org.apache.flink.optimizer.plantranslate.JobGraphGenerator.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-22 转载在 其他  
字(13.4k)|赞(0)|评价(0)|浏览(102)

本文整理了Java中org.apache.flink.optimizer.plantranslate.JobGraphGenerator.<init>()方法的一些代码示例,展示了JobGraphGenerator.<init>()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JobGraphGenerator.<init>()方法的具体详情如下:
包路径:org.apache.flink.optimizer.plantranslate.JobGraphGenerator
类名称:JobGraphGenerator
方法名:<init>

JobGraphGenerator.<init>介绍

[英]Creates a new job graph generator that uses the default values for its resource configuration.
[中]创建一个新的作业图生成器,该生成器使用其资源配置的默认值。

代码示例

代码示例来源:origin: apache/flink

@Override
public JobExecutionResult execute(String jobName) throws Exception {
  OptimizedPlan op = compileProgram(jobName);
  JobGraphGenerator jgg = new JobGraphGenerator();
  JobGraph jobGraph = jgg.compileJobGraph(op);
  for (Path jarFile: jarFiles) {
    jobGraph.addJar(jarFile);
  }
  jobGraph.setClasspaths(new ArrayList<>(classPaths));
  this.lastJobExecutionResult = jobExecutor.executeJobBlocking(jobGraph);
  return this.lastJobExecutionResult;
}

代码示例来源:origin: apache/flink

public static JobGraph getJobGraph(Configuration flinkConfig, FlinkPlan optPlan, List<URL> jarFiles, List<URL> classpaths, SavepointRestoreSettings savepointSettings) {
  JobGraph job;
  if (optPlan instanceof StreamingPlan) {
    job = ((StreamingPlan) optPlan).getJobGraph();
    job.setSavepointRestoreSettings(savepointSettings);
  } else {
    JobGraphGenerator gen = new JobGraphGenerator(flinkConfig);
    job = gen.compileJobGraph((OptimizedPlan) optPlan);
  }
  for (URL jar : jarFiles) {
    try {
      job.addJar(new Path(jar.toURI()));
    } catch (URISyntaxException e) {
      throw new RuntimeException("URL is invalid. This should not happen.", e);
    }
  }
  job.setClasspaths(classpaths);
  return job;
}

代码示例来源:origin: apache/flink

private JobGraph getJobGraph(final Plan plan) {
    final Optimizer pc = new Optimizer(new DataStatistics(), getConfiguration());
    final OptimizedPlan op = pc.compile(plan);
    final JobGraphGenerator jgg = new JobGraphGenerator();
    return jgg.compileJobGraph(op);
  }
}

代码示例来源:origin: apache/flink

/**
   * Helpers to generate the JobGraph.
   */
  private static JobGraph getJobGraph(Plan plan) {
    Optimizer pc = new Optimizer(new DataStatistics(), new Configuration());
    JobGraphGenerator jgg = new JobGraphGenerator();
    OptimizedPlan op = pc.compile(plan);
    return jgg.compileJobGraph(op);
  }
}

代码示例来源:origin: apache/flink

/**
 * This test makes sure that only a HYBRIDHASH on the static path is transformed to the cached variant
 */
@Test
public void testLeftSideCountercheck() {
  try {
    
    Plan plan = getTestPlanLeftStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);
    
    OptimizedPlan oPlan = compileNoStats(plan);

    OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan);
    DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner");
    
    // verify correct join strategy
    assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND, innerJoin.getDriverStrategy());
    assertEquals(TempMode.CACHED, innerJoin.getInput1().getTempMode());
    assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode());
  
    new JobGraphGenerator().compileJobGraph(oPlan);
  }
  catch (Exception e) {
    System.err.println(e.getMessage());
    e.printStackTrace();
    fail("Test errored: " + e.getMessage());
  }
}

代码示例来源:origin: apache/flink

@Test
public void testIdentityIteration() {
  try {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(43);
    
    IterativeDataSet<Long> iteration = env.generateSequence(-4, 1000).iterate(100);
    iteration.closeWith(iteration).output(new DiscardingOutputFormat<Long>());
    
    Plan p = env.createProgramPlan();
    OptimizedPlan op = compileNoStats(p);
    
    new JobGraphGenerator().compileJobGraph(op);
  }
  catch (Exception e) {
    e.printStackTrace();
    fail(e.getMessage());
  }
}

代码示例来源:origin: apache/flink

/**
 * This test makes sure that only a HYBRIDHASH on the static path is transformed to the cached variant
 */
@Test
public void testRightSideCountercheck() {
  try {
    
    Plan plan = getTestPlanRightStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_FIRST);
    
    OptimizedPlan oPlan = compileNoStats(plan);

    OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan);
    DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner");
    
    // verify correct join strategy
    assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST, innerJoin.getDriverStrategy()); 
    assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode());
    assertEquals(TempMode.CACHED, innerJoin.getInput2().getTempMode());
  
    new JobGraphGenerator().compileJobGraph(oPlan);
  }
  catch (Exception e) {
    System.err.println(e.getMessage());
    e.printStackTrace();
    fail("Test errored: " + e.getMessage());
  }
}

代码示例来源:origin: apache/flink

/**
 * This tests whether a HYBRIDHASH_BUILD_FIRST is correctly transformed to a HYBRIDHASH_BUILD_FIRST_CACHED
 * when inside of an iteration an on the static path
 */
@Test
public void testLeftSide() {
  try {
    
    Plan plan = getTestPlanLeftStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_FIRST);
    
    OptimizedPlan oPlan = compileNoStats(plan);

    OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan);
    DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner");
    
    // verify correct join strategy
    assertEquals(DriverStrategy.HYBRIDHASH_BUILD_FIRST_CACHED, innerJoin.getDriverStrategy());
    assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode());
    assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode());
  
    new JobGraphGenerator().compileJobGraph(oPlan);
  }
  catch (Exception e) {
    System.err.println(e.getMessage());
    e.printStackTrace();
    fail("Test errored: " + e.getMessage());
  }
}

代码示例来源:origin: apache/flink

/**
 * This tests whether a HYBRIDHASH_BUILD_SECOND is correctly transformed to a HYBRIDHASH_BUILD_SECOND_CACHED
 * when inside of an iteration an on the static path
 */
@Test
public void testRightSide() {
  try {
    
    Plan plan = getTestPlanRightStatic(Optimizer.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);
    
    OptimizedPlan oPlan = compileNoStats(plan);

    OptimizerPlanNodeResolver resolver = getOptimizerPlanNodeResolver(oPlan);
    DualInputPlanNode innerJoin = resolver.getNode("DummyJoiner");
    
    // verify correct join strategy
    assertEquals(DriverStrategy.HYBRIDHASH_BUILD_SECOND_CACHED, innerJoin.getDriverStrategy()); 
    assertEquals(TempMode.NONE, innerJoin.getInput1().getTempMode());
    assertEquals(TempMode.NONE, innerJoin.getInput2().getTempMode());
  
    new JobGraphGenerator().compileJobGraph(oPlan);
  }
  catch (Exception e) {
    System.err.println(e.getMessage());
    e.printStackTrace();
    fail("Test errored: " + e.getMessage());
  }
}

代码示例来源:origin: apache/flink

@Test
  public void testDisjointFlows() {
    try {
      ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
      
      // generate two different flows
      env.generateSequence(1, 10)
          .output(new DiscardingOutputFormat<Long>());
      env.generateSequence(1, 10)
          .output(new DiscardingOutputFormat<Long>());
      
      Plan p = env.createProgramPlan();
      OptimizedPlan op = compileNoStats(p);
      
      new JobGraphGenerator().compileJobGraph(op);
    }
    catch (Exception e) {
      e.printStackTrace();
      fail(e.getMessage());
    }
  }
}

代码示例来源:origin: apache/flink

@Test
public void testUnionReplacement() {
  try {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<String> input1 = env.fromElements("test1");
    DataSet<String> input2 = env.fromElements("test2");

    DataSet<String> union = input1.union(input2);

    union.output(new DiscardingOutputFormat<String>());
    union.output(new DiscardingOutputFormat<String>());

    Plan plan = env.createProgramPlan();
    OptimizedPlan oPlan = compileNoStats(plan);
    JobGraphGenerator jobGen = new JobGraphGenerator();
    jobGen.compileJobGraph(oPlan);
  }
  catch (Exception e) {
    e.printStackTrace();
    fail(e.getMessage());
  }
}

代码示例来源:origin: apache/flink

@Test
public void testWorksetIterationNotDependingOnSolutionSet() {
  try {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    
    DataSet<Tuple2<Long, Long>> input = env.generateSequence(1, 100).map(new Duplicator<Long>());
    
    DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration = input.iterateDelta(input, 100, 1);
    
    DataSet<Tuple2<Long, Long>> iterEnd = iteration.getWorkset().map(new TestMapper<Tuple2<Long,Long>>());
    iteration.closeWith(iterEnd, iterEnd)
      .output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
    
    Plan p = env.createProgramPlan();
    OptimizedPlan op = compileNoStats(p);
    
    WorksetIterationPlanNode wipn = (WorksetIterationPlanNode) op.getDataSinks().iterator().next().getInput().getSource();
    assertTrue(wipn.getSolutionSetPlanNode().getOutgoingChannels().isEmpty());
    
    JobGraphGenerator jgg = new JobGraphGenerator();
    jgg.compileJobGraph(op);
  }
  catch (Exception e) {
    e.printStackTrace();
    fail(e.getMessage());
  }
}

代码示例来源:origin: apache/flink

@Test
public void testEmptyWorksetIteration() {
  try {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(43);
    
    DataSet<Tuple2<Long, Long>> input = env.generateSequence(1, 20)
        .map(new MapFunction<Long, Tuple2<Long, Long>>() {
          @Override
          public Tuple2<Long, Long> map(Long value){ return null; }
        });
        
        
    DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iter = input.iterateDelta(input, 100, 0);
    iter.closeWith(iter.getWorkset(), iter.getWorkset())
      .output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
    
    Plan p = env.createProgramPlan();
    OptimizedPlan op = compileNoStats(p);
    
    new JobGraphGenerator().compileJobGraph(op);
  }
  catch (Exception e) {
    e.printStackTrace();
    fail(e.getMessage());
  }
}

代码示例来源:origin: apache/flink

@Test
public void testCostComputationWithMultipleDataSinks() {
  final int SINKS = 5;

  try {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(DEFAULT_PARALLELISM);
    DataSet<Long> source = env.generateSequence(1, 10000);
    DataSet<Long> mappedA = source.map(new IdentityMapper<Long>());
    DataSet<Long> mappedC = source.map(new IdentityMapper<Long>());
    for (int sink = 0; sink < SINKS; sink++) {
      mappedA.output(new DiscardingOutputFormat<Long>());
      mappedC.output(new DiscardingOutputFormat<Long>());
    }
    Plan plan = env.createProgramPlan("Plans With Multiple Data Sinks");
    OptimizedPlan oPlan = compileNoStats(plan);
    new JobGraphGenerator().compileJobGraph(oPlan);
  }
  catch (Exception e) {
    e.printStackTrace();
    fail(e.getMessage());
  }
}

代码示例来源:origin: apache/flink

@Override
public JobExecutionResult execute(String jobName) throws Exception {
  Plan plan = createProgramPlan(jobName);
  Optimizer pc = new Optimizer(new Configuration());
  OptimizedPlan op = pc.compile(plan);
  JobGraphGenerator jgg = new JobGraphGenerator();
  JobGraph jobGraph = jgg.compileJobGraph(op);
  String jsonPlan = JsonPlanGenerator.generatePlan(jobGraph);
  // first check that the JSON is valid
  JsonParser parser = new JsonFactory().createJsonParser(jsonPlan);
  while (parser.nextToken() != null) {}
  validator.validateJson(jsonPlan);
  throw new AbortError();
}

代码示例来源:origin: apache/flink

new JobGraphGenerator().compileJobGraph(op);

代码示例来源:origin: apache/flink

@Test
  public void testReduce() {
    // construct the plan
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(DEFAULT_PARALLELISM);
    DataSet<Long> set1 = env.generateSequence(0,1);

    set1.reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce1")
        .output(new DiscardingOutputFormat<Long>()).name("Sink");

    Plan plan = env.createProgramPlan();

    try {
      OptimizedPlan oPlan = compileNoStats(plan);
      JobGraphGenerator jobGen = new JobGraphGenerator();
      jobGen.compileJobGraph(oPlan);
    } catch(CompilerException ce) {
      ce.printStackTrace();
      fail("The pact compiler is unable to compile this plan correctly");
    }
  }
}

代码示例来源:origin: apache/flink

new JobGraphGenerator().compileJobGraph(op);

代码示例来源:origin: apache/flink

@Test
public void testTempInIterationTest() throws Exception {
  ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
  DataSet<Tuple2<Long, Long>> input = env.readCsvFile("file:///does/not/exist").types(Long.class, Long.class);
  DeltaIteration<Tuple2<Long, Long>, Tuple2<Long, Long>> iteration =
      input.iterateDelta(input, 1, 0);
  DataSet<Tuple2<Long, Long>> update = iteration.getWorkset()
      .join(iteration.getSolutionSet()).where(0).equalTo(0)
        .with(new DummyFlatJoinFunction<Tuple2<Long, Long>>());
  iteration.closeWith(update, update)
      .output(new DiscardingOutputFormat<Tuple2<Long, Long>>());
  Plan plan = env.createProgramPlan();
  OptimizedPlan oPlan = (new Optimizer(new Configuration())).compile(plan);
  JobGraphGenerator jgg = new JobGraphGenerator();
  JobGraph jg = jgg.compileJobGraph(oPlan);
  boolean solutionSetUpdateChecked = false;
  for(JobVertex v : jg.getVertices()) {
    if(v.getName().equals("SolutionSet Delta")) {
      // check if input of solution set delta is temped
      TaskConfig tc = new TaskConfig(v.getConfiguration());
      assertTrue(tc.isInputAsynchronouslyMaterialized(0));
      solutionSetUpdateChecked = true;
    }
  }
  assertTrue(solutionSetUpdateChecked);
}

代码示例来源:origin: apache/flink

/**
   * Source -> Map -> Reduce -> Cross -> Reduce -> Cross -> Reduce ->
   * |--------------------------/                  /
   * |--------------------------------------------/
   * 
   * First cross has SameKeyFirst output contract
   */
  @Test
  public void testTicket158() {
    // construct the plan
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setParallelism(DEFAULT_PARALLELISM);
    DataSet<Long> set1 = env.generateSequence(0,1);

    set1.map(new IdentityMapper<Long>()).name("Map1")
        .groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce1")
        .cross(set1).with(new IdentityCrosser<Long>()).withForwardedFieldsFirst("*").name("Cross1")
        .groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce2")
        .cross(set1).with(new IdentityCrosser<Long>()).name("Cross2")
        .groupBy("*").reduceGroup(new IdentityGroupReducer<Long>()).name("Reduce3")
        .output(new DiscardingOutputFormat<Long>()).name("Sink");

    Plan plan = env.createProgramPlan();
    OptimizedPlan oPlan = compileNoStats(plan);

    JobGraphGenerator jobGen = new JobGraphGenerator();
    jobGen.compileJobGraph(oPlan);
  }
}

相关文章