org.apache.flink.api.common.operators.Operator类的使用及代码示例

x33g5p2x  于2022-01-26 转载在 其他  
字(12.0k)|赞(0)|评价(0)|浏览(87)

本文整理了Java中org.apache.flink.api.common.operators.Operator类的一些代码示例,展示了Operator类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Operator类的具体详情如下:
包路径:org.apache.flink.api.common.operators.Operator
类名称:Operator

Operator介绍

[英]Abstract base class for all operators. An operator is a source, sink, or it applies an operation to one or more inputs, producing a result.
[中]所有运算符的抽象基类。运算符是源、汇,或者它对一个或多个输入应用一个操作,从而产生一个结果。

代码示例

代码示例来源:origin: apache/flink

@Override
  public String toString() {
    return getClass().getSimpleName() + " - " + getName();
  }
}

代码示例来源:origin: apache/flink

@Override
public boolean preVisit(Operator<?> visitable) {
  if (!visitedOperators.add(visitable)) {
    return false;
  }
  this.maxDop = Math.max(this.maxDop, visitable.getParallelism());
  return true;
}

代码示例来源:origin: apache/flink

@Override
public void accept(Visitor<Operator<?>> visitor) {
  boolean descend = visitor.preVisit(this);
  if (descend) {
    this.input1.accept(visitor);
    this.input2.accept(visitor);
    for (Operator<?> c : this.broadcastInputs.values()) {
      c.accept(visitor);
    }
    visitor.postVisit(this);
  }
}

代码示例来源:origin: apache/flink

@Override
public boolean preVisit(Operator<?> visitable) {
  if (this.seen.add(visitable)) {
    // add to  the map
    final String name = visitable.getName();
    List<Operator<?>> list = this.map.get(name);
    if (list == null) {
      list = new ArrayList<Operator<?>>(2);
      this.map.put(name, list);
    }
    list.add(visitable);
    
    // recurse into bulk iterations
    if (visitable instanceof BulkIterationBase) {
      ((BulkIterationBase) visitable).getNextPartialSolution().accept(this);
    } else if (visitable instanceof DeltaIterationBase) {
      ((DeltaIterationBase) visitable).getSolutionSetDelta().accept(this);
      ((DeltaIterationBase) visitable).getNextWorkset().accept(this);
    }
    
    return true;
  } else {
    return false;
  }
}

代码示例来源:origin: apache/flink

private JobVertex createDataSinkVertex(SinkPlanNode node) throws CompilerException {
  final OutputFormatVertex vertex = new OutputFormatVertex(node.getNodeName());
  final TaskConfig config = new TaskConfig(vertex.getConfiguration());
  vertex.setResources(node.getMinResources(), node.getPreferredResources());
  vertex.setInvokableClass(DataSinkTask.class);
  vertex.setFormatDescription(getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper()));
  
  // set user code
  config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
  config.setStubParameters(node.getProgramOperator().getParameters());
  return vertex;
}

代码示例来源:origin: apache/flink

if (n instanceof DataSinkNode) {
  type = "sink";
  contents = n.getOperator().toString();
} else if (n instanceof DataSourceNode) {
  type = "source";
  contents = n.getOperator().toString();
  contents = n.getOperator().getName();
  contents = n.getOperator().getName();
  contents = n.getOperator().getName();
    if (inputNum == 0) {
      child1name += child1name.length() > 0 ? ", " : "";
      child1name += source.getOptimizerNode().getOperator().getName() +
        " (id: " + this.nodeIds.get(source) + ")";
    } else if (inputNum == 1) {
      child2name += child2name.length() > 0 ? ", " : "";
      child2name += source.getOptimizerNode().getOperator().getName() +
        " (id: " + this.nodeIds.get(source) + ")";
if (n.getOperator().getCompilerHints() != null) {
  CompilerHints hints = n.getOperator().getCompilerHints();
  CompilerHints defaults = new CompilerHints();

代码示例来源:origin: apache/flink

@SuppressWarnings("unchecked")
public static <T, K> org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> appendKeyExtractor(
    org.apache.flink.api.common.operators.Operator<T> input,
    SelectorFunctionKeys<T, K> key) {
  if (input instanceof Union) {
    // if input is a union, we apply the key extractors recursively to all inputs
    org.apache.flink.api.common.operators.Operator<T> firstInput = ((Union) input).getFirstInput();
    org.apache.flink.api.common.operators.Operator<T> secondInput = ((Union) input).getSecondInput();
    org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> firstInputWithKey =
        appendKeyExtractor(firstInput, key);
    org.apache.flink.api.common.operators.Operator<Tuple2<K, T>> secondInputWithKey =
        appendKeyExtractor(secondInput, key);
    return new Union(firstInputWithKey, secondInputWithKey, input.getName());
  }
  TypeInformation<T> inputType = key.getInputType();
  TypeInformation<Tuple2<K, T>> typeInfoWithKey = createTypeWithKey(key);
  KeyExtractingMapper<T, K> extractor = new KeyExtractingMapper(key.getKeyExtractor());
  MapOperatorBase<T, Tuple2<K, T>, MapFunction<T, Tuple2<K, T>>> mapper =
      new MapOperatorBase<T, Tuple2<K, T>, MapFunction<T, Tuple2<K, T>>>(
          extractor,
          new UnaryOperatorInformation(inputType, typeInfoWithKey),
          "Key Extractor"
      );
  mapper.setInput(input);
  mapper.setParallelism(input.getParallelism());
  return mapper;
}

代码示例来源:origin: apache/flink

@Override
public UnaryOperatorInformation<OUT, OUT> getOperatorInfo() {
  TypeInformation<OUT> previousOut = input.getOperatorInfo().getOutputType();
  return new UnaryOperatorInformation<OUT, OUT>(previousOut, previousOut);
}

代码示例来源:origin: apache/flink

protected void readUniqueFieldsAnnotation() {
  if (this.operator.getCompilerHints() != null) {
    Set<FieldSet> uniqueFieldSets = operator.getCompilerHints().getUniqueFields();
    if (uniqueFieldSets != null) {
      if (this.uniqueFields == null) {
        this.uniqueFields = new HashSet<FieldSet>();
      }
      this.uniqueFields.addAll(uniqueFieldSets);
    }
  }
}

代码示例来源:origin: apache/flink

public ResourceSpec getPreferredResources() {
  return this.template.getOperator().getPreferredResources();
}

代码示例来源:origin: apache/flink

public ResourceSpec getMinResources() {
  return this.template.getOperator().getMinResources();
}

代码示例来源:origin: apache/flink

private <I, O> org.apache.flink.api.common.operators.Operator<O> translateSingleInputOperator(SingleInputOperator<?, ?, ?> op) {
  @SuppressWarnings("unchecked")
  SingleInputOperator<I, O, ?> typedOp = (SingleInputOperator<I, O, ?>) op;
  @SuppressWarnings("unchecked")
  DataSet<I> typedInput = (DataSet<I>) op.getInput();
  Operator<I> input = translate(typedInput);
  org.apache.flink.api.common.operators.Operator<O> dataFlowOp = typedOp.translateToDataFlow(input);
  if (op instanceof UdfOperator<?>) {
    @SuppressWarnings("unchecked")
    SingleInputUdfOperator<I, O, ?> udfOp = (SingleInputUdfOperator<I, O, ?>) op;
    // set configuration parameters
    Configuration opParams = udfOp.getParameters();
    if (opParams != null) {
      dataFlowOp.getParameters().addAll(opParams);
    }
    if (dataFlowOp instanceof org.apache.flink.api.common.operators.SingleInputOperator) {
      org.apache.flink.api.common.operators.SingleInputOperator<?, O, ?> unaryOp =
          (org.apache.flink.api.common.operators.SingleInputOperator<?, O, ?>) dataFlowOp;
      // set the semantic properties
      unaryOp.setSemanticProperties(udfOp.getSemanticProperties());
    }
  }
  return dataFlowOp;
}

代码示例来源:origin: org.apache.flink/flink-optimizer_2.10

if (n instanceof DataSinkNode) {
  type = "sink";
  contents = n.getOperator().toString();
} else if (n instanceof DataSourceNode) {
  type = "source";
  contents = n.getOperator().toString();
  contents = n.getOperator().getName();
  contents = n.getOperator().getName();
  contents = n.getOperator().getName();
    if (inputNum == 0) {
      child1name += child1name.length() > 0 ? ", " : "";
      child1name += source.getOptimizerNode().getOperator().getName() +
        " (id: " + this.nodeIds.get(source) + ")";
    } else if (inputNum == 1) {
      child2name += child2name.length() > 0 ? ", " : "";
      child2name += source.getOptimizerNode().getOperator().getName() +
        " (id: " + this.nodeIds.get(source) + ")";
if (n.getOperator().getCompilerHints() != null) {
  CompilerHints hints = n.getOperator().getCompilerHints();
  CompilerHints defaults = new CompilerHints();

代码示例来源:origin: apache/flink

private InputFormatVertex createDataSourceVertex(SourcePlanNode node) throws CompilerException {
  final InputFormatVertex vertex = new InputFormatVertex(node.getNodeName());
  final TaskConfig config = new TaskConfig(vertex.getConfiguration());
  vertex.setResources(node.getMinResources(), node.getPreferredResources());
  vertex.setInvokableClass(DataSourceTask.class);
  vertex.setFormatDescription(getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper()));
  // set user code
  config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
  config.setStubParameters(node.getProgramOperator().getParameters());
  config.setOutputSerializer(node.getSerializer());
  return vertex;
}

代码示例来源:origin: apache/flink

@SuppressWarnings("unchecked")
public static <T, K1, K2> org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> appendKeyExtractor(
    org.apache.flink.api.common.operators.Operator<T> input,
    SelectorFunctionKeys<T, K1> key1,
    SelectorFunctionKeys<T, K2> key2) {
  if (input instanceof Union) {
    // if input is a union, we apply the key extractors recursively to all inputs
    org.apache.flink.api.common.operators.Operator<T> firstInput = ((Union) input).getFirstInput();
    org.apache.flink.api.common.operators.Operator<T> secondInput = ((Union) input).getSecondInput();
    org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> firstInputWithKey =
        appendKeyExtractor(firstInput, key1, key2);
    org.apache.flink.api.common.operators.Operator<Tuple3<K1, K2, T>> secondInputWithKey =
        appendKeyExtractor(secondInput, key1, key2);
    return new Union(firstInputWithKey, secondInputWithKey, input.getName());
  }
  TypeInformation<T> inputType = key1.getInputType();
  TypeInformation<Tuple3<K1, K2, T>> typeInfoWithKey = createTypeWithKey(key1, key2);
  TwoKeyExtractingMapper<T, K1, K2> extractor =
      new TwoKeyExtractingMapper<>(key1.getKeyExtractor(), key2.getKeyExtractor());
  MapOperatorBase<T, Tuple3<K1, K2, T>, MapFunction<T, Tuple3<K1, K2, T>>> mapper =
      new MapOperatorBase<T, Tuple3<K1, K2, T>, MapFunction<T, Tuple3<K1, K2, T>>>(
          extractor,
          new UnaryOperatorInformation<>(inputType, typeInfoWithKey),
          "Key Extractor"
      );
  mapper.setInput(input);
  mapper.setParallelism(input.getParallelism());
  return mapper;
}

代码示例来源:origin: apache/flink

public Union(Operator<T> input1, Operator<T> input2, String unionLocationName) {
  this(new BinaryOperatorInformation<T, T, T>(input1.getOperatorInfo().getOutputType(),
      input1.getOperatorInfo().getOutputType(), input1.getOperatorInfo().getOutputType()), unionLocationName);
  setFirstInput(input1);
  setSecondInput(input2);
}

代码示例来源:origin: apache/flink

if (getOperator() == null || getOperator().getCompilerHints() == null) {
  return ;
CompilerHints hints = getOperator().getCompilerHints();
if (hints.getOutputSize() >= 0) {
  this.estimatedOutputSize = hints.getOutputSize();

代码示例来源:origin: org.apache.flink/flink-optimizer_2.10

public ResourceSpec getPreferredResources() {
  return this.template.getOperator().getPreferredResources();
}

代码示例来源:origin: org.apache.flink/flink-optimizer

public ResourceSpec getMinResources() {
  return this.template.getOperator().getMinResources();
}

代码示例来源:origin: apache/flink

private <I1, I2, O> org.apache.flink.api.common.operators.Operator<O> translateTwoInputOperator(TwoInputOperator<?, ?, ?, ?> op) {
  @SuppressWarnings("unchecked")
  TwoInputOperator<I1, I2, O, ?> typedOp = (TwoInputOperator<I1, I2, O, ?>) op;
  @SuppressWarnings("unchecked")
  DataSet<I1> typedInput1 = (DataSet<I1>) op.getInput1();
  @SuppressWarnings("unchecked")
  DataSet<I2> typedInput2 = (DataSet<I2>) op.getInput2();
  Operator<I1> input1 = translate(typedInput1);
  Operator<I2> input2 = translate(typedInput2);
  org.apache.flink.api.common.operators.Operator<O> dataFlowOp = typedOp.translateToDataFlow(input1, input2);
  if (op instanceof UdfOperator<?>) {
    @SuppressWarnings("unchecked")
    TwoInputUdfOperator<I1, I2, O, ?> udfOp = (TwoInputUdfOperator<I1, I2, O, ?>) op;
    // set configuration parameters
    Configuration opParams = udfOp.getParameters();
    if (opParams != null) {
      dataFlowOp.getParameters().addAll(opParams);
    }
    if (dataFlowOp instanceof org.apache.flink.api.common.operators.DualInputOperator) {
      org.apache.flink.api.common.operators.DualInputOperator<?, ?,  O, ?> binaryOp =
          (org.apache.flink.api.common.operators.DualInputOperator<?, ?, O, ?>) dataFlowOp;
      // set the semantic properties
      binaryOp.setSemanticProperties(udfOp.getSemanticProperties());
    }
  }
  return dataFlowOp;
}

相关文章