cz.seznam.euphoria.core.client.flow.Flow类的使用及代码示例

x33g5p2x  于2022-01-19 转载在 其他  
字(7.6k)|赞(0)|评价(0)|浏览(107)

本文整理了Java中cz.seznam.euphoria.core.client.flow.Flow类的一些代码示例,展示了Flow类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Flow类的具体详情如下:
包路径:cz.seznam.euphoria.core.client.flow.Flow
类名称:Flow

Flow介绍

[英]A dependency graph of operators.
[中]运算符的依赖关系图。

代码示例

代码示例来源:origin: seznam/euphoria

/**
 * Creates a new (anonymous) Flow.
 *
 * @return a new flow with an undefined name,
 *          i.e. either not named at all or with a system generated name
 */
public static Flow create() {
 return create(null);
}

代码示例来源:origin: seznam/euphoria

public <T> Dataset<T> read(DataSource<T> src) {
  return new Dataset<>(wrap.createInput(src));
 }
}

代码示例来源:origin: seznam/euphoria

Collection<Operator<?, ?>> operators = flow.operators();
Set<Operator<?, ?>> resolvedOperators = new HashSet<>();
Map<Dataset<?>, Operator<?, ?>> datasets = new HashMap<>();
flow.sources().forEach(d -> datasets.put(d, new InputOperator(d)));

代码示例来源:origin: seznam/euphoria

@Test
public void testBuild_ImplicitName() {
 Flow flow = Flow.create("TEST");
 Dataset<String> dataset = Util.createMockDataset(flow, 3);
 Dataset<String> uniq = Distinct.of(dataset).output();
 Distinct distinct = (Distinct) flow.operators().iterator().next();
 assertEquals("Distinct", distinct.getName());
}

代码示例来源:origin: seznam/euphoria

@Test
public void testBuild() {
 Flow flow = Flow.create("TEST");
 Dataset<String> dataset = Util.createMockDataset(flow, 1);
 Dataset<String> filtered = Filter.named("Filter1")
     .of(dataset)
     .by(s -> !s.equals(""))
     .output();
 assertEquals(flow, filtered.getFlow());
 assertEquals(1, flow.size());
 Filter filter = (Filter) flow.operators().iterator().next();
 assertEquals(flow, filter.getFlow());
 assertEquals("Filter1", filter.getName());
 assertNotNull(filter.predicate);
 assertEquals(filtered, filter.output());
}

代码示例来源:origin: seznam/euphoria

private void run() {
 Flow flow = Flow.create();
 Dataset<Pair<ImmutableBytesWritable, Result>> ds = flow.createInput(
   Utils.getHBaseSource(input, conf.get()));
 FlatMap.of(ds)
   .using((Pair<ImmutableBytesWritable, Result> p, Collector<byte[]> c) -> {
    writeCellsAsBytes(p.getSecond(), c);
   })
   .output()
   .persist(Utils.getSink(output, conf.get()));
 LOG.info("Starting flow reading from {} and persisting to {}", input, output);
 executor.submit(flow).join();
}

代码示例来源:origin: seznam/euphoria

public void testRDDCaching() {
 final Flow flow = Flow.create(getClass().getSimpleName());
 final Dataset<Integer> input = flow.createInput(dataSource);
   new SparkFlowTranslator(sparkContext, flow.getSettings(), mockedFactory, new HashMap<>());
 translator.translateInto(flow, StorageLevel.MEMORY_ONLY());

代码示例来源:origin: seznam/euphoria

@SuppressWarnings("unchecked")
private void run() {
 final Dataset<Cell> ds;
 flow.getSettings().setInt(
   "euphoria.flink.batch.list-storage.max-memory-elements", 100);
 switch (input.getScheme()) {
   Dataset<Pair<byte[], byte[]>> raw = flow.createInput(
     new KafkaSource(input.getAuthority(),
       getPath(input),
  case "file":
   Dataset<Pair<ImmutableBytesWritable, Cell>> raw = flow.createInput(
     new SequenceFileSource<>(ImmutableBytesWritable.class,
       (Class) KeyValue.class, input.toString()));
   Dataset<Pair<ImmutableBytesWritable, Result>> raw = flow.createInput(
     source);

代码示例来源:origin: seznam/euphoria

@Test
public void testBuild() {
 String opName = "split";
 Flow flow = Flow.create("split-test");
 Dataset<String> dataset = Util.createMockDataset(flow, 1);
 Split.Output<String> split = Split.named(opName)
   .of(dataset)
   .using((UnaryPredicate<String>) what -> true)
   .output();
 assertEquals(2, flow.size());
 Filter positive =
   (Filter) getOperator(flow, opName + Split.POSITIVE_FILTER_SUFFIX);
 assertSame(flow, positive.getFlow());
 assertNotNull(positive.getPredicate());
 assertSame(positive.output(), split.positive());
 Filter negative =
   (Filter) getOperator(flow, opName + Split.NEGATIVE_FILTER_SUFFIX);
 assertSame(flow, negative.getFlow());
 assertNotNull(negative.getPredicate());
 assertSame(negative.output(), split.negative());
}

代码示例来源:origin: seznam/euphoria

ExecutionContext context = new ExecutionContext(flow.getSettings());

代码示例来源:origin: seznam/euphoria

onContextCreationFns.forEach(c -> c.accept(environment));
 Settings settings = flow.getSettings();
    flow.getName(), environment.dumpExecutionPlan());
} catch (Throwable t) {
 t.printStackTrace(System.err);
 LOG.error("Failed to run flow " + flow.getName(), t);
 throw new RuntimeException(t);

代码示例来源:origin: seznam/euphoria

/**
 * Checks if the given {@link Flow} reads bounded inputs
 *
 * @param flow the flow to inspect
 *
 * @return {@code true} if all sources are bounded
 */
private boolean isBoundedInput(Flow flow) {
 // check if sources are bounded or not
 for (Dataset<?> ds : flow.sources()) {
  if (!ds.isBounded()) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: seznam/euphoria

@SuppressWarnings("unchecked")
private static void applySinkTransforms(Flow flow) {
 List<Dataset<?>> outputs = flow.operators().stream()
   .filter(o -> o.output().getOutputSink() != null)
   .map(Operator::output)
   .collect(Collectors.toList());
 outputs.forEach(d -> {
  if (d.getOutputSink().prepareDataset((Dataset) d)) {
   // remove the old output sink
   d.persist(null);
  }
 });
}

代码示例来源:origin: seznam/euphoria

@Override
public String toString() {
 return "Flow{" +
     "name='" + name + '\'' +
     ", size=" + size() +
     '}';
}

代码示例来源:origin: seznam/euphoria

@Test
 public void testBuild_ImplicitName() {
  Flow flow = Flow.create("TEST");
  Dataset<String> dataset = Util.createMockDataset(flow, 1);

  Dataset<String> filtered = Filter.of(dataset)
      .by(s -> !s.equals(""))
      .output();

  Filter filter = (Filter) flow.operators().iterator().next();
  assertEquals("Filter", filter.getName());
 }
}

代码示例来源:origin: seznam/euphoria

@Test
public void testBuild() {
 final Flow flow = Flow.create("TEST");
 final Dataset<String> left = Util.createMockDataset(flow, 2);
 final Dataset<String> right = Util.createMockDataset(flow, 3);
 final Dataset<String> unioned = Union.named("Union1")
   .of(left, right)
   .output();
 assertEquals(flow, unioned.getFlow());
 assertEquals(1, flow.size());
 final Union union = (Union) flow.operators().iterator().next();
 assertEquals(flow, union.getFlow());
 assertEquals("Union1", union.getName());
 assertEquals(unioned, union.output());
 assertEquals(2, union.listInputs().size());
}

代码示例来源:origin: seznam/euphoria

/**
 * Collects Avro record as JSON string
 *
 * @param outSink
 * @param inSource
 * @throws Exception
 */
public static void runFlow(
  DataSink<String> outSink,
  DataSource<Pair<AvroKey<GenericData.Record>, NullWritable>> inSource)
  throws Exception {
 Flow flow = Flow.create("simple read avro");
 Dataset<Pair<AvroKey<GenericData.Record>, NullWritable>> input = flow.createInput(inSource);
 final Dataset<String> output =
   FlatMap.named("avro2csv").of(input).using(AvroSourceTest::apply).output();
 output.persist(outSink);
 Executor executor = new LocalExecutor();
 executor.submit(flow).get();
}

代码示例来源:origin: seznam/euphoria

ExecutionContext context = new ExecutionContext(flow.getSettings());

代码示例来源:origin: seznam/euphoria

/**
 * Determines {@link Mode} of the given flow.
 *
 * @param flow the flow to inspect
 *
 * @return the given flow's mode; never {@code null}
 */
public static Mode determineMode(Flow flow) {
 // check if sources are bounded or not
 for (Dataset<?> ds : flow.sources()) {
  if (!ds.isBounded()) {
   return Mode.STREAMING;
  }
 }
 // default mode is batch
 return Mode.BATCH;
}

代码示例来源:origin: seznam/euphoria

@SuppressWarnings("unchecked")
private static void applySinkTransforms(Flow flow) {
 List<Dataset<?>> outputs = flow.operators().stream()
   .filter(o -> o.output().getOutputSink() != null)
   .map(Operator::output)
   .collect(Collectors.toList());
 outputs.forEach(d -> {
  if (d.getOutputSink().prepareDataset((Dataset) d)) {
   // remove the old output sink
   d.persist(null);
  }
 });
}

相关文章