org.apache.calcite.rel.core.Filter.copy()方法的使用及代码示例

x33g5p2x  于2022-01-19 转载在 其他  
字(10.4k)|赞(0)|评价(0)|浏览(124)

本文整理了Java中org.apache.calcite.rel.core.Filter.copy()方法的一些代码示例,展示了Filter.copy()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Filter.copy()方法的具体详情如下:
包路径:org.apache.calcite.rel.core.Filter
类名称:Filter
方法名:copy

Filter.copy介绍

暂无

代码示例

代码示例来源:origin: apache/hive

public RelNode align(Filter rel, List<RelFieldCollation> collations) {
 final RelNode child = dispatchAlign(rel.getInput(), collations);
 return rel.copy(rel.getTraitSet(), ImmutableList.of(child));
}

代码示例来源:origin: apache/drill

public RelNode align(Filter rel, List<RelFieldCollation> collations) {
 final RelNode child = dispatchAlign(rel.getInput(), collations);
 return rel.copy(rel.getTraitSet(), ImmutableList.of(child));
}

代码示例来源:origin: apache/drill

protected void apply(RelOptRuleCall call, Project project, Filter filter, TableScan scan) {
 RelOptPlanner planner = call.getPlanner();
 List<RelOptMaterialization> materializations =
   (planner instanceof VolcanoPlanner)
     ? ((VolcanoPlanner) planner).getMaterializations()
     : ImmutableList.<RelOptMaterialization>of();
 if (!materializations.isEmpty()) {
  RelNode root = project.copy(project.getTraitSet(), Collections.singletonList(
    filter.copy(filter.getTraitSet(), Collections.singletonList(
      (RelNode) scan))));
  // Costing is done in transformTo(), so we call it repeatedly with all applicable
  // materialized views and cheapest one will be picked
  List<RelOptMaterialization> applicableMaterializations =
    VolcanoPlanner.getApplicableMaterializations(root, materializations);
  for (RelOptMaterialization materialization : applicableMaterializations) {
   List<RelNode> subs = new MaterializedViewSubstitutionVisitor(
     materialization.queryRel, root, relBuilderFactory).go(materialization.tableRel);
   for (RelNode s : subs) {
    call.transformTo(s);
   }
  }
 }
}

代码示例来源:origin: apache/hive

@Override
public void onMatch(RelOptRuleCall call) {
 final Filter fil = call.rel(0);
 final TableScan rel = call.rel(1);
 // Add identity
 RelBuilder relBuilder = call.builder();
 relBuilder.push(rel);
 List<RexNode> identityFields = relBuilder.fields(
   ImmutableBitSet.range(0, rel.getRowType().getFieldCount()).asList());
 RelNode newRel = relBuilder
   .project(identityFields, ImmutableList.<String>of(), true)
   .build();
 call.transformTo(fil.copy(fil.getTraitSet(), ImmutableList.of(newRel)));
}

代码示例来源:origin: apache/hive

protected void perform(RelOptRuleCall call, Filter filter,
   HiveTableScan tScan) {
  // Original table
  RelOptHiveTable hiveTable = (RelOptHiveTable) tScan.getTable();

  // Copy original table scan and table
  HiveTableScan tScanCopy = tScan.copyIncludingTable(tScan.getRowType());
  RelOptHiveTable hiveTableCopy = (RelOptHiveTable) tScanCopy.getTable();

  // Execute partition pruning
  RexNode predicate = filter.getCondition();
  Pair<RexNode, RexNode> predicates = PartitionPrune
    .extractPartitionPredicates(filter.getCluster(), hiveTableCopy, predicate);
  RexNode partColExpr = predicates.left;
  hiveTableCopy.computePartitionList(conf, partColExpr, tScanCopy.getPartOrVirtualCols());

  if (StringUtils.equals(hiveTableCopy.getPartitionListKey(), hiveTable.getPartitionListKey())) {
   // Nothing changed, we do not need to produce a new expression
   return;
  }

  call.transformTo(filter.copy(
    filter.getTraitSet(), Collections.singletonList(tScanCopy)));
 }
}

代码示例来源:origin: apache/hive

@Override
 public void onMatch(RelOptRuleCall call) {
  final Filter filter = call.rel(0);
  final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
  final RexNode condition = RexUtil.pullFactors(rexBuilder, filter.getCondition());
  RexNode newCondition = analyzeRexNode(rexBuilder, condition);
  // If we could not transform anything, we bail out
  if (newCondition.toString().equals(condition.toString())) {
   return;
  }
  RelNode newNode = filter.copy(filter.getTraitSet(), filter.getInput(), newCondition);
  call.transformTo(newNode);
 }
}

代码示例来源:origin: apache/hive

@Override
public void onMatch(RelOptRuleCall call) {
 final Filter filter = call.rel(0);
 final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
 final RelMetadataQuery metadataProvider = call.getMetadataQuery();
 // 1. Recompose filter possibly by pulling out common elements from DNF
 // expressions
 RexNode newFilterCondition = RexUtil.pullFactors(rexBuilder, filter.getCondition());
 // 2. Reduce filter with stats information
 RexReplacer replacer = new RexReplacer(filter, rexBuilder, metadataProvider);
 newFilterCondition = replacer.apply(newFilterCondition);
 // 3. Transform if we have created a new filter operator
 if (!filter.getCondition().toString().equals(newFilterCondition.toString())) {
  Filter newFilter = filter.copy(filter.getTraitSet(), filter.getInput(), newFilterCondition);
  call.transformTo(newFilter);
 }
}

代码示例来源:origin: apache/drill

@Override
public void onMatch(RelOptRuleCall call) {
 final Filter filter = call.rel(0);
 final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
 final RelMetadataQuery metadataProvider = RelMetadataQuery.instance();
 // 1. Recompose filter possibly by pulling out common elements from DNF
 // expressions
 RexNode newFilterCondition = RexUtil.pullFactors(rexBuilder, filter.getCondition());
 // 2. Reduce filter with stats information
 RexReplacer replacer = new RexReplacer(filter, rexBuilder, metadataProvider);
 newFilterCondition = replacer.apply(newFilterCondition);
 // 3. Transform if we have created a new filter operator
 if (!filter.getCondition().toString().equals(newFilterCondition.toString())) {
  Filter newFilter = filter.copy(filter.getTraitSet(), filter.getInput(), newFilterCondition);
  call.transformTo(newFilter);
 }
}

代码示例来源:origin: apache/drill

public void onMatch(RelOptRuleCall call) {
  final Filter filter = call.rel(0);
  final Project project = call.rel(1);
  final List<RexNode> newProjects = new ArrayList<>(project.getProjects());
  newProjects.add(filter.getCondition());
  final RelOptCluster cluster = filter.getCluster();
  RelDataType newRowType =
    cluster.getTypeFactory().builder()
      .addAll(project.getRowType().getFieldList())
      .add("condition", Util.last(newProjects).getType())
      .build();
  final RelNode newProject =
    project.copy(project.getTraitSet(),
      project.getInput(),
      newProjects,
      newRowType);
  final RexInputRef newCondition =
    cluster.getRexBuilder().makeInputRef(newProject,
      newProjects.size() - 1);
  call.transformTo(filter.copy(filter.getTraitSet(), newProject, newCondition));
 }
}

代码示例来源:origin: apache/drill

public void onMatch(RelOptRuleCall call) {
 final Filter filter = call.rel(0);
 final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
 final RexNode condition = RexUtil.pullFactors(rexBuilder, filter.getCondition());
 // 1. We try to transform possible candidates
 RexTransformIntoInClause transformIntoInClause = new RexTransformIntoInClause(rexBuilder, filter,
     minNumORClauses);
 RexNode newCondition = transformIntoInClause.apply(condition);
 // 2. We merge IN expressions
 RexMergeInClause mergeInClause = new RexMergeInClause(rexBuilder);
 newCondition = mergeInClause.apply(newCondition);
 // 3. If we could not transform anything, we bail out
 if (newCondition.toString().equals(condition.toString())) {
  return;
 }
 // 4. We create the filter with the new condition
 RelNode newFilter = filter.copy(filter.getTraitSet(), filter.getInput(), newCondition);
 call.transformTo(newFilter);
}

代码示例来源:origin: apache/incubator-druid

newWhereFilter = whereFilter.copy(
  whereFilter.getTraitSet(),
  whereFilter.getInput(),

代码示例来源:origin: apache/hive

RelNode newFilterRel = filterFactory == null ? filter.copy(filter.getTraitSet(),
  project.getInput(), newCondition) : filterFactory.createFilter(project.getInput(),
  newCondition);

代码示例来源:origin: apache/drill

RelNode newFilterRel = filterFactory == null ? filter.copy(filter.getTraitSet(),
  project.getInput(), newCondition) : filterFactory.createFilter(project.getInput(),
  newCondition);

代码示例来源:origin: Qihoo360/Quicksql

public RelNode accept(RexShuttle shuttle) {
 RexNode condition = shuttle.apply(this.condition);
 if (this.condition == condition) {
  return this;
 }
 return copy(traitSet, getInput(), condition);
}

代码示例来源:origin: Qihoo360/Quicksql

public abstract Filter copy(RelTraitSet traitSet, RelNode input,
  RexNode condition);

代码示例来源:origin: org.apache.calcite/calcite-core

public RelNode accept(RexShuttle shuttle) {
 RexNode condition = shuttle.apply(this.condition);
 if (this.condition == condition) {
  return this;
 }
 return copy(traitSet, getInput(), condition);
}

代码示例来源:origin: org.apache.calcite/calcite-core

public abstract Filter copy(RelTraitSet traitSet, RelNode input,
  RexNode condition);

代码示例来源:origin: Qihoo360/Quicksql

protected void apply(RelOptRuleCall call, Filter filter, TableScan scan) {
  final RelOptPlanner planner = call.getPlanner();
  final List<RelOptMaterialization> materializations =
    planner.getMaterializations();
  if (!materializations.isEmpty()) {
   RelNode root = filter.copy(filter.getTraitSet(),
     Collections.singletonList((RelNode) scan));
   List<RelOptMaterialization> applicableMaterializations =
     RelOptMaterializations.getApplicableMaterializations(root, materializations);
   for (RelOptMaterialization materialization : applicableMaterializations) {
    if (RelOptUtil.areRowTypesEqual(scan.getRowType(),
      materialization.queryRel.getRowType(), false)) {
     RelNode target = materialization.queryRel;
     final HepPlanner hepPlanner =
       new HepPlanner(program, planner.getContext());
     hepPlanner.setRoot(target);
     target = hepPlanner.findBestExp();
     List<RelNode> subs = new MaterializedViewSubstitutionVisitor(target, root)
       .go(materialization.tableRel);
     for (RelNode s : subs) {
      call.transformTo(s);
     }
    }
   }
  }
 }
}

代码示例来源:origin: org.apache.calcite/calcite-core

protected void apply(RelOptRuleCall call, Filter filter, TableScan scan) {
  final RelOptPlanner planner = call.getPlanner();
  final List<RelOptMaterialization> materializations =
    planner.getMaterializations();
  if (!materializations.isEmpty()) {
   RelNode root = filter.copy(filter.getTraitSet(),
     Collections.singletonList((RelNode) scan));
   List<RelOptMaterialization> applicableMaterializations =
     RelOptMaterializations.getApplicableMaterializations(root, materializations);
   for (RelOptMaterialization materialization : applicableMaterializations) {
    if (RelOptUtil.areRowTypesEqual(scan.getRowType(),
      materialization.queryRel.getRowType(), false)) {
     RelNode target = materialization.queryRel;
     final HepPlanner hepPlanner =
       new HepPlanner(program, planner.getContext());
     hepPlanner.setRoot(target);
     target = hepPlanner.findBestExp();
     List<RelNode> subs = new MaterializedViewSubstitutionVisitor(target, root)
       .go(materialization.tableRel);
     for (RelNode s : subs) {
      call.transformTo(s);
     }
    }
   }
  }
 }
}

代码示例来源:origin: org.apache.calcite/calcite-druid

@Override public void onMatch(RelOptRuleCall call) {
  final Filter filter = call.rel(0);
  final DruidQuery query = call.rel(1);
  if (!DruidQuery.isValidSignature(query.signature() + 'h')) {
   return;
  }
  final RexNode cond = filter.getCondition();
  final DruidJsonFilter druidJsonFilter = DruidJsonFilter
    .toDruidFilters(cond, query.getTopNode().getRowType(), query);
  if (druidJsonFilter != null) {
   final RelNode newFilter = filter
     .copy(filter.getTraitSet(), Util.last(query.rels), filter.getCondition());
   final DruidQuery newDruidQuery = DruidQuery.extendQuery(query, newFilter);
   call.transformTo(newDruidQuery);
  }
 }
}

相关文章