org.apache.hadoop.hive.ql.parse.QBParseInfo.getClauseNames()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(8.5k)|赞(0)|评价(0)|浏览(125)

本文整理了Java中org.apache.hadoop.hive.ql.parse.QBParseInfo.getClauseNames方法的一些代码示例,展示了QBParseInfo.getClauseNames的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。QBParseInfo.getClauseNames方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.parse.QBParseInfo
类名称:QBParseInfo
方法名:getClauseNames

QBParseInfo.getClauseNames介绍

暂无

代码示例

代码示例来源:origin: apache/hive

ks.addAll(qbp.getClauseNames());

代码示例来源:origin: apache/hive

private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel, Map<String, RelNode> aliasToRel)
  throws SemanticException {
 RelNode gbFilter = null;
 QBParseInfo qbp = getQBParseInfo(qb);
 String destClauseName = qbp.getClauseNames().iterator().next();
 ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames().iterator().next());
 if (havingClause != null) {
  if (!(srcRel instanceof HiveAggregate)) {
   // ill-formed query like select * from t1 having c1 > 0;
   throw new CalciteSemanticException("Having clause without any group-by.",
     UnsupportedFeature.Having_clause_without_any_groupby);
  }
  ASTNode targetNode = (ASTNode) havingClause.getChild(0);
  validateNoHavingReferenceToAlias(qb, targetNode);
  if (!qbp.getDestToGroupBy().isEmpty()) {
   final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty()
       || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty());
   // Special handling of grouping function
   targetNode = rewriteGroupingFunctionAST(getGroupByForClause(qbp, destClauseName), targetNode,
     !cubeRollupGrpSetPresent);
  }
  gbFilter = genFilterRelNode(qb, targetNode, srcRel, aliasToRel, null, null, true);
 }
 return gbFilter;
}

代码示例来源:origin: apache/drill

ks.addAll(qbp.getClauseNames());

代码示例来源:origin: apache/hive

private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException {
 HiveRelNode sortRel = null;
 QBParseInfo qbp = getQBParseInfo(qb);
 SimpleEntry<Integer,Integer> entry =
   qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next());
 Integer offset = (entry == null) ? 0 : entry.getKey();
 Integer fetch = (entry == null) ? null : entry.getValue();
 if (fetch != null) {
  RexNode offsetRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(offset));
  RexNode fetchRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(fetch));
  RelTraitSet traitSet = cluster.traitSetOf(HiveRelNode.CONVENTION);
  RelCollation canonizedCollation = traitSet.canonize(RelCollations.EMPTY);
  sortRel = new HiveSortLimit(cluster, traitSet, srcRel, canonizedCollation, offsetRN, fetchRN);
  RowResolver inputRR = relToHiveRR.get(srcRel);
  RowResolver outputRR = inputRR.duplicate();
  ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(
    outputRR, sortRel);
  relToHiveRR.put(sortRel, outputRR);
  relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap);
 }
 return sortRel;
}

代码示例来源:origin: apache/drill

private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel, Map<String, RelNode> aliasToRel)
  throws SemanticException {
 RelNode gbFilter = null;
 QBParseInfo qbp = getQBParseInfo(qb);
 String destClauseName = qbp.getClauseNames().iterator().next();
 ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames().iterator().next());
 if (havingClause != null) {
  if (!(srcRel instanceof HiveAggregate)) {
   // ill-formed query like select * from t1 having c1 > 0;
   throw new CalciteSemanticException("Having clause without any group-by.",
     UnsupportedFeature.Having_clause_without_any_groupby);
  }
  ASTNode targetNode = (ASTNode) havingClause.getChild(0);
  validateNoHavingReferenceToAlias(qb, targetNode);
  if (!qbp.getDestToGroupBy().isEmpty()) {
   final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty()
       || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty());
   // Special handling of grouping function
   targetNode = rewriteGroupingFunctionAST(getGroupByForClause(qbp, destClauseName), targetNode,
     !cubeRollupGrpSetPresent);
  }
  gbFilter = genFilterRelNode(qb, targetNode, srcRel, aliasToRel, null, null, true);
 }
 return gbFilter;
}

代码示例来源:origin: apache/drill

private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException {
 HiveRelNode sortRel = null;
 QBParseInfo qbp = getQBParseInfo(qb);
 SimpleEntry<Integer,Integer> entry =
   qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next());
 Integer offset = (entry == null) ? 0 : entry.getKey();
 Integer fetch = (entry == null) ? null : entry.getValue();
 if (fetch != null) {
  RexNode offsetRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(offset));
  RexNode fetchRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(fetch));
  RelTraitSet traitSet = cluster.traitSetOf(HiveRelNode.CONVENTION);
  RelCollation canonizedCollation = traitSet.canonize(RelCollations.EMPTY);
  sortRel = new HiveSortLimit(cluster, traitSet, srcRel, canonizedCollation, offsetRN, fetchRN);
  RowResolver outputRR = new RowResolver();
  if (!RowResolver.add(outputRR, relToHiveRR.get(srcRel))) {
   throw new CalciteSemanticException(
     "Duplicates detected when adding columns to RR: see previous message",
     UnsupportedFeature.Duplicates_in_RR);
  }
  ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap(
    outputRR, sortRel);
  relToHiveRR.put(sortRel, outputRR);
  relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap);
 }
 return sortRel;
}

代码示例来源:origin: apache/hive

private void setQueryHints(QB qb) throws SemanticException {
 QBParseInfo qbp = getQBParseInfo(qb);
 String selClauseName = qbp.getClauseNames().iterator().next();
 Tree selExpr0 = qbp.getSelForClause(selClauseName).getChild(0);
 if (selExpr0.getType() != HiveParser.QUERY_HINT) return;
 String hint = ctx.getTokenRewriteStream().toString(
   selExpr0.getTokenStartIndex(), selExpr0.getTokenStopIndex());
 LOG.debug("Handling query hints: " + hint);
 ParseDriver pd = new ParseDriver();
 try {
  ASTNode hintNode = pd.parseHint(hint);
  qbp.setHints(hintNode);
 } catch (ParseException e) {
  throw new SemanticException("failed to parse query hint: "+e.getMessage(), e);
 }
}

代码示例来源:origin: apache/hive

final String selClauseName = qbp.getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty()
    || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty());

代码示例来源:origin: apache/drill

final String selClauseName = qbp.getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty()
    || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty());

代码示例来源:origin: apache/hive

final String destClauseName = qb.getParseInfo().getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qb.getParseInfo().getDestRollups().isEmpty()
  || !qb.getParseInfo().getDestGroupingSets().isEmpty()

代码示例来源:origin: apache/drill

final String destClauseName = qb.getParseInfo().getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qb.getParseInfo().getDestRollups().isEmpty()
    || !qb.getParseInfo().getDestGroupingSets().isEmpty()

代码示例来源:origin: apache/hive

QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
Map<String, Operator<? extends OperatorDesc>> inputs = createInputForDests(qb, input, ks);

代码示例来源:origin: apache/hive

final String selClauseName = qb.getParseInfo().getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qb.getParseInfo().getDestRollups().isEmpty()
  || !qb.getParseInfo().getDestGroupingSets().isEmpty()

代码示例来源:origin: apache/hive

Set<String> dests = qb.getParseInfo().getClauseNames();
if ( dests.size() == 1 && joinTree.getNoOuterJoin()) {
 String dest = dests.iterator().next();

代码示例来源:origin: apache/drill

final String selClauseName = qb.getParseInfo().getClauseNames().iterator().next();
final boolean cubeRollupGrpSetPresent = (!qb.getParseInfo().getDestRollups().isEmpty()
    || !qb.getParseInfo().getDestGroupingSets().isEmpty()

代码示例来源:origin: apache/hive

String detsClauseName = qbp.getClauseNames().iterator().next();

代码示例来源:origin: apache/drill

QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
Map<String, Operator<? extends OperatorDesc>> inputs = createInputForDests(qb, input, ks);

代码示例来源:origin: apache/drill

Set<String> dests = qb.getParseInfo().getClauseNames();
if ( dests.size() == 1 && joinTree.getNoOuterJoin()) {
 String dest = dests.iterator().next();

代码示例来源:origin: apache/hive

RowResolver hiveRootRR = genRowResolver(hiveRoot, getQB());
opParseCtx.put(hiveRoot, new OpParseContext(hiveRootRR));
String dest = getQB().getParseInfo().getClauseNames().iterator().next();
if (getQB().getParseInfo().getDestSchemaForClause(dest) != null
  && this.getQB().getTableDesc() == null) {

代码示例来源:origin: apache/drill

RowResolver hiveRootRR = genRowResolver(hiveRoot, getQB());
opParseCtx.put(hiveRoot, new OpParseContext(hiveRootRR));
String dest = getQB().getParseInfo().getClauseNames().iterator().next();
if (getQB().getParseInfo().getDestSchemaForClause(dest) != null
  && this.getQB().getTableDesc() == null) {

相关文章

微信公众号

最新文章

更多

QBParseInfo类方法