org.apache.hadoop.hive.ql.plan.api.Query.getStageGraph()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(10.3k)|赞(0)|评价(0)|浏览(86)

本文整理了Java中org.apache.hadoop.hive.ql.plan.api.Query.getStageGraph方法的一些代码示例,展示了Query.getStageGraph的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Query.getStageGraph方法的具体详情如下:
包路径:org.apache.hadoop.hive.ql.plan.api.Query
类名称:Query
方法名:getStageGraph

Query.getStageGraph介绍

暂无

代码示例

代码示例来源:origin: apache/hive

public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
  throws IOException {
 if (query.getStageGraph() == null) {
  populateQueryPlan();
 }
 extractCounters();
 updateCountersInQueryPlan();
 return query;
}

代码示例来源:origin: apache/drill

public static void setWorkflowAdjacencies(Configuration conf, QueryPlan plan) {
 try {
  Graph stageGraph = plan.getQueryPlan().getStageGraph();
  if (stageGraph == null) {
   return;
  }
  List<Adjacency> adjList = stageGraph.getAdjacencyList();
  if (adjList == null) {
   return;
  }
  for (Adjacency adj : adjList) {
   List<String> children = adj.getChildren();
   if (children == null || children.isEmpty()) {
    return;
   }
   conf.setStrings("mapreduce.workflow.adjacency."+adj.getNode(),
     children.toArray(new String[children.size()]));
  }
 } catch (IOException e) {
 }
}

代码示例来源:origin: apache/hive

public static void setWorkflowAdjacencies(Configuration conf, QueryPlan plan) {
 try {
  Graph stageGraph = plan.getQueryPlan().getStageGraph();
  if (stageGraph == null) {
   return;
  }
  List<Adjacency> adjList = stageGraph.getAdjacencyList();
  if (adjList == null) {
   return;
  }
  for (Adjacency adj : adjList) {
   List<String> children = adj.getChildren();
   if (CollectionUtils.isEmpty(children)) {
    return;
   }
   conf.setStrings("mapreduce.workflow.adjacency." + adj.getNode(),
     children.toArray(new String[0]));
  }
 } catch (IOException e) {
 }
}

代码示例来源:origin: apache/drill

public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
  throws IOException {
 if (query.getStageGraph() == null) {
  populateQueryPlan();
 }
 extractCounters();
 updateCountersInQueryPlan();
 return query;
}

代码示例来源:origin: twitter/ambrose

Map<String, List<String>> result = Maps.newHashMap();
try {
 Graph stageGraph = queryPlan.getQueryPlan().getStageGraph();
 if (stageGraph == null) {
  return result;

代码示例来源:origin: apache/hive

public String getJSONQuery(org.apache.hadoop.hive.ql.plan.api.Query query) {
 StringBuilder sb = new StringBuilder();
 sb.append("{");
 sb.append(getJSONKeyValue("queryId", query.getQueryId()));
 sb.append(getJSONKeyValue("queryType", query.getQueryType()));
 sb.append(getJSONKeyValue("queryAttributes", getJSONMap(query.getQueryAttributes())));
 sb.append(getJSONKeyValue("queryCounters", getJSONMap(query.getQueryCounters())));
 sb.append(getJSONKeyValue("stageGraph", getJSONGraph(query.getStageGraph())));
 // stageList
 List<String> stageList = new ArrayList<String>();
 if (query.getStageList() != null) {
  for (org.apache.hadoop.hive.ql.plan.api.Stage stage : query.getStageList()) {
   stageList.add(getJSONStage(stage));
  }
 }
 sb.append(getJSONKeyValue("stageList", getJSONList(stageList)));
 sb.append(getJSONKeyValue("done", query.isDone()));
 sb.append(getJSONKeyValue("started", query.isStarted()));
 sb.deleteCharAt(sb.length() - 1);
 sb.append("}");
 return sb.toString();
}

代码示例来源:origin: apache/drill

public String getJSONQuery(org.apache.hadoop.hive.ql.plan.api.Query query) {
 StringBuilder sb = new StringBuilder();
 sb.append("{");
 sb.append(getJSONKeyValue("queryId", query.getQueryId()));
 sb.append(getJSONKeyValue("queryType", query.getQueryType()));
 sb.append(getJSONKeyValue("queryAttributes", getJSONMap(query.getQueryAttributes())));
 sb.append(getJSONKeyValue("queryCounters", getJSONMap(query.getQueryCounters())));
 sb.append(getJSONKeyValue("stageGraph", getJSONGraph(query.getStageGraph())));
 // stageList
 List<String> stageList = new ArrayList<String>();
 if (query.getStageList() != null) {
  for (org.apache.hadoop.hive.ql.plan.api.Stage stage : query.getStageList()) {
   stageList.add(getJSONStage(stage));
  }
 }
 sb.append(getJSONKeyValue("stageList", getJSONList(stageList)));
 sb.append(getJSONKeyValue("done", query.isDone()));
 sb.append(getJSONKeyValue("started", query.isStarted()));
 sb.deleteCharAt(sb.length() - 1);
 sb.append("}");
 return sb.toString();
}

代码示例来源:origin: apache/hive

public Object getFieldValue(_Fields field) {
 switch (field) {
 case QUERY_ID:
  return getQueryId();
 case QUERY_TYPE:
  return getQueryType();
 case QUERY_ATTRIBUTES:
  return getQueryAttributes();
 case QUERY_COUNTERS:
  return getQueryCounters();
 case STAGE_GRAPH:
  return getStageGraph();
 case STAGE_LIST:
  return getStageList();
 case DONE:
  return isDone();
 case STARTED:
  return isStarted();
 }
 throw new IllegalStateException();
}

代码示例来源:origin: apache/drill

public Object getFieldValue(_Fields field) {
 switch (field) {
 case QUERY_ID:
  return getQueryId();
 case QUERY_TYPE:
  return getQueryType();
 case QUERY_ATTRIBUTES:
  return getQueryAttributes();
 case QUERY_COUNTERS:
  return getQueryCounters();
 case STAGE_GRAPH:
  return getStageGraph();
 case STAGE_LIST:
  return getStageList();
 case DONE:
  return isDone();
 case STARTED:
  return isStarted();
 }
 throw new IllegalStateException();
}

代码示例来源:origin: apache/hive

query.getStageGraph().setNodeType(NodeType.STAGE);
    query.getStageGraph().addToAdjacencyList(childEntry);
  query.getStageGraph().addToAdjacencyList(listEntry);
 } else if (task.getChildTasks() != null) {
  org.apache.hadoop.hive.ql.plan.api.Adjacency entry =
  query.getStageGraph().addToAdjacencyList(entry);

代码示例来源:origin: apache/drill

query.getStageGraph().setNodeType(NodeType.STAGE);
    query.getStageGraph().addToAdjacencyList(childEntry);
  query.getStageGraph().addToAdjacencyList(listEntry);
 } else if (task.getChildTasks() != null) {
  org.apache.hadoop.hive.ql.plan.api.Adjacency entry =
  query.getStageGraph().addToAdjacencyList(entry);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public static void setWorkflowAdjacencies(Configuration conf, QueryPlan plan) {
 try {
  Graph stageGraph = plan.getQueryPlan().getStageGraph();
  if (stageGraph == null) {
   return;
  }
  List<Adjacency> adjList = stageGraph.getAdjacencyList();
  if (adjList == null) {
   return;
  }
  for (Adjacency adj : adjList) {
   List<String> children = adj.getChildren();
   if (children == null || children.isEmpty()) {
    return;
   }
   conf.setStrings("mapreduce.workflow.adjacency."+adj.getNode(),
     children.toArray(new String[children.size()]));
  }
 } catch (IOException e) {
 }
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
  throws IOException {
 if (query.getStageGraph() == null) {
  populateQueryPlan();
 }
 extractCounters();
 updateCountersInQueryPlan();
 return query;
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public org.apache.hadoop.hive.ql.plan.api.Query getQueryPlan()
  throws IOException {
 if (query.getStageGraph() == null) {
  populateQueryPlan();
 }
 extractCounters();
 updateCountersInQueryPlan();
 return query;
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public String getJSONQuery(org.apache.hadoop.hive.ql.plan.api.Query query) {
 StringBuilder sb = new StringBuilder();
 sb.append("{");
 sb.append(getJSONKeyValue("queryId", query.getQueryId()));
 sb.append(getJSONKeyValue("queryType", query.getQueryType()));
 sb.append(getJSONKeyValue("queryAttributes", getJSONMap(query.getQueryAttributes())));
 sb.append(getJSONKeyValue("queryCounters", getJSONMap(query.getQueryCounters())));
 sb.append(getJSONKeyValue("stageGraph", getJSONGraph(query.getStageGraph())));
 // stageList
 List<String> stageList = new ArrayList<String>();
 if (query.getStageList() != null) {
  for (org.apache.hadoop.hive.ql.plan.api.Stage stage : query.getStageList()) {
   stageList.add(getJSONStage(stage));
  }
 }
 sb.append(getJSONKeyValue("stageList", getJSONList(stageList)));
 sb.append(getJSONKeyValue("done", query.isDone()));
 sb.append(getJSONKeyValue("started", query.isStarted()));
 sb.deleteCharAt(sb.length() - 1);
 sb.append("}");
 return sb.toString();
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public String getJSONQuery(org.apache.hadoop.hive.ql.plan.api.Query query) {
 StringBuilder sb = new StringBuilder();
 sb.append("{");
 sb.append(getJSONKeyValue("queryId", query.getQueryId()));
 sb.append(getJSONKeyValue("queryType", query.getQueryType()));
 sb.append(getJSONKeyValue("queryAttributes", getJSONMap(query.getQueryAttributes())));
 sb.append(getJSONKeyValue("queryCounters", getJSONMap(query.getQueryCounters())));
 sb.append(getJSONKeyValue("stageGraph", getJSONGraph(query.getStageGraph())));
 // stageList
 List<String> stageList = new ArrayList<String>();
 if (query.getStageList() != null) {
  for (org.apache.hadoop.hive.ql.plan.api.Stage stage : query.getStageList()) {
   stageList.add(getJSONStage(stage));
  }
 }
 sb.append(getJSONKeyValue("stageList", getJSONList(stageList)));
 sb.append(getJSONKeyValue("done", query.isDone()));
 sb.append(getJSONKeyValue("started", query.isStarted()));
 sb.deleteCharAt(sb.length() - 1);
 sb.append("}");
 return sb.toString();
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

public Object getFieldValue(_Fields field) {
 switch (field) {
 case QUERY_ID:
  return getQueryId();
 case QUERY_TYPE:
  return getQueryType();
 case QUERY_ATTRIBUTES:
  return getQueryAttributes();
 case QUERY_COUNTERS:
  return getQueryCounters();
 case STAGE_GRAPH:
  return getStageGraph();
 case STAGE_LIST:
  return getStageList();
 case DONE:
  return new Boolean(isDone());
 case STARTED:
  return new Boolean(isStarted());
 }
 throw new IllegalStateException();
}

代码示例来源:origin: com.facebook.presto.hive/hive-apache

public Object getFieldValue(_Fields field) {
 switch (field) {
 case QUERY_ID:
  return getQueryId();
 case QUERY_TYPE:
  return getQueryType();
 case QUERY_ATTRIBUTES:
  return getQueryAttributes();
 case QUERY_COUNTERS:
  return getQueryCounters();
 case STAGE_GRAPH:
  return getStageGraph();
 case STAGE_LIST:
  return getStageList();
 case DONE:
  return Boolean.valueOf(isDone());
 case STARTED:
  return Boolean.valueOf(isStarted());
 }
 throw new IllegalStateException();
}

代码示例来源:origin: org.apache.hadoop.hive/hive-exec

query.getStageGraph().setNodeType(NodeType.STAGE);
    query.getStageGraph().addToAdjacencyList(childEntry);
  query.getStageGraph().addToAdjacencyList(listEntry);
 } else if (task.getChildTasks() != null) {
  org.apache.hadoop.hive.ql.plan.api.Adjacency entry =
  query.getStageGraph().addToAdjacencyList(entry);

代码示例来源:origin: com.facebook.presto.hive/hive-apache

query.getStageGraph().setNodeType(NodeType.STAGE);
    query.getStageGraph().addToAdjacencyList(childEntry);
  query.getStageGraph().addToAdjacencyList(listEntry);
 } else if (task.getChildTasks() != null) {
  org.apache.hadoop.hive.ql.plan.api.Adjacency entry =
  query.getStageGraph().addToAdjacencyList(entry);

相关文章

微信公众号

最新文章

更多