本文整理了Java中backtype.storm.task.TopologyContext.getThisOutputFields()
方法的一些代码示例,展示了TopologyContext.getThisOutputFields()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。TopologyContext.getThisOutputFields()
方法的具体详情如下:
包路径:backtype.storm.task.TopologyContext
类名称:TopologyContext
方法名:getThisOutputFields
[英]Gets the declared output fields for the specified stream id for the component this task is a part of.
[中]获取此任务所属组件的指定流id的声明输出字段。
代码示例来源:origin: alibaba/jstorm
/**
* Gets the declared output fields for the specified stream id for the component this task is a part of.
*/
public Map<String, List<String>> getThisOutputFieldsForStreams() {
Map<String, List<String>> streamToFields = new HashMap<>();
for (String stream : this.getThisStreams()) {
streamToFields.put(stream, this.getThisOutputFields(stream).toList());
}
return streamToFields;
}
代码示例来源:origin: alibaba/jstorm
Map<String, Grouping> component_grouping = entry.getValue();
Fields out_fields = topology_context.getThisOutputFields(stream_id);
代码示例来源:origin: alibaba/mdrill
public static Map<String, Map<String, MkGrouper>> outbound_components(
TopologyContext topology_context) {
Map<String, Map<String, MkGrouper>> rr = new HashMap<String, Map<String, MkGrouper>>();
// <Stream_id,<component,Grouping>>
Map<String, Map<String, Grouping>> output_groupings = topology_context
.getThisTargets();
for (Entry<String, Map<String, Grouping>> entry : output_groupings
.entrySet()) {
Map<String, Grouping> component_grouping = entry.getValue();
String stream_id = entry.getKey();
Fields out_fields = topology_context.getThisOutputFields(stream_id);
Map<String, MkGrouper> componentGrouper = new HashMap<String, MkGrouper>();
for (Entry<String, Grouping> cg : component_grouping.entrySet()) {
String component = cg.getKey();
Grouping tgrouping = cg.getValue();
int num_tasks = topology_context.getComponentTasks(component)
.size();
if (num_tasks > 0) {
MkGrouper grouper = new MkGrouper(out_fields, tgrouping,
num_tasks);
componentGrouper.put(component, grouper);
}
}
if (componentGrouper.size() > 0) {
rr.put(stream_id, componentGrouper);
}
}
return rr;
}
代码示例来源:origin: com.alibaba.jstorm/jstorm-core
/**
* Gets the declared output fields for the specified stream id for the component this task is a part of.
*/
public Map<String, List<String>> getThisOutputFieldsForStreams() {
Map<String, List<String>> streamToFields = new HashMap<String, List<String>>();
for (String stream : this.getThisStreams()) {
streamToFields.put(stream, this.getThisOutputFields(stream).toList());
}
return streamToFields;
}
代码示例来源:origin: com.alibaba.jstorm/jstorm-core
/**
* get current task's output <Stream_id, <componentId, MkGrouper>>
*/
public static Map<String, Map<String, MkGrouper>> outbound_components(TopologyContext topology_context, WorkerData workerData) {
Map<String, Map<String, MkGrouper>> rr = new HashMap<String, Map<String, MkGrouper>>();
// <Stream_id,<component,Grouping>>
Map<String, Map<String, Grouping>> output_groupings = topology_context.getThisTargets();
for (Entry<String, Map<String, Grouping>> entry : output_groupings.entrySet()) {
String stream_id = entry.getKey();
Map<String, Grouping> component_grouping = entry.getValue();
Fields out_fields = topology_context.getThisOutputFields(stream_id);
Map<String, MkGrouper> componentGrouper = new HashMap<String, MkGrouper>();
for (Entry<String, Grouping> cg : component_grouping.entrySet()) {
String component = cg.getKey();
Grouping tgrouping = cg.getValue();
List<Integer> outTasks = topology_context.getComponentTasks(component);
// ATTENTION: If topology set one component parallelism as 0
// so we don't need send tuple to it
if (outTasks.size() > 0) {
MkGrouper grouper = new MkGrouper(topology_context, out_fields, tgrouping, component, stream_id, workerData);
componentGrouper.put(component, grouper);
}
LOG.info("outbound_components, {}-{} for task-{} on {}", component, outTasks, topology_context.getThisTaskId(), stream_id);
}
if (componentGrouper.size() > 0) {
rr.put(stream_id, componentGrouper);
}
}
return rr;
}
内容来源于网络,如有侵权,请联系作者删除!