本文整理了Java中org.apache.spark.streaming.kafka010.KafkaUtils
类的一些代码示例,展示了KafkaUtils
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。KafkaUtils
类的具体详情如下:
包路径:org.apache.spark.streaming.kafka010.KafkaUtils
类名称:KafkaUtils
暂无
代码示例来源:origin: OryxProject/oryx
protected final JavaInputDStream<ConsumerRecord<K,M>> buildInputDStream(
JavaStreamingContext streamingContext) {
Preconditions.checkArgument(
KafkaUtils.topicExists(inputTopicLockMaster, inputTopic),
"Topic %s does not exist; did you create it?", inputTopic);
if (updateTopic != null && updateTopicLockMaster != null) {
Preconditions.checkArgument(
KafkaUtils.topicExists(updateTopicLockMaster, updateTopic),
"Topic %s does not exist; did you create it?", updateTopic);
}
String groupID = getGroupID();
Map<String,Object> kafkaParams = new HashMap<>();
kafkaParams.put("group.id", groupID);
// Don't re-consume old messages from input by default
kafkaParams.put("auto.offset.reset", "latest"); // Ignored by Kafka 0.10 Spark integration
kafkaParams.put("bootstrap.servers", inputBroker);
kafkaParams.put("key.deserializer", keyDecoderClass.getName());
kafkaParams.put("value.deserializer", messageDecoderClass.getName());
LocationStrategy locationStrategy = LocationStrategies.PreferConsistent();
ConsumerStrategy<K,M> consumerStrategy = ConsumerStrategies.Subscribe(
Collections.singleton(inputTopic), kafkaParams, Collections.emptyMap());
return org.apache.spark.streaming.kafka010.KafkaUtils.createDirectStream(
streamingContext,
locationStrategy,
consumerStrategy);
}
代码示例来源:origin: uber/marmaray
kafkaPartitionOffsetToSparkPartitionMap);
final JavaRDD<byte[]> kafkaData = KafkaUtils.<byte[], byte[]>createRDD(
this.jsc.get(),
KafkaUtil.getKafkaParams(this.conf),
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-10
JavaRDD<String> rdd1 = KafkaUtils.<String, String>createRDD(
sc,
kafkaParams,
).map(handler);
JavaRDD<String> rdd2 = KafkaUtils.<String, String>createRDD(
sc,
kafkaParams,
代码示例来源:origin: com.cloudera.oryx/oryx-lambda
protected final JavaInputDStream<ConsumerRecord<K,M>> buildInputDStream(
JavaStreamingContext streamingContext) {
Preconditions.checkArgument(
KafkaUtils.topicExists(inputTopicLockMaster, inputTopic),
"Topic %s does not exist; did you create it?", inputTopic);
if (updateTopic != null && updateTopicLockMaster != null) {
Preconditions.checkArgument(
KafkaUtils.topicExists(updateTopicLockMaster, updateTopic),
"Topic %s does not exist; did you create it?", updateTopic);
}
String groupID = getGroupID();
Map<String,Object> kafkaParams = new HashMap<>();
kafkaParams.put("group.id", groupID);
// Don't re-consume old messages from input by default
kafkaParams.put("auto.offset.reset", "latest"); // Ignored by Kafka 0.10 Spark integration
kafkaParams.put("bootstrap.servers", inputBroker);
kafkaParams.put("key.deserializer", keyDecoderClass.getName());
kafkaParams.put("value.deserializer", messageDecoderClass.getName());
LocationStrategy locationStrategy = LocationStrategies.PreferConsistent();
ConsumerStrategy<K,M> consumerStrategy = ConsumerStrategies.Subscribe(
Collections.singleton(inputTopic), kafkaParams, Collections.emptyMap());
return org.apache.spark.streaming.kafka010.KafkaUtils.createDirectStream(
streamingContext,
locationStrategy,
consumerStrategy);
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-10_2.11
JavaRDD<String> rdd1 = KafkaUtils.<String, String>createRDD(
sc,
kafkaParams,
).map(handler);
JavaRDD<String> rdd2 = KafkaUtils.<String, String>createRDD(
sc,
kafkaParams,
代码示例来源:origin: jetoile/hadoop-unit
KafkaUtils.createDirectStream(
scc,
LocationStrategies.PreferConsistent(),
代码示例来源:origin: ebi-wp/kafka-streams-api-websockets
KafkaUtils.createDirectStream(
streamingContext,
LocationStrategies.PreferConsistent(),
代码示例来源:origin: cloudera-labs/envelope
dStream = KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams, lastOffsets));
} else {
dStream = KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams));
dStream = KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
ConsumerStrategies.<byte[], byte[]>Subscribe(topics, kafkaParams, lastOffsets));
} else {
dStream = KafkaUtils.createDirectStream(jssc, LocationStrategies.PreferConsistent(),
ConsumerStrategies.<byte[], byte[]>Subscribe(topics, kafkaParams));
代码示例来源:origin: streampipes/streampipes-ce
/**
* This method takes the i's input stream and creates a source for the Spark streaming job
* Currently just kafka is supported as a protocol
* TODO Add also jms support
*
* @param i
* @param streamingContext
* @return
*/
private JavaInputDStream<ConsumerRecord<String, String>> getStreamSource(int i, JavaStreamingContext streamingContext) {
if (graph.getInputStreams().size() - 1 >= i) {
SpDataStream stream = graph.getInputStreams().get(i);
if (stream != null) {
KafkaTransportProtocol protocol = (KafkaTransportProtocol) stream.getEventGrounding().getTransportProtocol();
//System.out.println("Listening on Kafka topic '" + protocol.getTopicName() + "'");
return KafkaUtils.createDirectStream(streamingContext, LocationStrategies.PreferConsistent(),
ConsumerStrategies.<String, String>Subscribe(Arrays.asList(protocol.getTopicDefinition().getActualTopicName()),
kafkaParams));
}
else {
return null;
}
}
else {
return null;
}
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-10
"-" + System.currentTimeMillis());
JavaInputDStream<ConsumerRecord<String, String>> istream1 = KafkaUtils.createDirectStream(
ssc,
LocationStrategies.PreferConsistent(),
"-" + System.currentTimeMillis());
JavaInputDStream<ConsumerRecord<String, String>> istream2 = KafkaUtils.createDirectStream(
ssc,
LocationStrategies.PreferConsistent(),
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-10_2.11
"-" + System.currentTimeMillis());
JavaInputDStream<ConsumerRecord<String, String>> istream1 = KafkaUtils.createDirectStream(
ssc,
LocationStrategies.PreferConsistent(),
"-" + System.currentTimeMillis());
JavaInputDStream<ConsumerRecord<String, String>> istream2 = KafkaUtils.createDirectStream(
ssc,
LocationStrategies.PreferConsistent(),
内容来源于网络,如有侵权,请联系作者删除!