本文整理了Java中org.apache.spark.streaming.kafka.KafkaTestUtils
类的一些代码示例,展示了KafkaTestUtils
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。KafkaTestUtils
类的具体详情如下:
包路径:org.apache.spark.streaming.kafka.KafkaTestUtils
类名称:KafkaTestUtils
暂无
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.11
private String[] createTopicAndSendData(String topic) {
String[] data = { topic + "-1", topic + "-2", topic + "-3"};
kafkaTestUtils.createTopic(topic);
kafkaTestUtils.sendMessages(topic, data);
return data;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka
@Before
public void setUp() {
kafkaTestUtils = new KafkaTestUtils();
kafkaTestUtils.setup();
SparkConf sparkConf = new SparkConf()
.setMaster("local[4]").setAppName(this.getClass().getSimpleName());
sc = new JavaSparkContext(sparkConf);
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.10
sent.put("c", 10);
kafkaTestUtils.createTopic(topic);
kafkaTestUtils.sendMessages(topic, sent);
kafkaParams.put("zookeeper.connect", kafkaTestUtils.zkAddress());
kafkaParams.put("group.id", "test-consumer-" + random.nextInt(10000));
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8
@After
public void tearDown() {
if (ssc != null) {
ssc.stop();
ssc = null;
}
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown();
kafkaTestUtils = null;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8
kafkaParams.put("metadata.broker.list", kafkaTestUtils.brokerAddress());
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.10
@Before
public void setUp() {
kafkaTestUtils = new KafkaTestUtils();
kafkaTestUtils.setup();
SparkConf sparkConf = new SparkConf()
.setMaster("local[4]").setAppName(this.getClass().getSimpleName());
sc = new JavaSparkContext(sparkConf);
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.11
sent.put("c", 10);
kafkaTestUtils.createTopic(topic);
kafkaTestUtils.sendMessages(topic, sent);
kafkaParams.put("zookeeper.connect", kafkaTestUtils.zkAddress());
kafkaParams.put("group.id", "test-consumer-" + random.nextInt(10000));
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8
@After
public void tearDown() {
if (sc != null) {
sc.stop();
sc = null;
}
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown();
kafkaTestUtils = null;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.10
kafkaParams.put("metadata.broker.list", kafkaTestUtils.brokerAddress());
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.11
@Before
public void setUp() {
kafkaTestUtils = new KafkaTestUtils();
kafkaTestUtils.setup();
SparkConf sparkConf = new SparkConf()
.setMaster("local[4]").setAppName(this.getClass().getSimpleName());
sc = new JavaSparkContext(sparkConf);
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka
sent.put("c", 10);
kafkaTestUtils.createTopic(topic);
kafkaTestUtils.sendMessages(topic, sent);
kafkaParams.put("zookeeper.connect", kafkaTestUtils.zkAddress());
kafkaParams.put("group.id", "test-consumer-" + random.nextInt(10000));
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8
private String[] createTopicAndSendData(String topic) {
String[] data = { topic + "-1", topic + "-2", topic + "-3"};
kafkaTestUtils.createTopic(topic, 1);
kafkaTestUtils.sendMessages(topic, data);
return data;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8_2.11
@After
public void tearDown() {
if (sc != null) {
sc.stop();
sc = null;
}
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown();
kafkaTestUtils = null;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8_2.11
kafkaParams.put("metadata.broker.list", kafkaTestUtils.brokerAddress());
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8
@Before
public void setUp() {
kafkaTestUtils = new KafkaTestUtils();
kafkaTestUtils.setup();
SparkConf sparkConf = new SparkConf()
.setMaster("local[4]").setAppName(this.getClass().getSimpleName());
sc = new JavaSparkContext(sparkConf);
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8
sent.put("c", 10);
kafkaTestUtils.createTopic(topic, 1);
kafkaTestUtils.sendMessages(topic, sent);
kafkaParams.put("zookeeper.connect", kafkaTestUtils.zkAddress());
kafkaParams.put("group.id", "test-consumer-" + random.nextInt(10000));
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka_2.10
private String[] createTopicAndSendData(String topic) {
String[] data = { topic + "-1", topic + "-2", topic + "-3"};
kafkaTestUtils.createTopic(topic);
kafkaTestUtils.sendMessages(topic, data);
return data;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8_2.11
@After
public void tearDown() {
if (ssc != null) {
ssc.stop();
ssc = null;
}
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown();
kafkaTestUtils = null;
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka
kafkaParams.put("metadata.broker.list", kafkaTestUtils.brokerAddress());
kafkaParams.put("auto.offset.reset", "smallest");
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-8_2.11
@Before
public void setUp() {
kafkaTestUtils = new KafkaTestUtils();
kafkaTestUtils.setup();
SparkConf sparkConf = new SparkConf()
.setMaster("local[4]").setAppName(this.getClass().getSimpleName());
sc = new JavaSparkContext(sparkConf);
}
内容来源于网络,如有侵权,请联系作者删除!