读取kafka写入另一个kafka

/** * Created by lmy on 2018/10/10. */

public class TEST {

public static void main(String[] args) { Properties props = new Properties(); props.put("bootstrap.servers", "host11:9092,host13:9092"); props.put("auto.offset.reset", "latest"); props.put("socket.timeout.ms", "300000"); props.put("zookeeper.session.timeout.ms", "300000"); props.put("auto.commit.enable", "true");

// 必须要使用别的组名称, 如果生产者和消费者都在同一组,则不能访问同一组内的topic数据 props.put("group.id", "111"); /* key的序列化类 */ props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); /* value的序列化类 */ props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); /* 定义consumer */ KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props); /* 消费者订阅的topic, 可同时订阅多个topic,用逗号隔开 */ consumer.subscribe(Arrays.asList("a3")); props.put("metadata.broker.list", "host58:9092,host59:9092,host60:9092"); props.put("serializer.class", "kafka.serializer.StringEncoder"); ProducerConfig config = new ProducerConfig(props); Producer<String, String> producer = new Producer<String, String>(config);

/* 读取数据,读取超时时间为100ms */ while (true) { ConsumerRecords<String, String> records = consumer.poll(1000); for (ConsumerRecord<String, String> record : records) { System.out.println(record.value()); KeyedMessage<String, String> message = new KeyedMessage<String, String>("a4", record.value()); producer.send(message); } } } }

经验分享 程序员 微信小程序 职场和发展