Reactor Kafka官方指导地址:https://projectreactor.io/docs/kafka/milestone/reference/
依赖
<dependency>
<groupId>io.projectreactor.kafka</groupId>
<artifactId>reactor-kafka</artifactId>
<version>1.1.1.RELEASE</version>
</dependency>
producer
public void producer(String topic, String value){
Map<String, Object> producerProps = new HashMap<>();
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
SenderOptions<String, String> senderOptions = SenderOptions.<String, String>create(producerProps)
.withKeySerializer(new StringSerializer())
.withValueSerializer(new StringSerializer());
KafkaSender<String, String> kafkaProducer = KafkaSender.create(senderOptions);
Flux<SenderRecord<String, String, Integer>> senderRecord = Flux.just(SenderRecord.create(new ProducerRecord<>(topic, value), 1));
kafkaProducer.send(senderRecord)
.doOnError(e ->{ log.error("ReactorKafka | occur a error when write to kafka",e); })
.subscribe();
}
consumer
public static Flux<String> consumer(String topic) throws Exception{
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ConsumerConfig.GROUP_ID_CONFIG, "sample-group");
ReceiverOptions<String, String> receiverOptions = ReceiverOptions.<String, String>create(props)
.withKeyDeserializer(new StringDeserializer())
.withValueDeserializer(new StringDeserializer())
.closeTimeout(Duration.of(1, ChronoUnit.MINUTES))
.subscription(Collections.singleton(topic))
.addAssignListener(receiverPartitions -> {
receiverPartitions.forEach(partition -> {
log.info("------------ assign listener topic {} and partition {}", partition.topicPartition().topic(), partition.topicPartition().partition());
});
})
.addRevokeListener(receiverPartitions -> {
receiverPartitions.forEach(partition -> {
log.info("------------ revoke topic {} and partition {} to listener", partition.topicPartition().topic(), partition.topicPartition().partition());
});
})
.commitInterval(Duration.ofMillis(1000))
.commitBatchSize(0)
.pollTimeout(Duration.ofMillis(100));
Flux<String> flux = KafkaReceiver.create(receiverOptions).receiveAutoAck().concatMap(r -> r).map(ConsumerRecord::value);
return flux;
}
使用
@Override
public void afterPropertiesSet() throws Exception {
producer("test", "test");
consumer("test").doOnNext(s -> {
System.out.println(s);
}).subscribe();
}
来源:https://blog.csdn.net/Yuan52007298/article/details/100537347