spring boot整合kafka
阿新 • • 發佈:2019-01-03
1.pom.xml
<dependency> <groupId> org.apache.kafka</groupId> <artifactId> kafka_2.10</artifactId> <version> 0.8.0</version> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> <version>1.1.1.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka-test</artifactId> <version>1.0.2.RELEASE</version> <scope>test</scope> </dependency>
2.配置kafka(配置傳送者,以及消費者,通過MyMessageListen)
package com.example.demo.n1; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.annotation.EnableKafka; import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; import org.springframework.kafka.core.*; import org.springframework.kafka.listener.KafkaMessageListenerContainer; import org.springframework.kafka.listener.config.ContainerProperties; import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; @Configuration @EnableKafka public class KafkaConfig { /* --------------producer configuration-----------------**/ @Bean public Map<String, Object> producerConfigs() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "xxx:9092"); props.put(ProducerConfig.RETRIES_CONFIG, 0); props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384); props.put(ProducerConfig.LINGER_MS_CONFIG, 1); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); return props; } @Bean public ProducerFactory<String, String> producerFactory() { return new DefaultKafkaProducerFactory<>(producerConfigs()); } /* --------------consumer configuration-----------------**/ @Bean public Map<String, Object> consumerConfigs() { Map<String, Object> props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "xxx:9092"); props.put(ConsumerConfig.GROUP_ID_CONFIG, "0"); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); return props; } @Bean ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory()); return factory; } @Bean public ConsumerFactory<String, String> consumerFactory() { return new DefaultKafkaConsumerFactory<>(consumerConfigs()); } //實際執行訊息消費的類,用於處理訊息,做一些業務邏輯 @Bean public MyMessageListener myMessageListener(){ return new MyMessageListener(); } //消費者容器配置資訊 @Bean public ContainerProperties containerProperties(){ Pattern topicPattern = Pattern.compile("topic的名稱"); //匹配滿足正則的topic ContainerProperties containerProperties = new ContainerProperties(topicPattern);//訂閱滿足正則表示式的topic containerProperties.setMessageListener(myMessageListener());//訂閱的topic的訊息用myMessageListener去處理 return containerProperties; } @Bean public KafkaMessageListenerContainer<String, String> kafkaMessageListenerContainer(){ return new KafkaMessageListenerContainer<>(consumerFactory(),containerProperties()); } /* --------------kafka template configuration-----------------**/ @Bean public KafkaTemplate<String,String> kafkaTemplate() { KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<>(producerFactory()); kafkaTemplate.setDefaultTopic("defaultTopic"); return kafkaTemplate; } }
3.配置消費者,裡面可以寫業務邏輯
package com.example.demo.n1; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.kafka.listener.MessageListener; //監聽器必須實現MessageListener這個介面中onMessage方法 public class MyMessageListener implements MessageListener<String, String> { public final static Logger logger = LoggerFactory.getLogger(MyMessageListener.class); @Override//此方法處理訊息 public void onMessage(ConsumerRecord<String, String> data) { String topic = data.topic();//消費的topic logger.info("-------------recieve message from {} topic-------------", topic); logger.info("partition:{}", String.valueOf(data.partition()));//消費的topic的分割槽 logger.info("offset:{}", String.valueOf(data.offset()));//消費者的位置 logger.info("get message from {} topic : {}", topic, data.value());//接收到的訊息 } }
4.測試控制器
package com.example.demo.n1;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.web.bind.annotation.*;
@RestController
public class TestController {
public final static Logger logger = LoggerFactory.getLogger(TestController.class);
@Autowired
private KafkaTemplate<String,String> kafkaTemplate;//kafkaTemplate相當於生產者
@RequestMapping(value = "/{topic}/send",method = RequestMethod.GET)
public void sendMeessage(
@RequestParam(value = "message",defaultValue = "hello world") String message,
@PathVariable final String topic) {
logger.info("start send message to {}",topic);
ListenableFuture<SendResult<String, String>> listenableFuture = kafkaTemplate.send(topic,message);//傳送訊息,topic不存在將自動建立新的topic
listenableFuture.addCallback(//新增成功傳送訊息的回撥和失敗的回撥
result -> logger.info("send message to {} success",topic),
ex -> logger.info("send message to {} failure,error message:{}",topic,ex.getMessage()));
}
@RequestMapping(value = "/default/send",method = RequestMethod.GET)
public void sendMeessagedefault() {//傳送訊息到預設的topic
logger.info("start send message to default topic");
kafkaTemplate.sendDefault("你好,世界");
}
}
可以看到kafka訊息傳送是通過kafkaTemplate.send()