Java實現Kafka的生產者、消費者
阿新 • • 發佈:2018-12-01
一、生產者
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
public class SimpleKafkaProducer {
private static KafkaProducer<String, String> producer;
private final static String TOPIC = "adienTest2" ;
public SimpleKafkaProducer(){
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory" , 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//設定分割槽類,根據key進行資料分割槽
producer = new KafkaProducer<String, String>(props);
}
public void produce(){
for (int i = 30;i<40;i++){
String key = String.valueOf(i);
String data = "hello kafka message:"+key;
producer.send(new ProducerRecord<String, String>(TOPIC,key,data));
System.out.println(data);
}
producer.close();
}
public static void main(String[] args) {
new SimpleKafkaProducer().produce();
}
}
二、消費者
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.log4j.Logger;
import java.util.Arrays;
import java.util.Properties;
public class SimpleKafkaConsumer {
private static KafkaConsumer<String, String> consumer;
private final static String TOPIC = "adienTest2"
public SimpleKafkaConsumer(){
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
//每個消費者分配獨立的組號
props.put("group.id", "test2");
//如果value合法,則自動提交偏移量
props.put("enable.auto.commit", "true");
//設定多久一次更新被消費訊息的偏移量
props.put("auto.commit.interval.ms", "1000");
//設定會話響應的時間,超過這個時間kafka可以選擇放棄消費或者消費下一條訊息
props.put("session.timeout.ms", "30000");
//自動重置offset
props.put("auto.offset.reset","earliest");
props.put("key.deserializer",
"org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer",
"org.apache.kafka.common.serialization.StringDeserializer");
consumer = new KafkaConsumer<String, String>(props);
}
public void consume(){
consumer.subscribe(Arrays.asList(TOPIC));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records){
System.out.printf("offset = %d, key = %s, value = %s",record.offset(), record.key(), record.value());
System.out.println();
}
}
}
public static void main(String[] args) {
new SimpleKafkaConsumer().consume();
}
}
三、執行
1、該生產者、消費者是執行本地搭建的kafka,所以先用命令列啟動zookeeper,再啟動kafka服務,還要建立本例中使用的topic:adienTest2
2、執行生產者程式碼,執行結果如下:
3、執行消費者程式碼,執行結果如下: