1.创建java项目。在pom.xml文件中添加
<dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>0.10.2.0</version> </dependency>
2.创建生产者代码
import java.util.Properties; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; public class ProducerSend { public static void main(String args[]) { //1.参数配置:端口、缓冲内存、最大连接数、key序列化、value序列化等等(不是每一个非要配置) Properties props=new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("acks", "all"); props.put("retries", 0); props.put("batch.size", 16384); props.put("linger.ms", 1); props.put("buffer.memory", 33554432); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //2.创建生产者对象,并建立连接 Producer<String, String> producer = new KafkaProducer<String,String>(props); try { //3.在my-topic主题下,发送消息 for (int i = 0; i < 10000; i++) { System.out.println(Integer.toString(i)); producer.send(new ProducerRecord<String, String>("my-topic", Integer.toString(i), Integer.toString(i))); Thread.sleep(500); } } catch (Exception e) { System.out.println("ERROR"); } //4.关闭 producer.close(); } }
3. 创建消费者代码
import java.util.Arrays; import java.util.Properties; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; public class ConsumerReceive { public static void main(String args[]) { //1.参数配置:不是每一非得配置 Properties props = new Properties(); props.put("bootstrap.servers", "172.16.8.100:9092"); props.put("auto.commit.interval.ms", "1000"); //因为每一个消费者必须属于某一个消费者组,所以必须还设置group.id props.put("group.id", "test1"); props.put("enable.auto.commit", "true"); props.put("session.timeout.ms", "30000"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); //2.创建消费者对象,并建立连接 KafkaConsumer<String, String> consumer = new KafkaConsumer<String,String>(props); //3.设置从"my-topic"主题下拿取数据 consumer.subscribe(Arrays.asList("my-topic")); //4.消费数据 while (true) { //阻塞时间,从kafka中取出100毫秒的数据,有可能一次性去除0-n条 ConsumerRecords<String, String> records = consumer.poll(100); //遍历 for (ConsumerRecord<String, String> record : records) //打印结果 //System.out.printf("offset = %d, key = %s, value = %s", record.offset(), record.key(), record.value()); System.out.println("消费者消费的数据为:"+record.value()); } } }
原文地址:https://www.cnblogs.com/tong775131501/p/12327167.html
时间: 2024-10-26 00:30:00