菜鸟要飞

Kafka学习(二)kafka_2.12-1.1.0 生产与消费

2018-07-05  本文已影响4人  万总有点菜

创建Maven工程

引入Kafka的核心jar包和日志jar包

<dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka_2.12</artifactId>
      <version>1.1.0</version>
</dependency>
<dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-log4j-appender</artifactId>
      <version>1.1.0</version>
</dependency>

生成者

package com.lczyfz.junit;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;


/**
 * Created by maple on 2018-07-05.
 */
public class ProducerTest {


    public static void send(){
        Properties props = new Properties();

        props.put("bootstrap.servers", "192.168.1.211:9092,192.168.1.212:9092,192.168.1.213:9092");//kafka集群
        props.put("acks", "all");
        props.put("retries", 0);
        props.put("batch.size", 16384);
        props.put("linger.ms", 1);
        props.put("buffer.memory", 33554432);
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        Producer<String, String> producer = new KafkaProducer<String, String>(props);
        for (int i = 0; i < 10; i++){
            System.out.println(i);
            producer.send(new ProducerRecord<String, String>("qweasd", Integer.toString(i), Integer.toString(i)));

        }

        producer.close();
    }
    public static void main(String[] args){
        send();
    }
}

注意:运行如果发现消息没有发送到Kafka集群上面,那么做如下配置
将kafka主目录下的/config/server.properties文件中advertised.listeners做如下配置。(因找到这个原因浪费了2个多小时,,,)
advertised.listeners=PLAINTEXT://服务器主机的ip:9092
改完后重启,再次测试OK。

消费者

package com.lczyfz.junit;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.Properties;


/**
 * Created by maple on 2018-07-05.
 */
public class ConsumerTest {

    Logger logger = LoggerFactory.getLogger(ConsumerTest.class);

    public static void poll(){
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.1.211:9092,192.168.1.212:9092,192.168.1.213:9092");//Kafka集群
        props.put("group.id", "test");
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Arrays.asList("qweasd"));
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(100);
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
        }
    }
    public static void main(String[] args){
        poll();
    }
}
上一篇 下一篇

猜你喜欢

热点阅读