首先,在虚拟机上搭建一个Kafka,可以参考这里:Linux 安装Kafka单机、集群

新建一个spring boot工程,引入maven依赖:kafka-client,如下:

    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-clients</artifactId>
      <version>2.0.0</version>
    </dependency>

接着实现生产者发送消息,如下:

package org.example.kafka;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

/**
 * 实现生产者
 */
public class OriginalProducer implements Runnable {

    private static final String TOPIC = "TEST_TOPIC";

    private static final String BROKER_LIST = "192.168.3.4:9092,192.168.3.5:9092,192.168.3.6:9092";

    /**
     * 消息发送确认
     * 0,只要消息提交到消息缓冲,就视为消息发送成功
     * 1,只要消息发送到分区Leader且写入磁盘,就视为消息发送成功
     * all,消息发送到分区Leader且写入磁盘,同时其他副本分区也同步到磁盘,才视为消息发送成功
     */
    private static final String ACKS_CONFIG = "all";

    /**
     * 缓存消息数达到此数值后批量提交
     */
    private static final String BATCH_SIZE_CONFIG = "1";

    private static KafkaProducer<String, String> producer;

    static {
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKER_LIST);
        properties.setProperty(ProducerConfig.ACKS_CONFIG, ACKS_CONFIG);
        properties.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, BATCH_SIZE_CONFIG);
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        producer = new KafkaProducer<String, String>(properties);
    }

    @Override
    public void run() {
        try {
            // 等待启动日志打印完后再发送消息
            Thread.sleep(10000L);
            String message = "This is a message ";
            for (int i = 0; i < 2; i++) {
                ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, message + i);
                producer.send(record, (recordMetadata, e) -> {
                    if (e != null) {
                        System.out.println("发送消息异常!");
                    }
                    if (recordMetadata != null) {
                        // topic 下可以有多个分区,每个分区的消费者维护一个 offset
                        System.out.println("消息发送成功:" + recordMetadata.partition() + "-" + recordMetadata.offset());
                    }
                });
                Thread.sleep(1000L);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            producer.close();
        }
    }
}

然后实现消费者消费消息,如下:

package org.example.kafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

/**
 * 官方jar实现消费者
 */
public class OriginalConsumer implements Runnable {

    /**
     * 服务器列表,多个用 , 隔开
     */
    private static final String BROKER_LIST = "192.168.3.4:9092,192.168.3.5:9092,192.168.3.6:9092";

    /**
     * 客户端收到消息后,在处理消息前自动提交(即消息已消费),增加偏移量
     */
    private static final String ENABLE_AUTO_COMMIT_CONFIG = "true";

    /**
     * 自动提交的频率,1S
     */
    private static final String AUTO_COMMIT_INTERVAL_MS_CONFIG = "1000";

    /**
     * 一个主题下可以有多个分区、可以有多个消费者组消费消息
     * 一个分区对应一个消费者(如分区P对应消费者C,则同个消费者组下的其他消费者不能再消费此分区的消息,只能消费其他分区的消息)
     * 若有多个消费者组,则一个分区可以分配给不同消费者组下的消费者进行消费(各自维护一个offSet)
     * 一个消费者组下可以有多个消费者
     * 一个消费者可以消费一个或多个分区的消息
     * 同一个消费组下,消费者数应小于等于分区数
     *
     * 正常消息队列(消息只消费一次)
     * 可以设置一个主题,一个消费者组,消费者组下多个消费者
     *
     * 发布订阅(广播模式)
     * 可以设置一个主题,多个消费者组,每个消费者组下一个或多个消费者
     */

    /**
     * 因为有多个消费者,所以先用命令创建好多分区的主题
     */
    private static final String TOPIC = "TEST_TOPIC";

    /**
     * 消费者组:A
     */
    public static final String GROUP_ID_A = "TEST_GROUP_A";
    /**
     * 消费者组:B
     */
    public static final String GROUP_ID_B = "TEST_GROUP_B";

    /**
     * 消费者:A
     */
    public static final String CLIENT_ID_A = "TEST_CLIENT_A";
    /**
     * 消费者:B
     */
    public static final String CLIENT_ID_B = "TEST_CLIENT_B";
    /**
     * 消费者:C
     */
    public static final String CLIENT_ID_C = "TEST_CLIENT_C";

    private KafkaConsumer<String, String> consumer;

    private String clientId;

    public OriginalConsumer(String groupId, String clientId) {
        this.clientId = clientId;
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BROKER_LIST);
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        properties.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, clientId);
        properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, ENABLE_AUTO_COMMIT_CONFIG);
        properties.setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, AUTO_COMMIT_INTERVAL_MS_CONFIG);
        properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        consumer = new KafkaConsumer<String, String>(properties);
        consumer.subscribe(Arrays.asList(TOPIC));
    }

    private void consume() {
        // 获取消息,超时时间:1S
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1L));
        records.forEach(record -> {
            System.out.println(clientId + "-消费消息:" + record.toString());
        });
    }

    @Override
    public void run() {
        try {
            while (true) {
                consume();
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            consumer.close();
        }
    }
}

最后调用生产者发送消息、消费者消费消息,如下:

package org.example.kafka;

import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.stereotype.Component;

@Component
public class KafkaRunner implements ApplicationRunner {

    @Autowired
    private SpringKafkaProducer producer;

    @Override
    public void run(ApplicationArguments args) throws Exception {

        // 启动生产者,发送两条消息
        new Thread(new OriginalProducer()).start();

        // A、B都在消费者组A下,故消息要么被A消费,要么被B消费
        // 启动消费者A
        new Thread(new OriginalConsumer(OriginalConsumer.GROUP_ID_A, OriginalConsumer.CLIENT_ID_A)).start();
        // 启动消费者B
        new Thread(new OriginalConsumer(OriginalConsumer.GROUP_ID_A, OriginalConsumer.CLIENT_ID_B)).start();

        // 启动消费者C,在消费者组B下,可以消费到两条消息
        new Thread(new OriginalConsumer(OriginalConsumer.GROUP_ID_B, OriginalConsumer.CLIENT_ID_C)).start();

    }

}

运行结果,生产者发送了两条消息,C消费两条,A、B各消费一条,如下:

消息发送成功:0-21
消息发送成功:1-23
TEST_CLIENT_C-消费消息:ConsumerRecord(topic = TEST_TOPIC, partition = 0, offset = 21, CreateTime = 1615098562063, serialized key size = -1, serialized value size = 19, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = This is a message 0)
TEST_CLIENT_A-消费消息:ConsumerRecord(topic = TEST_TOPIC, partition = 0, offset = 21, CreateTime = 1615098562063, serialized key size = -1, serialized value size = 19, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = This is a message 0)
TEST_CLIENT_C-消费消息:ConsumerRecord(topic = TEST_TOPIC, partition = 1, offset = 23, CreateTime = 1615098563063, serialized key size = -1, serialized value size = 19, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = This is a message 1)
TEST_CLIENT_B-消费消息:ConsumerRecord(topic = TEST_TOPIC, partition = 1, offset = 23, CreateTime = 1615098563063, serialized key size = -1, serialized value size = 19, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = This is a message 1)

下面使用spring-kafka来整合,引入maven依赖,如下:

    <dependency>
      <groupId>org.springframework.kafka</groupId>
      <artifactId>spring-kafka</artifactId>
    </dependency>

修改application.yml配置文件,增加kafka配置信息:

spring:
  application:
    name: demo
  kafka:
    bootstrap-servers: 192.168.3.4:9092,192.168.3.5:9092,192.168.3.6:9092
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      group-id: TEST_GROUP
      # 如果为true,则消费者的偏移量将在后台定期提交,为false,spring也会帮我们进行人工提交
      enable-auto-commit: true
      auto-commit-interval: 1000
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
server:
  port: 8080

因为spring封装了一层,所以再实现生产者代码就简洁很多了,如下:

package org.example.kafka;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;

/**
 * spring-kafka 生产者
 */
@Slf4j
@Component
public class SpringKafkaProducer {

    @Autowired
    private KafkaTemplate kafkaTemplate;

    public void produce(String topic, String message) {
        kafkaTemplate.send(topic, message);
        log.info("生产者发送消息:{},{}", topic, message);
    }

    public void produce(ProducerRecord<String, String> record) {
        kafkaTemplate.send(record);
        log.info("生产者发送消息:{}", record.toString());
    }

}

实现消费者,如下:

package org.example.kafka;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

/**
 * spring-kafka 消费者
 */
@Slf4j
@Component
public class SpringKafkaConsumer {

    /**
     * 因为有多个消费者,所以先用命令创建一个多分区的主题(防止出现消费者没有分区可以消费)
     */
    public static final String SPRING_TEST_TOPIC = "SPRING_TEST_TOPIC";

    public static final String SPRING_TEST_GROUP_A = "SPRING_TEST_GROUP_A";
    public static final String SPRING_TEST_GROUP_A_CLIENT_A = "SPRING_TEST_GROUP_A_CLIENT_A";
    public static final String SPRING_TEST_GROUP_A_CLIENT_B = "SPRING_TEST_GROUP_A_CLIENT_B";

    public static final String SPRING_TEST_GROUP_B = "SPRING_TEST_GROUP_B";
    public static final String SPRING_TEST_GROUP_B_CLIENT_C = "SPRING_TEST_GROUP_B_CLIENT_C";

    /**
     * 消费主题:SPRING_TEST_TOPIC
     * 消费组:SPRING_TEST_GROUP_A
     * 消费者:SPRING_TEST_GROUP_A_CLIENT_A
     *
     * 因为组 A 下有 CLIENT_A、CLIENT_B 两个消费者,故消息要么被 CLIENT_A 消费,要么被 CLIENT_B 消费
     */
    @KafkaListener(topics = {SPRING_TEST_TOPIC}, groupId = SPRING_TEST_GROUP_A, id = SPRING_TEST_GROUP_A_CLIENT_A)
    public void consumeA(ConsumerRecord<String, String> record) {
        log.info("消费者 A 消费消息:{}", record.toString());
    }
    /**
     * 消费主题:SPRING_TEST_TOPIC
     * 消费组:SPRING_TEST_GROUP_A
     * 消费者:SPRING_TEST_GROUP_A_CLIENT_B
     *
     * 因为组 A 下有 CLIENT_A、CLIENT_B 两个消费者,故消息要么被 CLIENT_A 消费,要么被 CLIENT_B 消费
     */
    @KafkaListener(topics = {SPRING_TEST_TOPIC}, groupId = SPRING_TEST_GROUP_A, id = SPRING_TEST_GROUP_A_CLIENT_B)
    public void consumeB(ConsumerRecord<String, String> record) {
        log.info("消费者 B 消费消息:{}", record.toString());
    }

    /**
     * 消费主题:SPRING_TEST_TOPIC
     * 消费组:SPRING_TEST_GROUP_B
     * 消费者:SPRING_TEST_GROUP_B_CLIENT_C
     *
     * 因为 GROUP_B 下只有 CLIENT_C 一个消费者,故 CLIENT_C 会消费所有发送到 SPRING_TEST_TOPIC 的消息
     */
    @KafkaListener(topics = {SPRING_TEST_TOPIC}, groupId = SPRING_TEST_GROUP_B, id = SPRING_TEST_GROUP_B_CLIENT_C)
    public void consumeC(ConsumerRecord<String, String> record) {
        log.info("消费者 C 消费消息:{}", record.toString());
    }

}

修改KafkaRunner代码,启动时调用生产者发送消息,如下:

        // spring-kafka
        // 生产者发送消息
        for (int i = 0; i < 10; i++) {
            ProducerRecord<String, String> record = new ProducerRecord<>(SpringKafkaConsumer.SPRING_TEST_TOPIC, String.valueOf(i));
            producer.produce(record);
        }

启动项目运行,可以看到生产者发送10条消息,然后消费者A、B一同消费这10条消息,另一个消费组的消费者C单独消费这10条消息,如下:

2021-03-07 14:29:12.113  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=0, timestamp=null)
2021-03-07 14:29:12.115  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=1, timestamp=null)
2021-03-07 14:29:12.118  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=2, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=3, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=4, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=5, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=6, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=7, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=8, timestamp=null)
2021-03-07 14:29:12.119  INFO 5740 --- [           main] org.example.kafka.SpringKafkaProducer    : 生产者发送消息:ProducerRecord(topic=SPRING_TEST_TOPIC, partition=null, headers=RecordHeaders(headers = [], isReadOnly = true), key=null, value=9, timestamp=null)
2021-03-07 14:29:12.346  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 0, offset = 14, CreateTime = 1615098552118, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 2)
2021-03-07 14:29:12.346  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 0, offset = 15, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 5)
2021-03-07 14:29:12.346  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 0, offset = 16, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 8)
2021-03-07 14:29:12.351  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 1, offset = 14, CreateTime = 1615098552115, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 1)
2021-03-07 14:29:12.351  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 1, offset = 15, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 4)
2021-03-07 14:29:12.352  INFO 5740 --- [_CLIENT_A-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 A 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 0, offset = 14, CreateTime = 1615098552118, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 2)
2021-03-07 14:29:12.352  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 1, offset = 16, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 7)
2021-03-07 14:29:12.352  INFO 5740 --- [_CLIENT_A-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 A 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 0, offset = 15, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 5)
2021-03-07 14:29:12.352  INFO 5740 --- [_CLIENT_A-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 A 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 0, offset = 16, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 8)
2021-03-07 14:29:12.354  INFO 5740 --- [_CLIENT_A-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 A 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 1, offset = 14, CreateTime = 1615098552115, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 1)
2021-03-07 14:29:12.354  INFO 5740 --- [_CLIENT_A-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 A 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 1, offset = 15, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 4)
2021-03-07 14:29:12.354  INFO 5740 --- [_CLIENT_A-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 A 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 1, offset = 16, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 7)
2021-03-07 14:29:12.410  INFO 5740 --- [_CLIENT_B-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 B 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 12, CreateTime = 1615098552102, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 0)
2021-03-07 14:29:12.410  INFO 5740 --- [_CLIENT_B-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 B 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 13, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 3)
2021-03-07 14:29:12.410  INFO 5740 --- [_CLIENT_B-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 B 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 14, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 6)
2021-03-07 14:29:12.410  INFO 5740 --- [_CLIENT_B-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 B 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 15, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 9)
2021-03-07 14:29:12.411  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 12, CreateTime = 1615098552102, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 0)
2021-03-07 14:29:12.411  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 13, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 3)
2021-03-07 14:29:12.411  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 14, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 6)
2021-03-07 14:29:12.411  INFO 5740 --- [_CLIENT_C-0-C-1] org.example.kafka.SpringKafkaConsumer    : 消费者 C 消费消息:ConsumerRecord(topic = SPRING_TEST_TOPIC, partition = 2, offset = 15, CreateTime = 1615098552119, serialized key size = -1, serialized value size = 1, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = 9)

Demo~

https://github.com/191720653/demo

 

 

Logo

为开发者提供学习成长、分享交流、生态实践、资源工具等服务,帮助开发者快速成长。

更多推荐