转自:http://www.cnblogs.com/yuanermen/p/5453339.html

一、概述

Spring Integration Kafka 是基于 Apache Kafka 和Spring Integration来集成Kafka,对开发配置提供了方便。

二、配置

1、spring-kafka-consumer.xml

?xml version="1.0"encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
     xmlns:int="http://www.springframework.org/schema/integration"
    xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"
    xmlns:task="http://www.springframework.org/schema/task"
    xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka
    http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
    http://www.springframework.org/schema/integration
    http://www.springframework.org/schema/integration/spring-integration.xsd
    http://www.springframework.org/schema/beans
    http://www.springframework.org/schema/beans/spring-beans.xsd
    http://www.springframework.org/schema/task
    http://www.springframework.org/schema/task/spring-task.xsd">
 
    <!-- topic test conf -->
    <int:channel id="inputFromKafka">
        <int:dispatcher task-executor="kafkaMessageExecutor"/>
    </int:channel>

    <!-- zookeeper配置 可以配置多个 -->
    <int-kafka:zookeeper-connect id="zookeeperConnect"
        zk-connect="192.168.1.237:2181"zk-connection-timeout="6000"
        zk-session-timeout="6000"zk-sync-time="2000"/>

    <!-- channel配置 auto-startup="true" 否则接收不发数据 -->
    <int-kafka:inbound-channel-adapter id="kafkaInboundChannelAdapter"
        kafka-consumer-context-ref="consumerContext"auto-startup="true"channel="inputFromKafka">
        <int:poller fixed-delay="1"time-unit="MILLISECONDS"/>
    </int-kafka:inbound-channel-adapter>

    <task:executor id="kafkaMessageExecutor"pool-size="8"keep-alive="120"queue-capacity="500"/>

    <bean id="kafkaDecoder"
        class="org.springframework.integration.kafka.serializer.common.StringDecoder"/>
 
    <bean id="consumerProperties"
        class="org.springframework.beans.factory.config.PropertiesFactoryBean">
        <property name="properties">
            <props>
                <prop key="auto.offset.reset">smallest</prop>
                <prop key="socket.receive.buffer.bytes">10485760</prop> <!-- 10M -->
                <prop key="fetch.message.max.bytes">5242880</prop>
                <prop key="auto.commit.interval.ms">1000</prop>
            </props>
        </property>
    </bean>

    <!-- 消息接收的BEEN -->
    <bean id="kafkaConsumerService"class="com.sunney.service.impl.KafkaConsumerService"/>

    <!-- 指定接收的方法 -->
    <int:outbound-channel-adapter channel="inputFromKafka"
        ref="kafkaConsumerService"method="processMessage"/>
 
    <int-kafka:consumer-context id="consumerContext"
        consumer-timeout="1000"zookeeper-connect="zookeeperConnect"
        consumer-properties="consumerProperties">
        <int-kafka:consumer-configurations>
            <int-kafka:consumer-configuration
                group-id="default1"value-decoder="kafkaDecoder"key-decoder="kafkaDecoder"
                max-messages="5000">
                <!-- 两个TOPIC配置 -->
                <int-kafka:topic id="mytopic"streams="4"/>
                <int-kafka:topic id="sunneytopic"streams="4"/>
            </int-kafka:consumer-configuration>
        </int-kafka:consumer-configurations>
    </int-kafka:consumer-context>
</beans>
    

  2、spring-kafka-producer.xml

?
<?xml version="1.0"encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
    xmlns:xsi="http://www.w3.org/2001/XMLSchemainstance"
     xmlns:int="http://www.springframework.org/schema/integration"
    xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"
    xmlns:task="http://www.springframework.org/schema/task"
    xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka        http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
 http://www.springframework.org/schema/integration 
 http://www.springframework.org/schema/integration/spring-integration.xsd
 http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
 http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
     
   <!-- commons config -->
    <bean id="stringSerializer"class="org.apache.kafka.common.serialization.StringSerializer"/>

    <bean id="kafkaEncoder"
         class="org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder">
        <constructor-arg value="java.lang.String"/>
    </bean>

    <bean id="producerProperties"
        class="org.springframework.beans.factory.config.PropertiesFactoryBean">
        <property name="properties">
            <props>
                <prop key="topic.metadata.refresh.interval.ms">3600000</prop>
                <prop key="message.send.max.retries">5</prop>
                <prop key="serializer.class">kafka.serializer.StringEncoder</prop>
                <prop key="request.required.acks">1</prop>
            </props>
        </property>
    </bean>
     
    <!-- topic test config  -->
     
    <int:channel id="kafkaTopicTest">
        <int:queue />
    </int:channel>
     
    <int-kafka:outbound-channel-adapter
        id="kafkaOutboundChannelAdapterTopicTest"kafka-producer-context-ref="producerContextTopicTest"
        auto-startup="true"channel="kafkaTopicTest"order="3">
        <int:poller fixed-delay="1000"time-unit="MILLISECONDS"
            receive-timeout="1"task-executor="taskExecutor"/>
    </int-kafka:outbound-channel-adapter>

    <task:executor id="taskExecutor"pool-size="5"
        keep-alive="120"queue-capacity="500"/>
    <!-- <bean id="kafkaEncoder"
        class="org.springframework.integration.kafka.serializer.avro.AvroSpecificDatumBackedKafkaEncoder">
        <constructor-arg value="com.company.AvroGeneratedSpecificRecord"/>
    </bean> -->

    <int-kafka:producer-context id="producerContextTopicTest"
        producer-properties="producerProperties">
        <int-kafka:producer-configurations>
            <!-- 多个topic配置 -->
            <int-kafka:producer-configuration
                broker-list="192.168.1.237:9090,192.168.1.237:9091,192.168.1.237:9092"
                key-serializer="stringSerializer"
                value-class-type="java.lang.String"
                value-serializer="stringSerializer"
                topic="mytopic"/>
            <int-kafka:producer-configuration
                broker-list="192.168.1.237:9090,192.168.1.237:9091,192.168.1.237:9092"
                key-serializer="stringSerializer"
                value-class-type="java.lang.String"
                value-serializer="stringSerializer"
                topic="sunneytopic"/>
        </int-kafka:producer-configurations>
    </int-kafka:producer-context>
</beans>

  3、发消息接口 KafkaService

?
packagecom.sunney.service;
 
/**
 * 类KafkaService.java的实现描述:发消息接口类
 * @author Sunney 2016年4月30日 上午11:30:53
 */
publicinterfaceKafkaService {
    /**
     * 发消息
     * @param topic 主题
     * @param obj 发送内容
     */
    publicvoidsendUserInfo(String topic, Object obj);
}

  4、发消息实现类 KafkaServiceImpl

?
packagecom.sunney.service.impl;
 
importorg.springframework.beans.factory.annotation.Autowired;
importorg.springframework.beans.factory.annotation.Qualifier;
importorg.springframework.integration.kafka.support.KafkaHeaders;
importorg.springframework.integration.support.MessageBuilder;
importorg.springframework.messaging.MessageChannel;
importorg.springframework.stereotype.Service;
 
importcom.sunney.service.KafkaService;
 
/**
 * 类KafkaServiceImpl.java的实现描述:发消息实现类
 * @author Sunney 2016年4月30日 上午11:31:13
 */
@Service("kafkaService")
publicclassKafkaServiceImpl implementsKafkaService{
 
    @Autowired
    @Qualifier("kafkaTopicTest")
    MessageChannel channel;
 
    publicvoidsendUserInfo(String topic, Object obj) {
        channel.send(MessageBuilder.withPayload(obj)
                                    .setHeader(KafkaHeaders.TOPIC,topic)
                                    .build());
    }
 
}

  5、消费接收类KafkaConsumerService

?
packagecom.sunney.service.impl;
 
importjava.util.Collection;
importjava.util.Iterator;
importjava.util.LinkedHashMap;
importjava.util.List;
importjava.util.Map;
importjava.util.Set;
 
importorg.slf4j.Logger;
importorg.slf4j.LoggerFactory;
 
importcom.alibaba.fastjson.JSON;
importcom.sunney.service.UserDto;
 
/**
 * 类KafkaConsumerService.java的实现描述:消费接收类
 *
 * @author Sunney 2016年4月30日 上午11:46:14
 */
publicclassKafkaConsumerService {
 
    staticfinalLogger logger = LoggerFactory.getLogger(KafkaConsumerService.class);
 
    publicvoidprocessMessage(Map<String, Map<Integer, String>> msgs) {
        logger.info("===============processMessage===============");
        for(Map.Entry<String, Map<Integer, String>> entry : msgs.entrySet()) {
            logger.info("============Topic:"+ entry.getKey());
            LinkedHashMap<Integer, String> messages = (LinkedHashMap<Integer, String>) entry.getValue();
            Set<Integer> keys = messages.keySet();
            for(Integer i : keys)
                logger.info("======Partition:"+ i);
            Collection<String> values = messages.values();
            for(Iterator<String> iterator = values.iterator(); iterator.hasNext();) {
                String message ="["+iterator.next()+"]";
                logger.info("=====message:"+ message);
                List<UserDto> userList = JSON.parseArray(message, UserDto.class); 
                logger.info("=====userList.size:"+ userList.size());
 
            }
 
        }
    }
 
}

  6、pom

?
<dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.integration</groupId>
            <artifactId>spring-integration-kafka</artifactId>
            <version>1.3.0.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.11</version>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId> org.apache.avro</groupId>
            <artifactId>avro</artifactId>
            <version>1.7.7</version>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.7</version>
        </dependency>
    </dependencies>

 六、源代码地址:https://github.com/sunney2010/kafka-demo

七、遇到的问题

1、消费端口收不到消息

spring-kafka-consumer.xml的auto-startup设置为true

Spring Integration Kafka相关推荐

  1. Java实时处理 - Spring Integration - MQ Message

    目录 Introduction Spring Integration Components Message Channel EndPoint Channel Adapter Messaging Gat ...

  2. spring集成kafka,以及常见错误解决

    spring集成kafka,以及常见错误解决 一.配置kafka 1.引入jar包 <!--Kafka和spring集成的支持类库,spring和kafka通信监听--><!-- h ...

  3. #翻译NO.3# --- Spring Integration Framework

    为什么80%的码农都做不了架构师?>>>    2.4 Message Endpoints A Message Endpoint represents the "filte ...

  4. Spring Integration学习资料

    Spring Integration学习资料 1.1     背景 Spring框架的一个重要主题是控制反转.从广义上来说,Spring处理其上下文中管理的组件的职责.只要组件减轻了职责,它们同时也被 ...

  5. Spring Integration 4.3.10 发布,Spring 消息通信

    Spring Integration 4.3.10 发布了.Spring Integration 能在基于 Spring 的应用中进行简单的消息通信,并通过简单的适配器与外部系统集成.这些适配器提供了 ...

  6. #翻译NO.5# --- Spring Integration Framework

    为什么80%的码农都做不了架构师?>>>    本人觉得这一章很重要,那就是 Spring默认的channel 的实现 DirectChannel,这个要大家多按照原文理解,开发者为 ...

  7. ESB学习笔记(Spring Integration实战)

    http://wangleifire.iteye.com/blog/351749 介绍 Spring Integration是Spring公司的一套ESB框架. 前面ESB介绍中我也做了一定了解.我们 ...

  8. #翻译NO.4# --- Spring Integration Framework

    为什么80%的码农都做不了架构师?>>>    Part III. Core Messaging This section covers all aspects of the cor ...

  9. java中channelmessage,MessageStore支持的QueueChannel与Spring Integration Java Config

    Spring Integration reference guide指的是使用MessageStore实现为QueueChannel提供持久性. 提到了很多次,但是所有示例都使用XML配置,即 但是Q ...

最新文章

  1. layer弹出层闪退_layer弹出层详解
  2. SaltStack 修复 Stack minion中的提权漏洞 (CVE-2020-28243)
  3. nhibernate GetType
  4. learning java AWT 布局管理器CardLayout
  5. Java学习笔记-Java概述和环境配置
  6. 《图解算法》第10章之 k最近邻算法
  7. 2017年微商方向选择
  8. vue 窗口变小文字_页面字体随窗口变化大小
  9. java本地文件上传
  10. 知识图谱属性融合_知识图谱融合_本体概念层的融合方法与技术
  11. php a5打印设置,word设置打印A5文档的两种方法
  12. 2017东北四省赛G题
  13. asp网站如何设置默认页_IIS 7.5 在 Windows Server(R) 2008 R2
  14. 微软控制云服务器软件,服务器远程控制用什么软件
  15. vue做移动端适配最佳解决方案,亲测有效
  16. 一体化伺服电机一圈多少脉冲
  17. Fortran基础编程(3)——格式化输出
  18. 中国四大发明原理//2021-2-6
  19. dnf服务器炸团门票怎么找回,DNF最新BUG,玩家打团买票后材料消失,无法引炸直接退队!...
  20. ProxmoxVE7.0+Ceph15.2集群搭建

热门文章

  1. gets() 与puts()的用法
  2. 8-1 用QPainter绘图(Painting with QPainter)
  3. 为开发者而生的Twig(上)-Twig使用指南
  4. 苹果xr十大隐藏功能_苹果手机隐藏的功能
  5. 无线网络加密方式对比 .
  6. Javascript 设计模式之代理模式【讲师辅导】-曾亮-专题视频课程
  7. windows下如何创建.xxx文件夹
  8. 3D深度相机---结构光
  9. Android手机连接蓝牙播放时,蓝牙设备如何显示歌名、专辑、歌手等信息?
  10. 常用端口与udp协议