Spring Integration Kafka
转自:http://www.cnblogs.com/yuanermen/p/5453339.html
一、概述
Spring Integration Kafka 是基于 Apache Kafka 和Spring Integration来集成Kafka,对开发配置提供了方便。
二、配置
1、spring-kafka-consumer.xml
?xml version= "1.0" encoding= "UTF-8" ?>
<beans xmlns= "http://www.springframework.org/schema/beans"
xmlns:xsi= "http://www.w3.org/2001/XMLSchema-instance"
xmlns: int = "http://www.springframework.org/schema/integration"
xmlns: int -kafka= "http://www.springframework.org/schema/integration/kafka"
xmlns:task= "http://www.springframework.org/schema/task"
xsi:schemaLocation="http: //www.springframework.org/schema/integration/kafka
http: //www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
http: //www.springframework.org/schema/integration
http: //www.springframework.org/schema/integration/spring-integration.xsd
http: //www.springframework.org/schema/beans
http: //www.springframework.org/schema/beans/spring-beans.xsd
http: //www.springframework.org/schema/task
http: //www.springframework.org/schema/task/spring-task.xsd">
<!-- topic test conf -->
< int :channel id= "inputFromKafka" >
< int :dispatcher task-executor= "kafkaMessageExecutor" />
</ int :channel>
<!-- zookeeper配置 可以配置多个 -->
< int -kafka:zookeeper-connect id= "zookeeperConnect"
zk-connect= "192.168.1.237:2181" zk-connection-timeout= "6000"
zk-session-timeout= "6000" zk-sync-time= "2000" />
<!-- channel配置 auto-startup= "true" 否则接收不发数据 -->
< int -kafka:inbound-channel-adapter id="kafkaInboundChannelAdapter"
kafka-consumer-context-ref= "consumerContext" auto-startup= "true" channel= "inputFromKafka" >
< int :poller fixed-delay= "1" time-unit= "MILLISECONDS" />
</ int -kafka:inbound-channel-adapter>
<task:executor id= "kafkaMessageExecutor" pool-size= "8" keep-alive= "120" queue-capacity= "500" />
<bean id= "kafkaDecoder"
class = "org.springframework.integration.kafka.serializer.common.StringDecoder" />
<bean id= "consumerProperties"
class = "org.springframework.beans.factory.config.PropertiesFactoryBean" >
<property name= "properties" >
<props>
<prop key= "auto.offset.reset" >smallest</prop>
<prop key= "socket.receive.buffer.bytes" > 10485760 </prop> <!-- 10M -->
<prop key= "fetch.message.max.bytes" > 5242880 </prop>
<prop key= "auto.commit.interval.ms" > 1000 </prop>
</props>
</property>
</bean>
<!-- 消息接收的BEEN -->
<bean id= "kafkaConsumerService" class = "com.sunney.service.impl.KafkaConsumerService" />
<!-- 指定接收的方法 -->
< int :outbound-channel-adapter channel= "inputFromKafka"
ref= "kafkaConsumerService" method= "processMessage" />
< int -kafka:consumer-context id= "consumerContext"
consumer-timeout= "1000" zookeeper-connect= "zookeeperConnect"
consumer-properties= "consumerProperties" >
< int -kafka:consumer-configurations>
< int -kafka:consumer-configuration
group-id= "default1" value-decoder= "kafkaDecoder" key-decoder= "kafkaDecoder"
max-messages= "5000" >
<!-- 两个TOPIC配置 -->
< int -kafka:topic id= "mytopic" streams= "4" />
< int -kafka:topic id= "sunneytopic" streams= "4" />
</ int -kafka:consumer-configuration>
</ int -kafka:consumer-configurations>
</ int -kafka:consumer-context>
</beans>
|
2、spring-kafka-producer.xml
<?xml version= "1.0" encoding= "UTF-8" ?>
<beans xmlns= "http://www.springframework.org/schema/beans"
xmlns:xsi= "http://www.w3.org/2001/XMLSchemainstance"
xmlns: int = "http://www.springframework.org/schema/integration"
xmlns: int -kafka= "http://www.springframework.org/schema/integration/kafka"
xmlns:task= "http://www.springframework.org/schema/task"
xsi:schemaLocation="http: //www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd
http: //www.springframework.org/schema/integration
http://www.springframework.org/schema/integration/spring-integration.xsd
http: //www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http: //www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd">
<!-- commons config -->
<bean id= "stringSerializer" class = "org.apache.kafka.common.serialization.StringSerializer" />
<bean id= "kafkaEncoder"
class = "org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder" >
<constructor-arg value= "java.lang.String" />
</bean>
<bean id= "producerProperties"
class = "org.springframework.beans.factory.config.PropertiesFactoryBean" >
<property name= "properties" >
<props>
<prop key= "topic.metadata.refresh.interval.ms" > 3600000 </prop>
<prop key= "message.send.max.retries" > 5 </prop>
<prop key= "serializer.class" >kafka.serializer.StringEncoder</prop>
<prop key= "request.required.acks" > 1 </prop>
</props>
</property>
</bean>
<!-- topic test config -->
< int :channel id= "kafkaTopicTest" >
< int :queue />
</ int :channel>
< int -kafka:outbound-channel-adapter
id= "kafkaOutboundChannelAdapterTopicTest" kafka-producer-context-ref= "producerContextTopicTest"
auto-startup= "true" channel= "kafkaTopicTest" order= "3" >
< int :poller fixed-delay= "1000" time-unit= "MILLISECONDS"
receive-timeout= "1" task-executor= "taskExecutor" />
</ int -kafka:outbound-channel-adapter>
<task:executor id= "taskExecutor" pool-size= "5"
keep-alive= "120" queue-capacity= "500" />
<!-- <bean id= "kafkaEncoder"
class = "org.springframework.integration.kafka.serializer.avro.AvroSpecificDatumBackedKafkaEncoder" >
<constructor-arg value= "com.company.AvroGeneratedSpecificRecord" />
</bean> -->
< int -kafka:producer-context id= "producerContextTopicTest"
producer-properties= "producerProperties" >
< int -kafka:producer-configurations>
<!-- 多个topic配置 -->
< int -kafka:producer-configuration
broker-list= "192.168.1.237:9090,192.168.1.237:9091,192.168.1.237:9092"
key-serializer= "stringSerializer"
value- class -type= "java.lang.String"
value-serializer= "stringSerializer"
topic= "mytopic" />
< int -kafka:producer-configuration
broker-list= "192.168.1.237:9090,192.168.1.237:9091,192.168.1.237:9092"
key-serializer= "stringSerializer"
value- class -type= "java.lang.String"
value-serializer= "stringSerializer"
topic= "sunneytopic" />
</ int -kafka:producer-configurations>
</ int -kafka:producer-context>
</beans>
|
3、发消息接口 KafkaService
package com.sunney.service;
/**
* 类KafkaService.java的实现描述:发消息接口类
* @author Sunney 2016年4月30日 上午11:30:53
*/
public interface KafkaService {
/**
* 发消息
* @param topic 主题
* @param obj 发送内容
*/
public void sendUserInfo(String topic, Object obj);
}
|
4、发消息实现类 KafkaServiceImpl
package com.sunney.service.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.integration.kafka.support.KafkaHeaders;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.MessageChannel;
import org.springframework.stereotype.Service;
import com.sunney.service.KafkaService;
/**
* 类KafkaServiceImpl.java的实现描述:发消息实现类
* @author Sunney 2016年4月30日 上午11:31:13
*/
@Service ( "kafkaService" )
public class KafkaServiceImpl implements KafkaService{
@Autowired
@Qualifier ( "kafkaTopicTest" )
MessageChannel channel;
public void sendUserInfo(String topic, Object obj) {
channel.send(MessageBuilder.withPayload(obj)
.setHeader(KafkaHeaders.TOPIC,topic)
.build());
}
}
|
5、消费接收类KafkaConsumerService
package com.sunney.service.impl;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.fastjson.JSON;
import com.sunney.service.UserDto;
/**
* 类KafkaConsumerService.java的实现描述:消费接收类
*
* @author Sunney 2016年4月30日 上午11:46:14
*/
public class KafkaConsumerService {
static final Logger logger = LoggerFactory.getLogger(KafkaConsumerService. class );
public void processMessage(Map<String, Map<Integer, String>> msgs) {
logger.info( "===============processMessage===============" );
for (Map.Entry<String, Map<Integer, String>> entry : msgs.entrySet()) {
logger.info( "============Topic:" + entry.getKey());
LinkedHashMap<Integer, String> messages = (LinkedHashMap<Integer, String>) entry.getValue();
Set<Integer> keys = messages.keySet();
for (Integer i : keys)
logger.info( "======Partition:" + i);
Collection<String> values = messages.values();
for (Iterator<String> iterator = values.iterator(); iterator.hasNext();) {
String message = "[" +iterator.next()+ "]" ;
logger.info( "=====message:" + message);
List<UserDto> userList = JSON.parseArray(message, UserDto. class );
logger.info( "=====userList.size:" + userList.size());
}
}
}
}
|
6、pom
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.integration</groupId>
<artifactId>spring-integration-kafka</artifactId>
<version> 1.3 . 0 .RELEASE</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version> 4.11 </version>
<scope>test</scope>
</dependency>
<dependency>
<groupId> org.apache.avro</groupId>
<artifactId>avro</artifactId>
<version> 1.7 . 7 </version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version> 1.2 . 7 </version>
</dependency>
</dependencies>
|
六、源代码地址:https://github.com/sunney2010/kafka-demo
七、遇到的问题
1、消费端口收不到消息
spring-kafka-consumer.xml的auto-startup设置为true
Spring Integration Kafka相关推荐
- Java实时处理 - Spring Integration - MQ Message
目录 Introduction Spring Integration Components Message Channel EndPoint Channel Adapter Messaging Gat ...
- spring集成kafka,以及常见错误解决
spring集成kafka,以及常见错误解决 一.配置kafka 1.引入jar包 <!--Kafka和spring集成的支持类库,spring和kafka通信监听--><!-- h ...
- #翻译NO.3# --- Spring Integration Framework
为什么80%的码农都做不了架构师?>>> 2.4 Message Endpoints A Message Endpoint represents the "filte ...
- Spring Integration学习资料
Spring Integration学习资料 1.1 背景 Spring框架的一个重要主题是控制反转.从广义上来说,Spring处理其上下文中管理的组件的职责.只要组件减轻了职责,它们同时也被 ...
- Spring Integration 4.3.10 发布,Spring 消息通信
Spring Integration 4.3.10 发布了.Spring Integration 能在基于 Spring 的应用中进行简单的消息通信,并通过简单的适配器与外部系统集成.这些适配器提供了 ...
- #翻译NO.5# --- Spring Integration Framework
为什么80%的码农都做不了架构师?>>> 本人觉得这一章很重要,那就是 Spring默认的channel 的实现 DirectChannel,这个要大家多按照原文理解,开发者为 ...
- ESB学习笔记(Spring Integration实战)
http://wangleifire.iteye.com/blog/351749 介绍 Spring Integration是Spring公司的一套ESB框架. 前面ESB介绍中我也做了一定了解.我们 ...
- #翻译NO.4# --- Spring Integration Framework
为什么80%的码农都做不了架构师?>>> Part III. Core Messaging This section covers all aspects of the cor ...
- java中channelmessage,MessageStore支持的QueueChannel与Spring Integration Java Config
Spring Integration reference guide指的是使用MessageStore实现为QueueChannel提供持久性. 提到了很多次,但是所有示例都使用XML配置,即 但是Q ...
最新文章
- layer弹出层闪退_layer弹出层详解
- SaltStack 修复 Stack minion中的提权漏洞 (CVE-2020-28243)
- nhibernate GetType
- learning java AWT 布局管理器CardLayout
- Java学习笔记-Java概述和环境配置
- 《图解算法》第10章之 k最近邻算法
- 2017年微商方向选择
- vue 窗口变小文字_页面字体随窗口变化大小
- java本地文件上传
- 知识图谱属性融合_知识图谱融合_本体概念层的融合方法与技术
- php a5打印设置,word设置打印A5文档的两种方法
- 2017东北四省赛G题
- asp网站如何设置默认页_IIS 7.5 在 Windows Server(R) 2008 R2
- 微软控制云服务器软件,服务器远程控制用什么软件
- vue做移动端适配最佳解决方案,亲测有效
- 一体化伺服电机一圈多少脉冲
- Fortran基础编程(3)——格式化输出
- 中国四大发明原理//2021-2-6
- dnf服务器炸团门票怎么找回,DNF最新BUG,玩家打团买票后材料消失,无法引炸直接退队!...
- ProxmoxVE7.0+Ceph15.2集群搭建