当前位置: 首页 > news >正文

@KafkaListener和KafkaTemplate自动装配原理分析

依赖项和配置信息参见另一篇博文@KafkaListener的配置使用,这里主要借助源码分析@KafkaListener和KafkaTemplate自动装配原理。

1、KafkaAutoConfiguration 源码分析

KafkaAutoConfiguration类自动装配生成了生产者客户端KafkaTemplate的bean和消费者基础ConsumerFactory的bean,KafkaAutoConfiguration导入KafkaAnnotationDrivenConfiguration,KafkaAnnotationDrivenConfiguration最终生成了ConcurrentKafkaListenerContainerFactory的bean, 该bean是@KafkaListener默认使用的容器工厂,即指定了消费的kafka集群。

package org.springframework.boot.autoconfigure.kafka;

import java.io.IOException;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.security.jaas.KafkaJaasLoginModuleInitializer;
import org.springframework.kafka.support.LoggingProducerListener;
import org.springframework.kafka.support.ProducerListener;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.transaction.KafkaTransactionManager;

@Configuration(
    proxyBeanMethods = false
)
/// KafkaTemplate在类路径下存在时,加载该配置类
@ConditionalOnClass({KafkaTemplate.class})  
@EnableConfigurationProperties({KafkaProperties.class})
/// 将KafkaAnnotationDrivenConfiguration、KafkaStreamsAnnotationDrivenConfiguration配置类合并过来
@Import({KafkaAnnotationDrivenConfiguration.class, KafkaStreamsAnnotationDrivenConfiguration.class})  
public class KafkaAutoConfiguration {

    private final KafkaProperties properties;

    public KafkaAutoConfiguration(KafkaProperties properties) {
        this.properties = properties;
    }

    @Bean  
    /// KafkaTemplate的bean未定义时,自动生成该bean
    @ConditionalOnMissingBean({KafkaTemplate.class})
    public KafkaTemplate<?, ?> kafkaTemplate(ProducerFactory<Object, Object> kafkaProducerFactory, ProducerListener<Object, Object> kafkaProducerListener, ObjectProvider<RecordMessageConverter> messageConverter) {
        KafkaTemplate<Object, Object> kafkaTemplate = new KafkaTemplate(kafkaProducerFactory);
        messageConverter.ifUnique(kafkaTemplate::setMessageConverter);
        kafkaTemplate.setProducerListener(kafkaProducerListener);
        kafkaTemplate.setDefaultTopic(this.properties.getTemplate().getDefaultTopic());
        return kafkaTemplate;
    }

    @Bean
    @ConditionalOnMissingBean({ProducerFactory.class})
    public ProducerFactory<?, ?> kafkaProducerFactory(ObjectProvider<DefaultKafkaProducerFactoryCustomizer> customizers) {
        DefaultKafkaProducerFactory<?, ?> factory = new DefaultKafkaProducerFactory(this.properties.buildProducerProperties());
        String transactionIdPrefix = this.properties.getProducer().getTransactionIdPrefix();
        if (transactionIdPrefix != null) {
            factory.setTransactionIdPrefix(transactionIdPrefix);
        }

        customizers.orderedStream().forEach((customizer) -> {
            customizer.customize(factory);
        });
        return factory;
    }

    @Bean
    @ConditionalOnMissingBean({ProducerListener.class})
    public ProducerListener<Object, Object> kafkaProducerListener() {
        return new LoggingProducerListener();
    }

    @Bean
    /// ConsumerFactory的bean未定义时,自动生成该bean
    @ConditionalOnMissingBean({ConsumerFactory.class})
    public ConsumerFactory<?, ?> kafkaConsumerFactory(ObjectProvider<DefaultKafkaConsumerFactoryCustomizer> customizers) {
        DefaultKafkaConsumerFactory<Object, Object> factory = new DefaultKafkaConsumerFactory(this.properties.buildConsumerProperties());
        customizers.orderedStream().forEach((customizer) -> {
            customizer.customize(factory);
        });
        return factory;
    }

    @Bean
    @ConditionalOnProperty(
        name = {"spring.kafka.producer.transaction-id-prefix"}
    )
    @ConditionalOnMissingBean
    public KafkaTransactionManager<?, ?> kafkaTransactionManager(ProducerFactory<?, ?> producerFactory) {
        return new KafkaTransactionManager(producerFactory);
    }

    @Bean
    @ConditionalOnProperty(
        name = {"spring.kafka.jaas.enabled"}
    )
    @ConditionalOnMissingBean
    public KafkaJaasLoginModuleInitializer kafkaJaasInitializer() throws IOException {
        KafkaJaasLoginModuleInitializer jaas = new KafkaJaasLoginModuleInitializer();
        KafkaProperties.Jaas jaasProperties = this.properties.getJaas();
        if (jaasProperties.getControlFlag() != null) {
            jaas.setControlFlag(jaasProperties.getControlFlag());
        }

        if (jaasProperties.getLoginModule() != null) {
            jaas.setLoginModule(jaasProperties.getLoginModule());
        }

        jaas.setOptions(jaasProperties.getOptions());
        return jaas;
    }

    @Bean
    @ConditionalOnMissingBean
    public KafkaAdmin kafkaAdmin() {
        KafkaAdmin kafkaAdmin = new KafkaAdmin(this.properties.buildAdminProperties());
        kafkaAdmin.setFatalIfBrokerNotAvailable(this.properties.getAdmin().isFailFast());
        return kafkaAdmin;
    }
}

2、KafkaAnnotationDrivenConfiguration 源码分析

package org.springframework.boot.autoconfigure.kafka;

import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties.Listener.Type;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerConfigUtils;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.AfterRollbackProcessor;
import org.springframework.kafka.listener.BatchErrorHandler;
import org.springframework.kafka.listener.ConsumerAwareRebalanceListener;
import org.springframework.kafka.listener.ErrorHandler;
import org.springframework.kafka.listener.RecordInterceptor;
import org.springframework.kafka.support.converter.BatchMessageConverter;
import org.springframework.kafka.support.converter.BatchMessagingMessageConverter;
import org.springframework.kafka.support.converter.MessageConverter;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.transaction.KafkaAwareTransactionManager;

/**
 * Configuration for Kafka annotation-driven support.
 *
 * @author Gary Russell
 * @author Eddú Meléndez
 */
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(EnableKafka.class)
class KafkaAnnotationDrivenConfiguration {

    private final KafkaProperties properties;

    private final RecordMessageConverter messageConverter;

    private final BatchMessageConverter batchMessageConverter;

    private final KafkaTemplate<Object, Object> kafkaTemplate;

    private final KafkaAwareTransactionManager<Object, Object> transactionManager;

    private final ConsumerAwareRebalanceListener rebalanceListener;

    private final ErrorHandler errorHandler;

    private final BatchErrorHandler batchErrorHandler;

    private final AfterRollbackProcessor<Object, Object> afterRollbackProcessor;

    private final RecordInterceptor<Object, Object> recordInterceptor;

    KafkaAnnotationDrivenConfiguration(KafkaProperties properties,
          ObjectProvider<RecordMessageConverter> messageConverter,
          ObjectProvider<BatchMessageConverter> batchMessageConverter,
          ObjectProvider<KafkaTemplate<Object, Object>> kafkaTemplate,
          ObjectProvider<KafkaAwareTransactionManager<Object, Object>> kafkaTransactionManager,
          ObjectProvider<ConsumerAwareRebalanceListener> rebalanceListener, ObjectProvider<ErrorHandler> errorHandler,
          ObjectProvider<BatchErrorHandler> batchErrorHandler,
          ObjectProvider<AfterRollbackProcessor<Object, Object>> afterRollbackProcessor,
          ObjectProvider<RecordInterceptor<Object, Object>> recordInterceptor) {
       this.properties = properties;
       this.messageConverter = messageConverter.getIfUnique();
       this.batchMessageConverter = batchMessageConverter
             .getIfUnique(() -> new BatchMessagingMessageConverter(this.messageConverter));
       this.kafkaTemplate = kafkaTemplate.getIfUnique();
       this.transactionManager = kafkaTransactionManager.getIfUnique();
       this.rebalanceListener = rebalanceListener.getIfUnique();
       this.errorHandler = errorHandler.getIfUnique();
       this.batchErrorHandler = batchErrorHandler.getIfUnique();
       this.afterRollbackProcessor = afterRollbackProcessor.getIfUnique();
       this.recordInterceptor = recordInterceptor.getIfUnique();
    }

    @Bean
    @ConditionalOnMissingBean
    ConcurrentKafkaListenerContainerFactoryConfigurer kafkaListenerContainerFactoryConfigurer() {
       ConcurrentKafkaListenerContainerFactoryConfigurer configurer = new ConcurrentKafkaListenerContainerFactoryConfigurer();
       configurer.setKafkaProperties(this.properties);
       MessageConverter messageConverterToUse = (this.properties.getListener().getType().equals(Type.BATCH))
             ? this.batchMessageConverter : this.messageConverter;
       configurer.setMessageConverter(messageConverterToUse);
       configurer.setReplyTemplate(this.kafkaTemplate);
       configurer.setTransactionManager(this.transactionManager);
       configurer.setRebalanceListener(this.rebalanceListener);
       configurer.setErrorHandler(this.errorHandler);
       configurer.setBatchErrorHandler(this.batchErrorHandler);
       configurer.setAfterRollbackProcessor(this.afterRollbackProcessor);
       configurer.setRecordInterceptor(this.recordInterceptor);
       return configurer;
    }

    @Bean
    @ConditionalOnMissingBean(name = "kafkaListenerContainerFactory")  
    /// ConcurrentKafkaListenerContainerFactory的bean未定义时,自动生成该bean(这是@KafkaListener默认使用的容器工厂)
    ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory(
          ConcurrentKafkaListenerContainerFactoryConfigurer configurer,
          ConsumerFactory<Object, Object> kafkaConsumerFactory) {
       ConcurrentKafkaListenerContainerFactory<Object, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
       configurer.configure(factory, kafkaConsumerFactory);
       return factory;
    }

    @Configuration(proxyBeanMethods = false)
    @EnableKafka
    @ConditionalOnMissingBean(name = KafkaListenerConfigUtils.KAFKA_LISTENER_ANNOTATION_PROCESSOR_BEAN_NAME)
    static class EnableKafkaConfiguration {

    }

}

相关文章:

  • TLS与自签名证书的创建、作用、用到的工具等知识的介绍
  • 《MULTI-CLASS SEMANTIC SEGMENTATION OF FACES》论文分享(侵删)
  • pandas如何添加列
  • android进阶面试题目
  • 机器学习(部分算法、模型)
  • 【redis】数据类型之Bitfields
  • 网络安全入门|HTTP慢速攻击的终极防御:零信任与AI对抗
  • 信号——进程间通信(20250225)
  • 微软开源神器OmniParser-v2.0本地部署教程
  • vue3 封装通用 ECharts 组件
  • 绕过information_schema与order by注入以及seacsmv9注入
  • 使用open-webui调用大模型
  • Android ViewStub延迟初始化加载布局View,Kotlin
  • C++:开胃菜练习项目---定长内存池的实现以及测试
  • 计算机网络:从底层原理到前沿应用,解锁数字世界的连接密码
  • Linux 驱动模块稳定性检测框架 - 概要设计
  • Spring 原始注解详解与实战指南
  • 详解linuxC编程下的同步原语
  • RK3568开发笔记-AD7616调试笔记
  • Linux——高级IO
  • 专业建站的网站/2023能用的磁力搜索引擎
  • 可以在哪些网站 APP做推广/网站自然优化
  • 成都大型商城网站建设/网页生成
  • 在线下单网站怎么做/官网seo关键词排名系统
  • 个人网站隐藏服务器真实ip/公众号软文是什么意思
  • java开发手册/google搜索优化