事务性kafkalistener中的kafkaTemplate在事务回滚时发布消息

时间:2019-03-06 12:27:19

标签: apache-kafka kafka-producer-api spring-kafka

我正在用spring-kafka测试一次准确的交货。我的理解是,偏移量和通过kafkaTemplate发布的任何消息都将属于同一笔交易。

但是,然后在发布后抛出异常,即使未提交消费者偏移量,消息仍在发布。

我的设置有问题吗?我还需要在每个kafkalistener类上设置@transaction吗?当我删除它时,我看到在spring-kafka调试日志中创建了事务。

@Transactional("kafkaTransactionManager")
@KafkaListener(
        id = "\${messaging.command.consumer-group-id}",
        clientIdPrefix = "\${messaging.command.consumer-group-id}",
        topics = ["\${messaging.command.topic}"],
        concurrency = "\${messaging.command.listener-count}"
    )
    fun processCommand1(@Payload command: EntityCommand<JsonNode>, record: ConsumerRecord<String, Array<Byte>>) {
        testEventPublisher.publish(record.key(), "test")
        testEventPublisher.publish(randomUUID().toString(), "test)
       throw RuntimeException("test")
    }

发布者(我试图使它正常工作时添加了executeInTransaction()):

class TransactionalTopicPublisher<TYPE>(val kafkaTemplate: KafkaTemplate<String, Any>, val topic: String) {

    fun publish(key: String, message: TYPE) {
        kafkaTemplate.executeInTransaction {
            kafkaTemplate.send(
                topic,
                key,
                message
            )
        }
    }
}

生产者配置:

@Bean
    fun kafkaTemplate(producerFactory: ProducerFactory<String, Any>): KafkaTemplate<String, Any> {
        return KafkaTemplate(producerFactory)
    }

    @Bean(KAFKA_TRANSACTION_MANAGER)
    fun kafkaTransactionManager(kafkaProducerFactory: ProducerFactory<String, Any>): KafkaTransactionManager<String, Any> {
        val kafkaTransactionManager = KafkaTransactionManager<String, Any>(kafkaProducerFactory)
        return kafkaTransactionManager
    }

    @Bean
    fun kafkaProducerFactory(kafkaJsonSerializer: JsonSerializer<Any>): ProducerFactory<String, Any> {
        val factory = DefaultKafkaProducerFactory<String, Any>(producerConfig())
        factory.setTransactionIdPrefix(transactionIdPrefix)
        factory.setValueSerializer(kafkaJsonSerializer)
        return factory
    }

    @Bean
    fun producerConfig(): Map<String, Any> {
        return mapOf(
            ProducerConfig.BOOTSTRAP_SERVERS_CONFIG to bootstrapServers,
            ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG to StringSerializer::class.java.name,
            ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG to true,
            ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION to 1,
            ProducerConfig.ACKS_CONFIG to "all",
            ProducerConfig.BATCH_SIZE_CONFIG to 16384,
            ProducerConfig.LINGER_MS_CONFIG to 1,
            ProducerConfig.BUFFER_MEMORY_CONFIG to 33554432,
            ProducerConfig.INTERCEPTOR_CLASSES_CONFIG to ProducerInterceptor::class.java.name
        )
    }

    @Bean
    fun kafkaJsonSerializer(kafkaObjectMapper: ObjectMapper): JsonSerializer<Any> {
        val jsonSerializer = JsonSerializer<Any>(kafkaObjectMapper)
        jsonSerializer.isAddTypeInfo = false
        return jsonSerializer
    }

消费者配置:

@Bean
    fun kafkaListenerContainerFactory(
        kafkaTransactionManager: KafkaTransactionManager<String, Any>,
        stringJsonMessageConverter: StringJsonMessageConverter
    ): ConcurrentKafkaListenerContainerFactory<String, String> {
        val factory = ConcurrentKafkaListenerContainerFactory<String, String>()
        factory.consumerFactory = consumerFactory()
        factory.setMessageConverter(stringJsonMessageConverter)
        factory.setErrorHandler(messagingErrorHandler())
        factory.containerProperties.transactionManager = kafkaTransactionManager
        return factory
    }

    @Bean
    fun stringJsonMessageConverter(kafkaObjectMapper: ObjectMapper) =
        StringJsonMessageConverter(kafkaObjectMapper)

    @Bean
    fun messagingErrorHandler() =
        MessagingErrorHandler()

    @Bean
    fun consumerFactory(): ConsumerFactory<String, Any> {
        val consumerFactory = DefaultKafkaConsumerFactory<String, Any>(consumerConfig())
        return consumerFactory
    }

    @Bean
    fun consumerConfig(): Map<String, Any> {
        return mapOf(
            ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to bootstrapServers,

            ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java.name,
            ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java.name,

            ConsumerConfig.ISOLATION_LEVEL_CONFIG to IsolationLevel.READ_COMMITTED.toString().toLowerCase(Locale.ROOT),
            ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG to false,
            ConsumerConfig.AUTO_OFFSET_RESET_CONFIG to "earliest",
            ConsumerConfig.MAX_POLL_RECORDS_CONFIG to MAX_POLL_RECORD,

            ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG to ConsumerInterceptor::class.java.name,

            JsonDeserializer.USE_TYPE_INFO_HEADERS to false
        )
    }

OffsetCommittingAndDeadLetterPublishingRecoverer:

@Transactional(KAFKA_TRANSACTION_MANAGER)
class OffsetCommittingAndDeadLetterPublishingRecoverer(val template: KafkaTemplate<Any, Any>) :
    DeadLetterPublishingRecoverer(template) {

    override fun accept(record: ConsumerRecord<*, *>, exception: Exception) {
        super.accept(record, exception)

        val topicPartition = TopicPartition(record.topic(), record.partition())
        val offsetAndMetadata = OffsetAndMetadata(record.offset() +1)

        template.executeInTransaction {
            template.sendOffsetsToTransaction(
                mapOf(topicPartition to offsetAndMetadata)
            )
        }
    }
}

0 个答案:

没有答案
相关问题