import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.kafka.annotation.EnableKafka;
+import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
@Configuration
@EnableKafka
@RequiredArgsConstructor
-public class KafkaTemplateConfig<T> {
+public class KafkaConfig<T> {
private final KafkaProperties kafkaProperties;
return new DefaultKafkaConsumerFactory<>(consumerConfigProperties);
}
+ /**
+ * A legacy Kafka event template for executing high-level operations. The legacy producer factory ensure this.
+ *
+ * @return an instance of legacy Kafka template.
+ */
+ @Bean
+ @Primary
+ public KafkaTemplate<String, T> legacyEventKafkaTemplate() {
+ final KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(legacyEventProducerFactory());
+ kafkaTemplate.setConsumerFactory(legacyEventConsumerFactory());
+ return kafkaTemplate;
+ }
+
+ /**
+ * A legacy concurrent kafka listener container factory.
+ *
+ * @return instance of Concurrent kafka listener factory
+ */
+ @Bean
+ public ConcurrentKafkaListenerContainerFactory<String, T> legacyEventConcurrentKafkaListenerContainerFactory() {
+ final ConcurrentKafkaListenerContainerFactory<String, T> containerFactory =
+ new ConcurrentKafkaListenerContainerFactory<>();
+ containerFactory.setConsumerFactory(legacyEventConsumerFactory());
+ return containerFactory;
+ }
+
/**
* This sets the strategy for creating cloud Kafka producer instance from kafka properties defined into
* application.yml with CloudEventSerializer.
return new DefaultKafkaConsumerFactory<>(consumerConfigProperties);
}
- /**
- * A legacy Kafka event template for executing high-level operations. The legacy producer factory ensure this.
- *
- * @return an instance of legacy Kafka template.
- */
- @Bean
- @Primary
- public KafkaTemplate<String, T> legacyEventKafkaTemplate() {
- final KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(legacyEventProducerFactory());
- kafkaTemplate.setConsumerFactory(legacyEventConsumerFactory());
- return kafkaTemplate;
- }
/**
* A cloud Kafka event template for executing high-level operations. The cloud producer factory ensure this.
return kafkaTemplate;
}
+ /**
+ * A Concurrent CloudEvent kafka listener container factory.
+ *
+ * @return instance of Concurrent kafka listener factory
+ */
+ @Bean
+ public ConcurrentKafkaListenerContainerFactory<String, CloudEvent>
+ cloudEventConcurrentKafkaListenerContainerFactory() {
+ final ConcurrentKafkaListenerContainerFactory<String, CloudEvent> containerFactory =
+ new ConcurrentKafkaListenerContainerFactory<>();
+ containerFactory.setConsumerFactory(cloudEventConsumerFactory());
+ return containerFactory;
+ }
+
}
KafkaProducer<String, CloudEvent> producer = new KafkaProducer<>(eventProducerConfigProperties(CloudEventSerializer))
producer.send(record)
and: 'wait a little for async processing of message'
- TimeUnit.MILLISECONDS.sleep(100)
+ TimeUnit.MILLISECONDS.sleep(300)
then: 'the event has only been forwarded for the correct type'
expectedNUmberOfCallsToPublishForwardedEvent * mockEventsPublisher.publishCloudEvent(*_)
where: 'the following event types are used'
KafkaProducer<String, String> producer = new KafkaProducer<>(eventProducerConfigProperties(StringSerializer))
producer.send(record)
and: 'wait a little for async processing of message'
- TimeUnit.MILLISECONDS.sleep(100)
+ TimeUnit.MILLISECONDS.sleep(300)
then: 'the event is not processed by this consumer'
0 * mockEventsPublisher.publishCloudEvent(*_)
}