2 * Copyright © 2019 IBM.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 package org.onap.ccsdk.cds.blueprintsprocessor.message.service
19 import kotlinx.coroutines.channels.Channel
20 import kotlinx.coroutines.delay
21 import kotlinx.coroutines.launch
22 import kotlinx.coroutines.runBlocking
23 import org.apache.kafka.clients.CommonClientConfigs
24 import org.apache.kafka.clients.consumer.Consumer
25 import org.apache.kafka.clients.consumer.ConsumerConfig
26 import org.apache.kafka.clients.consumer.KafkaConsumer
27 import org.apache.kafka.common.serialization.ByteArrayDeserializer
28 import org.apache.kafka.common.serialization.StringDeserializer
29 import org.onap.ccsdk.cds.blueprintsprocessor.message.KafkaBasicAuthMessageConsumerProperties
30 import org.onap.ccsdk.cds.controllerblueprints.core.logger
31 import java.nio.charset.Charset
32 import java.time.Duration
33 import kotlin.concurrent.thread
35 open class KafkaBasicAuthMessageConsumerService(
36 private val messageConsumerProperties: KafkaBasicAuthMessageConsumerProperties)
37 : BlueprintMessageConsumerService {
39 val channel = Channel<String>()
40 var kafkaConsumer: Consumer<String, ByteArray>? = null
41 val log = logger(KafkaBasicAuthMessageConsumerService::class)
46 fun kafkaConsumer(additionalConfig: Map<String, Any>? = null): Consumer<String, ByteArray> {
47 val configProperties = hashMapOf<String, Any>()
48 configProperties[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = messageConsumerProperties.bootstrapServers
49 configProperties[ConsumerConfig.GROUP_ID_CONFIG] = messageConsumerProperties.groupId
50 configProperties[ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG] = messageConsumerProperties.autoCommit
52 * earliest: automatically reset the offset to the earliest offset
53 * latest: automatically reset the offset to the latest offset
55 configProperties[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = messageConsumerProperties.autoOffsetReset
56 configProperties[ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG] = StringDeserializer::class.java
57 configProperties[ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG] = ByteArrayDeserializer::class.java
58 if (messageConsumerProperties.clientId != null) {
59 configProperties[ConsumerConfig.CLIENT_ID_CONFIG] = messageConsumerProperties.clientId!!
61 /** To handle Back pressure, Get only configured record for processing */
62 if (messageConsumerProperties.pollRecords > 0) {
63 configProperties[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = messageConsumerProperties.pollRecords
65 // TODO("Security Implementation based on type")
66 /** add or override already set properties */
67 additionalConfig?.let { configProperties.putAll(it) }
68 /** Create Kafka consumer */
69 return KafkaConsumer(configProperties)
72 override suspend fun subscribe(additionalConfig: Map<String, Any>?): Channel<String> {
73 /** get to topic names */
74 val consumerTopic = messageConsumerProperties.topic?.split(",")?.map { it.trim() }
75 check(!consumerTopic.isNullOrEmpty()) { "couldn't get topic information" }
76 return subscribe(consumerTopic, additionalConfig)
80 override suspend fun subscribe(consumerTopic: List<String>, additionalConfig: Map<String, Any>?): Channel<String> {
81 /** Create Kafka consumer */
82 kafkaConsumer = kafkaConsumer(additionalConfig)
84 checkNotNull(kafkaConsumer) {
85 "failed to create kafka consumer for " +
86 "server(${messageConsumerProperties.bootstrapServers})'s " +
87 "topics(${messageConsumerProperties.bootstrapServers})"
90 kafkaConsumer!!.subscribe(consumerTopic)
91 log.info("Successfully consumed topic($consumerTopic)")
93 thread(start = true, name = "KafkaConsumer") {
95 kafkaConsumer!!.use { kc ->
97 val consumerRecords = kc.poll(Duration.ofMillis(messageConsumerProperties.pollMillSec))
98 log.info("Consumed Records : ${consumerRecords.count()}")
100 consumerRecords?.forEach { consumerRecord ->
101 /** execute the command block */
102 consumerRecord.value()?.let {
104 if (!channel.isClosedForSend) {
105 channel.send(String(it, Charset.defaultCharset()))
107 log.error("Channel is closed to receive message")
114 log.info("message listener shutting down.....")
120 override suspend fun shutDown() {
121 /** stop the polling loop */
123 /** Close the Channel */
125 /** TO shutdown gracefully, need to wait for the maximum poll time */
126 delay(messageConsumerProperties.pollMillSec)