sdc-BE TLS support
[sdc.git] / catalog-be / src / main / java / org / openecomp / sdc / be / components / kafka / SdcKafkaConsumer.java
index 04df4e1..81da06d 100644 (file)
@@ -25,14 +25,10 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
-import java.util.UUID;
-import org.apache.kafka.clients.CommonClientConfigs;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.KafkaException;
-import org.apache.kafka.common.config.SaslConfigs;
 import org.openecomp.sdc.be.config.DistributionEngineConfiguration;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
@@ -42,30 +38,17 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 public class SdcKafkaConsumer {
 
     private static final Logger log = Logger.getLogger(SdcKafkaConsumer.class.getName());
-    private static final String DEFAULT_SASL_MECHANISM = "SCRAM-SHA-512";
-
     private final DistributionEngineConfiguration deConfiguration;
-    private KafkaConsumer<String, String> kafkaConsumer;
+    private final KafkaConsumer<String, String> kafkaConsumer;
 
     /**
      * Constructor setting up the KafkaConsumer from a predefined set of configurations
      */
     public SdcKafkaConsumer(DistributionEngineConfiguration deConfiguration){
         log.info("Create SdcKafkaConsumer via constructor");
-        Properties properties = new Properties();
+        KafkaCommonConfig kafkaCommonConfig = new KafkaCommonConfig(deConfiguration);
+        Properties properties = kafkaCommonConfig.getConsumerProperties();
         this.deConfiguration = deConfiguration;
-
-        properties.put(ConsumerConfig.CLIENT_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerId()+ "-consumer-" + UUID.randomUUID());
-        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
-        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,  "org.apache.kafka.common.serialization.StringDeserializer");
-        properties.put(ConsumerConfig.GROUP_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerGroup());
-        properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, deConfiguration.getKafkaBootStrapServers());
-        properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
-        properties.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, false);
-        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
-        properties.put(SaslConfigs.SASL_MECHANISM, getKafkaSaslMechanism());
-
-        properties.put(SaslConfigs.SASL_JAAS_CONFIG, getKafkaSaslJaasConfig());
         kafkaConsumer = new KafkaConsumer<>(properties);
     }
 
@@ -80,28 +63,6 @@ public class SdcKafkaConsumer {
         this.kafkaConsumer = kafkaConsumer;
     }
 
-    /**
-     *
-     * @return the Sasl Jass Config
-     */
-    private String getKafkaSaslJaasConfig() {
-        String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG");
-        if(saslJaasConfFromEnv != null) {
-            return saslJaasConfFromEnv;
-        } else {
-            throw new KafkaException("sasl.jaas.config not set for Kafka Consumer");
-        }
-    }
-    
-    private static String getKafkaSaslMechanism() throws KafkaException {
-        String saslMechanism = System.getenv("SASL_MECHANISM");
-        if(saslMechanism != null) {
-            return saslMechanism;
-        } else {
-            return DEFAULT_SASL_MECHANISM;
-        }
-    }
-
     /**
      *
      * @param topic Topic in which to subscribe