Revert "[SDC-BE] Add kafka ssl config" 51/134751/3
authorMichaelMorris <michael.morris@est.tech>
Fri, 2 Jun 2023 12:46:04 +0000 (12:46 +0000)
committerVasyl Razinkov <vasyl.razinkov@est.tech>
Fri, 2 Jun 2023 16:20:24 +0000 (16:20 +0000)
This reverts commit 3405456c46937352863ce19c39266a51dd7760db.

Reason for revert: deployment issues with TLS

Change-Id: I58aa51f7d563cf74d3747a5ff59104906b294d18
Signed-off-by: MichaelMorris <michael.morris@est.tech>
Issue-ID: SDC-4476

catalog-be/pom.xml
catalog-be/src/main/java/org/openecomp/sdc/be/components/kafka/KafkaCommonConfig.java [deleted file]
catalog-be/src/main/java/org/openecomp/sdc/be/components/kafka/KafkaHandler.java
catalog-be/src/main/java/org/openecomp/sdc/be/components/kafka/SdcKafkaConsumer.java
catalog-be/src/main/java/org/openecomp/sdc/be/components/kafka/SdcKafkaProducer.java
catalog-be/src/test/java/org/openecomp/sdc/be/components/kafka/KafkaHandlerTest.java
catalog-be/src/test/java/org/openecomp/sdc/be/components/kafka/SdcKafkaConsumerTest.java
catalog-be/src/test/java/org/openecomp/sdc/be/components/kafka/SdcKafkaProducerTest.java
pom.xml

index 051eb2e..d503b0b 100644 (file)
             <version>${mockitoJupiter.version}</version>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.junit-pioneer</groupId>
-            <artifactId>junit-pioneer</artifactId>
-            <version>2.0.1</version>
-            <scope>test</scope>
-        </dependency>
+
         <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-test</artifactId>
diff --git a/catalog-be/src/main/java/org/openecomp/sdc/be/components/kafka/KafkaCommonConfig.java b/catalog-be/src/main/java/org/openecomp/sdc/be/components/kafka/KafkaCommonConfig.java
deleted file mode 100644 (file)
index bf65c52..0000000
+++ /dev/null
@@ -1,95 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2023 Nordix Foundation. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-package org.openecomp.sdc.be.components.kafka;
-
-import java.util.Properties;
-import java.util.UUID;
-import org.apache.kafka.clients.CommonClientConfigs;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.clients.producer.ProducerConfig;
-import org.apache.kafka.common.KafkaException;
-import org.apache.kafka.common.config.SaslConfigs;
-import org.apache.kafka.common.config.SslConfigs;
-import org.openecomp.sdc.be.config.DistributionEngineConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class KafkaCommonConfig {
-
-    private static final Logger log = LoggerFactory.getLogger(KafkaCommonConfig.class.getName());
-
-    private final DistributionEngineConfiguration deConfiguration;
-
-    public KafkaCommonConfig(DistributionEngineConfiguration config){
-        this.deConfiguration = config;
-    }
-
-    public Properties getConsumerProperties(){
-        Properties props = new Properties();
-        setCommonProperties(props);
-        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
-        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,  "org.apache.kafka.common.serialization.StringDeserializer");
-        props.put(ConsumerConfig.CLIENT_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerId() + "-consumer-" + UUID.randomUUID());
-        props.put(ConsumerConfig.GROUP_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerGroup());
-        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
-        return props;
-    }
-
-    public Properties getProducerProperties(){
-        Properties props = new Properties();
-        setCommonProperties(props);
-        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
-        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,  "org.apache.kafka.common.serialization.StringSerializer");
-        props.put(ProducerConfig.CLIENT_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerId() + "-producer-" + UUID.randomUUID());
-
-        return props;
-    }
-
-    private void setCommonProperties(Properties props) {
-        String securityProtocolConfig = System.getenv().getOrDefault("SECURITY_PROTOCOL", "SASL_PLAINTEXT");
-        props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocolConfig);
-        props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, deConfiguration.getKafkaBootStrapServers());
-
-        if("SSL".equals(securityProtocolConfig)) {
-            props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, deConfiguration.getSSLConfig().getKeystorePath());
-            props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, deConfiguration.getSSLConfig().getKeystorePass());
-            props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, deConfiguration.getSSLConfig().getKeyManagerPassword());
-            props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
-            props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, deConfiguration.getSSLConfig().getTruststorePath());
-            props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, deConfiguration.getSSLConfig().getTruststorePass());
-        } else {
-            props.put(SaslConfigs.SASL_JAAS_CONFIG, getKafkaSaslJaasConfig());
-            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
-        }
-    }
-
-    /**
-     * @return The Sasl Jaas Configuration
-     */
-    private String getKafkaSaslJaasConfig() throws KafkaException {
-        String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG");
-        if(saslJaasConfFromEnv != null) {
-            return saslJaasConfFromEnv;
-        } else {
-            throw new KafkaException("sasl.jaas.config not set for Kafka Consumer");
-        }
-    }
-
-}
index 5a36980..2a5590e 100644 (file)
@@ -22,6 +22,7 @@ package org.openecomp.sdc.be.components.kafka;
 import com.google.gson.Gson;
 import com.google.gson.JsonSyntaxException;
 import fj.data.Either;
+import lombok.Getter;
 import lombok.Setter;
 import org.apache.http.HttpStatus;
 import org.apache.kafka.common.KafkaException;
index 5350445..8879bf0 100644 (file)
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2022-2023 Nordix Foundation. All rights reserved.
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -25,10 +25,14 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
+import java.util.UUID;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SaslConfigs;
 import org.openecomp.sdc.be.config.DistributionEngineConfiguration;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
@@ -39,16 +43,27 @@ public class SdcKafkaConsumer {
 
     private static final Logger log = Logger.getLogger(SdcKafkaConsumer.class.getName());
     private final DistributionEngineConfiguration deConfiguration;
-    private final KafkaConsumer<String, String> kafkaConsumer;
+    private KafkaConsumer<String, String> kafkaConsumer;
 
     /**
      * Constructor setting up the KafkaConsumer from a predefined set of configurations
      */
     public SdcKafkaConsumer(DistributionEngineConfiguration deConfiguration){
         log.info("Create SdcKafkaConsumer via constructor");
-        KafkaCommonConfig kafkaCommonConfig = new KafkaCommonConfig(deConfiguration);
-        Properties properties = kafkaCommonConfig.getConsumerProperties();
+        Properties properties = new Properties();
         this.deConfiguration = deConfiguration;
+
+        properties.put(ConsumerConfig.CLIENT_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerId()+ "-consumer-" + UUID.randomUUID());
+        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
+        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,  "org.apache.kafka.common.serialization.StringDeserializer");
+        properties.put(ConsumerConfig.GROUP_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerGroup());
+        properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, deConfiguration.getKafkaBootStrapServers());
+        properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
+        properties.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, false);
+        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
+        properties.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
+
+        properties.put(SaslConfigs.SASL_JAAS_CONFIG, getKafkaSaslJaasConfig());
         kafkaConsumer = new KafkaConsumer<>(properties);
     }
 
@@ -63,6 +78,19 @@ public class SdcKafkaConsumer {
         this.kafkaConsumer = kafkaConsumer;
     }
 
+    /**
+     *
+     * @return the Sasl Jass Config
+     */
+    private String getKafkaSaslJaasConfig() {
+        String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG");
+        if(saslJaasConfFromEnv != null) {
+            return saslJaasConfFromEnv;
+        } else {
+            throw new KafkaException("sasl.jaas.config not set for Kafka Consumer");
+        }
+    }
+
     /**
      *
      * @param topic Topic in which to subscribe
index 9e31da6..bdc984d 100644 (file)
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2022-2023 Nordix Foundation. All rights reserved.
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -21,9 +21,13 @@ package org.openecomp.sdc.be.components.kafka;
 
 import com.google.common.annotations.VisibleForTesting;
 import java.util.Properties;
+import java.util.UUID;
+import org.apache.kafka.clients.CommonClientConfigs;
 import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.common.KafkaException;
+import org.apache.kafka.common.config.SaslConfigs;
 import org.openecomp.sdc.be.config.DistributionEngineConfiguration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -34,15 +38,22 @@ import org.slf4j.LoggerFactory;
 public class SdcKafkaProducer {
     private static final Logger log = LoggerFactory.getLogger(SdcKafkaProducer.class.getName());
 
-    private final KafkaProducer<String, String> kafkaProducer;
+    private KafkaProducer<String, String> kafkaProducer;
 
     /**
      * Constructor setting up the KafkaProducer from a predefined set of configurations
      */
     public SdcKafkaProducer(DistributionEngineConfiguration deConfiguration) {
         log.info("Create SdcKafkaProducer via constructor");
-        KafkaCommonConfig kafkaCommonConfig = new KafkaCommonConfig(deConfiguration);
-        Properties properties = kafkaCommonConfig.getProducerProperties();
+        Properties properties = new Properties();
+
+        properties.put(ProducerConfig.CLIENT_ID_CONFIG, deConfiguration.getDistributionStatusTopic().getConsumerId() + "-producer-" + UUID.randomUUID());
+        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
+        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,  "org.apache.kafka.common.serialization.StringSerializer");
+        properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, deConfiguration.getKafkaBootStrapServers());
+        properties.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
+        properties.put(SaslConfigs.SASL_JAAS_CONFIG, getKafkaSaslJaasConfig());
+        properties.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
         kafkaProducer = new KafkaProducer<>(properties);
     }
 
@@ -55,9 +66,22 @@ public class SdcKafkaProducer {
         this.kafkaProducer = kafkaProducer;
     }
 
+    /**
+     * @return The Sasl Jaas Configuration
+     */
+    private static String getKafkaSaslJaasConfig() throws KafkaException {
+        String saslJaasConfFromEnv = System.getenv("SASL_JAAS_CONFIG");
+        if(saslJaasConfFromEnv != null) {
+            return saslJaasConfFromEnv;
+        } else {
+            throw new KafkaException("sasl.jaas.config not set for Kafka Consumer");
+        }
+    }
+
     /**
      * @param message A message to Send
      * @param topicName The name of the topic to publish to
+     * @return The status of the send request
      */
     public void send(String message, String topicName) throws KafkaException {
         ProducerRecord<String, String> kafkaMessagePayload = new ProducerRecord<>(topicName, "PartitionKey", message);
index de7d8bf..91ee023 100644 (file)
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2022-2023 Nordix Foundation. All rights reserved.
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -21,25 +21,29 @@ package org.openecomp.sdc.be.components.kafka;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.ArgumentMatchers.any;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.when;
 
 import com.google.gson.JsonSyntaxException;
-import fj.data.Either;
-import java.util.ArrayList;
-import java.util.List;
 import org.apache.kafka.common.KafkaException;
-import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
-import org.mockito.Mock;
 import org.mockito.junit.jupiter.MockitoExtension;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+
+import java.util.ArrayList;
+import fj.data.Either;
+import java.util.List;
+
 import org.openecomp.sdc.be.components.distribution.engine.CambriaErrorResponse;
-import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
 import org.openecomp.sdc.be.components.distribution.engine.NotificationDataImpl;
+import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
 import org.openecomp.sdc.be.distribution.api.client.CambriaOperationStatus;
 
+
 @ExtendWith(MockitoExtension.class)
 public class KafkaHandlerTest {
 
@@ -49,6 +53,8 @@ public class KafkaHandlerTest {
     @Mock
     private SdcKafkaProducer mockSdcKafkaProducer;
 
+    private KafkaHandler kafkaHandler;
+
     @Test
     public void testIsKafkaActiveTrue(){
         KafkaHandler kafkaHandler = new KafkaHandler(mockSdcKafkaConsumer, mockSdcKafkaProducer, true);
index 8db9a32..0a4a834 100644 (file)
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2022-2023 Nordix Foundation. All rights reserved.
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -21,14 +21,20 @@ package org.openecomp.sdc.be.components.kafka;
 
 import static org.junit.jupiter.api.Assertions.assertThrows;
 import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import org.apache.kafka.common.KafkaException;
+import org.junit.jupiter.api.Test;
+
 import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.never;
 import static org.mockito.Mockito.when;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
 
-import java.util.ArrayList;
-import java.util.Collection;
 import java.util.Collections;
+import java.util.Collection;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -37,16 +43,11 @@ import java.util.Set;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecords;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
-import org.apache.kafka.common.KafkaException;
 import org.apache.kafka.common.TopicPartition;
 import org.jetbrains.annotations.NotNull;
-import org.junit.jupiter.api.Test;
-import org.junitpioneer.jupiter.SetEnvironmentVariable;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
+
 import org.openecomp.sdc.be.config.DistributionEngineConfiguration;
 
-@SetEnvironmentVariable(key = "SASL_JAAS_CONFIG", value = "org.apache.kafka.common.security.scram.ScramLoginModule required username=admin password=admin-secret;")
 public class SdcKafkaConsumerTest {
 
     @Test
index 4264602..23322cc 100644 (file)
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2022-2023 Nordix Foundation. All rights reserved.
+ * Copyright (C) 2022 Nordix Foundation. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  */
 package org.openecomp.sdc.be.components.kafka;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
+import org.junit.jupiter.api.Test;
+
 import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mockito;
 
-import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.common.KafkaException;
-import org.junit.jupiter.api.Test;
-import org.junitpioneer.jupiter.SetEnvironmentVariable;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
+
+import org.openecomp.sdc.be.catalog.api.IStatus;
 import org.openecomp.sdc.be.config.DistributionEngineConfiguration;
 
-@SetEnvironmentVariable(key = "SASL_JAAS_CONFIG", value = "org.apache.kafka.common.security.scram.ScramLoginModule required username=admin password=admin-secret;")
 public class SdcKafkaProducerTest {
 
     @Test
diff --git a/pom.xml b/pom.xml
index 581be0a..4f7201c 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -209,7 +209,7 @@ Modifications copyright (c) 2018-2019 Nokia
             <dependency>
                 <groupId>org.apache.kafka</groupId>
                 <artifactId>kafka-clients</artifactId>
-                <version>3.4.0</version>
+                <version>3.3.1</version>
             </dependency>
             <dependency>
                 <groupId>com.google.guava</groupId>