[AAI] Helm changes for DMaaP deprecation 90/137990/3
authorKv <vani.kv@accenture.com>
Mon, 20 May 2024 06:01:17 +0000 (11:31 +0530)
committerKv <vani.kv@accenture.com>
Mon, 20 May 2024 10:26:49 +0000 (15:56 +0530)
- Helm changes for DMaaP deprecation in A&AI

Issue-ID: OOM-3297
Change-Id: I6a601c4c9958fb6817872520c259525eb650d0c9
Signed-off-by: Kv <vani.kv@accenture.com>
16 files changed:
kubernetes/aai/components/aai-graphadmin/resources/config/application.properties
kubernetes/aai/components/aai-graphadmin/resources/config/logback.xml
kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml [new file with mode: 0644]
kubernetes/aai/components/aai-graphadmin/templates/deployment.yaml
kubernetes/aai/components/aai-graphadmin/values.yaml
kubernetes/aai/components/aai-resources/resources/config/application.properties
kubernetes/aai/components/aai-resources/resources/config/logback.xml
kubernetes/aai/components/aai-resources/templates/aai-kafka-user.yml [new file with mode: 0644]
kubernetes/aai/components/aai-resources/templates/deployment.yaml
kubernetes/aai/components/aai-resources/values.yaml
kubernetes/aai/components/aai-traversal/resources/config/application.properties
kubernetes/aai/components/aai-traversal/resources/config/logback.xml
kubernetes/aai/components/aai-traversal/templates/aai-trav-kafka-user.yml [new file with mode: 0644]
kubernetes/aai/components/aai-traversal/templates/deployment.yaml
kubernetes/aai/components/aai-traversal/values.yaml
kubernetes/aai/values.yaml

index b63cd83..83689da 100644 (file)
@@ -55,8 +55,15 @@ server.ssl.enabled=false
 
 # JMS bind address host port
 jms.bind.address=tcp://localhost:61649
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
 
 # Schema related attributes for the oxm and edges
 # Any additional schema related attributes should start with prefix schema
index fd79f70..875b1de 100644 (file)
           <includeCallerData>true</includeCallerData>
           <appender-ref ref="translog"/>
         </appender>
-        <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
             <level>WARN</level>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
           </rollingPolicy>
             <pattern>${"errorPattern"}</pattern>
           </encoder>
         </appender>
-        <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.LevelFilter">
             <level>INFO</level>
             <onMatch>ACCEPT</onMatch>
             <onMismatch>DENY</onMismatch>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}
             </fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
             <pattern>${debugPattern}</pattern>
           </encoder>
         </appender>
-        <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.LevelFilter">
             <level>DEBUG</level>
             <onMatch>ACCEPT</onMatch>
             <onMismatch>DENY</onMismatch>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
           </rollingPolicy>
             <pattern>${debugPattern}</pattern>
           </encoder>
         </appender>
-        <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.LevelFilter">
             <level>INFO</level>
             <onMatch>ACCEPT</onMatch>
             <onMismatch>DENY</onMismatch>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
           </rollingPolicy>
     </if>
     <appender-ref ref="STDOUT"/>
   </logger>
-  <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+  <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumer"/>
-        <appender-ref ref="dmaapAAIEventConsumerDebug"/>
-        <appender-ref ref="dmaapAAIEventConsumerMetric"/>
+        <appender-ref ref="kafkaAAIEventConsumer"/>
+        <appender-ref ref="kafkaAAIEventConsumerDebug"/>
+        <appender-ref ref="kafkaAAIEventConsumerMetric"/>
       </then>
     </if>
     <appender-ref ref="STDOUT"/>
diff --git a/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml b/kubernetes/aai/components/aai-graphadmin/templates/aai-graph-kafka-user.yml
new file mode 100644 (file)
index 0000000..b028df7
--- /dev/null
@@ -0,0 +1,31 @@
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+  name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}
+  labels:
+    strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+  authentication:
+    type: scram-sha-512
+  authorization:
+    type: simple
+    acls:
+    - resource:
+        type: topic
+        name: AAI-EVENT
+      operation: All
\ No newline at end of file
index cad213a..9a0ca76 100644 (file)
@@ -118,6 +118,13 @@ spec:
           value: {{ .Values.service.internalPort2 | quote }}
         - name: INTERNAL_PORT_3
           value: {{ .Values.service.internalPort3 | quote }}
+        - name: BOOTSTRAP_SERVERS
+          value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+        - name: JAAS_CONFIG
+          valueFrom:
+            secretKeyRef:
+              name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}
+              key: sasl.jaas.config
         volumeMounts:
         - mountPath: /opt/app/aai-graphadmin/resources/etc/appprops/janusgraph-realtime.properties
           name: config
index 8b6b5f9..46e81c8 100644 (file)
@@ -25,6 +25,8 @@
 # Declare variables to be passed into your templates.
 global: # global defaults
   nodePortPrefix: 302
+  kafkaBootstrap: strimzi-kafka-bootstrap
+  aaiGraphKafkaUser: aai-graph-kafka-user
   cassandra:
     #This will instantiate AAI cassandra cluster, default:shared cassandra.
     localCluster: false
@@ -123,9 +125,11 @@ config:
 
   # Specify the profiles for the graphadmin microservice
   profiles:
-
-    active: dmaap
-
+    active: kafka
+    kafkaBootstrap: strimzi-kafka-bootstrap
+    jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}'
+    someConfig: graphrandom
+    aaiTopic: AAI-EVENT
   # Specifies the timeout limit for the REST API requests
   timeout:
     enabled: true
@@ -292,3 +296,20 @@ serviceAccount:
 log:
   path: /var/log/onap
 logConfigMapNamePrefix: '{{ include "common.fullname" . }}'
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+  - uid: aai-graph-kafka-user
+    externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+    type: genericKV
+    envs:
+      - name: sasl.jaas.config
+        value: '{{ .Values.config.someConfig }}'
+        policy: generate
+kafkaUser:
+  authenticationType: scram-sha-512
+  acls:
+    - name: AAI-EVENT
+      type: topic
+      operations: [Read, Write]
\ No newline at end of file
index 1b7bdf8..5762460 100644 (file)
@@ -58,8 +58,15 @@ server.ssl.enabled=false
 
 # JMS bind address host port
 jms.bind.address=tcp://localhost:61647
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
 
 # Schema related attributes for the oxm and edges
 # Any additional schema related attributes should start with prefix schema
index b523185..54b352a 100644 (file)
         <appender-ref ref="translog"/>
       </appender>
 
-      <appender name="dmaapAAIEventConsumer"
+      <appender name="kafkaAAIEventConsumer"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
           <level>WARN</level>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
 
       </appender>
 
-      <appender name="dmaapAAIEventConsumerDebug"
+      <appender name="kafkaAAIEventConsumerDebug"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>DEBUG</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${debugPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerInfo"
+      <appender name="kafkaAAIEventConsumerInfo"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${auditPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerMetric"
+      <appender name="kafkaAAIEventConsumerMetric"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
         <appender-ref ref="asyncMETRIC"/>
       </logger>
       <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO">
-        <appender-ref ref="dmaapAAIEventConsumerMetric"/>
+        <appender-ref ref="kafkaAAIEventConsumerMetric"/>
       </logger>
       <logger name="org.onap.aai.logging.ErrorLogHelper" level="WARN">
         <appender-ref ref="asyncERROR"/>
       </logger>
       <logger name="com.att.nsa.mr" level="INFO">
-        <appender-ref ref="dmaapAAIEventConsumerInfo"/>
+        <appender-ref ref="kafkaAAIEventConsumerInfo"/>
       </logger>
     </then>
   </if>
     <appender-ref ref="STDOUT"/>
   </logger>
 
-  <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+  <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumer"/>
-        <appender-ref ref="dmaapAAIEventConsumerDebug"/>
+        <appender-ref ref="kafkaAAIEventConsumer"/>
+        <appender-ref ref="kafkaAAIEventConsumerDebug"/>
       </then>
     </if>
     <appender-ref ref="STDOUT"/>
diff --git a/kubernetes/aai/components/aai-resources/templates/aai-kafka-user.yml b/kubernetes/aai/components/aai-resources/templates/aai-kafka-user.yml
new file mode 100644 (file)
index 0000000..e4fa84a
--- /dev/null
@@ -0,0 +1,31 @@
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+  name: {{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}
+  labels:
+    strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+  authentication:
+    type: scram-sha-512
+  authorization:
+    type: simple
+    acls:
+    - resource:
+        type: topic
+        name: AAI-EVENT
+      operation: All
\ No newline at end of file
index 122e522..7cccfb1 100644 (file)
@@ -162,6 +162,13 @@ spec:
           value: {{ .Values.service.internalPort2 | quote }}
         - name: INTERNAL_PORT_3
           value: {{ .Values.service.internalPort3 | quote }}
+        - name: BOOTSTRAP_SERVERS
+          value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+        - name: JAAS_CONFIG
+          valueFrom:
+            secretKeyRef:
+              name: {{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}
+              key: sasl.jaas.config
         volumeMounts:
         - mountPath: /opt/app/aai-resources/resources/etc/appprops/janusgraph-realtime.properties
           name: {{ include "common.fullname" . }}-config
index b1d3202..5e34181 100644 (file)
@@ -20,6 +20,8 @@
 # Declare variables to be passed into your templates.
 global: # global defaults
   nodePortPrefix: 302
+  kafkaBootstrap: strimzi-kafka-bootstrap
+  aaiKafkaUser: aai-kafka-user
   cassandra:
     #Service Name of the cassandra cluster to connect to.
     #Override it to aai-cassandra if localCluster is enabled.
@@ -50,7 +52,7 @@ global: # global defaults
 
     # Active spring profiles for the resources microservice
     profiles:
-      active: production,dmaap
+      active: production,kafka
 
     # Notification event specific properties
     notification:
@@ -96,6 +98,10 @@ global: # global defaults
     # Specifies which clients should always default to realtime graph connection
     realtime:
       clients: SDNC,MSO,SO,robot-ete
+    kafkaBootstrap: strimzi-kafka-bootstrap
+    jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}'
+    someConfig: random
+    aaiTopic: AAI-EVENT
 
 api_list:
   - 11
@@ -123,7 +129,7 @@ aai_enpoints:
     url: external-system
 
 # application image
-image: onap/aai-resources:1.13.0
+image: onap/aai-resources:1.13.4
 pullPolicy: Always
 restartPolicy: Always
 flavor: small
@@ -374,3 +380,20 @@ accessLogback:
   logToFileEnabled: false
   maxHistory: 7
   totalSizeCap: 1GB
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+  - uid: aai-kafka-user
+    externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+    type: genericKV
+    envs:
+      - name: sasl.jaas.config
+        value: '{{ .Values.config.someConfig }}'
+        policy: generate
+kafkaUser:
+  authenticationType: scram-sha-512
+  acls:
+    - name: AAI-EVENT
+      type: topic
+      operations: [Read, Write]
\ No newline at end of file
index 276dbfe..1b58ad6 100644 (file)
@@ -49,8 +49,15 @@ server.ssl.enabled=false
 
 # JMS bind address host port
 jms.bind.address=tcp://localhost:61647
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
 
 # Schema related attributes for the oxm and edges
 # Any additional schema related attributes should start with prefix schema
index 3dc4867..75fec54 100644 (file)
         <includeCallerData>true</includeCallerData>
         <appender-ref ref="translog" />
       </appender>
-      <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
           <level>WARN</level>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${errorPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>DEBUG</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${debugPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${auditPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
   <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumerMetric" />
+        <appender-ref ref="kafkaAAIEventConsumerMetric" />
       </then>
     </if>
     <appender-ref ref="STDOUT" />
     <appender-ref ref="STDOUT" />
   </logger>
 
-  <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+  <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumer" />
-        <appender-ref ref="dmaapAAIEventConsumerDebug" />
+        <appender-ref ref="kafkaAAIEventConsumer" />
+        <appender-ref ref="kafkaAAIEventConsumerDebug" />
       </then>
     </if>
     <appender-ref ref="STDOUT" />
   <logger name="com.att.nsa.mr" level="INFO">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumerInfo" />
+        <appender-ref ref="kafkaAAIEventConsumerInfo" />
       </then>
     </if>
     <appender-ref ref="STDOUT" />
diff --git a/kubernetes/aai/components/aai-traversal/templates/aai-trav-kafka-user.yml b/kubernetes/aai/components/aai-traversal/templates/aai-trav-kafka-user.yml
new file mode 100644 (file)
index 0000000..1754227
--- /dev/null
@@ -0,0 +1,31 @@
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+  name: {{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}
+  labels:
+    strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+  authentication:
+    type: scram-sha-512
+  authorization:
+    type: simple
+    acls:
+    - resource:
+        type: topic
+        name: AAI-EVENT
+      operation: All
\ No newline at end of file
index 6627a80..67e1b99 100644 (file)
@@ -183,6 +183,13 @@ spec:
           value: {{ .Values.service.internalPort2 | quote }}
         - name: INTERNAL_PORT_3
           value: {{ .Values.service.internalPort3 | quote }}
+        - name: BOOTSTRAP_SERVERS
+          value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+        - name: JAAS_CONFIG
+          valueFrom:
+            secretKeyRef:
+              name: {{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}
+              key: sasl.jaas.config
         volumeMounts:
         - mountPath: /opt/app/aai-traversal/resources/etc/appprops/janusgraph-realtime.properties
           name: {{ include "common.fullname" . }}-config
index aca1ec7..a644698 100644 (file)
@@ -20,7 +20,8 @@
 # Declare variables to be passed into your templates.
 global: # global defaults
   nodePortPrefix: 302
-
+  kafkaBootstrap: strimzi-kafka-bootstrap
+  aaiTravKafkaUser: aai-trav-kafka-user
   cassandra:
     #Service Name of the cassandra cluster to connect to.
     #Override it to aai-cassandra if localCluster is enabled.
@@ -59,7 +60,7 @@ global: # global defaults
 
     # Active spring profiles for the resources microservice
     profiles:
-      active: production,dmaap
+      active: production,kafka
 
     # Notification event specific properties
     notification:
@@ -105,9 +106,13 @@ global: # global defaults
     # Specifies which clients should always default to realtime graph connection
     realtime:
       clients: SDNC,MSO,SO,robot-ete
+    kafkaBootstrap: strimzi-kafka-bootstrap
+    jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}'
+    someConfig: random
+    aaiTopic: AAI-EVENT
 
 # application image
-image: onap/aai-traversal:1.12.3
+image: onap/aai-traversal:1.13.4
 pullPolicy: Always
 restartPolicy: Always
 flavor: small
@@ -369,3 +374,20 @@ serviceAccount:
 log:
   path: /var/log/onap
 logConfigMapNamePrefix: '{{ include "common.fullname" . }}'
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+  - uid: aai-trav-kafka-user
+    externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+    type: genericKV
+    envs:
+      - name: sasl.jaas.config
+        value: '{{ .Values.config.someConfig }}'
+        policy: generate
+kafkaUser:
+  authenticationType: scram-sha-512
+  acls:
+    - name: AAI-EVENT
+      type: topic
+      operations: [Read, Write]
\ No newline at end of file
index 523cd8c..a257ee0 100644 (file)
@@ -212,7 +212,7 @@ global: # global defaults
     # Active spring profiles for the resources microservice
     # aaf-auth profile will be automatically set if aaf enabled is set to true
     profiles:
-      active: production,dmaap #,aaf-auth
+      active: production,kafka #,aaf-auth
 
     # Notification event specific properties
     notification: