# JMS bind address host port
 jms.bind.address=tcp://localhost:61649
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
 
 # Schema related attributes for the oxm and edges
 # Any additional schema related attributes should start with prefix schema
 
           <includeCallerData>true</includeCallerData>
           <appender-ref ref="translog"/>
         </appender>
-        <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
             <level>WARN</level>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}</fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
           </rollingPolicy>
             <pattern>${"errorPattern"}</pattern>
           </encoder>
         </appender>
-        <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.LevelFilter">
             <level>INFO</level>
             <onMatch>ACCEPT</onMatch>
             <onMismatch>DENY</onMismatch>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}
             </fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
             <pattern>${debugPattern}</pattern>
           </encoder>
         </appender>
-        <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.LevelFilter">
             <level>DEBUG</level>
             <onMatch>ACCEPT</onMatch>
             <onMismatch>DENY</onMismatch>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
           </rollingPolicy>
             <pattern>${debugPattern}</pattern>
           </encoder>
         </appender>
-        <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
           <filter class="ch.qos.logback.classic.filter.LevelFilter">
             <level>INFO</level>
             <onMatch>ACCEPT</onMatch>
             <onMismatch>DENY</onMismatch>
           </filter>
-          <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+          <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
           <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-            <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+            <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
             <maxHistory>${maxHistory}</maxHistory>
             <totalSizeCap>${totalSizeCap}</totalSizeCap>
           </rollingPolicy>
     </if>
     <appender-ref ref="STDOUT"/>
   </logger>
-  <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+  <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumer"/>
-        <appender-ref ref="dmaapAAIEventConsumerDebug"/>
-        <appender-ref ref="dmaapAAIEventConsumerMetric"/>
+        <appender-ref ref="kafkaAAIEventConsumer"/>
+        <appender-ref ref="kafkaAAIEventConsumerDebug"/>
+        <appender-ref ref="kafkaAAIEventConsumerMetric"/>
       </then>
     </if>
     <appender-ref ref="STDOUT"/>
 
--- /dev/null
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+  name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}
+  labels:
+    strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+  authentication:
+    type: scram-sha-512
+  authorization:
+    type: simple
+    acls:
+    - resource:
+        type: topic
+        name: AAI-EVENT
+      operation: All
\ No newline at end of file
 
           value: {{ .Values.service.internalPort2 | quote }}
         - name: INTERNAL_PORT_3
           value: {{ .Values.service.internalPort3 | quote }}
+        - name: BOOTSTRAP_SERVERS
+          value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+        - name: JAAS_CONFIG
+          valueFrom:
+            secretKeyRef:
+              name: {{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}
+              key: sasl.jaas.config
         volumeMounts:
         - mountPath: /opt/app/aai-graphadmin/resources/etc/appprops/janusgraph-realtime.properties
           name: config
 
 # Declare variables to be passed into your templates.
 global: # global defaults
   nodePortPrefix: 302
+  kafkaBootstrap: strimzi-kafka-bootstrap
+  aaiGraphKafkaUser: aai-graph-kafka-user
   cassandra:
     #This will instantiate AAI cassandra cluster, default:shared cassandra.
     localCluster: false
 
   # Specify the profiles for the graphadmin microservice
   profiles:
-
-    active: dmaap
-
+    active: kafka
+    kafkaBootstrap: strimzi-kafka-bootstrap
+    jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiGraphKafkaUser }}'
+    someConfig: graphrandom
+    aaiTopic: AAI-EVENT
   # Specifies the timeout limit for the REST API requests
   timeout:
     enabled: true
 log:
   path: /var/log/onap
 logConfigMapNamePrefix: '{{ include "common.fullname" . }}'
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+  - uid: aai-graph-kafka-user
+    externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+    type: genericKV
+    envs:
+      - name: sasl.jaas.config
+        value: '{{ .Values.config.someConfig }}'
+        policy: generate
+kafkaUser:
+  authenticationType: scram-sha-512
+  acls:
+    - name: AAI-EVENT
+      type: topic
+      operations: [Read, Write]
\ No newline at end of file
 
 
 # JMS bind address host port
 jms.bind.address=tcp://localhost:61647
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
 
 # Schema related attributes for the oxm and edges
 # Any additional schema related attributes should start with prefix schema
 
         <appender-ref ref="translog"/>
       </appender>
 
-      <appender name="dmaapAAIEventConsumer"
+      <appender name="kafkaAAIEventConsumer"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
           <level>WARN</level>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
 
       </appender>
 
-      <appender name="dmaapAAIEventConsumerDebug"
+      <appender name="kafkaAAIEventConsumerDebug"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>DEBUG</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${debugPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerInfo"
+      <appender name="kafkaAAIEventConsumerInfo"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${auditPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerMetric"
+      <appender name="kafkaAAIEventConsumerMetric"
                 class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
         <appender-ref ref="asyncMETRIC"/>
       </logger>
       <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO">
-        <appender-ref ref="dmaapAAIEventConsumerMetric"/>
+        <appender-ref ref="kafkaAAIEventConsumerMetric"/>
       </logger>
       <logger name="org.onap.aai.logging.ErrorLogHelper" level="WARN">
         <appender-ref ref="asyncERROR"/>
       </logger>
       <logger name="com.att.nsa.mr" level="INFO">
-        <appender-ref ref="dmaapAAIEventConsumerInfo"/>
+        <appender-ref ref="kafkaAAIEventConsumerInfo"/>
       </logger>
     </then>
   </if>
     <appender-ref ref="STDOUT"/>
   </logger>
 
-  <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+  <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumer"/>
-        <appender-ref ref="dmaapAAIEventConsumerDebug"/>
+        <appender-ref ref="kafkaAAIEventConsumer"/>
+        <appender-ref ref="kafkaAAIEventConsumerDebug"/>
       </then>
     </if>
     <appender-ref ref="STDOUT"/>
 
--- /dev/null
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+  name: {{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}
+  labels:
+    strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+  authentication:
+    type: scram-sha-512
+  authorization:
+    type: simple
+    acls:
+    - resource:
+        type: topic
+        name: AAI-EVENT
+      operation: All
\ No newline at end of file
 
           value: {{ .Values.service.internalPort2 | quote }}
         - name: INTERNAL_PORT_3
           value: {{ .Values.service.internalPort3 | quote }}
+        - name: BOOTSTRAP_SERVERS
+          value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+        - name: JAAS_CONFIG
+          valueFrom:
+            secretKeyRef:
+              name: {{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}
+              key: sasl.jaas.config
         volumeMounts:
         - mountPath: /opt/app/aai-resources/resources/etc/appprops/janusgraph-realtime.properties
           name: {{ include "common.fullname" . }}-config
 
 # Declare variables to be passed into your templates.
 global: # global defaults
   nodePortPrefix: 302
+  kafkaBootstrap: strimzi-kafka-bootstrap
+  aaiKafkaUser: aai-kafka-user
   cassandra:
     #Service Name of the cassandra cluster to connect to.
     #Override it to aai-cassandra if localCluster is enabled.
 
     # Active spring profiles for the resources microservice
     profiles:
-      active: production,dmaap
+      active: production,kafka
 
     # Notification event specific properties
     notification:
     # Specifies which clients should always default to realtime graph connection
     realtime:
       clients: SDNC,MSO,SO,robot-ete
+    kafkaBootstrap: strimzi-kafka-bootstrap
+    jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiKafkaUser }}'
+    someConfig: random
+    aaiTopic: AAI-EVENT
 
 api_list:
   - 11
     url: external-system
 
 # application image
-image: onap/aai-resources:1.13.0
+image: onap/aai-resources:1.13.4
 pullPolicy: Always
 restartPolicy: Always
 flavor: small
   logToFileEnabled: false
   maxHistory: 7
   totalSizeCap: 1GB
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+  - uid: aai-kafka-user
+    externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+    type: genericKV
+    envs:
+      - name: sasl.jaas.config
+        value: '{{ .Values.config.someConfig }}'
+        policy: generate
+kafkaUser:
+  authenticationType: scram-sha-512
+  acls:
+    - name: AAI-EVENT
+      type: topic
+      operations: [Read, Write]
\ No newline at end of file
 
 
 # JMS bind address host port
 jms.bind.address=tcp://localhost:61647
-dmaap.ribbon.listOfServers=message-router.{{ include "common.namespace" . }}:3904
-dmaap.ribbon.transportType=http
+
+# dmaap is deprecated now kafka is used
+spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
+spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
+spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
+spring.kafka.producer.properties.sasl.jaas.config=${JAAS_CONFIG}
+spring.kafka.producer.retries=3
 
 # Schema related attributes for the oxm and edges
 # Any additional schema related attributes should start with prefix schema
 
         <includeCallerData>true</includeCallerData>
         <appender-ref ref="translog" />
       </appender>
-      <appender name="dmaapAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
           <level>WARN</level>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${errorPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>DEBUG</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${debugPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/dmaap-transaction.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
           <pattern>${auditPattern}</pattern>
         </encoder>
       </appender>
-      <appender name="dmaapAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+      <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
         <filter class="ch.qos.logback.classic.filter.LevelFilter">
           <level>INFO</level>
           <onMatch>ACCEPT</onMatch>
           <onMismatch>DENY</onMismatch>
         </filter>
-        <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+        <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
         <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-          <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
+          <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
           </fileNamePattern>
           <maxHistory>${maxHistory}</maxHistory>
           <totalSizeCap>${totalSizeCap}</totalSizeCap>
   <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumerMetric" />
+        <appender-ref ref="kafkaAAIEventConsumerMetric" />
       </then>
     </if>
     <appender-ref ref="STDOUT" />
     <appender-ref ref="STDOUT" />
   </logger>
 
-  <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+  <logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumer" />
-        <appender-ref ref="dmaapAAIEventConsumerDebug" />
+        <appender-ref ref="kafkaAAIEventConsumer" />
+        <appender-ref ref="kafkaAAIEventConsumerDebug" />
       </then>
     </if>
     <appender-ref ref="STDOUT" />
   <logger name="com.att.nsa.mr" level="INFO">
     <if condition='property("logToFileEnabled").contains("true")'>
       <then>
-        <appender-ref ref="dmaapAAIEventConsumerInfo" />
+        <appender-ref ref="kafkaAAIEventConsumerInfo" />
       </then>
     </if>
     <appender-ref ref="STDOUT" />
 
--- /dev/null
+{{/*
+# Copyright © 2022-23 Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+*/}}
+apiVersion: kafka.strimzi.io/v1beta2
+kind: KafkaUser
+metadata:
+  name: {{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}
+  labels:
+    strimzi.io/cluster: {{ include "common.release" . }}-strimzi
+spec:
+  authentication:
+    type: scram-sha-512
+  authorization:
+    type: simple
+    acls:
+    - resource:
+        type: topic
+        name: AAI-EVENT
+      operation: All
\ No newline at end of file
 
           value: {{ .Values.service.internalPort2 | quote }}
         - name: INTERNAL_PORT_3
           value: {{ .Values.service.internalPort3 | quote }}
+        - name: BOOTSTRAP_SERVERS
+          value: {{ include "common.release" . }}-strimzi-kafka-bootstrap:9092
+        - name: JAAS_CONFIG
+          valueFrom:
+            secretKeyRef:
+              name: {{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}
+              key: sasl.jaas.config
         volumeMounts:
         - mountPath: /opt/app/aai-traversal/resources/etc/appprops/janusgraph-realtime.properties
           name: {{ include "common.fullname" . }}-config
 
 # Declare variables to be passed into your templates.
 global: # global defaults
   nodePortPrefix: 302
-
+  kafkaBootstrap: strimzi-kafka-bootstrap
+  aaiTravKafkaUser: aai-trav-kafka-user
   cassandra:
     #Service Name of the cassandra cluster to connect to.
     #Override it to aai-cassandra if localCluster is enabled.
 
     # Active spring profiles for the resources microservice
     profiles:
-      active: production,dmaap
+      active: production,kafka
 
     # Notification event specific properties
     notification:
     # Specifies which clients should always default to realtime graph connection
     realtime:
       clients: SDNC,MSO,SO,robot-ete
+    kafkaBootstrap: strimzi-kafka-bootstrap
+    jaasConfExternalSecret: '{{ include "common.release" . }}-{{ .Values.global.aaiTravKafkaUser }}'
+    someConfig: random
+    aaiTopic: AAI-EVENT
 
 # application image
-image: onap/aai-traversal:1.12.3
+image: onap/aai-traversal:1.13.4
 pullPolicy: Always
 restartPolicy: Always
 flavor: small
 log:
   path: /var/log/onap
 logConfigMapNamePrefix: '{{ include "common.fullname" . }}'
+#################################################################
+# Secrets metaconfig
+#################################################################
+secrets:
+  - uid: aai-trav-kafka-user
+    externalSecret: '{{ tpl (default "" .Values.config.jaasConfExternalSecret) . }}'
+    type: genericKV
+    envs:
+      - name: sasl.jaas.config
+        value: '{{ .Values.config.someConfig }}'
+        policy: generate
+kafkaUser:
+  authenticationType: scram-sha-512
+  acls:
+    - name: AAI-EVENT
+      type: topic
+      operations: [Read, Write]
\ No newline at end of file
 
     # Active spring profiles for the resources microservice
     # aaf-auth profile will be automatically set if aaf enabled is set to true
     profiles:
-      active: production,dmaap #,aaf-auth
+      active: production,kafka #,aaf-auth
 
     # Notification event specific properties
     notification: