tosca.mappings.config=${CONFIG_HOME}/tosca-mappings.json
spring.application.name=aai-babel
-spring.sleuth.enabled={{ .Values.tracing.enabled }}
-spring.zipkin.baseUrl={{ .Values.tracing.collector.baseUrl }}
+spring.sleuth.enabled={{ default .Values.global.tracing.enabled .Values.tracing.enabled }}
+spring.zipkin.baseUrl={{ default .Values.global.tracing.collector.baseUrl .Values.tracing.collector.baseUrl }}
spring.sleuth.messaging.jms.enabled = false
spring.sleuth.trace-id128=true
-spring.sleuth.sampler.probability={{ .Values.tracing.sampling.probability }}
+spring.sleuth.sampler.probability={{ default .Values.global.tracing.sampling.probability .Values.tracing.collector.baseUrl }}
spring.sleuth.propagation.type=w3c, b3
spring.sleuth.supports-join=false
#################################################################
# Global configuration defaults.
#################################################################
-global: {}
+global:
+ tracing:
+ enabled: false
+ collector:
+ baseUrl: http://jaeger-collector.istio-system:9411
+ sampling:
+ probability: 1.0
#################################################################
# Application configuration defaults.
memory: "1Gi"
unlimited: {}
+##
+## Use this to override the global defaults
tracing:
- enabled: false
- collector:
- baseUrl: http://jaeger-collector.istio-system:9411
- sampling:
- probability: 1.0 # percentage of requests that are sampled (between 0-1/0%-100%)
+# enabled: false
+ collector: {}
+# baseUrl: http://jaeger-collector.istio-system:9411
+ sampling: {}
+# probability: 1.0 # percentage of requests that are sampled (between 0-1/0%-100%)
# adds jvm args for remote debugging the application
debug:
{{- if or (.Values.global.config.basic.auth.enabled) ( include "common.onServiceMesh" .) }}
aai.tools.enableBasicAuth=true
-aai.tools.username={{ .Values.global.config.basic.auth.username }}
-aai.tools.password={{ .Values.global.config.basic.auth.passwd }}
+aai.tools.username={{ (index .Values.global.config.basic.auth.users 0).username }}
+aai.tools.password={{ (index .Values.global.config.basic.auth.users 0).password }}
{{- end }}
aai.notification.current.version={{ .Values.global.config.schema.version.api.default }}
aai.datagrooming.enableghost2checkoff=false
aai.datagrooming.enableghost2fixon=false
aai.datagrooming.enablef=false
+aai.datagrooming.enableskipindexupdatefix=true
# used by the dataGrooming to set values
aai.datagrooming.timewindowminutesvalue=10500
spring.profiles.active={{ .Values.config.profiles.active }}
spring.jersey.application-path=${schema.uri.base.path}
#The max number of active threads in this pool
-server.tomcat.max-threads=200
+server.tomcat.max-threads=50
#The minimum number of threads always kept alive
-server.tomcat.min-Spare-Threads=25
+server.tomcat.min-Spare-Threads=5
#The number of milliseconds before an idle thread shutsdown, unless the number of active threads are less or equal to minSpareThreads
server.tomcat.max-idle-time=60000
server.basic.auth.location=${server.local.startpath}etc/auth/realm.properties
server.port=8449
-security.require-ssl=false
-server.ssl.enabled=false
-# JMS bind address host port
-jms.bind.address=tcp://localhost:61649
-# dmaap is deprecated now kafka is used
spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
aai.actuator.echo.enabled={{ .Values.actuator.echo.enabled }}
aai.graph.properties.path=${server.local.startpath}/etc/appprops/janusgraph-realtime.properties
+aai.basic-auth.enabled={{ .Values.global.config.basic.auth.enabled }}
+{{- range $index, $user := .Values.global.config.basic.auth.users }}
+aai.basic-auth.users[{{ $index }}].username={{ $user.username }}
+aai.basic-auth.users[{{ $index }}].password={{ $user.password }}
+{{- end }}
basic:
auth:
enabled: true
- username: AAI
- passwd: AAI
+ users:
+ - username: aai@aai.onap.org
+ password: demo123456!
+ - username: AAI
+ password: AAI
# Notification event specific properties
notification:
eventType: AAI-EVENT
clients: SDNC,-1|MSO,-1|SO,-1|robot-ete,-1
# application image
-image: onap/aai-graphadmin:1.15.4
+image: onap/aai-graphadmin:1.15.6
pullPolicy: Always
restartPolicy: Always
flavor: small
# config override for the cassandra driver
# see: https://docs.janusgraph.org/master/configs/configuration-reference/#storagecqlinternal
cassandraDriver:
- configuration: advanced.metadata.schema.debouncer.window = 1 second
+ configuration: advanced.metadata.schema.debouncer.window = 2 second
# Default maximum records to fix for the data grooming and dupeTool
maxFix:
dataGrooming: 150
BOOTSTRAP_SERVERS: onap-strimzi-kafka-bootstrap:9092
DATA_SNAPSHOT_TASKS_ENABLED: false
DATA_SNAPSHOT_CLEANUP_ENABLED: false
+ HISTORY_TRUNCATE_TASK_ENABLED: false
nodeSelector: {}
server.basic.auth.location=${server.local.startpath}etc/auth/realm.properties
server.port=8447
-security.require-ssl=false
-server.ssl.enabled=false
-# JMS bind address host port
-jms.bind.address=tcp://localhost:61647
-
-# dmaap is deprecated now kafka is used
spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+++ /dev/null
-{{/*\r
-<!--\r
-\r
- ============LICENSE_START=======================================================\r
- org.onap.aai\r
- ================================================================================\r
- Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- Modifications Copyright © 2018 Amdocs, Bell Canada\r
- ================================================================================\r
- Licensed under the Apache License, Version 2.0 (the "License");\r
- you may not use this file except in compliance with the License.\r
- You may obtain a copy of the License at\r
-\r
- http://www.apache.org/licenses/LICENSE-2.0\r
-\r
- Unless required by applicable law or agreed to in writing, software\r
- distributed under the License is distributed on an "AS IS" BASIS,\r
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- See the License for the specific language governing permissions and\r
- limitations under the License.\r
- ============LICENSE_END=========================================================\r
-\r
- ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
-\r
--->\r
-*/}}\r
-<configuration>\r
- <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />\r
-\r
- <property name="logToFileEnabled" value='{{.Values.accessLogback.logToFileEnabled}}' />\r
- <property name="maxHistory" value='{{.Values.accessLogback.maxHistory}}' />\r
- <property name="totalSizeCap" value='{{.Values.accessLogback.totalSizeCap}}' />\r
- <property name="livenessAccessLogEnabled"\r
- value='{{.Values.accessLogback.livenessAccessLogEnabled}}' />\r
-\r
-\r
- <if condition='property("logToFileEnabled").contains("true")'>\r
- <then>\r
- <appender name="ACCESS"\r
- class="ch.qos.logback.core.rolling.RollingFileAppender">\r
- <file>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log</file>\r
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">\r
- <fileNamePattern>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log.%d{yyyy-MM-dd}.zip\r
- </fileNamePattern>\r
- <maxHistory>${maxHistory}</maxHistory>\r
- <totalSizeCap>${totalSizeCap}</totalSizeCap>\r
- </rollingPolicy>\r
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">\r
- <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId}\r
- %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU}\r
- %i{X-AAI-SSL-Client-O} %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST}\r
- %i{X-AAI-SSL-Client-C} %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter}\r
- %i{X-AAI-SSL-Client-DN} %D</Pattern>\r
- </encoder>\r
- </appender>\r
- <appender-ref ref="ACCESS" />\r
- </then>\r
- </if>\r
-\r
- <appender name="STDOUTACCESS" class="ch.qos.logback.core.ConsoleAppender">\r
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">\r
- <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId}\r
- %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU} %i{X-AAI-SSL-Client-O}\r
- %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST} %i{X-AAI-SSL-Client-C}\r
- %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter} %i{X-AAI-SSL-Client-DN} %D -\r
- "logType": "access"</Pattern>\r
- </encoder>\r
- <if condition='property("livenessAccessLogEnabled").contains("false")'>\r
- <then>\r
- <filter class="ch.qos.logback.core.filter.EvaluatorFilter">\r
- <evaluator class="ch.qos.logback.access.net.URLEvaluator">\r
- <URL>/aai/util/echo</URL>\r
- </evaluator>\r
- <OnMismatch>NEUTRAL</OnMismatch>\r
- <OnMatch>DENY</OnMatch>\r
- </filter>\r
- </then>\r
- </if>\r
- </appender>\r
-\r
- <appender-ref ref="STDOUTACCESS" />\r
-\r
-</configuration>\r
-{{/*\r
-<!--\r
-%a - Remote IP address\r
-%A - Local IP address\r
-%b - Bytes sent, excluding HTTP headers, or '-' if no bytes were sent\r
-%B - Bytes sent, excluding HTTP headers\r
-%h - Remote host name\r
-%H - Request protocol\r
-%l - Remote logical username from identd (always returns '-')\r
-%m - Request method\r
-%p - Local port\r
-%q - Query string (prepended with a '?' if it exists, otherwise an empty string\r
-%r - First line of the request\r
-%s - HTTP status code of the response\r
-%S - User session ID\r
-%t - Date and time, in Common Log Format format\r
-%u - Remote user that was authenticated\r
-%U - Requested URL path\r
-%v - Local server name\r
-%I - current request thread name (can compare later with stacktraces)\r
-\r
-%z - Custom pattern that parses the cert for the subject\r
-%y - Custom pattern determines rest or dme2\r
--->\r
-*/}}\r
<property resource="application.properties" />
- <property name="maxHistory" value='{{.Values.logback.maxHistory}}' />
- <property name="totalSizeCap" value='{{.Values.logback.totalSizeCap}}' />
- <property name="queueSize" value='{{.Values.logback.queueSize}}'/>
-
- <property name="logToFileEnabled" value='{{.Values.logback.logToFileEnabled}}'/>
-
<property name="namespace" value="aai-resources"/>
<property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
<jmxConfigurator />
<property name="logDirectory" value="${AJSC_HOME}/logs" />
- <!-- Old patterns
- <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfAuditLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%ecompServiceName|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%ecompResponseCode|%ecompResponseDescription|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfTransLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{partnerName}:%m%n"/>
- -->
+
<property name="p_tim" value="%d{"yyyy-MM-dd'T'HH:mm:ss.SSSXXX", UTC}"/>
<property name="p_lvl" value="%level"/>
<property name="p_log" value="%logger"/>
<conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender name="SANE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/sane.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/sane.log.%d{yyyy-MM-dd}.zip</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
- </pattern>
- </encoder>
- </appender>
-
- <appender name="asyncSANE" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="SANE"/>
- </appender>
- <appender name="METRIC" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/metrics.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/metrics.log.%d{yyyy-MM-dd}.zip</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${metricPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncMETRIC" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="METRIC"/>
- </appender>
-
- <appender name="DEBUG"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>DEBUG</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <file>${logDirectory}/rest/debug.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/debug.log.%d{yyyy-MM-dd}.zip</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${debugPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncDEBUG" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <appender-ref ref="DEBUG"/>
- <includeCallerData>true</includeCallerData>
- </appender>
- <appender name="ERROR"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/error.log</file>
- <rollingPolicy
- class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/error.log.%d{yyyy-MM-dd}.zip</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- <encoder>
- <pattern>${errorPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncERROR" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <appender-ref ref="ERROR"/>
- </appender>
-
- <appender name="AUDIT"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/audit.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/audit.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${auditPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asyncAUDIT" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="AUDIT"/>
- </appender>
-
- <appender name="translog"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>DEBUG</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <file>${logDirectory}/rest/translog.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/translog.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${transLogPattern}</pattern>
- </encoder>
- </appender>
-
- <appender name="asynctranslog" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="translog"/>
- </appender>
-
- <appender name="kafkaAAIEventConsumer"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${errorPattern}</pattern>
- </encoder>
-
- </appender>
-
- <appender name="kafkaAAIEventConsumerDebug"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>DEBUG</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${debugPattern}</pattern>
- </encoder>
- </appender>
- <appender name="kafkaAAIEventConsumerInfo"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${auditPattern}</pattern>
- </encoder>
- </appender>
- <appender name="kafkaAAIEventConsumerMetric"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${metricPattern}</pattern>
- </encoder>
- </appender>
- <appender name="external"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- <file>${logDirectory}/external/external.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/external/external.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${debugPattern}</pattern>
- </encoder>
- </appender>
- <appender name="auth"
- class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>DEBUG</level>
- </filter>
- <file>${logDirectory}/auth/auth.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/auth/auth.log.%d{yyyy-MM-dd}.zip
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}[%thread] %-5level %logger{1024} - %msg%n</pattern>
- </encoder>
- </appender>
- <appender name="asyncAUTH" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="auth"/>
- </appender>
- </then>
- </if>
+ <property name="CONSOLE_LOG_PATTERN"
+ value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} [%X{traceId},%X{spanId}] [%thread] %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />
+ <!-- Appender to log to console -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
- <providers>
- <timestamp><fieldName>timestamp</fieldName></timestamp>
- <message/>
- <mdc>
- <fieldName>context</fieldName>
- <excludeMdcKeyName>ServerIPAddress</excludeMdcKeyName>
- <excludeMdcKeyName>EntryTimestamp</excludeMdcKeyName>
- <excludeMdcKeyName>InvokeTimestamp</excludeMdcKeyName>
- <excludeMdcKeyName>ErrorCode</excludeMdcKeyName>
- <excludeMdcKeyName>ErrorDesc</excludeMdcKeyName>
- </mdc>
- <stackTrace>
- <fieldName>exception</fieldName>
- <throwableConverter class="net.logstash.logback.stacktrace.ShortenedThrowableConverter">
- <exclude>^sun\.reflect\..*\.invoke</exclude>
- <exclude>^net\.sf\.cglib\.proxy\.MethodProxy\.invoke</exclude>
- <rootCauseFirst>true</rootCauseFirst>
- </throwableConverter>
- </stackTrace>
- <threadName><fieldName>thread</fieldName></threadName>
- <loggerName>
- <fieldName>logger</fieldName>
- <shortenedLoggerNameLength>36</shortenedLoggerNameLength>
- </loggerName>
- <logLevel/>
- <pattern>
- <pattern>{"logType":"app"}</pattern>
- </pattern>
- </providers>
+ <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+ <!-- Minimum logging level to be presented in the console logs-->
+ <level>DEBUG</level>
+ </filter>
+ <encoder>
+ <pattern>${CONSOLE_LOG_PATTERN}</pattern>
+ <charset>utf8</charset>
</encoder>
</appender>
<!-- logback internals logging -->
-
<logger name="ch.qos.logback.classic" level="WARN" />
<logger name="ch.qos.logback.core" level="WARN" />
<logger name="org.onap.aai" level={{ .Values.log.level.base | upper | quote }} additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncDEBUG"/>
- <appender-ref ref="asyncSANE"/>
- </then>
- </if>
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.onap.aai.aaf.auth" level="DEBUG" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncAUTH"/>
- </then>
- </if>
<appender-ref ref="STDOUT"/>
</logger>
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <!-- These loggers are not additive and will be redirected to the parent logger.
- Sending events to log is handled by parent loggers-->
- <logger name="org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog" level="INFO">
- <appender-ref ref="asyncAUDIT"/>
- </logger>
- <logger name="org.onap.logging.filter.base.AbstractAuditLogFilter" level={{ .Values.log.level.audit | upper | quote }}>
- <appender-ref ref="asyncAUDIT"/>
- </logger>
- <logger name="org.onap.aai.aailog.logs.AaiDBMetricLog" level={{ .Values.log.level.dbMetric | upper | quote }}>
- <appender-ref ref="asyncMETRIC"/>
- </logger>
- <logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO">
- <appender-ref ref="kafkaAAIEventConsumerMetric"/>
- </logger>
- <logger name="org.onap.aai.logging.ErrorLogHelper" level="WARN">
- <appender-ref ref="asyncERROR"/>
- </logger>
- <logger name="com.att.nsa.mr" level="INFO">
- <appender-ref ref="kafkaAAIEventConsumerInfo"/>
- </logger>
- </then>
- </if>
-
<logger name="org.onap.aai.interceptors.post" level="DEBUG" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asynctranslog"/>
- </then>
- </if>
<appender-ref ref="STDOUT"/>
</logger>
<logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="kafkaAAIEventConsumer"/>
- <appender-ref ref="kafkaAAIEventConsumerDebug"/>
- </then>
- </if>
<appender-ref ref="STDOUT"/>
</logger>
<root level={{ .Values.log.level.root | upper | quote }}>
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="external"/>
- </then>
- </if>
<appender-ref ref="STDOUT"/>
</root>
</configuration>
labels: {{- include "common.labels" . | nindent 4 }}
data:
{{ tpl (.Files.Glob "resources/config/logback.xml").AsConfig . | indent 2 }}
-{{ tpl (.Files.Glob "resources/config/localhost-access-logback.xml").AsConfig . | indent 2 }}
{{ tpl (.Files.Glob "resources/config/janusgraph-realtime.properties").AsConfig . | indent 2 }}
{{ tpl (.Files.Glob "resources/config/aaiconfig.properties").AsConfig . | indent 2 }}
{{ tpl (.Files.Glob "resources/config/application.properties").AsConfig . | indent 2 }}
- mountPath: /opt/app/aai-resources/resources/logback.xml
name: {{ include "common.fullname" . }}-config
subPath: logback.xml
- - mountPath: /opt/app/aai-resources/resources/localhost-access-logback.xml
- name: {{ include "common.fullname" . }}-config
- subPath: localhost-access-logback.xml
- mountPath: /opt/app/aai-resources/resources/etc/auth/realm.properties
name: {{ include "common.fullname" . }}-config
subPath: realm.properties
server.basic.auth.location=${server.local.startpath}etc/auth/realm.properties
server.port=8446
-security.require-ssl=false
-server.ssl.enabled=false
-# dmaap is deprecated now kafka is used
spring.kafka.producer.bootstrap-servers=${BOOTSTRAP_SERVERS}
spring.kafka.producer.properties.security.protocol=SASL_PLAINTEXT
spring.kafka.producer.properties.sasl.mechanism=SCRAM-SHA-512
+++ /dev/null
-{{/*<!--\r
- ============LICENSE_START=======================================================\r
- org.onap.aai\r
- ================================================================================\r
- Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- Modifications Copyright © 2018 Amdocs, Bell Canada\r
- Modifications Copyright © 2020 Orange\r
- ================================================================================\r
- Licensed under the Apache License, Version 2.0 (the "License");\r
- you may not use this file except in compliance with the License.\r
- You may obtain a copy of the License at\r
-\r
- http://www.apache.org/licenses/LICENSE-2.0\r
-\r
- Unless required by applicable law or agreed to in writing, software\r
- distributed under the License is distributed on an "AS IS" BASIS,\r
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- See the License for the specific language governing permissions and\r
- limitations under the License.\r
- ============LICENSE_END=========================================================\r
--->\r
-*/}}\r
-<configuration scan="true" scanPeriod="60 seconds" debug="false">\r
- <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />\r
- <property name="maxHistory" value='{{.Values.accessLogback.maxHistory}}' />\r
- <property name="totalSizeCap" value='{{.Values.accessLogback.totalSizeCap}}' />\r
- <property name="logToFileEnabled" value='{{.Values.accessLogback.logToFileEnabled}}' />\r
- <property name="livenessAccessLogEnabled"\r
- value='{{.Values.accessLogback.livenessAccessLogEnabled}}' />\r
-\r
- <if condition='property("logToFileEnabled").contains("true")'>\r
- <then>\r
- <appender name="ACCESS" class="ch.qos.logback.core.rolling.RollingFileAppender">\r
- <file>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log</file>\r
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">\r
- <fileNamePattern>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log.%d{yyyy-MM-dd}\r
- </fileNamePattern>\r
- <maxHistory>${maxHistory}</maxHistory>\r
- <totalSizeCap>${totalSizeCap}</totalSizeCap>\r
- </rollingPolicy>\r
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">\r
- <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId}\r
- %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU}\r
- %i{X-AAI-SSL-Client-O} %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST}\r
- %i{X-AAI-SSL-Client-C} %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter}\r
- %i{X-AAI-SSL-Client-DN} %D</Pattern>\r
- </encoder>\r
- </appender>\r
- <appender-ref ref="ACCESS" />\r
- </then>\r
- </if>\r
-\r
- <appender name="STDOUTACCESS" class="ch.qos.logback.core.ConsoleAppender">\r
- <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">\r
- <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId}\r
- %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU} %i{X-AAI-SSL-Client-O}\r
- %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST} %i{X-AAI-SSL-Client-C}\r
- %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter} %i{X-AAI-SSL-Client-DN} %D -\r
- "logType": "access"</Pattern>\r
- </encoder>\r
- <if condition='property("livenessAccessLogEnabled").contains("false")'>\r
- <then>\r
- <filter class="ch.qos.logback.core.filter.EvaluatorFilter">\r
- <evaluator class="ch.qos.logback.access.net.URLEvaluator">\r
- <URL>/aai/util/echo</URL>\r
- </evaluator>\r
- <OnMismatch>NEUTRAL</OnMismatch>\r
- <OnMatch>DENY</OnMatch>\r
- </filter>\r
- </then>\r
- </if>\r
- </appender>\r
- <appender-ref ref="STDOUTACCESS" />\r
-</configuration>\r
-{{/*<!--\r
-%a - Remote IP address\r
-%A - Local IP address\r
-%b - Bytes sent, excluding HTTP headers, or '-' if no bytes were sent\r
-%B - Bytes sent, excluding HTTP headers\r
-%h - Remote host name\r
-%H - Request protocol\r
-%l - Remote logical username from identd (always returns '-')\r
-%m - Request method\r
-%p - Local port\r
-%q - Query string (prepended with a '?' if it exists, otherwise an empty string\r
-%r - First line of the request\r
-%s - HTTP status code of the response\r
-%S - User session ID\r
-%t - Date and time, in Common Log Format format\r
-%u - Remote user that was authenticated\r
-%U - Requested URL path\r
-%v - Local server name\r
-%I - current request thread name (can compare later with stacktraces)\r
-\r
-%z - Custom pattern that parses the cert for the subject\r
-%y - Custom pattern determines rest or dme2\r
--->*/}}\r
<property resource="application.properties" />
- <property name="maxHistory" value='{{.Values.logback.maxHistory}}' />
- <property name="totalSizeCap" value='{{.Values.logback.totalSizeCap}}' />
- <property name="queueSize" value='{{.Values.logback.queueSize}}'/>
- <property name="logToFileEnabled" value='{{.Values.logback.logToFileEnabled}}'/>
-
<property name="namespace" value="aai-traversal" />
<property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
<jmxConfigurator />
<property name="logDirectory" value="${AJSC_HOME}/logs" />
- <!-- Old patterns
- <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfAuditLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%ecompServiceName|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%ecompResponseCode|%ecompResponseDescription|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
- <property name="eelfTransLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%ecompServiceName|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{partnerName}:%m%n"/>
- -->
+
<property name="p_tim" value="%d{"yyyy-MM-dd'T'HH:mm:ss.SSSXXX", UTC}" />
<property name="p_lvl" value="%level" />
<property name="p_log" value="%logger" />
</encoder>
</appender>
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender name="SANE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/sane.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/sane.log.%d{yyyy-MM-dd}</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
- </pattern>
- </encoder>
- </appender>
- <appender name="asyncSANE" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="SANE" />
- </appender>
- <appender name="METRIC" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/metrics.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${metricPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncMETRIC" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="METRIC" />
- </appender>
- <appender name="DEBUG" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>DEBUG</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <file>${logDirectory}/rest/debug.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${debugPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncDEBUG" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <appender-ref ref="DEBUG" />
- <includeCallerData>true</includeCallerData>
- </appender>
- <appender name="ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/error.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/error.log.%d{yyyy-MM-dd}</fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- <encoder>
- <pattern>${errorPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncERROR" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <appender-ref ref="ERROR" />
- </appender>
- <appender name="AUDIT" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logDirectory}/rest/audit.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/audit.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${auditPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asyncAUDIT" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="AUDIT" />
- </appender>
- <appender name="translog" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>DEBUG</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <file>${logDirectory}/rest/translog.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/rest/translog.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${transLogPattern}</pattern>
- </encoder>
- </appender>
- <appender name="asynctranslog" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="translog" />
- </appender>
- <appender name="kafkaAAIEventConsumer" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/error.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/error.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${errorPattern}</pattern>
- </encoder>
- </appender>
- <appender name="kafkaAAIEventConsumerDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>DEBUG</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/debug.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${debugPattern}</pattern>
- </encoder>
- </appender>
- <appender name="kafkaAAIEventConsumerInfo" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/kafka-transaction.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${auditPattern}</pattern>
- </encoder>
- </appender>
- <appender name="kafkaAAIEventConsumerMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.LevelFilter">
- <level>INFO</level>
- <onMatch>ACCEPT</onMatch>
- <onMismatch>DENY</onMismatch>
- </filter>
- <File>${logDirectory}/kafkaAAIEventConsumer/metrics.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/kafkaAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${metricPattern}</pattern>
- </encoder>
- </appender>
- <appender name="external" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>WARN</level>
- </filter>
- <file>${logDirectory}/external/external.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/external/external.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>${debugPattern}</pattern>
- </encoder>
- </appender>
- <appender name="auth" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
- <level>DEBUG</level>
- </filter>
- <file>${logDirectory}/auth/auth.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
- <fileNamePattern>${logDirectory}/auth/auth.log.%d{yyyy-MM-dd}
- </fileNamePattern>
- <maxHistory>${maxHistory}</maxHistory>
- <totalSizeCap>${totalSizeCap}</totalSizeCap>
- </rollingPolicy>
- <encoder>
- <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}[%thread] %-5level %logger{1024} - %msg%n</pattern>
- </encoder>
- </appender>
- <appender name="asyncAUTH" class="ch.qos.logback.classic.AsyncAppender">
- <queueSize>${queueSize}</queueSize>
- <includeCallerData>true</includeCallerData>
- <appender-ref ref="auth" />
- </appender>
- </then>
- </if>
-
<!-- logback internals logging -->
<logger name="ch.qos.logback.classic" level="WARN" />
<logger name="ch.qos.logback.core" level="WARN" />
<logger name="org.onap.aai" level={{ .Values.log.level.base | upper | quote }} additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncDEBUG" />
- <appender-ref ref="asyncSANE" />
- </then>
- </if>
- <appender-ref ref="STDOUT" />
- </logger>
-
- <logger name="org.onap.aai.aaf.auth" level="DEBUG" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncAUTH" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
<logger name="org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog" level="INFO" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncAUDIT" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
<logger name="org.onap.logging.filter.base.AbstractAuditLogFilter" level="INFO" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncAUDIT" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
<logger name="org.onap.aai.aailog.logs.AaiDBMetricLog" level="INFO" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncMETRIC" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
<logger name="org.onap.aai.aailog.logs.AaiDmaapMetricLog" level="INFO" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="kafkaAAIEventConsumerMetric" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
- <logger name="org.onap.aai.logging.ErrorLogHelper" level="WARN" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asyncERROR" />
- </then>
- </if>
+ <logger name="org.onap.aai.logging.ErrorLogHelper" level="INFO" additivity="false">
<appender-ref ref="STDOUT" />
</logger>
<logger name="org.onap.aai.interceptors.post" level="DEBUG" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="asynctranslog" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
<logger name="org.onap.aai.kafka" level="DEBUG" additivity="false">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="kafkaAAIEventConsumer" />
- <appender-ref ref="kafkaAAIEventConsumerDebug" />
- </then>
- </if>
- <appender-ref ref="STDOUT" />
- </logger>
-
- <logger name="com.att.nsa.mr" level="INFO">
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="kafkaAAIEventConsumerInfo" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</logger>
<root level={{ .Values.log.level.root | upper | quote }}>
- <if condition='property("logToFileEnabled").contains("true")'>
- <then>
- <appender-ref ref="external" />
- </then>
- </if>
<appender-ref ref="STDOUT" />
</root>
</configuration>
metadata: {{- include "common.resourceMetadata" . | nindent 2 }}
data:
{{ tpl (.Files.Glob "resources/config/logback.xml").AsConfig . | indent 2 }}
-{{ tpl (.Files.Glob "resources/config/localhost-access-logback.xml").AsConfig . | indent 2 }}
{{ tpl (.Files.Glob "resources/config/janusgraph-realtime.properties").AsConfig . | indent 2 }}
{{ tpl (.Files.Glob "resources/config/aaiconfig.properties").AsConfig . | indent 2 }}
{{ tpl (.Files.Glob "resources/config/application.properties").AsConfig . | indent 2 }}
- mountPath: /opt/app/aai-traversal/resources/logback.xml
name: {{ include "common.fullname" . }}-config
subPath: logback.xml
- - mountPath: /opt/app/aai-traversal/resources/localhost-access-logback.xml
- name: {{ include "common.fullname" . }}-config
- subPath: localhost-access-logback.xml
- mountPath: /opt/app/aai-traversal/resources/etc/auth/realm.properties
name: {{ include "common.fullname" . }}-config
subPath: realm.properties
- mountPath: /opt/app/aai-traversal/resources/logback.xml
name: {{ include "common.fullname" . }}-config
subPath: logback.xml
- - mountPath: /opt/app/aai-traversal/resources/localhost-access-logback.xml
- name: {{ include "common.fullname" . }}-config
- subPath: localhost-access-logback.xml
- mountPath: /opt/app/aai-traversal/resources/application.properties
name: {{ include "common.fullname" . }}-config
subPath: application.properties