Merge "Add message-router dependency in blueprint-processor for kafka-listener."
[oom.git] / kubernetes / clamp / charts / clamp-dash-logstash / resources / config / pipeline.conf
index 317b428..05d8085 100644 (file)
@@ -57,13 +57,13 @@ filter {
         drop { }
     }
 
-    if [http_request_failure] or [@metadata][code] != "200" {
+    if [http_request_failure] or [@metadata][code] != 200 {
        mutate {
                   add_tag => [ "error" ]
        }
     }
 
-    if "dmaap_source" in [tags] {
+    if "dmaap_source" in [@metadata][request][tags] {
         #
         # Dmaap provides a json list, whose items are Strings containing the event
         # provided to Dmaap, which itself is an escaped json.
@@ -76,14 +76,7 @@ filter {
             source => "[message]"
             target => "message"
         }
-        ruby {
-            code => "
-            for ev in event.get('message', [])
-                ev.set('@metadata', event.get('@metadata'))
-            end
-            "
-        }
-       
+
         split {
             field => "message"
         }
@@ -194,7 +187,7 @@ filter {
     }
 
 
-    if "error" not in [tags] {
+    if "error" not in [@metadata][request][tags]{
         #
         # Creating data for a secondary index
         #
@@ -203,7 +196,7 @@ filter {
             add_tag => [ "event-cl-aggs" ]
         }
         
-        if  "event-cl-aggs" in [tags] {
+        if  "event-cl-aggs" in [@metadata][request][tags]{
             #
             # we only need a few fields for aggregations; remove all fields from clone except :
             #   vmName,vnfName,vnfType,requestID,closedLoopAlarmStart, closedLoopControlName,closedLoopAlarmEnd,abated,nbrDmaapevents,finalFailure
@@ -226,6 +219,8 @@ output {
         elasticsearch {
             codec => "json"
             hosts => ["${elasticsearch_base_url}"]
+            user => ["${logstash_user}"]
+            password => ["${logstash_pwd}"]
             index => "errors-%{+YYYY.MM.DD}"
             doc_as_upsert => true
         }
@@ -234,6 +229,8 @@ output {
         elasticsearch {
             codec => "json"
             hosts => ["${elasticsearch_base_url}"]
+            user => ["${logstash_user}"]
+            password => ["${logstash_pwd}"]
             document_id => "%{requestID}"
             index => "events-cl-%{+YYYY.MM.DD}" # creates daily indexes for control loop
             doc_as_upsert => true
@@ -244,6 +241,8 @@ output {
         elasticsearch {
             codec => "json"
             hosts => ["${elasticsearch_base_url}"]
+            user => ["${logstash_user}"]
+            password => ["${logstash_pwd}"]
             index => "events-%{+YYYY.MM.DD}" # creates daily indexes
             doc_as_upsert => true
         }