skip processing for log 78/71478/3
authorosgn422w <gervais-martial.ngueko@intl.att.com>
Tue, 30 Oct 2018 16:04:03 +0000 (17:04 +0100)
committerosgn422w <gervais-martial.ngueko@intl.att.com>
Tue, 30 Oct 2018 16:27:34 +0000 (17:27 +0100)
skip specific http processing in case of logfile events

Issue-ID: CLAMP-240
Change-Id: I26d38ea99d46b186fa81fda01b63ab207c881057
Signed-off-by: osgn422w <gervais-martial.ngueko@intl.att.com>
pom.xml
src/main/docker/logstash/pipeline/logstash.conf

diff --git a/pom.xml b/pom.xml
index 9918a3f..0f1eab2 100644 (file)
--- a/pom.xml
+++ b/pom.xml
                                <dependency>
                                                <groupId>org.onap.policy.engine</groupId>
                                                <artifactId>PolicyEngineAPI</artifactId>
-                                               <version>1.3.0</version>
+                                               <version>1.3.1</version>
                                                <exclusions>
                                                                <exclusion>
                                                                                <groupId>com.google.guava</groupId>
index e6cee9c..6fe9d96 100644 (file)
@@ -61,48 +61,54 @@ input {
 }
 
 filter {
-    # avoid noise if no entry in the list
-    if [message] == "[]" {
-        drop { }
-    }
+    if [type] != "dmaap_log" {
+    #only execute this section for dmaap events from http request
+    #it doesn't apply to dmaap events from log file
 
-    if [http_request_failure] or [@metadata][code] != "200" {
-       mutate {
-                  add_tag => [ "error" ]
-       }
-    }
+           # avoid noise if no entry in the list
+           if [message] == "[]" {
+               drop { }
+           }
 
-    if "dmaap_source" in [tags] {
-        #
-        # Dmaap provides a json list, whose items are Strings containing the event
-        # provided to Dmaap, which itself is an escaped json.
-        #
-        # We first need to parse the json as we have to use the plaintext as it cannot
-        # work with list of events, then split that list into multiple string events,
-        # that we then transform into json.
-        #
-        json {
-            source => "[message]"
-            target => "message"
-        }
-        ruby {
-            code => "
-            for ev in event.get('message', [])
-                ev.set('@metadata', event.get('@metadata'))
-            end
-            "
-        }
-       
-        split {
-            field => "message"
-        }
-        json {
-            source => "message"
-        }
-        mutate {
-            remove_field => [ "message" ]
-        }
-    }
+           if [http_request_failure] or [@metadata][code] != "200" {
+              mutate {
+                  add_tag => [ "error" ]
+              }
+           }
+
+           if "dmaap_source" in [tags] {
+               #
+               # Dmaap provides a json list, whose items are Strings containing the event
+               # provided to Dmaap, which itself is an escaped json.
+               #
+               # We first need to parse the json as we have to use the plaintext as it cannot
+               # work with list of events, then split that list into multiple string events,
+               # that we then transform into json.
+               #
+               json {
+                   source => "[message]"
+                   target => "message"
+               }
+               ruby {
+                   code => "
+                   for ev in event.get('message', [])
+                       ev.set('@metadata', event.get('@metadata'))
+                   end
+                   "
+               }
+       
+               split {
+                   field => "message"
+               }
+               json {
+                   source => "message"
+               }
+               mutate {
+                   remove_field => [ "message" ]
+               }
+           }
+       }
+       #now start the common, to both http request and log file events, processing
 
     #
     # Some timestamps are expressed as milliseconds, some are in microseconds
@@ -250,7 +256,7 @@ output {
         elasticsearch {
             codec => "json"
             hosts => ["${elasticsearch_base_url}"]
-            index => "events-%{+YYYY.MM.DD}" # creates daily indexes
+            index => "events-raw-%{+YYYY.MM.DD}" # creates daily indexes
             doc_as_upsert => true
         }
     }