Restore ELK5.x configuration on ELK6.x
[clamp.git] / extra / docker / elk / logstash-conf / logstash.conf
index 2b5a24e..2b71686 100644 (file)
@@ -8,6 +8,7 @@ input {
                     Accept => "application/json"
                 }
                 add_field => { "topic" => "${event_topic}" }
+                type => "dmaap_event"
             }
             notification_queue => {
                 method => get
@@ -16,6 +17,7 @@ input {
                     Accept => "application/json"
                 }
                 add_field => { "topic" => "${notification_topic}" }
+                type => "dmaap_notification"
             }
             request_queue => {
                 method => get
@@ -24,45 +26,83 @@ input {
                     Accept => "application/json"
                 }
                 add_field => { "topic" => "${request_topic}" }
+                type => "dmaap_request"
             }
         }
         socket_timeout => 30
         request_timeout => 30
-        interval => 15
         codec => "plain"
+        schedule => { "every" => "1m"  }
+  }
+}
+
+input {
+  file {
+    path => [
+      "/log-input/dmaap_evt.log"
+    ]
+    type => "dmaap_log"
+    codec => "json"
   }
 }
 
 filter {
-    # avoid noise if no entry in the list
-    if [message] == "[]" {
-       drop { }
-    }
 
     # parse json, split  the list into multiple events, and parse each event
-    json {
-         source => "[message]"
-         target => "message"
-    }
-    split {
-          field => "message"
-    }
-    json {
-         source => "message"
+    if [type] != "dmaap_log" {
+           # avoid noise if no entry in the list
+           if [message] == "[]" {
+              drop { }
+           }
+           
+           json {
+                source => "[message]"
+                target => "message"
+           }
+#          ruby {
+#              code => "event.get('message').each{|m| m.set('type',event.get('type')}"
+#          }
+           split {
+                 field => "message"
+                 add_field => {
+                       "type" => "%{type}"
+                       "topic" => "%{topic}"
+                 }
+           }
+           
+           json {
+                source => "message"
+           }
+           
+           mutate { remove_field => [ "message" ] }
     }
-    mutate { remove_field => [ "message" ] }
+    
     # express timestamps in milliseconds instead of microseconds
-    ruby {
-        code => "event.set('closedLoopAlarmStart', Integer(event.get('closedLoopAlarmStart')))"
-    }
-    date {
-        match => [ "closedLoopAlarmStart", UNIX_MS ]
-        target => "closedLoopAlarmStart"
+    if [closedLoopAlarmStart] {
+        ruby {
+            code => "
+                     if event.get('closedLoopAlarmStart').to_s.to_i(10) > 9999999999999
+                       event.set('closedLoopAlarmStart', event.get('closedLoopAlarmStart').to_s.to_i(10) / 1000)
+                     else
+                       event.set('closedLoopAlarmStart', event.get('closedLoopAlarmStart').to_s.to_i(10))
+                     end
+                    "
+        }
+        date {
+            match => [ "closedLoopAlarmStart", UNIX_MS ]
+            target => "closedLoopAlarmStart"
+        }
     }
 
     if [closedLoopAlarmEnd] {
         ruby {
-            code => "event.set('closedLoopAlarmEnd', Integer(event.get('closedLoopAlarmEnd')))"
+            code => "
+                    if event.get('closedLoopAlarmEnd').to_s.to_i(10) > 9999999999999  
+                      event.set('closedLoopAlarmEnd', event.get('closedLoopAlarmEnd').to_s.to_i(10) / 1000)
+                    else
+                      event.set('closedLoopAlarmEnd', event.get('closedLoopAlarmEnd').to_s.to_i(10))
+                    end
+                    "
         }
         date {
             match => [ "closedLoopAlarmEnd", UNIX_MS ]
@@ -88,12 +128,21 @@ output {
         codec => rubydebug
     }
 
-    elasticsearch {
-        codec => "json"
-        hosts => [elasticsearch]
-        index => "logstash-%{+YYYY.MM.DD}" # creates daily indexes
-        doc_as_upsert => true
+    if [http_request_failure] {
+        elasticsearch {
+            codec => "json"
+            hosts => ["${elasticsearch_hosts}"]
+            index => "errors-%{+YYYY.MM.DD}"
+            doc_as_upsert => true
+        }
+    } else {
+        elasticsearch {
+            codec => "json"
+            hosts => ["${elasticsearch_hosts}"]
+            index => "events-%{+YYYY.MM.DD}" # creates daily indexes
+            doc_as_upsert => true
 
+        }
     }
 
 }