Push JSON into elasticsearch directly

My architecture is

Filebeat (remote) > logstash (Beats input, grok filter and output file plugin) > stores the passed data as json in .log file


Sample log lines

0.0.111.12 16.0.0.199 58 58 0.0.111.12 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/prematch HTTP/1.1 404 - [30/Nov/2019:08:36:28 +0000] - /ContactServices/api/contact/prematch xxyy.zzz.com 601 0.601 default task-152 "-" "application/json" "-" "-" "-" "-"
0.0.111.13 16.0.0.199 58 58 0.0.111.13 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/prematch HTTP/1.1 404 - [30/Nov/2019:08:36:57 +0000] - /ContactServices/api/contact/prematch xxyy.zzz.com 647 0.647 default task-243 "-" "application/json" "-" "-" "-" "-"
0.0.111.12 16.0.0.199 81 81 0.0.111.12 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/prematch HTTP/1.1 302 - [30/Nov/2019:08:42:06 +0000] - /ContactServices/api/contact/prematch xxyy.zzz.com 220 0.220 default task-78 "-" "application/json" "-" "-" "-" "-"
0.0.111.10 16.0.0.199 311 311 0.0.111.10 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/create HTTP/1.1 400 - [30/Nov/2019:08:42:24 +0000] - /ContactServices/api/contact/create xxyy.zzz.com 1565 1.565 default task-274 "-" "application/json" "-" "-" "-" "-"
0.0.111.11 16.0.0.199 81 81 0.0.111.11 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/prematch HTTP/1.1 302 - [30/Nov/2019:08:42:34 +0000] - /ContactServices/api/contact/prematch xxyy.zzz.com 178 0.178 default task-99 "-" "application/json" "-" "-" "-" "-"
0.0.111.12 16.0.0.199 81 81 0.0.111.12 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/prematch HTTP/1.1 302 - [30/Nov/2019:08:42:35 +0000] - /ContactServices/api/contact/prematch xxyy.zzz.com 186 0.186 default task-319 "-" "application/json" "-" "-" "-" "-"
0.0.111.11 16.0.0.199 311 311 0.0.111.11 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/create HTTP/1.1 400 - [30/Nov/2019:08:42:51 +0000] - /ContactServices/api/contact/create xxyy.zzz.com 1540 1.540 default task-210 "-" "application/json" "-" "-" "-" "-"
0.0.111.13 16.0.0.199 81 81 0.0.111.13 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/prematch HTTP/1.1 302 - [30/Nov/2019:08:44:49 +0000] - /ContactServices/api/contact/prematch xxyy.zzz.com 205 0.205 default task-173 "-" "application/json" "-" "-" "-" "-"
0.0.111.13 16.0.0.199 311 311 0.0.111.13 HTTP/1.1 - POST 8743 - POST /ContactServices/api/contact/create HTTP/1.1 400 - [30/Nov/2019:08:45:06 +0000] - /ContactServices/api/contact/create xxyy.zzz.com 1563 1.563 default task-306 "-" "application/json" "-" "-" "-" "-"

logstash.conf

input {
 beats {
                port => 5044
                host => "199.99.00.11"
        }
}

filter {
  grok {
        match => ["message", '%{IPV4:remoteIP}\s+%{IPV4:localIP}\s+%{INT:throughtputData:int}\s+%{INT}\s+%{IPV4}\s+%{DATA:requestProtocol}\s+%{DATA:remoteLogicalUserName}\s+%{DATA:requestMethod}\s+%{DATA:port}\s+%{DATA}\s+%{DATA}\s+/ContactServices/api/contact%{DATA}\s+%{DATA:requestProtocol2}\s+%{INT:requestStatusCode}\s+%{DATA:userSessionID}\s+\[%{HTTPDATE:logTimeStamp}\]\s+%{DATA:remoteUser}\s+/ContactServices/api/contact%{DATA:requestedURL2}\s+%{DATA:serverName}\s+%{INT:timeTakenInMilliSec:int}\s+%{NUMBER}\s+default\s+task-%{INT}\s+"%{DATA:authorization}"\s+"%{DATA}"\s+"%{DATA}"\s+"%{DATA}"\s+"%{DATA}"\s+"%{DATA}"']
 }

        if "_grokparsefailure" in [tags]{
                drop {}
          }

        if "_groktimeout" in [tags]{
                drop {}
          }

        date {
                match => ["logTimeStamp" ,"dd/MMM/yyyy:HH:mm:ss Z" ]
        }
        mutate{
          remove_field => ["message","host","input","type","@version","prospector","beat","garbageData","offset"]
        }
}

output {
   file {
     path => "/opt/jboss/elk/filtered_logs/filtered_accesslog_itg29_%{+YYYY-MM-dd}.log"
     id => "filtered_accesslog_itg29"
   }
}

After filtering > filtered_accesslog_itg29_%{+YYYY-MM-dd}.log

{"serverName":"xxxxxxxxxxx","tags":["beats_input_codec_plain_applied"],"remoteLogicalUserName":"-","logTimeStamp":"22/Nov/2019:03:42:19 +0000","@timestamp":"2019-11-22T03:42:19.000Z","port":"8743","authorization":"-","throughtputData":196,"requestStatusCode":"400","source":"/opt/jbossapp/mdm/cs/logs/access_log.2019-11-22.log","requestedURL2":"/create","remoteUser":"-","localIP":"0.0.111.111","timeTakenInMilliSec":490,"requestProtocol2":"HTTP/1.1","userSessionID":"-","remoteIP":"16.11.0.111","requestProtocol":"HTTP/1.1","requestMethod":"POST"}
{"serverName":"xxxxxxxxxxx","tags":["beats_input_codec_plain_applied"],"remoteLogicalUserName":"-","logTimeStamp":"22/Nov/2019:03:58:17 +0000","@timestamp":"2019-11-22T03:58:17.000Z","port":"8743","authorization":"-","throughtputData":2120,"requestStatusCode":"200","source":"/opt/jbossapp/mdm/cs/logs/access_log.2019-11-22.log","requestedURL2":"/create","remoteUser":"-","localIP":"0.0.111.111","timeTakenInMilliSec":2648,"requestProtocol2":"HTTP/1.1","userSessionID":"-","remoteIP":"16.11.0.111","requestProtocol":"HTTP/1.1","requestMethod":"POST"}
{"serverName":"xxxxxxxxxxx","tags":["beats_input_codec_plain_applied"],"remoteLogicalUserName":"-","logTimeStamp":"22/Nov/2019:04:35:17 +0000","@timestamp":"2019-11-22T04:35:17.000Z","port":"8743","authorization":"-","throughtputData":2394,"requestStatusCode":"200","source":"/opt/jbossapp/mdm/cs/logs/access_log.2019-11-22.log","requestedURL2":"/create","remoteUser":"-","localIP":"0.0.111.111","timeTakenInMilliSec":3065,"requestProtocol2":"HTTP/1.1","userSessionID":"-","remoteIP":"16.11.0.111","requestProtocol":"HTTP/1.1","requestMethod":"POST"}

These filtered items (json as log file) are same as the JSON we see in our discover tab

How can I used this filtered data into elasticsearch and make data available in kibana? Any suggestions on this?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.