Hello Everyone!
I am having trouble getting the Date Filter Plugin properly working with my Elasticsearch. Below is my Logstash configuration file:
input {
beats {
port => "5043" }
}
filter {
grok {
match => ["message", '%{GREEDYDATA:IPs} - [ %{GREEDYDATA:auth_message} ] [%{HTTPDATE:logtime}] "%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})" %{NUMBER:response} (?:%{NUMBER:bytes}|-) %{NUMBER:anothernumber} %{NUMBER:someothernumber}'] }
date {
match => ["logtime", "dd/MMM/yyyy:HH:mm:ss Z"]
}
kv {
source => "auth_message"
value_split => ":"
}
}
output {
elasticsearch { hosts => ["localhost:9200"] }
stdout { codec => rubydebug }
}
When I set the output to stdout, Logtash correctly parses through my log and updates the @timestamp to my logtime field (like below).
{
"request" => "/XXXXX-XXX/vX/logout",
"anothernumber" => "X",
"offset" => XXXXXXX,
"input_type" => "log",
"verb" => "XXX",
"source" => "/XXX/XXX/XXX/awsLogs.log",
"someothernumber" => "X",
"message" => "XXX.XXX.XXX.XXX, XXX.XX.XXXX.XX, XXX.X.XXX.XX - [ auth : no-auth | correlation-id : XXX-XXXX | remote-addr : XX.XXX.XX.XXX | request_method : XXX | request_resource : XXXX | service_name : XXXX-XX ] [02/Jun/2017:16:42:14 +0000] "XXX /XXX-XXX/vX/logout?redirectUrl=https%3A%2F%2Fapp.hubspot.com%2Flogin&loggedout=true HTTP/1.0" 307 0 0 0",
"type" => "log",
"IPs" => "XX.XXX.XX.XXX, XXX.XX.XXX.XX, XXX.X.XXX.XX",
"tags" => [
[0] "beats_input_codec_plain_applied"
],
"@timestamp" => 2017-06-02T16:42:14.000Z,
...
"host" => "ip-XX-XXX-XXX-XX",
"httpversion" => "X.X",
"logtime" => "02/Jun/2017:16:42:14 +0000"
}
However, when I make the output go to my Elasticsearch (uncomment the line in my configuration file), my log message loses a particular field, the key-value pairs are not parsed, and Kibana displays that there has been a "_grokparsefailure".
Please let me know if anyone has any resolution to this issue!