Logstash grok filter thinks we are still in 2015


#1

Hello there, weird problem. I have the following config, which worked in 2015:

input {
   tcp {
     port => 5000
     type => syslog
   }

   udp {
     port => 5000
     type => syslog
   }

   udp {
     port => 12201
     type => windows
     queue_size => 1000000
     buffer_size => 65536
     workers => 4
   }
}


filter {

    if [message] =~ /default send string/ {
      drop {}
    }

    if [type] == "syslog" and [message] =~ "run-parts" {
      grok {
       #match => [ "message", "%{SYSLOG5424PRI:syslog_pri}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}\(%{URIPATH:syslog_uri}\)\[%{POSINT:syslog_pid}\]%{GREEDYDATA:syslog_message}" ]
       match => [ "message", "%{SYSLOG5424PRI:syslog_pri}%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{PROG:syslog_program}\(%{URIPATH:syslog_uri}\)\[%{POSINT:syslog_pid}%{GREEDYDATA:syslog_message}" ]
      }

      syslog_pri { }

      date {
        match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
        timezone => "America/Chicago"
      }
    }

    if [type] == "syslog" {
      grok {
       match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\]?)?: %{GREEDYDATA:syslog_message}" }
      }

      syslog_pri { }

      date {
        match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
        timezone => "America/Chicago"
      }
    }

    if [host] == "0:0:0:0:0:0:0:1" or [host] == "127.0.0.1" {
      mutate {
        replace => [ "host", "%{syslog_hostname}" ]
      }
    }

    if [message] =~ /The Windows Filtering Platform has permitted a connection/ {
      drop {}
    }

    if [type] == "windows" {
      json {
        source => "message"
      }
      mutate {
        rename =>  [ "Hostname", "host" ]
        remove => [ "SourceModuleType", "SourceModuleName", "EventTime", "EventReceivedTime", "EventType", "Keywords", "full_message" ]
      }
    }
}


output {
    elasticsearch {
      hosts => ["10.36.52.82:9200", "10.36.52.83:9200", "10.36.52.84:9200", "10.36.52.85:9200", "10.36.52.86:9200"]
      workers => 4
      template_overwrite => true
    }
}

Now it is sending all syslogs into logstash-2015-xx-xx daily indices instead of logstash-2016-xx-xx and timestamps of the logs are also 2015 instead of 2016. Only the logs that logstash cannot parse (with _grokparsefailure tag) end up in the correct index, others go to 2015 :slight_smile:

Any idea why this is happening?


(Shaunak Kashyap) #2

This is almost certainly being caused by this bug: https://github.com/logstash-plugins/logstash-filter-date/issues/3

This bug has been fixed but you may not have the latest version of the logstash-filter-date plugin installed. Can you check which version you have installed currently by running ./bin/plugin list --verbose logstash-filter-date from inside your logstash installation directory?


#3

Here you go:

bin/plugin list --verbose logstash-filter-date
logstash-filter-date (2.0.2)

So if this is the bug you mentioned, then simple restart of the logstash service will solve the issue till next year, right?


(Shaunak Kashyap) #4

Yes, restart will fix it for the current year but there's a better, more permanent solution: Upgrade logstash to the latest version. Is this possible for you to do?


#5

Restarted and it fixed the issue. For now I cannot update it but I will do it ASAP. Thanks for help!


(system) #6