Hello,
I've been running logstash on virtual machines for a bit and am trying to move it to Kubernetes to help scale and better manage configuration. When running logstash in Kubernetes events bound for elasticsearch are not making it when I try to set the timestamp from a syslog_timestamp field provided by grok. If I remove the date filter the events are sent, but if it is enabled they do not. The same config dropped into a VM with the same version of elasticsearch works.
I've run logstash with debug enabled and can see that the date is parsed successfully, but there is no output sent to elasticsearch. Are there further steps I can take to troubleshoot? I've tried several 2.x versions of logstash and different versions of the logstash-filter-date plugin to no avail.
Version:
[root@logstash-syslog-df3r2 /]# /opt/logstash/bin/logstash --version
logstash 2.2.0
Config:
input {
kafka {
zk_connect => 'zookeeper:2181'
group_id => 'logstash_consumer-linux_syslog'
consumer_threads => 1
decorate_events => true
topic_id => 'linux_syslog'
}
}
filter {
if [type] == "linux_syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss"]
}
}
}
output {
if [type] == "linux_syslog" {
elasticsearch {
index => "linux-%{+YYYY.MM.dd}"
hosts => ["elasticsearch00", "elasticsearch01", "elasticsearch02"]
}
} else {
elasticsearch {
index => "logstash-%{+YYYY.MM.dd}"
hosts => ["elasticsearch00", "elasticsearch01", "elasticsearch02"]
}
}
stdout {
codec => rubydebug { metadata => true }
}
}