I have upgraded my ELK stack to 5.0. All seems running well but I'm not receiving any events to my elastic search from logstash. Following error occurs in logstash-plain.log file,
[2016-10-31T10:59:30,638][ERROR][logstash.pipeline ] A plugin had an unrecoverable error. Will restart this plugin.
Plugin: <LogStash::Inputs::Beats port=>5000, codec=><LogStash::Codecs::Plain charset=>"ISO-8859-1", id=>"3c73c6dd66bc469b2a0c9384a730be709186aa24-1", enable_metric=>true>, id=>"3c73c6dd66bc469b2a0c9384a730be709186aa24-2", enable_metric=>true, host=>"0.0.0.0", ssl=>false, ssl_verify_mode=>"none", include_codec_tag=>true, ssl_handshake_timeout=>10000, congestion_threshold=>5, target_field_for_codec=>"message", tls_min_version=>1, tls_max_version=>1.2, cipher_suites=>["TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA38", "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256"], client_inactivity_timeout=>60>
Error: event executor terminated
And my logstash.conf file is as follows,
input {
beats {
port => 5000
codec => plain{
charset => "ISO-8859-1"
}
}
tcp {
port => 5000
type => syslog
codec => plain{
charset => "ISO-8859-1"
}
}
udp {
port => 5000
type => syslog
codec => plain{
charset => "ISO-8859-1"
}
}
}
First filter
filter {
#ignore log comments
if [message] =~ "^#" {
drop {}
}
grok {
patterns_dir => "./patterns"
match => [
"message", "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:serverip} %{WORD:verb} %{NOTSPACE:request} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:auth} %{IPORHOST:clientip} %{NOTSPACE:agent} %{NOTSPACE:referrer} %{NUMBER:response} %{NUMBER:sub_response} %{NUMBER:sc_status} %{NUMBER:responsetime}",
"message", "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:serverip} %{WORD:verb} %{NOTSPACE:request} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:auth} %{IPORHOST:clientip} %{NOTSPACE:agent} %{NUMBER:response} %{NUMBER:sub_response} %{NUMBER:sc_status} %{NUMBER:responsetime}",
"message", "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:serverip} %{WORD:verb} %{NOTSPACE:request} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:auth} %{IPORHOST:clientip} %{NOTSPACE:agent} %{NUMBER:response} %{NUMBER:sub_response} %{NUMBER:sc_status}"
]
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss" ]
locale => "en"
}
}
Second filter
filter {
if "_grokparsefailure" in [tags] {
} else {
# on success remove the message field to save space
mutate {
remove_field => ["message", "timestamp"]
}
}
}
output {
elasticsearch {
hosts => ["172.24.80.86:9200"]
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
}
Any idea about the issue??