After upgrading from filebeat-6.2.4 to filebeat-6.3.0 none of my log messages make into logstash. I did not make any filebeat.yml or logstash.conf changes during the upgrade. The logstash.stdout is full of errors like this...
[2018-06-14T16:36:44,073][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.06.14", :_type=>"doc", :_routing=>nil}, #<LogStash::E
vent:0x7915b5b2>], :response=>{"index"=>{"_index"=>"logstash-2018.06.14", "_type"=>"doc", "_id"=>"uPUo_2MB8dpigLNDCRGl", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse [host]", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:353"}}}}}
I reverted back to filebeat-6.2.4, everything works again and the logstash.stdout errors went away. I noticed the fifth bullet point in the 6.3.0 release notes mentions the addition of a host.name field that could break logstash configs. That bullet point also links to github PR 7051 where it says "...and it could causes issues with any LS config that expects [host] to be a string..."
I just want to confirm that this breaking change is indeed what I'm running into and I'm wondering if there is workaround I can use in filebeat.yml or logstash.conf to mitigate.
My filebeat.yml and logstash.conf files are below just in case anyone wants to look at them.
Thank you for your time.
filebeat.yml
beat.prospectors:
type: log
enabled: true
paths:
- /var/log/snort-eth1/alert.csv
- /var/log/snort-eth2/alert.csv
type: log
enabled: true
paths:
- /var/log/auth.log
include_lines: ['Failed password for']
exclude_lines: ['invalid user']
tags: ["failedpw"]type: log
enabled: true
paths:
- /var/log/fail2ban.log
type: log
enabled: true
paths:
- /var/log/apache2/*.log
exclude_lines: ['GET /server-status','GET /st.html']output.logstash:
hosts: ["localhost:5044"]setup.kibana:
host: "127.0.0.1:5601"logging.level: info
logging.to_files: true
logging.metrics.enabled: true
logging.metrics.period: 60s
logging.files:
path: /var/log/filebeat
name: filebeat
logstash.conf
input {
beats {
port => 5044
}
}
filter {
if [source] =~ "access" {
mutate { replace => { type => "apache_access" } }
# append response_time_us onto predefined COMBINEDAPACHELOG
grok {
match => { "message" => "%{COMBINEDAPACHELOG} %{NUMBER:response_time_us}" }
}date { match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ] } mutate { #rename 'response' field to 'status' rename => {"response" => "status"} #make a copy of IP for name resolution copy => {"clientip" => "client_dns_name"} } dns { # resolve the IP reverse => ["client_dns_name"] action => "replace" }
}
}filter {
if [source] =~ "error" {
mutate {
replace => { type => "apache_error" }
}
}
}filter {
if [source] =~ "fail2ban" {
mutate {
replace => { type => "fail2ban" }
}
}
}filter {
if [source] =~ "snort-eth1" {
csv {
separator => ","
columns => ["timestamp","sig_generator","sig_id","sig_rev","msg","proto","src","srcport","dst","dstport","ethsrc","ethdst","ethlen","tcpflags","tcpseq","tcpack","tcplen","tcpwindow","ttl","tos","id","dgmlen","iplen","icmptype","icmpcode","icmpid","icmpseq"]
}
mutate {
#change type
replace => { type => "snort-eth1" }
#make a copy of IP addrs for name resolution
copy => {"src" => "src_dns_name"}
copy => {"dst" => "dst_dns_name"}
}
dns {
# resolve the addresses
reverse => ["src_dns_name","dst_dns_name"]
action => "replace"
}
}
}filter {
if [source] =~ "snort-eth2" {
csv {
separator => ","
columns => ["timestamp","sig_generator","sig_id","sig_rev","msg","proto","src","srcport","dst","dstport","ethsrc","ethdst","ethlen","tcpflags","tcpseq","tcpack","tcplen","tcpwindow","ttl","tos","id","dgmlen","iplen","icmptype","icmpcode","icmpid","icmpseq"]
}
mutate {
#change type
replace => { type => "snort-eth2" }
#make a copy of IP addrs for name resolution
copy => {"src" => "src_dns_name"}
copy => {"dst" => "dst_dns_name"}
}
dns {
# resolve the addresses
reverse => ["src_dns_name","dst_dns_name"]
action => "replace"
}
}
}output {
elasticsearch {
hosts => ["localhost:9200"]
}send debug data to stdout
#stdout { codec => rubydebug }
}