Hi I am sending logs to kibana using filebeat ,and I am facing below error while sending logs .Some of the events doesn't have host in raw data facinf error for that events.
[2020-09-24T09:20:40,591][WARN ][logstash.outputs.elasticsearch][main] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"filetest-", :routing=>nil, :_type=>"_doc"}, #LogStash::Event:0x6eb22ef8], :response=>{"index"=>{"_index"=>"filetest-", "_type"=>"_doc", "_id"=>"keBFwHQB9GB4WNUD5v0d", "status"=>400, "error"=>{"type"=>"mapper_parPreformatted text
sing_exception", "reason"=>"failed to parse field [host.hostname] of type [keyword] in document with id 'keBFwHQB9GB4WNUD5v0d'. Preview of field's value: '{}'", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:603"}}}}}
my logstash config is:
input {
beats {
host => "192.168.1.242"
port => 5526
}
}
filter {
grok {
match => [
"message", "%%{CISCO_REASON:network}-%{INT}-%{CISCO_REASON}:"
"message", "(%{TIME}|%{TIMESTAMP_ISO8601}|%{YEAR}) %{HOSTNAME:host} %{WORD:[event][category]}.*?:%{GREEDYDATA:summary}" ]
overwrite => ["host"]
}
mutate { rename => { "host" => "[host][hostname]" } }
}
output {
elasticsearch {
hosts => ["192.168.1.242:9200"]
index => "filebeattest-"
}
stdout {
codec => rubydebug
}
}
Thanks in advance.