Issue with logstash parsing mysql logs

Hello,

I am using logstash to parse my mysql error logs. I am getting an error "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value". So I modified my code and added
mutate {
remove_field => [ "%host%" ]
}

But still I am getting the same error. I am using logstash v6.6 and below is my logstash configuration

input {
file {
path => ["/var/log/mysql/error.log"]
start_position => "beginning"
sincedb_path => "/dev/null"
type => "mysql"
}
}
filter {
grok {
match => { "message" => ["%{LOCALDATETIME:[mysql][error][timestamp]} ([%{DATA:[mysql][error][level]}] )?%{GREEDYDATA:[mysql][error][message]}", "%{TIMESTAMP_ISO8601:[mysql][error][timestamp]} %{NUMBER:[mysql][error][thread_id]} [%{DATA:[mysql][error][level]}] %{GREEDYDATA:[mysql][error][message1]}", "%{GREEDYDATA:[mysql][error][message2]}"] }
pattern_definitions => {
"LOCALDATETIME" => "[0-9]+ %{TIME}"
}
remove_field => "message"
}
mutate {
rename => { "[mysql][error][message1]" => "[mysql][error][message]" }
}
mutate {
rename => { "[mysql][error][message2]" => "[mysql][error][message]" }
}
date {
match => [ "[mysql][error][timestamp]", "ISO8601", "YYMMdd H:m:s" ]
remove_field => "[apache2][access][time]"
}
mutate {
remove_field => [ "%host%" ]
}
}
output {
elasticsearch {
action => "index"
hosts => "http://localhost:9200"
index => "customlogs-mysql"
}
}

Thanks in advance.

Remove the % and just use

remove_field => [ "host" ]

The problem is that beats create [host] as an object, so if you try to add [host] as a string that creates a conflict.

Hello @Badger,

Thanks for your prompt response, issue is fixed now.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.