Through logstash I sent my log messages to elasticsearch and a message has the following structure:
{
"message" => "{\"@version\":1,\"level\":\"INFO\",\"logger_name\":\"it.test.elk.ELKTestApplication\",\"appName\":\"docker-elk-master\",\"thread_name\":\"main\",\"message\":\"LAT: 45, LON: 13.69\"}\r",
"tags" => [
[0] "geoinfo"
],
"@version" => "1",
"@timestamp" => 2018-05-11T07:59:49.447Z,
"host" => "gateway",
"location" => {
"longitude" => 13.69,
"latitude" => 45.0
},
"port" => 59586
}
I created a template in the following way (the creation of the standard logstash index follows this model, or at least I thought it worked)
PUT _template/mytemplate
{
"index_patterns": ["logstash-*"],
"mappings": {
"doc": {
"properties": {
"location": { "type": "geo_point" }
}
}
}
}
However, I get parsing errors:
Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.05.11", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x5a226948>], :response=>{"index"=>{"_index"=>"logstash-2018.05.11", "_type"=>"doc", "_id"=>"qKs2TmMBEkGQJoRknI5B", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse", "caused_by"=>{"type"=>"parse_exception", "reason"=>"field must be either [lat], [lon] or [geohash]"}}}}}
I do not understand why
Also, this is the configuration for logstash:
input {
tcp {
port => 5000
}
}
filter {
grok {
match => {"message" => "LAT: %{NUMBER:LAT:float}, LON: %{NUMBER:LON:float}"}
add_tag => ["geoinfo"]
}
if "_grokparsefailure" in [tags] {
drop {}
}
else {
mutate {
rename => { "LAT" => "[location][latitude]" }
rename => { "LON" => "[location][longitude]" }
}
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => "elasticsearch:9200"
}
}