Geo_point and geohash in logstash

Hi,
I am trying to use coordinate maps in kibana for my visualization.
I mutate my longitude and latitude fields with rename and convert to float, but when i tried in Kibana, it showing no compactible field geo_point.

Logstash:

input {

file

{

path => "G:/Transit/*.csv"

start_position => "beginning"

sincedb_path => "F:\ELKstack\logstash\data\plugins\inputs\file\transit.txt"

}

}

filter {

csv {

separator => ","

columns => ["Unit#","Unit Name","Add1","City","Pr","Pcode","Country","Tel1","Access","W_GMT","S_GMT","LATITUDE","LONGTITUDE","NAME_FRN","ADDR_1_FRN","Hrs_Mon_Op","Hrs_Mon_Cls","Hrs_Tue_Op","Hrs_Tue_Cls","Hrs_Wed_Op","Hrs_Wed_Cls","Hrs_Thu_Op","Hrs_Thu_Cls","Hrs_Fri_Op","Hrs_Fri_Cls","Hrs_Sat_Op","Hrs_Sat_Cls","Hrs_Sun_Op","Hrs_Sun_Cls","LANG_SERVICES","Close Date","Forced Relo?","FY 2019","F24"]

}

mutate { convert => { "LONGTITUDE" => "float"}}

mutate { convert => { "LATITUDE" => "float"}}

mutate {rename => ["LONGTITUDE", "Location(lon)"]}

mutate {rename => ["LATITUDE", "Location(lat)"]}

mutate {rename => ["Unit#", "TransitId"]}

mutate {rename => ["City", "City"]}

mutate {rename => ["Add1", "Address"]}

mutate {rename => ["Pr", "Province"]}

mutate {rename => ["Pcode", "Postal Code"]}

#mutate {rename => ["my_Region", "Region"]}

mutate {remove_field => ["Unit Name","Country","Tel1","Access","W_GMT","S_GMT","LATITUDE","LONGTITUDE","NAME_FRN","ADDR_1_FRN","Hrs_Mon_Op","Hrs_Mon_Cls","Hrs_Tue_Op","Hrs_Tue_Cls","Hrs_Wed_Op","Hrs_Wed_Cls","Hrs_Thu_Op","Hrs_Thu_Cls","Hrs_Fri_Op","Hrs_Fri_Cls","Hrs_Sat_Op","Hrs_Sat_Cls","Hrs_Sun_Op","Hrs_Sun_Cls","LANG_SERVICES","Close Date","Forced Relo?","FY 2019","F24","my_Region"]}

}

output {

stdout { codec => rubydebug }

elasticsearch {

action => "index"

hosts => "https://127.0.0.1:9200"

index => "branchres-transit"

cacert => "/ELKstack/logstash/config/certs/ca/ca.crt"

user => "logstash_writer"

password => "password"

manage_template => false

}

}

That should be

mutate {rename => { "LONGTITUDE" => "[Location][lon]" } }

Also, you will need an index template that tells elasticsearch that Location is a geo_point.

Thanks Badger.

Hi,
I am getting an error. Can you help to resolve it.

[2019-10-10T10:17:16,107][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"branchres-transit", :_type=>"_doc", :routing=>nil}, #LogStash::Event:0x66b9b5b], :response=>{"index"=>{"_index"=>"branchres-transit", "_type"=>"_doc", "_id"=>"x-kHtm0Bu8ReViWd7er9", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [location] of type [geo_point]", "caused_by"=>{"type"=>"parse_exception", "reason"=>"unsupported symbol [L] in geohash [LONGTITUDE LATITUDE]", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"unsupported symbol [L] in geohash [LONGTITUDE LATITUDE]"}}}}}}

My Index template:
PUT _template/transit
{
"index_patterns":"branchres-transit",
"mappings":{
"properties":{
"location":{
"properties": {
"lat": {
"type": "geo_point"
},
"lon": {
"type": "geo_point"
}
}
}
}
}
}

Logstash config:

input {
file
{
path => "G:/Transit/*.csv"
start_position => "beginning"
sincedb_path => "F:\ELKstack\logstash\data\plugins\inputs\file\transit.txt"
}
}
filter {
csv {
separator => ","
columns => ["Unit#","Unit Name","Add1","City","Pr","Pcode","Country","Tel1","Access","W_GMT","S_GMT","LATITUDE","LONGTITUDE","NAME_FRN","ADDR_1_FRN","Hrs_Mon_Op","Hrs_Mon_Cls","Hrs_Tue_Op","Hrs_Tue_Cls","Hrs_Wed_Op","Hrs_Wed_Cls","Hrs_Thu_Op","Hrs_Thu_Cls","Hrs_Fri_Op","Hrs_Fri_Cls","Hrs_Sat_Op","Hrs_Sat_Cls","Hrs_Sun_Op","Hrs_Sun_Cls","LANG_SERVICES","Close Date","Forced Relo?","FY 2019","F24"]}

mutate {
add_field => { "location" => "%{[Location][lon]} %{[Location][lat]}" }
rename => { "LONGTITUDE" => "[Location][lon]" }
rename => { "LATITUDE" => "[Location][lat]" }
convert => { "[LONGTITUDE]" => "float" }
convert => { "[LATITUDE]" => "float" }
}

mutate {rename => ["Unit#", "TransitId"]}
mutate {rename => ["City", "City"]}
mutate {rename => ["Add1", "Address"]}
mutate {rename => ["Pr", "Province"]}
mutate {rename => ["Pcode", "Postal Code"]}
#mutate {rename => ["Region", "Region"]}
mutate {remove_field => ["Unit Name","Country","Tel1","Access","W_GMT","S_GMT","LATITUDE","LONGTITUDE","NAME_FRN","ADDR_1_FRN","Hrs_Mon_Op","Hrs_Mon_Cls","Hrs_Tue_Op","Hrs_Tue_Cls","Hrs_Wed_Op","Hrs_Wed_Cls","Hrs_Thu_Op","Hrs_Thu_Cls","Hrs_Fri_Op","Hrs_Fri_Cls","Hrs_Sat_Op","Hrs_Sat_Cls","Hrs_Sun_Op","Hrs_Sun_Cls","LANG_SERVICES","Close Date","Forced Relo?","FY 2019","F24","Region"]}
}
output {
stdout { codec => rubydebug }
elasticsearch {
action => "index"
hosts => "https://127.0.0.1:9200"
index => "branchres-transit"
cacert => "/ELKstack/logstash/config/certs/ca/ca.crt"
user => "logstash_writer"
password => "password"
manage_template => false
template_name => "transit"
}
}

When specifying a geo_point as a string the fields should be comma separated, so this should be

add_field => { "location" => "%{[Location][lon]},%{[Location][lat]}" }

Thanks. I've changed the add_field but now i am getting different kind of error:

[2019-10-10T13:00:22,403][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"branchres-transit", :_type=>"_doc", :routing=>nil}, #LogStash::Event:0x2fb4c248], :response=>{"index"=>{"_index"=>"branchres-transit", "_type"=>"_doc", "_id"=>"CRudtm0Bu8ReViWdQQSG", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [location] tried to parse field [location] as object, but found a concrete value"}}}}

Can you delete the index and start over?

No luck even after deleting Index and start over - got same mapper_parsing_exception.

In your stdout rubydebug output what does the location field look like?

Hi Badger,

rubydebug output: "location" => "-123.5100, 45.4243" and "Location" => {
"lon" => "-123.5100",
"lat" => "45.4243"

 "message" => "372 & 49TH , MAIN ST,3H1,CA,6042352472,True,-8,-7,41.228806,-103.1078,\"\",\"\",09:30,18:00,09:30,18:00,09:30,18:00,09:30,18:00,09:30,18:00,09:00,16:00,\"\",\"\",\"English, French, Mandarin Chinese, \",,,,\r"

}
{
"Province" => "po",
"@version" => "1",
"@timestamp" => 2019-10-10T18:51:22.850Z,
"location" => "-123.5100, 45.4243",
"TransitId" => "300",
"Address" => "1114 RD",
"Postal Code" => "xxxxx",
"host" => "xxxxx",
"City" => "ED",
"path" => "G:/Tran/2019-09-05.csv",
"Location" => {
"lon" => "-123.5100",
"lat" => "45.4243"
}

Your index template should include something like

"mappings": {
  "properties": {
    "location": {
      "type": "geo_point"
    }
  }
}

I suspect it doesn't.

1 Like

I've changed my index template, now again same "mapper_parsing_exception" error:

[2019-10-10T15:20:59,482][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"branchres-transit", :_type=>"_doc", :routing=>nil}, #LogStash::Event:0x77fec1ff], :response=>{"index"=>{"_index"=>"branchres-transit", "_type"=>"_doc", "id"=>"rUYdt20Bu8ReViWd_h5", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [location] of type [geo_point]", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"illegal latitude value [-104.6098] for location"}}}}}

That's an improvement! latitude goes from -90 to +90. What do you want -104.6098 to mean? Do you have latititude and longitude the wrong way around?

Thanks.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.