Can't get the longitude and latitude

Hi everyone, I'm new in ELK.
I created a script in python in order to generate over 100 persons with a random id , random long,lat and timestamps I stored them into mongoDB. i'm trying to generate a heatmap of the stored data, later I heard about ELK so I installed them on VM virtualbox, and in order to try it I create a simple json file with 10 docs. here it is
[{"id":1,"longitude":49.47664,"latitude":1.09213},
{"id":2,"longitude":49.47765,"latitude":1.09113},
{"id":3,"longitude":49.47563,"latitude":1.09313},
...............................................................................]
and here it is my confuration file
input {
file {
path => "/home/cesiadmin/Bureau/data/parc-visitvc.json"
type => "json"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
json {
source => "json_body"
remove_field => ["message","body","json_body" ]
}

mutate {
    add_field => ["[geoip][location]", "%{longitude}" ]
    add_field => ["[geoip][location]", "%{[latitude]}" ]  
}

mutate {
    convert => [ "[geoip][location]", "float" ]  
}

}
output {
stdout {}
elasticsearch {
hosts => "localhost"
index => "parcours18"
}

}

and here it is what I got when running logstash

cesiadmin@mongodb1:~/logstash-6.3.2$ bin/logstash -f /home/cesiadmin/Bureau/data/logstash_cesi3.config
Sending Logstash's logs to /home/cesiadmin/logstash-6.3.2/logs which is now configured via log4j2.properties
[2018-08-06T14:31:27,954][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2018-08-06T14:31:28,126][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.3.2"}
[2018-08-06T14:31:29,176][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50}
[2018-08-06T14:31:29,371][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2018-08-06T14:31:29,374][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:9200/, :path=>"/"}
[2018-08-06T14:31:29,455][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2018-08-06T14:31:29,517][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>5}
[2018-08-06T14:31:29,520][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost"]}
[2018-08-06T14:31:29,525][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-08-06T14:31:29,550][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-08-06T14:31:29,928][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x57922e08@/home/cesiadmin/logstash-6.3.2/logstash-core/lib/logstash/pipeline.rb:245 sleep>"}
[2018-08-06T14:31:29,938][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2018-08-06T14:31:30,068][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
{
"@version" => "1",
"host" => "mongodb1",
"type" => "json",
"path" => "/home/cesiadmin/Bureau/data/parc-visitvc.json",
"@timestamp" => 2018-08-06T13:31:30.049Z,
"message" => "[{"id":1,"langue":"Portugais","longitude":49.47664,"latitude":1.09213},",
"tags" => [
[0] "_grokparsefailure"
],
"geoip" => {
"location" => [
[0] 0.0,
[1] 0.0
]
}
}
{
"@version" => "1",
"host" => "mongodb1",
"type" => "json",
"path" => "/home/cesiadmin/Bureau/data/parc-visitvc.json",
"@timestamp" => 2018-08-06T13:31:30.059Z,
"message" => "{"id":3,"langue":"Portugais","longitude":49.47563,"latitude":1.09313},",
"tags" => [
[0] "_grokparsefailure"
],
"geoip" => {
"location" => [
[0] 0.0,
[1] 0.0
]
}
}
{
"@version" => "1",
"host" => "mongodb1",
"type" => "json",
"path" => "/home/cesiadmin/Bureau/data/parc-visitvc.json",
"@timestamp" => 2018-08-06T13:31:30.061Z,
"message" => "",
"tags" => [
[0] "_grokparsefailure"
],
"geoip" => {
"location" => [
[0] 0.0,
[1] 0.0
]
}
}
{
"@version" => "1",
"host" => "mongodb1",
"type" => "json",
"path" => "/home/cesiadmin/Bureau/data/parc-visitvc.json",
"@timestamp" => 2018-08-06T13:31:30.059Z,
"message" => "{"id":2,"langue":"Portugais","longitude":49.47765,"latitude":1.09113},",
"tags" => [
[0] "_grokparsefailure"
],
"geoip" => {
"location" => [
[0] 0.0,
[1] 0.0
]
}
}
{
"@version" => "1",
"host" => "mongodb1",
"type" => "json",
"path" => "/home/cesiadmin/Bureau/data/parc-visitvc.json",
"@timestamp" => 2018-08-06T13:31:30.059Z,
"message" => "{"id":4,"langue":"Portugais","longitude":49.47464,"latitude":1.09413},",
"tags" => [
[0] "_grokparsefailure"
],
"geoip" => {
"location" => [
[0] 0.0,
[1] 0.0
]
}
}

......

Thanks a lot.

Hi @sofian,

I suspect a Logstash configuration error more than anything else.

I don't think you need the json filter as you have already read the data as JSON on the input.

You can test your Logstash filter from stdin

Here's one example of what I have tried

input { stdin { codec => "json" } }
#input { stdin { } }

filter {
  translate {
    field           => "[jsonPayload][level]"
    destination     => "[jsonPayload][level]"
    override        => true
    fallback        => "0"
    dictionary_path => "/root/tmp/dict.yaml"
  }
}
output {
  stdout { codec => rubydebug }
}

Start logstash with

logstash-6.3.1/bin/logstash -f test.conf

I have only done location lookups based on public IP addresses. Not sure how to do it from longitude and latitude.

Ok thank u A_B ,I'll try it, I hope resolving the issue , the geoip" => {
"location" => [
[0] 0.0,
[1] 0.0 shouldn't give 0.0 , it should give me the exact value of the longitude and latitude
[0] long value
[1] lat value.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.