Hi Team,
I have latitude and longitude information in my mysql db and I want to push these information to elastic-search and want to create a map in kibana by taking these values.
I have browsed different discussion but unable to wrap it up.
can you please provide proper steps to achieve this.
my logstash configuration is as below :
input {
jdbc {
jdbc_connection_string => "jdbc:mysql://10.0.30.14:3306/p_gw"
jdbc_user => "db_user"
jdbc_password => "db_password"
# The path to our downloaded jdbc driver
jdbc_driver_library => "C:/Users/saroj/Downloads/mysql-connector-java-5.1.46/mysql-connector-java-5.1.46/mysql-connector-java-5.1.46-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
# our query
schedule => "* * * * *"
statement => "select si.latitude, si.longitude from pg_master tm left join p_ledger pl on tm.tran_id=pl.gRefNumber left join pg_store_info si on pl.storeID=si.store_id where tm.id > :sql_last_value order by tm.id ASC limit 10000"
use_column_value => true
tracking_column => id
last_run_metadata_path => "C:/elk/logstash-5.2.2/config/.last_txn_with_loc_run_metadata_path"
}
}
filter {
if [latitude] and [longitude] {
mutate {
# Location and lat/lon should be used as is, this is as per logstash documentation
# Here we are tying to create a two-dimensional array in order to save data as per Logstash documentation
add_field => { "[location][lat]" => [ "%{latitude}" ] }
add_field => { "[location][lon]" => [ "%{longitude}" ] }
convert => [ "[location]", "float" ]
}
}
mutate {
add_field => {
"[@metadata][document_id]" => "%{id}"
}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "pgw-txn-loc"
sniffing => true
manage_template => false
document_type => "data"
document_id => "%{[@metadata][document_id]}"
}
stdout { codec => rubydebug }
}