I want to take Lat Long from MYSQL database and map as geopoint in elasticsearch index

Below is my logstash configuration file.

input {
jdbc {
jdbc_driver_library => "mysql-connector-java-8.0.16/mysql-connector-java-8.0.16.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/VAMT_NX"
jdbc_user => ""
jdbc_password => ""
jdbc_paging_enabled => true
tracking_column => "unix_ts_in_secs"
use_column_value => true
tracking_column_type => "numeric"
schedule => "*/5 * * * * *"
statement => "SELECT t1.id,t3.name as entity,t1.name as asset_code,t1.serial as serial,t1.date_mod,t4.name as branch,concat(t4.latitude,',',t4.longitude) as location,t5.name as domain,
t6.name as computer_model,t7.name as computer_type,t8.name as manufacturer,t9.name user,t1.date_creation,t1.uniqueqrcode,
t2.macaddressfield,t2.ipaddressfield,t2.defaultgatewayfield,t2.subnetmaskfield,t2.dnsentryfield,t2.employeenumberfield,
t2.employeenamefield,t2.employeecontactfield,t2.employeedesignationfield,t10.name as cpu_make,t11.name as cpu_speed,
t12.name as hard_disk,t12.name as ram,t14.name as service_type,t15.name as os_name,t16.name as os_bit,t17.name as asset_status,
t18.name as status_remarks,t1.is_deleted,UNIX_TIMESTAMP(t1.date_mod) AS unix_ts_in_secs FROM glpi_computers t1
LEFT JOIN glpi_plugin_fields_computercomputers t2
ON t1.id=t2.items_id
LEFT JOIN glpi_entities t3
ON t1.entities_id=t3.id
LEFT JOIN glpi_locations t4
ON t1.locations_id=t4.id
LEFT JOIN glpi_domains t5
ON t1.domains_id=t5.id
LEFT JOIN glpi_computermodels t6
ON t1.computermodels_id=t6.id
LEFT JOIN glpi_computertypes t7
ON t1.computertypes_id=t7.id
LEFT JOIN glpi_manufacturers t8
ON t1.manufacturers_id=t8.id
LEFT JOIN glpi_users t9
ON t1.users_id=t9.id
LEFT JOIN glpi_plugin_fields_cpumakefielddropdowns t10
ON t2.plugin_fields_cpumakefielddropdowns_id=t10.id
LEFT JOIN glpi_plugin_fields_cpuspeedfielddropdowns t11
ON t2.plugin_fields_cpuspeedfielddropdowns_id=t11.id
LEFT JOIN glpi_plugin_fields_harddiskfielddropdowns t12
ON t2.plugin_fields_harddiskfielddropdowns_id=t12.id
LEFT JOIN glpi_plugin_fields_ramfielddropdowns t13
ON t2.plugin_fields_ramfielddropdowns_id=t13.id
LEFT JOIN glpi_plugin_fields_servicetypefielddropdowns t14
ON t2.plugin_fields_servicetypefielddropdowns_id=t14.id
LEFT JOIN glpi_plugin_fields_osnamefielddropdowns t15
ON t2.plugin_fields_osnamefielddropdowns_id=t15.id
LEFT JOIN glpi_plugin_fields_osbitfielddropdowns t16
ON t2.plugin_fields_osbitfielddropdowns_id=t16.id
LEFT JOIN glpi_plugin_fields_assetstatusfielddropdowns t17
ON t2.plugin_fields_assetstatusfielddropdowns_id=t17.id
LEFT JOIN glpi_plugin_fields_statusremarkfielddropdowns t18
ON t2.plugin_fields_statusremarkfielddropdowns_id=t18.id
WHERE (UNIX_TIMESTAMP(t1.date_mod) > :sql_last_value AND t1.date_mod < NOW()) ORDER BY t1.date_mod ASC

"
}
}
filter {
mutate {
copy => { "id" => "[@metadata][_id]"}
remove_field => ["id", "@version", "unix_ts_in_secs"]
}
}
output {

stdout { codec => "rubydebug"}

elasticsearch {
index => "nx_logs"
document_id => "%{[@metadata][_id]}"
}
}

So what do you end up with in the location field?

You are going to need a mapping. This thread might help but I have not tested what I suggested there.

Once i run this logstash file, i am not able to map the location field as geo_point.