Hi
yes i use JDBC plugin
logstash config :
`input {
jdbc {
jdbc_driver_library => "mysql-connector-java-8.0.17.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/db_trecobat?useUnicode=true&useJDBCCompliantTimezoneShift=tru$
jdbc_user => "user"
jdbc_password => "password"
statement => "SELECT sit_id, sit_nom,COUNT(cli_id) AS nb_clients,cli_date_client,cli_id,
SUM(if(cli_marque_construction in ('mnb','primobois'),1,0)) AS nb_clients_bois, cit_long as longitude,c$
ROUND((sum(if(cat_cli_type='actif',1,0))/SUM(cli_id)*100),0) AS pourcent_prospect_actif,
sum(if(ev_type='R1',1,0)) AS nb_R1,
sum(if(ev_type='R2',1,0)) AS nb_R2,
sum(if(ev_type='R3',1,0)) AS nb_R3
FROM clients
LEFT OUTER JOIN client_events ON ev_cli_id=cli_id AND ev_type IN ('R1','R2','R3') AND ev_actif
INNER JOIN sites ON cli_site_propect=sit_id
INNER JOIN categories on cli_cat_origine=cat_id
INNER JOIN tables_diverses.cities ON cli_cp_chantier=tables_diverses.cities.cit_code
WHERE cli_marque_construction
not in('extenbois','directmob','isytec','Mureno','office_sante')
AND cli_site_propect in(7,45,5,18,6,3,9,10,16,53,17,11,14,47,15)
group by cli_id"
}
}
filter {
mutate{
convert => [ "%{latitude}", "float" ]
convert => [ "%{longitude}", "float" ]
add_field => {
"[location][lat]" => "%{latitude}"
"[location][lon]" => "%{longitude}"
}
}
}
output {
stdout { codec => json_lines }
elasticsearch {
hosts => "localhost:9200"
index => "index_trecobat"
document_type => "reporting"
document_id => "%{cli_id}"
}
}`
But with my template the import work fine