Hello Guys , so lately i've trying to insert some data from txt file into elasticsearch index , but i'm having a problem converting string field to date type , here is my logsatsh conf :
input {
file {
path => "C:/Elastic/Rapport_finale.txt"
start_position => "beginning"
}
}
filter {
csv {
separator => ";"
columns =>["ID","DATE_SITUATION","HEURE","COMPANY","NOM_COMPANY","DATE_SIT","NBRE_OPERATION_TOTAL","OPERATION_CAISSE","LR_LATITUDE","LR_LONGITUDE","SOMME","LIB_REGION","LIB_ZONE","LIB_AGENCE","DATE_UNIQ"]
}
date {
match => ["DATE_SIT", "yyyy-MM-dd HH:mm:ss"]
}
mutate {
add_field => { "[location][lat]" => "%{LR_LONGITUDE}" }
add_field => { "[location][lon]" => "%{LR_LATITUDE}" }
}
mutate {
convert => {"[location][lat]" => "float"}
convert => {"[location][lon]" => "float"}
}
}
output{
elasticsearch {
hosts => "*.*.*.*:9200"
index => "rapport_index_dg_finale_test"
#document_type => "my_type"
#action => "update"
#doc_as_upsert => true
#document_id => "%{id}"
user => "logstash_admin"
password => "********"
}
stdout{ codec => rubydebug}
}
and this is the output in elasticsearch
any idea what am i doing wrong !!
elasticsearch version 7.4
logstash version 7.4
thanks.