Hi guys,
I've created logstash file with csv filter like below. It's working but i need to export timestamp field as "date" into elastic. I decided to use convert function but it's not working. Can You help ?
'''
input {
file {
path => "/home/elastic/ECHOVOULT/AEDS-TWAMP-*.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
csv {
separator => ","
skip_header => true
autodetect_column_names => true
autogenerate_column_names => true
skip_empty_columns => true
}
grok {
patterns_dir => ["/home/elastic/ECHOVOULT/pattern"]
match => { "destination_name" => "%{REGION_ID:region_id}%{NAZWA:sitename_a}%{LITERKA:kandydat}%{ENODEB_ID:enodeb_id}%{REST:czesc_adresu}%{PHB:phb}"}
}
mutate {convert => {"timestamp"=> "date" }}
}
output {
elasticsearch {
hosts => "http://172.16.63.27:9200"
index => "trm_echo_test"
}
stdout { codec => rubydebug }
}
'''