Hello! I'm trying to send data to Elasticsearch with logstash on a Linux server. For some reasons, I do not get any error, but nothing is sent.
input {
file {
path => ["/srv/XXX/csv_receivers/XXX_receiver/*"]
ignore_older => 10000000000
start_position => "beginning"
sincedb_path => "NUL"
codec => plain {
charset => "ANSI_X3.4-1968"
}
}
}
filter {
csv {
separator => ";"
columns => ["sender", "receiver", "flow_type", "start_traitement", "end_traitement", "start_size", "end_size", "start_format", "end_format", "start_platform", "end_platform", "start_transport", "end_transport", "prod"]
}
date {
match => ["start_traitement", "dd/MM/yy HH:mm:ss,SSS000000"]
target => "start_traitement_true"
}
date {
match => ["end_traitement", "dd/MM/yy HH:mm:ss,SSS000000"]
target => "end_traitement_true"
}
ruby {
code => 'event.set("date_epoch_end", event.get("end_traitement_true").to_i)'
}
ruby {
code => 'event.set("date_epoch_start", event.get("start_traitement_true").to_i)'
}
ruby {
code => 'event.set("time_between", (event.get("date_epoch_end"))-(event.get("date_epoch_start")))'
}
ruby {
code => 'event.set("fixedProd", (event.get("prod")).tr("\r", ""))'
}
ruby {
code => 'event.set("cat", "XXX")'
}
mutate {
convert => {
"sender" => "string"
"receiver" => "string"
"flow_type" => "string"
"start_size" => "integer"
"end_size" => "integer"
"start_format" => "string"
"end_format" => "string"
"start_platform" => "string"
"end_platform" => "string"
"date_epoch_end" => "float"
"date_epoch_start" => "float"
"start_transport" => "string"
"end_transport" => "string"
"prod" => string
}
}
prune{
blacklist_names => ["message"]
}
}
output {
elasticsearch {
hosts => "XXXXXXXXX:9200"
index => "testindex"
user => "elastic"
password => "XXXXXXXX"
}
}
Thanks for the help! <3