Hi,
I'm trying to parse and integrate multiple .csv files into elasticsearch through logstash csv plugin. However, as I have around 4000 csv files, is there a way to make the same input csv filename the same as the elasticsearch output index? To give some context, every csv file starts with "Amp_.csv", where "" is a integer. Any way to make the index name as "Amp_" with "" as the corresponded digit(s)?
input {
file {
path => "/home/emanuel/ondas/amp/Amp_*.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
csv {
separator => ","
columns => ["id", "Amplitude", "time", "dist"]
remove_field => ["True"]
}
mutate {convert => ["id", "integer"] }
mutate {convert => ["Amplitude", "float"] }
mutate {convert => ["time", "float"] }
mutate {convert => ["dist", "float"] }
}
output {
elasticsearch {
hosts => "192.168.20.32:9200"
document_type => "data"
index => "WHAT TO PUT HERE?"
}
stdout { codec => json_lines }
}