Hi All,
I've been testing out the very basics of ELK and I was able to get an index going by reading a simple CSV. As a test, I tried to do ANOTHER index with the same timestamp format, but different columns and it gave me a _dateparsefailure. I deleted the whole logstash, tried using the same CSV as the original and it still complained the same.
Any pointers would be appreciated. Below is my sample logstash.conf and CSV:
input {
file {
path => "PiT.csv"
start_position => "beginning"
}
}
filter {
csv {
separator => ","
columns => ["DATE","QUEUE","MAX","JLU","PEND","RUN","SUSP"]
}
date {
match => ["DATE","MM/dd/YYYY HH:mm:ss"]
target => "@timestamp"
}
mutate {convert => ["MAX", "float"]}
mutate {convert => ["JLU", "float"]}
mutate {convert => ["PEND", "float"]}
mutate {convert => ["RUN", "float"]}
mutate {convert => ["SUSP", "float"]}
}
output {
elasticsearch {
action => "index"
hosts => ["localhost:9201"]
index => "queue2"
workers => 1
}
stdout {}
}
CSV
DATE,QUEUE,MAX,JLU,PEND,RUN,SUSP
07/06/2017 10:05:16, x,3000,2000,0,500,0
07/06/2017 10:05:16, y,1100,1000,0,58,0
07/06/2017 10:05:16, z,4500,1600,92,1328,20
07/06/2017 10:10:19, a,3000,2000,0,478,0
07/06/2017 10:10:19, b,1100,1000,0,58,0
07/06/2017 10:10:19, c,4500,1600,84,1328,20
07/06/2017 10:15:32, a,3000,2000,0,440,0
07/06/2017 10:15:32, c,1100,1000,0,58,0
07/06/2017 10:15:32, d,4500,1600,76,1248,20