Hello,
Could someone help me with the message below. Im not sure if the issue is my configuration or the .csv file.
This is a snippet from my debug
Pushing flush onto pipeline {:level=>:debug, :file=>"logstash/pipeline.rb", :line=>"453", :method=>"flush"}
Pushing flush onto pipeline {:level=>:debug, :file=>"logstash/pipeline.rb", :line=>"453", :method=>"flush"}
_globbed_files: /opt/logstash/csv/.csv: glob is: ["/opt/logstash/csv/alohamon-alerts-2016-10-07.csv"] {:level=>:debug, :file=>"filewatch/watch.rb", :line=>"346", :method=>"_globbed_files"}
Pushing flush onto pipeline {:level=>:debug, :file=>"logstash/pipeline.rb", :line=>"453", :method=>"flush"}
Pushing flush onto pipeline {:level=>:debug, :file=>"logstash/pipeline.rb", :line=>"453", :method=>"flush"}
Pushing flush onto pipeline {:level=>:debug, :file=>"logstash/pipeline.rb", :line=>"453", :method=>"flush"}
_globbed_files: /opt/logstash/csv/.csv: glob is: ["/opt/logstash/csv/alohamon-alerts-2016-10-07.csv"] {:level=>:debug, :file=>"filewatch/watch.rb", :line=>"346", :method=>"_globbed_files"}
My logstash.conf
input {
file {
path => "/opt/logstash/csv/*.csv"
type => "csv" # a type to identify those logs (will need this later)
start_position => "beginning"
}
}
filter {
csv {
columns => ["ID","URL","Brouha","Action","Last Action before Clear","Resolve/Close Reason","In Process","Chronic","Service Affecting","Created","AM PM","From","Till","Duration (minutes)","Customers","STBs","TTA","TTI","TTS","TTR","By","Region","DAC","Division","Device","IP","Upstreams","Reason","Comment","Root Cause","Corrective Action Taken","SI Ticket","JB Ticket"]
separator => ","
}
mutate {
convert => ["TempOut", "float"]
}
}
#output {
#elasticsearch {
hosts => "localhost" # it used to be "host" and "port" pre-2.0
index => "avsdata"
#}
stdout {
codec => rubydebug
}
#}
output {
stdout { codec => rubydebug }
}
Thank you,
Keith