Good day,
I am trying to parse multiple csv files in a directory for trend analysis and report generation,
Index is being created but not all information is being parsed. The error below is being displayed
[ERROR] 2017-11-21 21:27:27.067 [Ruby-0-Thread-3: /usr/share/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.23/lib/stud/task.rb:22] agent - An unexpected error occurred! {:message=>"Permission denied - /etc/logstash/conf.d/logstash-simple.conf", :class=>"Errno::EACCES", :backtrace=>["org/jruby/RubyIO.java:3804:in read'", "org/jruby/RubyIO.java:3987:in
read'", "/usr/share/logstash/logstash-core/lib/logstash/config/loader.rb:80:in local_config'", "org/jruby/RubyArray.java:1613:in
each'", "/usr/share/logstash/logstash-core/lib/logstash/config/loader.rb:73:in local_config'", "/usr/share/logstash/logstash-core/lib/logstash/config/loader.rb:46:in
load_config'", "/usr/share/logstash/logstash-core/lib/logstash/config/loader.rb:15:in format_config'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:304:in
fetch_config'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:312:in reload_pipeline!'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:111:in
reload_state!'", "org/jruby/RubyHash.java:1342:in each'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:108:in
reload_state!'", "org/jruby/ext/thread/Mutex.java:149:in synchronize'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:107:in
reload_state!'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:75:in execute'", "org/jruby/RubyProc.java:281:in
call'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.23/lib/stud/interval.rb:20:in interval'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:75:in
execute'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:320:in execute'", "org/jruby/RubyProc.java:281:in
call'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.23/lib/stud/task.rb:24:in `initialize'"]}
Please find below config file: logstash-simple.conf
input {
file {
path => "/home/africom/backup3/data*.csv"
start_position => "end"
sincedb_path =>"/dev/null"
type => "data_record"
}
}
filter {
if [type] == "data_record"{
csv {
separator => "|"
skip_empty_columns => true
mutate {
rename => {"column1" => "index"}
rename => {"column3" => "system_time"}
rename => {"column5" => "msdn"}
rename => {"column8" => "imsi"}
rename => {"column13" => "session_start"}
rename => {"column16" => "session_end"}
rename => {"column17" => "data_volume"}
rename => {"column74" => "needs_to_be_id"}
}
}
output {
elasticsearch {
hosts => ["192.168.88.101:9200"]
index => "fourth"
}
stdout { codec => rubydebug }
}