Today I started learning about ES and LS using latest versions.
Like any other curious newbie, experienced various issues and solved them one by one. I am hoping i.e. below mentioned problem will the last one before I will create my first pipeline.
Below is the config file I am using, trying to pass csv file has a mix of integers, date, text(lot of, which include /", ' )
Help:- Error pointing at row # 23 column 37. I am assuming it referring to config file, Despite of many changes I am made to config file like pass the less column names or use autodetec_column_names . Still its pointing to same place.
Please review both stuffs and suggest me what I am missing and if my interpretation from logs is not correct help me to understand it better.
FYI.. I started programming after 14 years and i am enjoying it..
// My logstacsh.config
input {
file {
path => "C:\Users\Documents\Chander\Elastic\Data\May_Dec 17.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter{
csv {
separator => ","
# skip_empty_columns => true
columns => [ "Month", "Quarter", "Year", "INCIDENT_ID", "REQ_ID", "COUNTRY", "SERVICE", "ASSIGNED_GROUP", "STATUS","DUPLICATE_CALL_FLAG", "ASSIGNEE_LOGIN_ID", "LAST_MODIFIED_BY" ] **(Due to limitation of words removed few column names)**
}
mutate {convert => ["REQ_ID", "integer"]}
mutate {convert => ["ASSIGNEE_LOGIN_ID", "integer"]}
mutate {convert => ["SLA_RESUME_MIN", "integer"]}
mutate {gsub => ["RESOLUTION", "['"\\]","0"]
mutate {gsub => ["SUMMARY", "['"\\]","0"]
}
output{
elasticsearch {
hosts => "localhost"
index => "reports"
document_type => "Inc details"
}
stdout{}
}
// Error I have been experiencing.
[2018-02-25T21:50:03,909][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.2.1"}
[2018-02-25T21:50:04,293][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2018-02-25T21:50:04,622][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, {, ,, ] at line 23, column 37 (byte 1933) after filter{\n\n csv {\n \tseparator => \",\"\n#\tskip_empty_columns => true\n#\tcolumns => [ \"Month\", \"Quarter\", \"Year\", \"INCIDENT_ID\", \"REQ_ID\", \"COUNTRY\", \"SERVICE\", \"ASSIGNED_GROUP\", \"STATUS\", \"STATUS_REASON\", \"SERVICE_TYPE\", #\"PRIORITY\", \"URGENCY\", \"IMPACT\", \"ASSIGNED_SUPPORT_ORGANIZATION\", \"ASSIGNED_SUPPORT_COMPANY\", #\"PIN\", \"FIRST_NAME\", \"LAST_NAME\", \"INTERNET_E_MAIL\", \"VIP\", \"CONTACT_SENSITIVITY\", \"ASSIGNEE\", \"SUBMITTER\", \"OWNER\", \"OWNER_SUPPORT_COMPANY\", #\"OWNER_GROUP\", \"OWNER_SUPPORT_ORGANIZATION\", \"DIRECT_CONTACT_COMPANY\", \"RESOLUTION\", \"RESOLUTION_CATEGORY\", \"RESOLUTION_CATEGORY_TIER_2\" , #\"RESOLUTION_CATEGORY_TIER_3\", \"CLOSURE_PRODUCT_CATEGORY_TIER1\", \"CLOSURE_PRODUCT_CATEGORY_TIER2\", \"CLOSURE_PRODUCT_CATEGORY_TIER3\", \"SLA_RESUME_MIN\", #\"SLA_GOAL\", \"INC_SLA\", \"SLA_OVERALLSTARTTIME\", \"SLA_OVERALLSTOPTIME\", \"DUPLICATE_CALL_FLAG\", \"ASSIGNEE_LOGIN_ID\", \"LAST_MODIFIED_BY\" ]\n }\n\tmutate {convert => [\"Month\", \"integer\"]}\n\tmutate {convert => [\"Quarter\", \"integer\"]}\n \tmutate {convert => [\"Year\", \"integer\"]}\n \tmutate {convert => [\"INCIDENT_ID\", \"integer\"]}\n \tmutate {convert => [\"REQ_ID\", \"integer\"]}\n \tmutate {convert => [\"ASSIGNEE_LOGIN_ID\", \"integer\"]}\n \tmutate {convert => [\"SLA_RESUME_MIN\", \"integer\"]}\n\tmutate {gsub => [\"RESOLUTION\", \"['\"", :backtrace=>["C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/compiler.rb:42:in `compile_imperative'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/compiler.rb:50:in `compile_graph'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/compiler.rb:12:in `block in compile_sources'", "org/jruby/RubyArray.java:2486:in `map'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/compiler.rb:11:in `compile_sources'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/pipeline.rb:51:in `initialize'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/pipeline.rb:169:in `initialize'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/pipeline_action/create.rb:40:in `execute'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:315:in `block in converge_state'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:141:in `with_pipelines'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:312:in `block in converge_state'", "org/jruby/RubyArray.java:1734:in `each'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:299:in `converge_state'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:166:in `block in converge_state_and_update'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:141:in `with_pipelines'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:164:in `converge_state_and_update'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/agent.rb:90:in `execute'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/logstash-core/lib/logstash/runner.rb:348:in `block in execute'", "C:/Users/1480587/Documents/Chander/Elastic/logstash-6.2.1/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"]}