I am getting below logstash error for the multiline json file.
Error:
[2018-02-19T19:59:01,332][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, {, } at line 9, column 25 (byte 210) after input {\n\t#stdin { }\n\tfile {\n\t\tpath => \"/home/sdc/kibana_test.json\"\n\t\tstart_position => \"beginning\"\n\t\tsincedb_path => \"/dev/null\"\n # codec => \"json\"\n\t\tcodec => multiline {\n\t\t\tpattern => \"^(\\n+|\\{\"", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:42:in `compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:50:in `compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:12:in `block in compile_sources'", "org/jruby/RubyArray.java:2486:in `map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:in `compile_sources'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:51:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:171:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:40:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:335:in `block in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in `with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:332:in `block in converge_state'", "org/jruby/RubyArray.java:1734:in `each'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:319:in `converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:166:in `block in converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in `with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:164:in `converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:105:in `block in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/interval.rb:18:in `interval'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:94:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:343:in `block in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"]}" "
Conf file:
input {
#stdin { }
file {
path => "/home/sdc/kibana_test.json"
start_position => "beginning"
sincedb_path => "/dev/null"
# codec => "json"
codec => multiline {
pattern => "^(\n+|\{)"
what => "previous"
negate => "true"
auto_flush_interval => 1
max_lines => 10000000
max_bytes => "200 MiB"
}
}
}
filter {
mutate {
remove_field => [ "path", "tags", "host", "@version", "@timestamp"]
}
mutate {
convert => { "duration" => "integer" }
convert => { "runid" => "integer" }
convert => { "changelist" => "integer" }
convert => { "serverid" => "integer" }
convert => { "testpathid" => "integer" }
convert => { "resultrootid" => "integer" }
convert => { "statusid" => "integer" }
convert => { "resultid" => "integer" }
}
date {
match => [ "starttime", "YYYY-MM-dd HH:mm:ss" ]
target => "starttime"
}
date {
match => [ "endtime", "YYYY-MM-dd HH:mm:ss" ]
target => "endtime"
}
}
output {
stdout {
codec => rubydebug
}
#if ![tags] {
elasticsearch {
hosts => ["10.177.219.149:9200"]
index => "mrtf_index"
user => "elastic"
password => "mrkibana"
#sniffing => true
#manage_template => true
}
#} else {
# file {
# path => "/home/sdc/kibana_data/std_mrtf.log"
# codec => rubydebug
# }
#}
}