An unexpected error occurred! {:error=>#<Errno::EPIPE: Broken pipe - <STDOUT>>

Im getting a fatal error while running logstash . this is my logstash log file

[2018-07-03T17:14:51,463][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"fb_apache", :directory=>"/home/automaton/logtesting/logstash-6.1.0/modules/fb_apache/configuration"}
[2018-07-03T17:14:51,482][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"netflow", :directory=>"/home/automaton/logtesting/logstash-6.1.0/modules/netflow/configuration"}
[2018-07-03T17:14:52,659][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2018-07-03T17:14:53,865][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.1.0"}
[2018-07-03T17:14:54,772][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>13002}
[2018-07-03T17:15:05,816][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:13003/]}}
[2018-07-03T17:15:05,838][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://localhost:13003/, :path=>"/"}
[2018-07-03T17:15:06,143][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://localhost:13003/"}
[2018-07-03T17:15:06,206][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>nil}
[2018-07-03T17:15:06,228][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-07-03T17:15:06,261][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "omit_norms"=>true}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"string", "index"=>"analyzed", "omit_norms"=>true, "fielddata"=>{"format"=>"disabled"}}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"string", "index"=>"analyzed", "omit_norms"=>true, "fielddata"=>{"format"=>"disabled"}, "fields"=>{"raw"=>{"type"=>"string", "index"=>"not_analyzed", "doc_values"=>true, "ignore_above"=>256}}}}}, {"float_fields"=>{"match"=>"", "match_mapping_type"=>"float", "mapping"=>{"type"=>"float", "doc_values"=>true}}}, {"double_fields"=>{"match"=>"", "match_mapping_type"=>"double", "mapping"=>{"type"=>"double", "doc_values"=>true}}}, {"byte_fields"=>{"match"=>"", "match_mapping_type"=>"byte", "mapping"=>{"type"=>"byte", "doc_values"=>true}}}, {"short_fields"=>{"match"=>"", "match_mapping_type"=>"short", "mapping"=>{"type"=>"short", "doc_values"=>true}}}, {"integer_fields"=>{"match"=>"", "match_mapping_type"=>"integer", "mapping"=>{"type"=>"integer", "doc_values"=>true}}}, {"long_fields"=>{"match"=>"", "match_mapping_type"=>"long", "mapping"=>{"type"=>"long", "doc_values"=>true}}}, {"date_fields"=>{"match"=>"", "match_mapping_type"=>"date", "mapping"=>{"type"=>"date", "doc_values"=>true}}}, {"geo_point_fields"=>{"match"=>"", "match_mapping_type"=>"geo_point", "mapping"=>{"type"=>"geo_point", "doc_values"=>true}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "doc_values"=>true}, "@version"=>{"type"=>"string", "index"=>"not_analyzed", "doc_values"=>true}, "geoip"=>{"type"=>"object", "dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip", "doc_values"=>true}, "location"=>{"type"=>"geo_point", "doc_values"=>true}, "latitude"=>{"type"=>"float", "doc_values"=>true}, "longitude"=>{"type"=>"float", "doc_values"=>true}}}}}}}}
[2018-07-03T17:15:06,315][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:13003"]}
[2018-07-03T17:15:06,514][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>1, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>125, :thread=>"#<Thread:0x1a6c8e2b run>"}
[2018-07-03T17:15:07,070][INFO ][logstash.pipeline ] Pipeline started {"pipeline.id"=>"main"}
[2018-07-03T17:15:07,262][INFO ][logstash.agent ] Pipelines running {:count=>1, :pipelines=>["main"]}
[2018-07-03T17:16:04,121][FATAL][logstash.runner ] An unexpected error occurred! {:error=>#<Errno::EPIPE: Broken pipe - >, :backtrace=>["org/jruby/RubyIO.java:1457:in write'", "org/jruby/RubyIO.java:1428:inwrite'", "/home/automaton/logtesting/logstash-6.1.0/vendor/bundle/jruby/2.3.0/gems/logstash-output-stdout-3.1.3/lib/logstash/outputs/stdout.rb:44:in block in multi_receive_encoded'", "org/jruby/RubyArray.java:1734:ineach'", "/home/automaton/logtesting/logstash-6.1.0/vendor/bundle/jruby/2.3.0/gems/logstash-output-stdout-3.1.3/lib/logstash/outputs/stdout.rb:43:in multi_receive_encoded'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/outputs/base.rb:90:inmulti_receive'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/output_delegator_strategies/single.rb:15:in block in multi_receive'", "org/jruby/ext/thread/Mutex.java:148:insynchronize'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/output_delegator_strategies/single.rb:14:in multi_receive'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/output_delegator.rb:50:inmulti_receive'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/pipeline.rb:487:in block in output_batch'", "org/jruby/RubyHash.java:1343:ineach'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/pipeline.rb:486:in output_batch'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/pipeline.rb:438:inworker_loop'", "/home/automaton/logtesting/logstash-6.1.0/logstash-core/lib/logstash/pipeline.rb:393:in `block in start_workers'"]}

My config file is :

input {
file {
path => "/home/automaton/logtesting/logstash-6.1.0/bin/repoprolog.log"
start_position => "beginning"
codec => multiline
{
pattern => "%{DATE}_%{TIME} - "
negate => true
what => "previous"
auto_flush_interval => 1
charset => "ISO-8859-1"
}
sincedb_path => "/dev/null"
ignore_older => 0
}
}

filter {

grok {
match => {"message" => "%{DATE}_%{TIME} - %{POSINT:value} %{DATA:status} %{WORD:result} %{GREEDYDATA:final}"}
}
}

output {

elasticsearch {
hosts => ["localhost:13003"]
}

stdout {
codec => rubydebug
}
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.