I use logstash for exporting elastic search indexed data to another system. Indexes in elastic search are created per month. When starting logstash i can see in the logs this exception logged every seconds, i would hope the process to stop and receive a response code 1 but the process keeps on until i stop the process
Error
{:timestamp=>"2018-04-11T16:59:20.552000-0400", :message=>"A plugin had an unrecoverable error. Will restart this plugin.\n Plugin: <LogStash::Inputs::Elasticsearch hosts=>["myhost.com:19301"], index=>"events-2018-04", docinfo=>true, docinfo_fields=>["_id"], query=>"{ \"query\": { \"match_all\": {} }}", codec=><LogStash::Codecs::JSON charset=>"UTF-8">, scan=>true, size=>1000, scroll=>"1m", docinfo_target=>"@metadata", ssl=>false>\n Error: [404] {"error":{"root_cause":[{"type":"index_not_found_exception","reason":"no such index","index":"events-2018-04","resource.type":"index_or_alias","resource.id":"events-2018-04"}],"type":"index_not_found_exception","reason":"no such index","index":"events-2018-04","resource.type":"index_or_alias","resource.id":"events-2018-04"},"status":404}", :level=>:error}
pipeline file
input {
elasticsearch {
hosts => ["${HOST}:${PORT}"]
index => "${INDEX}"
docinfo => true
docinfo_fields => ["_id"]
query => '{ "query": { "match_all": {} }}'
}
}
filter {
mutate {
remove_field => [ "@version", "@timestamp"]
add_field => { "_id" => "%{[@metadata][_id]}" }
}
}
output {
# stdout { codec => rubydebug }
file {
path => "${FILENAME}"
}
}
Thanks for your help