Hi all....
Here i am trying to load elasticsearch data into anther elasticsearch or a file
but the problem is
in my input elasticsearch index i have 1000 docs
when it comes to output elasticsearch index i am getting more docs because
input elasticsearch continuously repeating ingesting the same data
here is my conf
input {
elasticsearch {
hosts => "192.168.1.75:9200"
index => "titanic"
#query => '{ "query":{"range" : { "@timestamp" : { "gt" : "now-400d" } } }, "sort": [ {"PassengerId" : {"order" : "asc"}} ] }'
# query => '{ "query": { "match_all": {} } }'
size => 1000
scroll => "1m"
type => "logs"
codec => "json"
}
}
output {
elasticsearch {
index => "atitanic"
#template_overwrite => true
#template => "/tmp/current-loc.json"
# document_type => "%{type}"
#document_id => "%{_id}"
hosts => ["http://192.168.1.59:9233"]
}
stdout {codec => rubydebug}
}
i am getting this error on consol
2017-09-25T17:15:49,317][ERROR][logstash.pipeline ] A plugin had an unrecoverable error. Will restart this plugin.
Plugin: <LogStash::Inputs::Elasticsearch hosts=>["192.168.1.75:9200"], index=>"titanic", query=>"{ \"query\":{\"range\" : { \"@timestamp\" : { \"gt\" : \"now-400d\" } } }, \"sort\": [ {\"PassengerId\" : {\"order\" : \"asc\"}} ] }", size=>1000, scroll=>"1m", type=>"ela", id=>"20146ff68c8e2c66a88816e21d260df8c2752774-1", enable_metric=>true, codec=><LogStash::Codecs::JSON id=>"json_060098d1-e59b-4f90-8792-2e9538924d70", enable_metric=>true, charset=>"UTF-8">, docinfo=>false, docinfo_target=>"@metadata", docinfo_fields=>["_index", "_type", "_id"], ssl=>false>
Error: [400] {"error":{"root_cause":[{"type":"illegal_argument_exception","reason":"Failed to parse request body"}],"type":"illegal_argument_exception","reason":"Failed to parse request body","caused_by":{"type":"json_parse_exception","reason":"Unrecognized token 'DnF1ZXJ5VGhlbkZldGNoBQAAAAAADR': was expecting ('true', 'false' or 'null')\n at [Source: org.elasticsearch.transport.netty4.ByteBufStreamInput@252d947c; line: 1, column: 32]"}},"status":400}
Thank You.