[1.5.0] elasticsearch output error

I've got such error messages and cannot send log data to elasticsearch server maybe after updating java from Java 7 to Java 8..


{:timestamp=>"2015-06-05T10:18:17.134000+0000", :message=>"Got error to send bulk of actions to elasticsearch server at logsene-receiver.sematext.com : undefined method map' for \"items\":String", :level=>:error} {:timestamp=>"2015-06-05T10:18:17.134000+0000", :message=>"Failed to flush outgoing items", :outgoing_count=>1, :exception=>#<NoMethodError: undefined methodmap' for "items":String>, :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch/protocol.rb:93:in normalize_bulk_response'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch/protocol.rb:108:inbulk'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch.rb:437:in submit'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch.rb:462:inflush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:219:in buffer_flush'", "org/jruby/RubyHash.java:1341:ineach'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:216:in buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:193:inbuffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:159:in buffer_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch.rb:426:inreceive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-1.5.0-java/lib/logstash/outputs/base.rb:88:in handle'", "(eval):31:inoutput_func'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-1.5.0-java/lib/logstash/pipeline.rb:244:in outputworker'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-1.5.0-java/lib/logstash/pipeline.rb:166:instart_outputs'"], :level=>:warn}
{:timestamp=>"2015-06-05T10:18:18.236000+0000", :message=>"Got error to send bulk of actions to elasticsearch server at logsene-receiver.sematext.com : undefined method map' for \"items\":String", :level=>:error} {:timestamp=>"2015-06-05T10:18:18.237000+0000", :message=>"Failed to flush outgoing items", :outgoing_count=>1, :exception=>#<NoMethodError: undefined methodmap' for "items":String>, :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch/protocol.rb:93:in normalize_bulk_response'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch/protocol.rb:108:inbulk'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch.rb:437:in submit'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch.rb:462:inflush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:219:in buffer_flush'", "org/jruby/RubyHash.java:1341:ineach'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:216:in buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:193:inbuffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.19/lib/stud/buffer.rb:159:in buffer_receive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch.rb:426:inreceive'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-1.5.0-java/lib/logstash/outputs/base.rb:88:in handle'", "(eval):31:inoutput_func'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-1.5.0-java/lib/logstash/pipeline.rb:244:in outputworker'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-1.5.0-java/lib/logstash/pipeline.rb:166:instart_outputs'"], :level=>:warn}

my logstash setting is below

input {
tcp {
type => "lsAALog"
port => 4560
codec => json_lines
}
}

output {
if [logType] == "AA" {
elasticsearch {
protocol => "http"
ssl => "true"
host => "logsene-receiver.sematext.com"
port => 443
index => "12ekgiuyg2-42e1-lkjkd1duygaiswd-xxxxx"
manage_template => false
}
} else if [logType] == "USR" {
elasticsearch {
protocol => "http"
ssl => "true"
host => "logsene-receiver.sematext.com"
port => 443
index => "7d03a35a-87fd-asdf2ef2iu1h2ekyg-xxxxx"
manage_template => false
}
} else {
elasticsearch {
protocol => "http"
ssl => "true"
host => "logsene-receiver.sematext.com"
port => 443
index => "1ekyg12euyg-488e-fcb4b8c4a2b1-xxxxx"
manage_template => false
}
}
}

I found logstash bug in logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-0.2.4-java/lib/logstash/outputs/elasticsearch/protocol.rb:93

bulk_response variable is just a JSON formatted String object not hash(map)

I inserted JSON parsing codes and I've tested it works well.

This is bulk method that I've inserted JSON parsing codes

require 'rubygems'
require 'json'
...
      def bulk(actions)
        bulk_response = @client.bulk(:body => actions.collect do |action, args, source|
          if source
            next [ { action => args }, source ]
          else
            next { action => args }
          end
        end.flatten)
        
        if bulk_response.kind_of? String 
          bulk_response = JSON.parse(bulk_response)
        end
        self.class.normalize_bulk_response(bulk_response)
      end # def bulk

I am not seeing this behavior using Java 8.

can you try updating to the latest plugin version via

bin/plugin update logstash-output-elasticsearch

the @client client is the Elasticsearch Ruby Client, so I am not sure how
the data type of the response has become a String.