I am using Logstash 5.1.2 with logstash-input-kafka 4.1.1 plugin to read Avro data from kafka version 0.9 but I am getting error after starting logstash and logstash stops.
I am using logstash-codec-avro 3.0.0-java. I am able to read message from kafka without avro codec. Below is my logstash config.
input {
kafka {
bootstrap_servers => "server1:9092,server2:9092,server3:9092"
topics => ["applogs"]
group_id => "logstash-1"
codec => avro {
schema_uri => "schema.avsc"
}
}
}
filter {
date {
match => ["ts","UNIX_MS"]
target => "@timestamp"
timezone => "America/New_York"
}
}
output {
elasticsearch {
hosts => ["host1:9200","host2:9200","host3:9200"]
index => "logstash-applogs-%{+YYYY.MM.dd}-1"
}
}
Error Message:
[2017-01-30T02:49:04,285][INFO ][logstash.pipeline ] Pipeline main started
[2017-01-30T02:49:04,354][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2017-01-30T02:49:04,788][FATAL][logstash.runner ] An unexpected error occurred! {:error=>#<ArgumentError: negative length -1552376 given>, :backtrace=>["org/jruby/ext/stringio/StringIO.java:829:in read'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:106:in
read'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:93:in read_bytes'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:99:in
read_string'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:299:in read_data'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:362:in
read_map'", "org/jruby/RubyFixnum.java:275:in times'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:360:in
read_map'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:308:in read_data'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:384:in
read_record'", "org/jruby/RubyArray.java:1613:in each'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:382:in
read_record'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:310:in read_data'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/avro-1.8.1/lib/avro/io.rb:275:in
read'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/logstash-codec-avro-3.0.0-java/lib/logstash/codecs/avro.rb:73:in decode'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/logstash-input-kafka-4.1.1/lib/logstash/inputs/kafka.rb:163:in
thread_runner'", "file:/apps/logstash-5.1.2/vendor/jruby/lib/jruby.jar!/jruby/java/java_ext/java.lang.rb:12:in each'", "/apps/logstash-5.1.2/vendor/bundle/jruby/1.9/gems/logstash-input-kafka-4.1.1/lib/logstash/inputs/kafka.rb:162:in
thread_runner'"]}