Hello Team,
I recently upgraded Logstash to 2.1.1 from 1.4.0. The pipeline works fine with my product, however following errors surfacing in /var/log/logstash/logstash.log.
{:timestamp=>"2016-01-14T17:50:09.683000+0000", :message=>"Failed to flush outgoing items", :outgoing_count=>15, :exception=>"Redis::CommandError", :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis/client.rb:114:in call'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:1030:in
rpush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:57:in synchronize'", "/opt/logstash/vendor/jruby/lib/ruby/1.9/monitor.rb:211:in
mon_synchronize'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:57:in synchronize'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:1029:in
rpush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-redis-2.0.2/lib/logstash/outputs/redis.rb:200:in flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:221:in
buffer_flush'", "org/jruby/RubyHash.java:1342:in each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:216:in
buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:193:in buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:112:in
buffer_initialize'", "org/jruby/RubyKernel.java:1479:in loop'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:110:in
buffer_initialize'"], :level=>:warn}
{:timestamp=>"2016-01-14T17:50:09.685000+0000", :message=>"Failed to send backlog of events to Redis", :identity=>"default", :exception=>#<Redis::CommandError: MISCONF Errors writing to the AOF file: Read-only file system>, :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis/client.rb:114:in call'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:1030:in
rpush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:57:in synchronize'", "/opt/logstash/vendor/jruby/lib/ruby/1.9/monitor.rb:211:in
mon_synchronize'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:57:in synchronize'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/redis-3.2.2/lib/redis.rb:1029:in
rpush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-redis-2.0.2/lib/logstash/outputs/redis.rb:200:in flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:221:in
buffer_flush'", "org/jruby/RubyHash.java:1342:in each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:216:in
buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:193:in buffer_flush'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:112:in
buffer_initialize'", "org/jruby/RubyKernel.java:1479:in loop'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/stud-0.0.22/lib/stud/buffer.rb:110:in
buffer_initialize'"], :level=>:warn}
Redis output config:
redis {
host => [
'10.8.10.100',
'10.8.10.101'
]
port => 6379
db => 0
password => 'XXXXXXXXX'
shuffle_hosts => true
data_type => 'list'
key => 'logstash:stallion'
workers => 4
batch => true
}
And, my version on my Redis queues - 10.8.10.100/11:
Redis server v=2.8.9 sha=00000000:0 malloc=jemalloc-3.2.0 bits=64 build=4066a43f25169284
Thanks/AM