Hello, I have a brand new logstash installation which is failing to start with the following error:
[2017-08-07T12:18:11,073][ERROR][logstash.pipeline ] Logstash failed to create queue {"exception"=>"integer 2147483648 too big to convert to `int'", "backtrace"=>["org/logstash/ackedqueue/ext/JrubyAckedQueueExtLibrary.java:61:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_acked_queue.rb:30:in `create_file_based'", "/usr/share/logstash/logstash-core/lib/logstash/queue_factory.rb:29:in `create'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:159:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:286:in `create_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:95:in `register_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:314:in `execute'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/clamp-0.6.5/lib/clamp/command.rb:67:in `run'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:209:in `run'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/clamp-0.6.5/lib/clamp/command.rb:132:in `run'", "/usr/share/logstash/lib/bootstrap/environment.rb:71:in `(root)'"]}
[2017-08-07T12:18:11,090][ERROR][logstash.agent ] Cannot create pipeline {:reason=>"integer 2147483648 too big to convert to `int'", :backtrace=>["org/logstash/ackedqueue/ext/JrubyAckedQueueExtLibrary.java:61:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_acked_queue.rb:30:in `create_file_based'", "/usr/share/logstash/logstash-core/lib/logstash/queue_factory.rb:29:in `create'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:159:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:286:in `create_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:95:in `register_pipeline'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:314:in `execute'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/clamp-0.6.5/lib/clamp/command.rb:67:in `run'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:209:in `run'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/clamp-0.6.5/lib/clamp/command.rb:132:in `run'", "/usr/share/logstash/lib/bootstrap/environment.rb:71:in `(root)'"]}
the logstash.yml looks like this:
path.data: /var/lib/logstash
pipeline.workers: 2
pipeline.output.workers: 1
pipeline.batch.size: 125
pipeline.batch.delay: 5
path.config: /etc/logstash/conf.d
config.reload.automatic: true
config.reload.interval: 3
config.debug: true
queue.type: persisted
path.queue: /var/lib/logstash/queue
queue.page_capacity: 2048mb
queue.max_events: 0
queue.max_bytes: 10240mb
queue.checkpoint.acks: 1024
queue.checkpoint.writes: 1024
queue.checkpoint.interval: 1000
http.host: "127.0.0.1"
http.port: 9600-9700
log.level: info
path.logs: /var/log/logstash
and the conf file looks like the following:
input {
beats {
host => "0.0.0.0"
port => "5044"
}
}
filter {
if [type == "auth"] {
grok {
match => { 'message' => '%{CISCOTIMESTAMP:timestamp} %{IPORHOST:host} %{WORD:type}%{NOTSPACE:pid} %{GREEDYDATA:message}' }
}
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => ["http://10.0.0.1:9200", "http://10.0.0.2:9200"]
user => elastic
password => changeme
}
}
Any help would be greatly appreciated.