I'm trying to set up elk under Rancher/Docker and am having problems with logstash. Elasticsearch and Kibana are up and running from your images. I'm trying to run two logstash instances, a collector to stuff messages in redis which works and an indexer to take stuff from redis and put in Elasticsearch. When I start this one I'm getting a plugin error I don't understand:
logstash-indexer_1 | 2017-04-07T14:56:25.697034709Z [2017-04-07T14:56:25,691][ERROR][logstash.pipeline ] Error registering plugin {:plugin=>"#<LogStash::OutputDelegator:0x283d61fb @namespaced_metric=#<LogStash::Instrument::NamespacedMetric:0x37ad53d0 @metric=#<LogStash::Instrument::Metric:0x7a619a7 @collector=#<LogStash::Instrument::Collector:0x52681773 @agent=nil, @metric_store=#<LogStash::Instrument::MetricStore:0x1119967c @store=#<Concurrent::Map:0x4cf7745c @default_proc=nil>, @structured_lookup_mutex=#<Mutex:0x7bab2744>, @fast_lookup=#<Concurrent::Map:0x696a64cd @default_proc=nil>>>>, @namespace_name=[:stats, :pipelines, :main, :plugins, :outputs, :\"27a302c857007510c987773d3622f41f4bd66ffd-2\"]>, @metric=#<LogStash::Instrument::NamespacedMetric:0x433f62a1 @metric=#<LogStash::Instrument::Metric:0x7a619a7 @collector=#<LogStash::Instrument::Collector:0x52681773 @agent=nil, @metric_store=#<LogStash::Instrument::MetricStore:0x1119967c @store=#<Concurrent::Map:0x4cf7745c @default_proc=nil>, @structured_lookup_mutex=#<Mutex:0x7bab2744>, @fast_lookup=#<Concurrent::Map:0x696a64cd @default_proc=nil>>>>, @namespace_name=[:stats, :pipelines, :main, :plugins, :outputs]>, @logger=#<LogStash::Logging::Logger:0x5bfaa065 @logger=#<Java::OrgApacheLoggingLog4jCore::Logger:0x2d137b6c>>, @strategy=#<LogStash::OutputDelegatorStrategies::Shared:0x3a833c8c @output=<LogStash::Outputs::ElasticSearch hosts=>[//elasticsearch:9200], index=>\"dev-logstash-%{+YYYY.MM.dd}\", user=>\"elastic\\t\", password=><password>, id=>\"27a302c857007510c987773d3622f41f4bd66ffd-2\", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>\"plain_066d4705-b852-41bb-9238-7b533e298848\", enable_metric=>true, charset=>\"UTF-8\">, workers=>1, manage_template=>true, template_name=>\"logstash\", template_overwrite=>false, idle_flush_time=>1, doc_as_upsert=>false, script_type=>\"inline\", script_lang=>\"painless\", script_var_name=>\"event\", scripted_upsert=>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1, action=>\"index\", ssl_certificate_verification=>true, sniffing=>false, sniffing_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, healthcheck_path=>\"/\", absolute_healthcheck_path=>false, resurrect_delay=>5, validate_after_inactivity=>10000>>, @id=\"27a302c857007510c987773d3622f41f4bd66ffd-2\", @metric_events=#<LogStash::Instrument::NamespacedMetric:0x3ced078d @metric=#<LogStash::Instrument::Metric:0x7a619a7 @collector=#<LogStash::Instrument::Collector:0x52681773 @agent=nil, @metric_store=#<LogStash::Instrument::MetricStore:0x1119967c @store=#<Concurrent::Map:0x4cf7745c @default_proc=nil>, @structured_lookup_mutex=#<Mutex:0x7bab2744>, @fast_lookup=#<Concurrent::Map:0x696a64cd @default_proc=nil>>>>, @namespace_name=[:stats, :pipelines, :main, :plugins, :outputs, :\"27a302c857007510c987773d3622f41f4bd66ffd-2\", :events]>, @output_class=LogStash::Outputs::ElasticSearch>", :error=>"bad component(expected userinfo component or user component): elastic\t"}
logstash-indexer_1 | 2017-04-07T14:56:25.702296464Z [2017-04-07T14:56:25,696][ERROR][logstash.agent ] Pipeline aborted due to error {:exception=>#<URI::InvalidComponentError: bad component(expected userinfo component or user component): elastic >, :backtrace=>["/usr/share/logstash/vendor/jruby/lib/ruby/1.9/uri/generic.rb:412:in `check_user'", "/usr/share/logstash/vendor/jruby/lib/ruby/1.9/uri/generic.rb:483:in `user='", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/http_client.rb:272:in `host_to_url'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/http_client.rb:260:in `build_pool'", "org/jruby/RubyArray.java:2414:in `map'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/http_client.rb:260:in `build_pool'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/http_client.rb:58:in `initialize'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/http_client_builder.rb:80:in `build'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch.rb:207:in `build_client'", "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-6.2.6-java/lib/logstash/outputs/elasticsearch/common.rb:20:in `register'", "/usr/share/logstash/logstash-core/lib/logstash/output_delegator_strategies/shared.rb:8:in `register'", "/usr/share/logstash/logstash-core/lib/logstash/output_delegator.rb:41:in `register'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:257:in `register_plugin'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:268:in `register_plugins'", "org/jruby/RubyArray.java:1613:in `each'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:268:in `register_plugins'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:277:in `start_workers'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:207:in `run'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:389:in `start_pipeline'"]}
docker-compose is:
# this is the main worker that drains redis, parses and inserts into elasticsearch
logstash-indexer:
image: docker.elastic.co/logstash/logstash:5.3.0
command: "logstash -f /etc/logstash/conf.d/logstash-indexer-simple.rb"
volumes:
- logstash:/etc/logstash/conf.d
volume_driver: convoy
links:
- redis:redis
- elasticsearch-data:elasticsearch
and the config file is
input {
redis {
host => "redis"
port => 6379
key => "logstash"
data_type => "list"
}
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "dev-logstash-%{+YYYY.MM.dd}"
user => "elastic "
password => 'changeme'
}
}
Note, I am using the default admin credentials as I haven't managed to get a new logstash-writer user to stop giving me 401 errors