Hi all. Super strange problem here. I upgraded my Logstash version that is managed by the Puppet Forge module elasticsearch/logstash from 1.5.0 beta 1 to 1.5.0-1. The uninstall of the beta worked fine. The install of the new version went fine.
What I'm seeing is that when starting LS from the service definition that the Puppet module creates, it starts up, but NO events ever process. If I start LS with the same config file but from the CLI, then events process fine. I've been looking at it for hours now and cannot figure this one out. Both of the process strings are the same from a "ps -ef"...I'm just stumped.
When running LS as a service with --debug, I do see everything load successfully:
{:timestamp=>"2015-05-18T21:49:46.552000-0400", :message=>"config LogStash::Outputs::ElasticSearch/@retry_max_interval = 5", :level=>:debug, :file=>"logstash/config/mixin.rb", :line=>"112", :method=>"config_init"}
{:timestamp=>"2015-05-18T21:49:46.905000-0400", :message=>"Registering file input", :path=>["/var/log/httpd/kibana3_access_ssl.log"], :level=>:info, :file=>"logstash/inputs/file.rb", :line=>"76", :method=>"register"}
{:timestamp=>"2015-05-18T21:49:46.907000-0400", :message=>"Registering file input", :path=>["/var/log/httpd/kibana3_error_ssl.log"], :level=>:info, :file=>"logstash/inputs/file.rb", :line=>"76", :method=>"register"}
{:timestamp=>"2015-05-18T21:49:46.910000-0400", :message=>"_sincedb_open: /tmp/access: No such file or directory - /tmp/access", :level=>:debug, :file=>"filewatch/tail.rb", :line=>"213", :method=>"_sincedb_open"}
{:timestamp=>"2015-05-18T21:49:46.912000-0400", :message=>"_discover_file_glob: /var/log/httpd/kibana3_access_ssl.log: glob is: []", :level=>:debug, :file=>"filewatch/watch.rb", :line=>"132", :method=>"_discover_file"}
{:timestamp=>"2015-05-18T21:49:46.916000-0400", :message=>"_sincedb_open: /tmp/error: No such file or directory - /tmp/error", :level=>:debug, :file=>"filewatch/tail.rb", :line=>"213", :method=>"_sincedb_open"}
{:timestamp=>"2015-05-18T21:49:46.917000-0400", :message=>"Adding type with date config", :type=>"", :field=>"timestamp", :format=>"dd/MMM/yyyy:HH:mm:ss Z", :level=>:debug, :file=>"logstash/filters/date.rb", :line=>"174", :method=>"setupMatcher"}
{:timestamp=>"2015-05-18T21:49:48.672000-0400", :message=>"Registering kafka producer", :topic_id=>"app_kibana3_raw", :broker_list=>"bdprodk01.dbhotelcloud.com:9092,bdprodk02.dbhotelcloud.com:9092", :level=>:info, :file=>"logstash/outputs/kafka.rb", :line=>"144", :method=>"register"}
{:timestamp=>"2015-05-18T21:49:48.683000-0400", :message=>"Create client to elasticsearch server on bdprodes02.dbhotelcloud.com:", :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"340", :method=>"register"}
{:timestamp=>"2015-05-18T21:49:50.338000-0400", :message=>"Automatic template management enabled", :manage_template=>"true", :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"348", :method=>"register"}
{:timestamp=>"2015-05-18T21:49:50.753000-0400", :message=>"Using mapping template", :template=>{"template"=>"logstash-*", "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"_default_"=>{"_all"=>{"enabled"=>true, "omit_norms"=>true}, "dynamic_templates"=>#<Java::JavaUtil::ArrayList:0x702e7fb5>, "properties"=>{"@version"=>{"type"=>"string", "index"=>"not_analyzed"}, "geoip"=>{"type"=>"object", "dynamic"=>true, "properties"=>{"location"=>{"type"=>"geo_point"}}}}}}}, :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"396", :method=>"get_template"}
{:timestamp=>"2015-05-18T21:49:50.821000-0400", :message=>"New Elasticsearch output", :cluster=>"elasticsearch-prod", :host=>["bdprodes02.dbhotelcloud.com"], :port=>"9300-9305", :embedded=>false, :protocol=>"transport", :level=>:info, :file=>"logstash/outputs/elasticsearch.rb", :line=>"357", :method=>"register"}
{:timestamp=>"2015-05-18T21:49:50.831000-0400", :message=>"Pipeline started", :level=>:info, :file=>"logstash/pipeline.rb", :line=>"86", :method=>"run"}
Same output for starting it from the CLI, but the events from the Apache server actually get processed.
I'm very confused by this behavior to say the least.
Here is my logstash.conf file:
input {
file {
path => "/var/log/httpd/kibana3_access_ssl.log"
type => "kibana3.access.bdprodm05"
sincedb_path => "/tmp/access"
}
file {
path => "/var/log/httpd/kibana3_error_ssl.log"
type => "kibana3.error.bdprodm05"
sincedb_path => "/tmp/error"
}
}
filter {
date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}
grok {
match => { "message" => "%{COMBINEDAPACHELOG}" }
tag_on_failure => [ ]
}
geoip {
source => "clientip"
target => "geoip"
add_field => { "[geoip][coordinates]" => "%{[geoip][longitude]}" }
add_field => { "[geoip][coordinates]" => "%{[geoip][latitude]}" }
}
# Fix types
mutate {
convert => { "[geoip][coordinates]" => "float"}
}
}
output {
# stdout { codec => rubydebug }
kafka {
batch_num_messages => 10
broker_list => "mykafkahost1:9092,mykafkahost2:9092"
client_id => "logstash"
compression_codec => "gzip"
producer_type => "async"
topic_id => "app_kibana3_raw"
}
elasticsearch {
bind_host => "myeshost1"
cluster => "elasticsearch-prod"
flush_size => 10
host => "myeshost1"
index => "app-kibana3-%{+YYYYMMdd}"
document_type => "app_kibana3"
node_name => "logstash-bdprodm05"
protocol => "transport"
}
}
Does anyone have any idea what might be going on? Something I can try?
Thanks so much for your time.
Chris