I recently upgraded Elastic from 7.0.1 to 7.5.1. Elasticsearch and Kibana had no issues but I am having some trouble with Logstash.
When I run Logstash 7.0.1, no issues. Everything works as normal. Then, I upgraded to 7.5.1 and my CPU went through the roof. My CPU sits at 100% utilization when starting/running Logstash. We are still in pre-prd, so I have a pretty basic setup. We ingest about 15 documents a minute, so it is not ingestion volume that is causing this high utilization.
The config directory is exactly the same between 7.0.1 and 7.5.1. The logs for 7.5.1 don't indicate anything fishy going on either. I have attached it below. Any thoughts would be very helpful. Thanks.
logstash-plain.log for Logstash 7.5.1
[2020-01-07T14:25:54,661][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2020-01-07T14:25:54,699][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.5.1"}
[2020-01-07T14:26:03,790][INFO ][org.reflections.Reflections] Reflections took 90 ms to scan 1 urls, producing 20 keys and 40 values
[2020-01-07T14:26:07,358][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@localhost:9200/]}}
[2020-01-07T14:26:08,098][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://elastic:xxxxxx@localhost:9200/"}
[2020-01-07T14:26:08,221][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>7}
[2020-01-07T14:26:08,228][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2020-01-07T14:26:08,384][WARN ][logstash.outputs.elasticsearch] DEPRECATION WARNING: Connecting to an OSS distribution of Elasticsearch using the default distribution of Logstash will stop working in Logstash 8.0.0. Please upgrade to the default distribution of Elasticsearch, or use the OSS distribution of Logstash {:url=>"http://elastic:xxxxxx@localhost:9200/"}
[2020-01-07T14:26:08,456][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://localhost:9200"]}
[2020-01-07T14:26:08,493][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@localhost:9200/]}}
[2020-01-07T14:26:08,508][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://elastic:xxxxxx@localhost:9200/"}
[2020-01-07T14:26:08,517][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>7}
[2020-01-07T14:26:08,543][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2020-01-07T14:26:08,637][WARN ][logstash.outputs.elasticsearch] DEPRECATION WARNING: Connecting to an OSS distribution of Elasticsearch using the default distribution of Logstash will stop working in Logstash 8.0.0. Please upgrade to the default distribution of Elasticsearch, or use the OSS distribution of Logstash {:url=>"http://elastic:xxxxxx@localhost:9200/"}
[2020-01-07T14:26:08,674][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://localhost:9200"]}
[2020-01-07T14:26:09,181][INFO ][logstash.outputs.elasticsearch] Using default mapping template
[2020-01-07T14:26:09,333][INFO ][logstash.outputs.elasticsearch] Index Lifecycle Management is set to 'auto', but will be disabled - Index Lifecycle management is not available in your Elasticsearch cluster
[2020-01-07T14:26:09,337][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2020-01-07T14:26:09,466][INFO ][logstash.outputs.elasticsearch] Using default mapping template
[2020-01-07T14:26:09,586][INFO ][logstash.outputs.elasticsearch] Index Lifecycle Management is set to 'auto', but will be disabled - Index Lifecycle management is not available in your Elasticsearch cluster
[2020-01-07T14:26:09,587][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2020-01-07T14:26:10,128][WARN ][org.logstash.instrument.metrics.gauge.LazyDelegatingGauge] A gauge metric of an unknown type (org.jruby.specialized.RubyArrayOneObject) has been create for key: cluster_uuids. This may result in invalid serialization. It is recommended to log an issue to the responsible developer/development team.
[2020-01-07T14:26:10,146][INFO ][logstash.javapipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>250, "pipeline.sources"=>["D:/ELK/logstash-7.5.1/config/logstash.conf"], :thread=>"#<Thread:0x677e4b9e run>"}
[2020-01-07T14:26:12,624][INFO ][logstash.inputs.beats ] Beats inputs: Starting input listener {:address=>"0.0.0.0:5044"}
[2020-01-07T14:26:13,656][INFO ][logstash.javapipeline ] Pipeline started {"pipeline.id"=>"main"}
[2020-01-07T14:26:14,581][INFO ][org.logstash.beats.Server] Starting server on port: 5044
[2020-01-07T14:26:15,580][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2020-01-07T14:26:20,134][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
logstash.conf file
input {
jms {
type => "jms"
yaml_file => "D:\ELK\logstash-7.0.1\config\jms.yml"
yaml_section => "activemq"
interval => 0
destination => "preprod.logging.event.q.1"
pub_sub => false
include_header => false
include_properties => false
include_body => true
use_jms_timestamp => true
}
jms {
type => "jms"
yaml_file => "D:\ELK\logstash-7.0.1\config\jms.yml"
yaml_section => "activemq"
interval => 0
destination => "tst.event.logging.q.1"
pub_sub => false
include_header => false
include_properties => false
include_body => true
use_jms_timestamp => true
}
beats {
type => "beats"
port => 5044
}
}
filter {
if [type] == "beats"{
if [log][file][path] =~ /(dcma-all.log)/ or [log][file][path] =~ /(dcma-user.log)/ {
grok {
match => { "message" => "%{IPV4:ephesoftVersion} %{CISCO_REASON:operatingSystem} %{TIMESTAMP_ISO8601:date} %{LOGLEVEL:logLevel} (?<container>[^\s]+) %{JAVACLASS:javaClass} \- %{GREEDYDATA:body}" }
}
}
else if [log][file][path] =~ /(dcma_report_all.log)/ {
grok {
match => { "message" => "%{IPV4:ephesoftVersion} %{CISCO_REASON:operatingSystem} \[%{WORD:logLevel} %{DATESTAMP:date}]\ %{NOTSPACE:javaClass} \- %{GREEDYDATA:body}" }
}
}
else {
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:date} %{WORD:logLevel} (?<container>[^\-]+) \- %{GREEDYDATA:body}" }
}
}
date {
match => [ "date", "dd/MM/yyyy HH:mm:ss,SSS", "ISO8601" ]
}
}
if [type] == "jms"{
json{
source => "message"
remove_field => ["message"]
}
}
}
output {
if [type] == "jms"{
elasticsearch {
hosts => ["http://localhost:9200"]
index => "talendesb-log"
user => "myUsername"
password => "myPassword"
}
}
if [type] == "beats"{
elasticsearch {
hosts => ["http://localhost:9200"]
index => "ephesoft-log"
user => "myUsername"
password => "myPassword"
}
}
}