I have a problem with logstash
it boots with no errors in the logs but I don't see anything in kibana
Kibana and elastic are on another node, both apps are running on docker at version 8.5.3
logstash is on a separate node from the kibana and eleastic instance
It is installed with the .deb package in version 7.5.2
Logs Logstash
[2022-12-16T11:25:02,163][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.5.2"}
[2022-12-16T11:25:05,808][INFO ][org.reflections.Reflections] Reflections took 54 ms to scan 1 urls, producing 20 keys and 40 values
[2022-12-16T11:25:09,003][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elastic:xxxxxx@*******:9200/]}}
[2022-12-16T11:25:09,440][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"http://elastic:xxxxxx@********:9200/"}
[2022-12-16T11:25:09,525][INFO ][logstash.outputs.elasticsearch][main] ES Output version determined {:es_version=>8}
[2022-12-16T11:25:09,531][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>8}
[2022-12-16T11:25:09,588][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://********:9200"]}
[2022-12-16T11:25:09,664][INFO ][logstash.outputs.elasticsearch][main] Using default mapping template
[2022-12-16T11:25:09,733][INFO ][logstash.outputs.elasticsearch][main] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>80001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1, "index.lifecycle.name"=>"logstash-policy", "index.lifecycle.rollover_alias"=>"logstash"}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2022-12-16T11:25:09,836][WARN ][org.logstash.instrument.metrics.gauge.LazyDelegatingGauge][main] A gauge metric of an unknown type (org.jruby.specialized.RubyArrayOneObject) has been create for key: cluster_uuids. This may result in invalid serialization. It is recommended to log an issue to the responsible developer/development team.
[2022-12-16T11:25:09,843][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1000, "pipeline.sources"=>["/etc/logstash/conf.d/logstash-ehcos.conf"], :thread=>"#<Thread:0x4206d5d6 run>"}
[2022-12-16T11:25:10,153][INFO ][logstash.inputs.file ][main] No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"/var/lib/logstash/plugins/inputs/file/.sincedb_3feb19d4a444dd4ad1171c743f654398", :path=>["/opt/ehcos-server/logs/ehcos-mule*.log"]}
[2022-12-16T11:25:10,201][INFO ][logstash.inputs.file ][main] No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"/var/lib/logstash/plugins/inputs/file/.sincedb_434f34af7890f1e23f7c131792fad38f", :path=>["/opt/ehcos-server/logs/**/ehCS*.log"]}
[2022-12-16T11:25:10,207][INFO ][logstash.inputs.file ][main] No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"/var/lib/logstash/plugins/inputs/file/.sincedb_4735d8287188199c63e5087acec18301", :path=>["/opt/ehcos-server/logs/querys*.log"]}
[2022-12-16T11:25:10,213][INFO ][logstash.inputs.file ][main] No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"/var/lib/logstash/plugins/inputs/file/.sincedb_55d9daaa2bd1fe556ac82936ae4f71ae", :path=>["/opt/ehcos-server/logs/**/ehHIS*.log"]}
[2022-12-16T11:25:10,226][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2022-12-16T11:25:10,263][INFO ][filewatch.observingtail ][main] START, creating Discoverer, Watch with file and sincedb collections
[2022-12-16T11:25:10,280][INFO ][filewatch.observingtail ][main] START, creating Discoverer, Watch with file and sincedb collections
[2022-12-16T11:25:10,301][INFO ][filewatch.observingtail ][main] START, creating Discoverer, Watch with file and sincedb collections
[2022-12-16T11:25:10,301][INFO ][filewatch.observingtail ][main] START, creating Discoverer, Watch with file and sincedb collections
[2022-12-16T11:25:10,369][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2022-12-16T11:25:11,006][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
my conf:
input {
file {
type => "Clinic"
path => "/opt/ehcos-server/logs/**/ehCS*.log"
start_position => "beginning"
codec => multiline {
pattern => "%{TIME}.*"
negate => "true"
what => "previous"
}
}
file {
type => "PMG"
path => "/opt/ehcos-server/logs/**/ehHIS*.log"
start_position => "beginning"
codec => multiline {
pattern => "%{TIME}.*"
negate => "true"
what => "previous"
}
}
file {
type => "Mule"
path => "/opt/ehcos-server/logs/ehcos-mule*.log"
start_position => "beginning"
codec => multiline {
pattern => "%{TIME}.*"
negate => "true"
what => "previous"
}
}
file {
type => "Query"
path => "/opt/ehcos-server/logs/querys*.log"
start_position => "beginning"
codec => multiline {
pattern => "%{TIME}.*"
negate => "true"
what => "previous"
}
}
}
filter {
if ([message] =~ /ERROR/ and [message] !~ /Exception/) or [message] =~ /INFO/{
drop { }
}
if [type] == "Mule" {
grok { match => [ "message", "%{TIME:time} %{LOGLEVEL:level}" ] }
}
if [type] == "Query" {
mutate {
gsub => ["message", "^.*?Method", "method"]
}
kv {
source => "message"
field_split_pattern => ", "
value_split => ":"
exclude_keys => [ "Tables", "Success", "Type", "Batch", "QuerySize", "BatchSize", "Query", "Params" ]
}
mutate {
rename => {"Name" => "tenant"}
gsub => ["tenant", "^.*?jdbc/", ""]
remove_field => [ "message" ]
}
}
else {
grok { match => [ "message", "%{TIME:time} %{LOGLEVEL:level} %{NOTSPACE:thread} %{NOTSPACE:tenant}" ] }
}
}
output {
elasticsearch {
hosts => ["http://*******:9200"]
user => elastic
password => ******
}
}