root@elk:/etc# cat /etc/logstash/conf.d/communigate-syslog.conf
input {
tcp {
port => 1601
type => "communigate"
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601} "
negate => true
what => "previous"
}
}
syslog {
port => 5514
type => "syslog"
codec => multiline {
pattern => "^%{SYSLOGTIMESTAMP}"
negate => true
what => "previous"
}
}
}
filter {
if [type] == "communigate" {
# filter for communigate logs
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:timestamp} %{LOGLEVEL:loglevel} \[%{DATA:thread}\] %{JAVACLASS:class} - %{GREEDYDATA:logmessage}" }
}
date {
match => [ "timestamp", "ISO8601" ]
}
} else if [type] == "syslog" {
# filter for syslog logs
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
output {
if [type] == "communigate" {
elasticsearch {
hosts => ["localhost:9200"]
index => "communigate-%{+YYYY.MM.dd}"
}
} else if [type] == "syslog" {
elasticsearch {
hosts => ["localhost:9200"]
index => "syslog-%{+YYYY.MM.dd}"
}
}
}
root@elk:/etc#
Some part of the logs of tail -f /var/log/logstash/logstash-plain.log
th.data" setting.
[2023-03-31T10:06:55,246][FATAL][org.logstash.Logstash ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:747) ~[jruby-complete-9.2.20.1.jar:?]
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:710) ~[jruby-complete-9.2.20.1.jar:?]
at usr.share.logstash.lib.bootstrap.environment.<main>(/usr/share/logstash/lib/bootstrap/environment.rb:94) ~[?:?]
[2023-03-31T10:06:57,162][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:06:57,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:00,238][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:00,238][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:02,164][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:02,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:05,242][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:05,242][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:07,165][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:07,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:10,246][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:10,246][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:12,166][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:12,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:15,249][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:15,249][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:17,167][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:17,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:20,253][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:20,253][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:22,168][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:22,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:25,257][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:25,258][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:27,169][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:27,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:30,262][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:30,263][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:32,170][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:32,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:35,268][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:35,268][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:37,171][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:37,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:40,271][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:40,271][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:42,172][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:42,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:45,274][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:45,274][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:47,173][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:47,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:50,278][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:50,278][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:52,174][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:52,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:07:55,282][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ParNew"}
[2023-03-31T10:07:55,282][DEBUG][logstash.instrument.periodicpoller.jvm] collector name {:name=>"ConcurrentMarkSweep"}
[2023-03-31T10:07:57,184][DEBUG][logstash.instrument.periodicpoller.cgroup] One or more required cgroup files or directories not found: /proc/self/cgroup, /sys/fs/cgroup/cpuacct, /sys/fs/cgroup/cpu
[2023-03-31T10:07:57,695][DEBUG][org.logstash.execution.PeriodicFlush][main] Pushing flush onto pipeline.
[2023-03-31T10:08:34,301][INFO ][logstash.runner ] Log4j configuration path used is: /etc/logstash/log4j2.properties
[2023-03-31T10:08:34,311][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.17.9", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.18+10 on 11.0.18+10 +indy +jit [linux-x86_64]"}
[2023-03-31T10:08:34,317][INFO ][logstash.runner ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djdk.io.File.enableADS=true, -Djruby.compile.invokedynamic=true, -Djruby.jit.threshold=0, -Djruby.regexp.interruptible=true, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true]
[2023-03-31T10:08:35,700][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[2023-03-31T10:08:37,626][INFO ][org.reflections.Reflections] Reflections took 88 ms to scan 1 urls, producing 119 keys and 419 values
[2023-03-31T10:08:39,395][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2023-03-31T10:08:39,694][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2023-03-31T10:08:39,890][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2023-03-31T10:08:39,901][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (7.17.9) {:es_version=>7}
[2023-03-31T10:08:39,903][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2023-03-31T10:08:39,999][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2023-03-31T10:08:40,000][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2023-03-31T10:08:40,003][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//localhost:9200"]}
[2023-03-31T10:08:40,023][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://localhost:9200/]}}
[2023-03-31T10:08:40,038][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"http://localhost:9200/"}
[2023-03-31T10:08:40,043][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (7.17.9) {:es_version=>7}
[2023-03-31T10:08:40,044][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2023-03-31T10:08:40,052][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>7, :ecs_compatibility=>:disabled}
[2023-03-31T10:08:40,083][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2023-03-31T10:08:40,083][INFO ][logstash.outputs.elasticsearch][main] Config is not compliant with data streams. `data_stream => auto` resolved to `false`
[2023-03-31T10:08:40,093][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>7, :ecs_compatibility=>:disabled}
[2023-03-31T10:08:40,278][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>12, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1500, "pipeline.sources"=>["/etc/logstash/conf.d/communigate-syslog.conf"], :thread=>"#<Thread:0x3c7d98eb run>"}
[2023-03-31T10:08:41,393][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>1.11}
[2023-03-31T10:08:41,619][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2023-03-31T10:08:41,624][INFO ][logstash.inputs.tcp ][main][8c1acdae7fa7e3e5188de6b8fd6590e6bf5015be55235c807722c5d9a9ed2849] Starting tcp input listener {:address=>"0.0.0.0:1601", :ssl_enable=>false}
[2023-03-31T10:08:41,641][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] Starting syslog tcp listener {:address=>"0.0.0.0:5514"}
[2023-03-31T10:08:41,654][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] Starting syslog udp listener {:address=>"0.0.0.0:5514"}
[2023-03-31T10:08:41,699][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2023-03-31T10:09:01,092][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] new connection {:client=>"172.20.111.149:46388"}
[2023-03-31T10:24:31,159][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] new connection {:client=>"172.20.111.149:35680"}
root@elk:/etc# tail -f /var/log/logstash/logstash-plain.log
[2023-03-31T10:08:40,093][INFO ][logstash.outputs.elasticsearch][main] Using a default mapping template {:es_version=>7, :ecs_compatibility=>:disabled}
[2023-03-31T10:08:40,278][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>12, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1500, "pipeline.sources"=>["/etc/logstash/conf.d/communigate-syslog.conf"], :thread=>"#<Thread:0x3c7d98eb run>"}
[2023-03-31T10:08:41,393][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>1.11}
[2023-03-31T10:08:41,619][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2023-03-31T10:08:41,624][INFO ][logstash.inputs.tcp ][main][8c1acdae7fa7e3e5188de6b8fd6590e6bf5015be55235c807722c5d9a9ed2849] Starting tcp input listener {:address=>"0.0.0.0:1601", :ssl_enable=>false}
[2023-03-31T10:08:41,641][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] Starting syslog tcp listener {:address=>"0.0.0.0:5514"}
[2023-03-31T10:08:41,654][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] Starting syslog udp listener {:address=>"0.0.0.0:5514"}
[2023-03-31T10:08:41,699][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2023-03-31T10:09:01,092][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] new connection {:client=>"172.20.111.149:46388"}
[2023-03-31T10:24:31,159][INFO ][logstash.inputs.syslog ][main][337362fefa288f7e26c1c0ba3e17a7bfbf96b8754e07421ebc926164397e8595] new connection {:client=>"172.20.111.149:35680"}