Hello experts,
I am setting up filebeat, logstash for my log monitoring work. (Linux system)
Getting below error on filebeat.
--> systemctl status filebeat - getting error
--> systemctl status logstash - Running fine.
Error :
[2023-01-05T08:10:42,132][ERROR][logstash.agent] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", \"and\", \"or\", \"xor\", \"nand\", \"{\" at line 23, column 9 (byte 649) after filter\n{\n if [id] == \"Filebeat_AnalyticsMonitoringGrafanaLog\"\n {\n grok {\n match => { \"message\" => \"%{TIMESTAMP_ISO8601:logTime} %{LOGLEVEL:logLevel} %{GREEDYDATA:message}\" }\n }\n }\n else if [id] == \"Filebeat_AnalyticsMonitoringInfluxDBLog\"\n {\n grok {\n match => { \"message\" => \"%{MONTH} %{NUMBER} %{TIME} %{HOSTNAME:host} influxd-systemd-start.sh\\[%{NUMBER}\\]: ts=%{TIMESTAMP_ISO8601:logTime} lvl=%{LOGLEVEL:logLevel} %{GREEDYDATA:message} \" }\n }\n }\n else if [id] == \"Filebeat_AnalyticsMonitoringKomdoLog\"\n \n ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:210:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:48:in `initialize'", "org/jruby/RubyClass.java:911:in `new'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:50:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:381:in `block in converge_state'"]}
[2023-01-05T08:10:42,189][INFO ][logstash.runner ] Logstash shut down.
[2023-01-05T08:10:42,198][FATAL][org.logstash.Logstash ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:790) ~[jruby.jar:?]
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:753) ~[jruby.jar:?]
Logstash configuration : (logs.conf)
Here I am configuring grok pattern for multiple sources of logs using nested if, and filtering based on log id defined in filebeat.yml.
input {
beats {
port => 5044
}
}
filter
{
if [id] == "Filebeat_AnalyticsMonitoringGrafanaLog"
{
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:logTime} %{LOGLEVEL:logLevel} %{GREEDYDATA:message}" }
}
}
else if [id] == "Filebeat_AnalyticsMonitoringInfluxDBLog"
{
grok {
match => { "message" => "%{MONTH} %{NUMBER} %{TIME} %{HOSTNAME:host} influxd-systemd-start.sh\[%{NUMBER}\]: ts=%{TIMESTAMP_ISO8601:logTime} lvl=%{LOGLEVEL:logLevel} %{GREEDYDATA:message} " }
}
}
else if [id] == "Filebeat_AnalyticsMonitoringKomdoLog"
{
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:logTime} - Komodo - %{LOGLEVEL:logLevel} - \[%{WORD:class}\] - %{GREEDYDATA:message}" }
}
}
date{
match => ["logtime", "yyyy-MM-dd HH:mm:ss", "ISO8601"]
timezone => "Europe/Dublin"
target => "@timestamp"
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "logs-%{+YYYY.MM.dd}"
}
}
**Filebeat.yml**
########################### Filebeat Configuration #############################
# ============================== Filebeat inputs ===============================
filebeat.inputs:
# Grafana Log Settings
-
type
: log
multiline.
type
: pattern
# Pattern for Grafana log StackTrace
multiline.negate:
false
multiline.match: after
id
: Filebeat_AnalyticsMonitoringGrafanaLog
enabled:
true
paths:
# Log location:
-
"/var/log/grafana/*.log"
# InfluxDB Log Settings
-
type
: log
multiline.
type
: pattern
# Pattern for Grafana log StackTrace
multiline.negate:
false
multiline.match: after
id
: Filebeat_AnalyticsMonitoringInfluxDBLog
enabled:
true
paths:
# Log location:
-
"/var/log/influxdb/*.log"
# komodo Log Settings
-
type
: log
multiline.
type
: pattern
# Pattern for komodo log StackTrace
multiline.negate:
false
multiline.match: after
id
: Filebeat_AnalyticsMonitoringKomdoLog
enabled:
true
paths:
# Log location:
-
"/opt/komodo/log/*.log"
# ======================= Elasticsearch template setting =======================
setup.template.settings:
index.number_of_shards: 1
# ================================== Outputs ===================================
# ------------------------------ Logstash Output -------------------------------
output.logstash:
# The Logstash hosts
hosts: [
"logstash"
]
# Optional SSL. By default is off.
# List of root certificates for HTTPS server verifications
#ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
# Certificate for SSL client authentication
#ssl.certificate: "/etc/pki/client/cert.pem"
# Client Certificate Key
#ssl.key: "/etc/pki/client/cert.key"
# ================================= Processors =================================
processors:
- add_host_metadata:
when.not.contains.tags: forwarded
- add_cloud_metadata: ~
- add_docker_metadata: ~
- add_kubernetes_metadata: ~