I have configured Logstash (see below configuration) to get logs from Cisco devices with syslog. Despite I have configured in "output" section index (index => "network-%{+YYYY.MM.dd}") on Kibana side I see wrong index. It shows %{[@metadata][beat]}-%{[@metadata][version]} instead of index (screenshot attached).
P.S with Winlogbeat and Filebeat everything ok.
Can anybody help to fix this issue?
# INPUT - Logstash listens on port 8514 for these logs.
#
input {
syslog {
port => "8514"
type => "syslog"
}
}
filter {
if [type] == "syslog" {
grok {
patterns_dir => [ "/opt/logstash/patterns" ]
match => [
"message", "%{SYSLOG5424PRI}%{NUMBER:log_sequence#}: %{CISCOTIMESTAMP:log_date}: %%{CISCO_REASON:facility}-%{INT:severity_level}-%{CISCO_REASON:facility_mnemonic}: %{GREEDYDATA:message}",
"message", "%{SYSLOG5424PRI}%{NUMBER:log_sequence#}: %{CISCOTIMESTAMP:log_date}: %%{CISCO_REASON:facility}-%{CISCO_REASON:facility_sub}-%{INT:severity_level}-%{CISCO_REASON:facility_mnemonic}: %{GREEDYDATA:message}"
]
overwrite => [ "message" ]
remove_field => [ "syslog5424_pri", "@version" ]
}
mutate {
gsub => [
"severity_level", "0", "0 - Emergency",
"severity_level", "1", "1 - Alert",
"severity_level", "2", "2 - Critical",
"severity_level", "3", "3 - Error",
"severity_level", "4", "4 - Warning",
"severity_level", "5", "5 - Notification",
"severity_level", "6", "6 - Informational"
]
}
}
}
}
output {
# Something went wrong with the grok parsing, don't discard the messages though
if "_grokparsefailure" in [tags] {
file {
path => "/tmp/fail-%{type}-%{+YYYY.MM.dd}.log"
}
}
# The message was parsed correctly, and should be sent to elasicsearch.
if "cisco" in [tags] {
#file {
# path => "/tmp/%{type}-%{+YYYY.MM.dd}.log"
#}
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "network-%{+YYYY.MM.dd}"
# document_id => "%{fingerprint}"
}
}
}