I am using following logstash files to filter all juniper SRX related logs and most of logs are tagged under _grokparsefailure tags. and they are stored in failed_syslogs file.
I am pretty new to ELK ,but i have used files available from internet as below :
/etc/logstash/conf.d/01-input.conf
input {
tcp {
port => 514
type => syslog
tags => [ juniper_old ]
}
udp {
port => 514
type => syslog
tags => [ juniper_old ]
}
tcp {
port => 514
type => syslog
tags => [ juniper_rfc5424 ]
}
udp {
port => 514
type => syslog
tags => [ juniper_rfc5424 ]
}
}
30-elasticsearch-output.conf
output {
elasticsearch {
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
}
stdout { codec => rubydebug }
if [type] == "syslog" and "_grokparsefailure" in [tags] {
file { path => "/var/log/logstash/failed_syslog_events-%{+YYYY-MM-dd}" }
}
}
10-syslog.conf
filter {
if [type] == "syslog" and "juniper_old" in [tags] {
grok {
match => { "message" => "<%{POSINT:syslogold_pri}>%{SYSLOGTIMESTAMP:timestamp} %{SYSLOGPROG}: %{GREEDYDATA:message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
if [syslogold_pri] {
# Calculate facility and severity from the syslog PRI value
ruby {
code => 'event.set("severity", event.get("syslogold_pri").to_i % 8)'
}
ruby {
code => 'event.set("facility", event.get("syslogold_pri").to_i / 8)'
}
}
}
if [type] == "syslog" and "juniper_rfc5424" in [tags] {
grok {
match => { "message" => "%{SYSLOG5424BASE} %{GREEDYDATA:syslog5424_msg}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
if [syslog5424_pri] {
# Calculate facility and severity from the syslog PRI value
ruby {
code => 'event.set("severity", event.get("syslog5424_pri").to_i % 8)'
}
ruby {
code => 'event.set("facility", event.get("syslog5424_pri").to_i / 8)'
}
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}