First off, let me just say that I am new to the Elasticstack.
I have 2 different kind of syslog messages I am receiving:
<30>ddns[21535]: Dynamic DNS update for xxx (Duck DNS) successful
<4>kernel: DNAT IN=red0 OUT= MAC=00:0d:b9:42:65:fc:00:17:10:82:5f:00:08:00 SRC=98.144.28.251 DST=46.127.208.49 LEN=48 TOS=0x00 PREC=0x00 TTL=114 ID=1060 PROTO=UDP SPT=18291 DPT=6666 LEN=28
I am trying to match them seperately with conditionals, and then have fields generated, such as pid
or program
, but the fields aren't generating. I just get this as output:
{
"message" => "<4>kernel: DNAT IN=red0 OUT= MAC=00:0d:b9:42:65:fc:00:17:10:82:5f:00:08:00 SRC=98.144.28.251
DST=46.127.208.49 LEN=48 TOS=0x00 PREC=0x00 TTL=114 ID=1060 PROTO=UDP SPT=18291 DPT=6666 LEN=28",
"@version" => "1",
"@timestamp" => "2016-08-19T11:30:41.239Z",
"host" => "sflsv006.sarandasnet.local"
}
Here is my config:
filter {
################
# START IPFIRE #
################
if [host] =~ /172\.16\.0\.1/ {
if [message] =~ /(?:k|kernel)/ {
grok {
match => { "message" => "<%{PROG:syslog_pri}>%{DATA:program}: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
}
kv {
source => "@message"
}
}
if [message] =~ /ddns/ {
grok {
match => { "message" => "<%{PROG:syslog_pri}>%{DATA:program}\[%{INT:pid}\]: %{GREEDYDATA:syslog_messa$
add_field => [ "received_at", "%{@timestamp}" ]
}
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "yyyy:MM:dd-HH:mm:ss" ]
}
mutate {
replace => [ "@source_host", "sflne01.sarandasnet.local" ]
replace => [ "@message", "%{syslog_message}" ]
remove_field => [ "syslog_message", "syslog_timestamp" ]
}
geoip {
source => "SRC"
target => "geoip"
database => "/etc/logstash/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
}
}
I am not sure what is wrong.