Pfsense 2.4 logs to logstash error

Hi,

I´m trying to send pfsense logs to logstash but keep´s giving me error _geoip_lookup_failure and _grokparsefailure.

Because I´m newbie to this scene, I was using this conf from web and it was working until upgrade stack package. But now, I don´t even know where to start. So I deeply ask for help. At least, where to start.

Here´s the output:

"evtid" => "134",
"@timestamp" => 2018-04-16T12:44:53.000Z,
"@version" => "1",
"prog" => "filterlog",
"type" => "syslog"
}
{
"message" => "121,,,1506957320,bge0,match,pass,in,4,0x0,,128,3213,0,DF,6,tcp,52,192.168.225.127,23.21.68.3,49988,443,0,S,1366643320,,65535,,mss;nop;wscale;nop;nop;sackOK",
"tags" => [
[0] "PFSense",
[1] "firewall",
[2] "_geoip_lookup_failure"
],
"proto" => "tcp",
"flags" => "DF",
"tos" => "0x0",
"host" => "192.168.78.50",
"ip_ver" => "4",
"evtid" => "134",
"dest_port" => "443",
"direction" => "in",
"id" => "3213",
"data_length" => "0",
"@version" => "1",
"action" => "pass",
"ttl" => "128",
"proto_id" => "6",
"src_ip" => "192.168.225.127",
"length" => "52",
"@timestamp" => 2018-04-16T12:44:53.000Z,
"rule" => "121",
"dest_ip" => "23.21.68.3",
"geoip" => {},
"prog" => "filterlog",
"type" => "syslog",
"src_port" => "49988",
"offset" => "0",
"iface" => "bge0",
"reason" => "match",
"tracker" => "1506957320"
}
{
"message" => "7,,,1000000105,bge0,match,block,in,6,0x00,0x00000,1,UDP,17,513,fe80::1166:a6b6:4b4b:254,ff02::c,1900,1900,513",
"host" => "192.168.224.18",
"tags" => [
[0] "PFSense",
[1] "firewall",
[2] "_grokparsefailure",
[3] "_geoip_lookup_failure"
],
"evtid" => "134",
"@timestamp" => 2018-04-16T12:44:53.000Z,
"@version" => "1",
"prog" => "filterlog",
"type" => "syslog"
}
{
"message" => "121,,,1506957320,bge0,match,pass,in,4,0x0,,128,3213,0,DF,6,tcp,52,192.168.225.127,23.21.68.3,49988,443,0,S,1366643320,,65535,,mss;nop;wscale;nop;nop;sackOK",
"tags" => [
[0] "PFSense",
[1] "firewall",
[2] "_geoip_lookup_failure"
],

###########################################################################

01-inputs.conf
#tcp syslog stream via 5140
input {
tcp {
type => "syslog"
port => 5140
}
}
#udp syslogs stream via 5140
input {
udp {
type => "syslog"
port => 5140
}
}

10-syslog.conf
filter {
if [type] == "syslog" {
#change to pfSense ip address
if [host] =~ /192.168.78.50/ {
mutate {
add_tag => ["PFSense", "Ready"]
}
}
if "Ready" not in [tags] {
mutate {
add_tag => [ "syslog" ]
}
}
}
}
filter {
if [type] == "syslog" {
mutate {
remove_tag => "Ready"
}
}
}
filter {
if "syslog" in [tags] {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:[%{POSINT:syslog_pid}])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
locale => "en"
}
if !("_grokparsefailure" in [tags]) {
mutate {
replace => [ "@source_host", "%{syslog_hostname}" ]
replace => [ "@message", "%{syslog_message}" ]
}
}
mutate {
remove_field => [ "syslog_hostname", "syslog_message", "syslog_timestamp" ]
}

if "_grokparsefailure" in [tags] {

drop { }

}

}
}

11-pfsense.conf
filter {
if "PFSense" in [tags] {
grok {
add_tag => [ "firewall" ]
match => [ "message", "<(?.)>(?(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\s+(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9]) (?:2[0123]|[01]?[0-9]):(?:[0-5][0-9]):(?:[0-5][0-9])) (?.?): (?.*)" ]
}
mutate {
gsub => ["datetime"," "," "]
}
date {
match => [ "datetime", "MMM dd HH:mm:ss" ]
timezone => "America/Sao_Paulo"
}
mutate {
replace => [ "message", "%{msg}" ]
}
mutate {
remove_field => [ "msg", "datetime" ]
}
}
if [prog] =~ /^filterlog$/ {
mutate {
remove_field => [ "msg", "datetime" ]
}
grok {
patterns_dir => "/etc/logstash/conf.d/patterns"
match => [ "message", "%{PFSENSE_LOG_DATA}%{PFSENSE_IP_SPECIFIC_DATA}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}",
"message", "%{PFSENSE_LOG_DATA}%{PFSENSE_IPv4_SPECIFIC_DATA_ECN}%{PFSENSE_IP_DATA}%{PFSENSE_PROTOCOL_DATA}",
"message", "%{PFSENSE_LOG_DATA}%{PFSENSE_IPv6_SPECIFIC_DATA}"]
}
mutate {
lowercase => [ 'proto' ]
}
geoip {
add_tag => [ "GeoIP" ]
source => "src_ip"
# Optional GeoIP database
# Comment out the below if you do not wise to utilize and omit last three steps dealing with (recommended) suffix
database => "/etc/logstash/GeoLite2-City.mmdb"
}
}
}

30-outputs.conf
output {
elasticsearch {
hosts => ["http://localhost:9200"]
index => "logstash-%{+YYYY.ww}" }

stdout { codec => rubydebug }

}

Thanks in advance for any help.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.