Hey first post here!
I'm getting a grok parse failure in my tags field:
tags netsyslog, _grokparsefailure
I'm really not sure where I've gone wrong though. The config seems fine and passes the configtest. Fields are indexed properly browsing in Kibana (besides the grokparsefailure in the tags field).
input {
file {
path => ["/var/log/network.log"]
# sincedb_path => "/var/log/logstash/"
start_position => "beginning"
type => "syslog"
tags => [ "netsyslog" ]
}
file {
path => ["/var/log/threat.log"]
# sincedb_path => "/var/log/logstash/"
start_position => "beginning"
type => "syslog"
tags => [ "threats" ]
}
} #end input
filter {
if [type] == "syslog" {
grok {
#strips timestamp and host off of the front of the syslog message leaving the raw message generated by the syslog client and saves it as "raw_message"
#patterns_dir => "/opt/logstash/patterns"
match => [ "message", "%{TIMESTAMP_ISO8601:@timestamp} %{HOST:syslog_host} %{GREEDYDATA:raw_message}" ]
}
}
if "netsyslog" in [tags]
{
csv
{
source => "raw_message"
columns => [ "PaloAltoDomain","ReceiveTime","SerialNum","Type","Threat-ContentType","ConfigVersion","GenerateTime","SourceAddress","DestinationAddress","NATSourceIP","NATDestinationIP","Rule","SourcUser","DestinationUser","$
}
date
{
timezone => "America/Vancouver"
match => [ "GenerateTime", "YYYY/MM/dd HH:mm:ss" ]
}
#convert fields to proper format
mutate
{
convert => [ "Bytes", "integer" ]
convert => [ "BytesReceived", "integer" ]
convert => [ "BytesSent", "integer" ]
convert => [ "ElapsedTimeInSec", "integer" ]
convert => [ "geoip.area_code", "integer" ]
convert => [ "geoip.dma_code", "integer" ]
convert => [ "geoip.latitude", "float" ]
convert => [ "geoip.longitude", "float" ]
convert => [ "NATDestinationPort", "integer" ]
convert => [ "NATSourcePort", "integer" ]
convert => [ "Packets", "integer" ]
convert => [ "pkts_received", "integer" ]
convert => [ "pkts_sent", "integer" ]
convert => [ "seqno", "integer" ]
gsub => [ "Rule", " ", "_",
"Application", "( |-)", "_" ]
remove_field => [ "message", "raw_message" ]
} #end mutate
} #end netsyslog
else if "threats" in [tags]
{
csv
{
source => "raw_message"
columns => [ "PaloAltoDomain","ReceiveTime","SerialNum","Type","Threat-ContentType","ConfigVersion","GenerateTime","SourceAddress","DestinationAddress","NATSourceIP","NATDestinationIP","Rule","SourcUser","DestinationUser","$
}
date
{
timezone => "America/Vancouver"
match => [ "GenerateTime", "YYYY/MM/dd HH:mm:ss" ]
}
#convert fields to proper format
mutate
{
convert => [ "geoip.area_code", "integer" ]
convert => [ "geoip.dma_code", "integer" ]
convert => [ "geoip.latitude", "float" ]
convert => [ "geoip.longitude", "float" ]
convert => [ "NATDestinationPort", "integer" ]
convert => [ "NATSourcePort", "integer" ]
convert => [ "seqno", "integer" ]
gsub => [ "Rule", " ", "_",
"Application", "( |-)", "_" ]
remove_field => [ "message", "raw_message" ]
}
} #end threats
#Geolocate logs that have SourceAddress and if that SourceAddress is a non-RFC1918 address
if [SourceAddress] and [SourceAddress] !~ "(^127\.0\.0\.1)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)|(^169\.254\.)" {
geoip {
database => "/opt/logstash/GeoLiteCity.dat"
source => "SourceAddress"
target => "SourceGeo"
}
#Delete 0,0 in SourceGeo.location if equal to 0,0
if ([SourceGeo.location] and [SourceGeo.location] =~ "0,0") {
mutate {
replace => [ "SourceGeo.location", "" ]
}
}
}
#Geolocate logs that have DestinationAddress and if that DestinationAddress is a non-RFC1918 address
if [DestinationAddress] and [DestinationAddress] !~ "(^127\.0\.0\.1)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^192\.168\.)|(^169\.254\.)" {
geoip {
database => "/opt/logstash/GeoLiteCity.dat"
source => "DestinationAddress"
target => "DestinationGeo"
}
#Delete 0,0 in DestinationGeo.location if equal to 0,0
if ([DestinationGeo.location] and [DestinationGeo.location] =~ "0,0") {
mutate {
replace => [ "DestinationAddress.location", "" ]
}
}
}
} #end filter block
output {
elasticsearch {
protocol => "node"
node_name => "logstash"
cluster => "elasticsearch"
host => "127.0.0.1"
template => "/opt/logstash/elasticsearch-template.json"
template_overwrite => true
}
} #end output block
Can you get this failure from syntax problems? This is my first ELK stack so sorry for the noobness.