Let me say I am a network guy so take that for what it is worth. I have spent weeks googling, testing, and debugging I tried not to hassle anyone.
I am trying to Grok the following data.
<134>May 08 2020 10:50:53: %ASA-6-734001: DAP: User xxxx,xxxx, Addr xx.xx.xx.xx, Connection AnyConnect: The following DAP records were selected for this connection: XXXX-XXX-XXX```
Here is my Grok filter
#if CiscoTAG is ASA-6-734001
if [type] == "cisco-fw" and [ciscotag] == "%ASA-6-734001" {
grok {
match => ["cisco_message", "DAP: User %{DATA:user}, Addr %{IP:src_ip}, Connection %{DATA:protocol}: The following DAP records were selected for this connection: %{GREEDYDATA:policy_id1}"]
}
}
Here is my config file
input { stdin {} }
filter {
grok {
match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"]
}
# Extract fields from the each of the detailed message types
# The patterns provided below are included in core of LogStash 1.4.2.
grok {
match => [
"cisco_message", "%{CISCOFW106001}",
"cisco_message", "%{CISCOFW106006_106007_106010}",
"cisco_message", "%{CISCOFW106014}",
"cisco_message", "%{CISCOFW106015}",
"cisco_message", "%{CISCOFW106021}",
"cisco_message", "%{CISCOFW106023}",
"cisco_message", "%{CISCOFW106100}",
"cisco_message", "%{CISCOFW110002}",
"cisco_message", "%{CISCOFW302010}",
"cisco_message", "%{CISCOFW302013_302014_302015_302016}",
"cisco_message", "%{CISCOFW302020_302021}",
"cisco_message", "%{CISCOFW305011}",
"cisco_message", "%{CISCOFW313001_313004_313008}",
"cisco_message", "%{CISCOFW313005}",
"cisco_message", "%{CISCOFW402117}",
"cisco_message", "%{CISCOFW402119}",
"cisco_message", "%{CISCOFW419001}",
"cisco_message", "%{CISCOFW419002}",
"cisco_message", "%{CISCOFW500004}",
"cisco_message", "%{CISCOFW602303_602304}",
"cisco_message", "%{CISCOFW710001_710002_710003_710005_710006}",
"cisco_message", "%{CISCOFW713172}",
"cisco_message", "%{CISCOFW733100}"
]
}
#If CiscoTag is ASA-6-722051
if [type] == "cisco-fw" and [ciscotag] == "ASA-6-722051" {
grok {
match => ["cisco_message", "%{DATA:Group}\s<%{DATA:Policy}> User\s<%{DATA:[event_data][TargetUserName]}> IP\s<%{IPV4:src_ip}> IPv4 Address <%{IPV4:assigned_ip}%{GREEDYDATA:extra_field}"]
}
}
#If CiscoTag is ASA-6-722055
if [type] == "cisco-fw" and [ciscotag] == "ASA-6-722055" {
grok {
match => ["cisco_message", "%{DATA:Group}\s<%{DATA:Policy}> User\s<%{DATA:[event_data][TargetUserName]}> IP\s<%{IPV4:src_ip}> Client Type:%{GREEDYDATA:VPN_Client}"]
}
}
#If CiscoTag is ASA-6-722055 - Authentication Rejected
if [type] == "cisco-fw" and [ciscotag] == "ASA-6-113005" {
grok {
match => ["cisco_message", "%{DATA:Group}\s%{GREEDYDATA:Policy} : reason =\s%{DATA:Reason} : server =\s%{IPV4:ServerIP} : user =\s%{DATA:[event_data][TargetUserName]} : user IP =\s%{IPV4:src_ip}"]
}
}
#if cisco tag is ASA-6-113004
if [type] == "cisco-fw" and [ciscotag] == "ASA-6-113004" {
grok {
match => ["cisco_message", "%{WORD:aaa_type} Successful : server = %{IP:aaa_server_ip} : user = %{GREEDYDATA:user}"]
}
}
#if CiscoTAG is ASA-6-734001
if [type] == "cisco-fw" and [ciscotag] == "%ASA-6-734001" {
grok {
match => ["cisco_message", "DAP: User %{DATA:user}, Addr %{IP:src_ip}, Connection %{DATA:protocol}: The following DAP records were selected for this connection: %{GREEDYDATA:policy_id1}"]
}
}
# Parse the syslog severity and facility
syslog_pri { }
if [type] == "cisco-fw" {
geoip {
add_tag => [ "GeoIP" ]
database => "/opt/logstash/databases/GeoLite2-City.mmdb"
source => "src_ip"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
if [geoip][city_name] == "" { mutate { remove_field => "[geoip][city_name]" } }
if [geoip][continent_code] == "" { mutate { remove_field => "[geoip][continent_code]" } }
if [geoip][country_code2] == "" { mutate { remove_field => "[geoip][country_code2]" } }
if [geoip][country_code3] == "" { mutate { remove_field => "[geoip][country_code3]" } }
if [geoip][country_name] == "" { mutate { remove_field => "[geoip][country_name]" } }
if [geoip][latitude] == "" { mutate { remove_field => "[geoip][latitude]" } }
if [geoip][longitude] == "" { mutate { remove_field => "[geoip][longitude]" } }
if [geoip][postal_code] == "" { mutate { remove_field => "[geoip][postal_code]" } }
if [geoip][region_name] == "" { mutate { remove_field => "[geoip][region_name]" } }
if [geoip][time_zone] == "" { mutate { remove_field => "[geoip][time_zone]" } }
}
# Gets the source IP whois information from the GeoIPASNum.dat flat file database
geoip {
add_tag => [ "Whois" ]
database => "/opt/logstash/databases/GeoLite2-ASN.mmdb"
source => "src_ip"
}
# Parse the date
date {
match => ["timestamp",
"MMM dd HH:mm:ss",
"MMM d HH:mm:ss",
"MMM dd yyyy HH:mm:ss",
"MMM d yyyy HH:mm:ss"
]
}
}
output { stdout { codec => rubydebug } }
When I run a test this is what I get.
{
"syslog_severity_code" => 6,
"timestamp" => "May 08 2020 10:50:53",
"tags" => [
[0] "_grokparsefailure",
[1] "_geoip_lookup_failure"
],
"syslog_facility_code" => 16,
"message" => "<134>May 08 2020 10:50:53: %ASA-6-734001: DAP: User xxxx.xxxxx, Addr xxx.xxx.xxx.xxx, Connection AnyConnect: The following DAP records were selected for this connection: XXXXXXXX",
"@timestamp" => 2020-05-08T16:50:53.000Z,
"ciscotag" => "ASA-6-734001",
"syslog_pri" => "134",
"syslog_facility" => "local0",
"cisco_message" => "DAP: User xxxxxx.xxxx, Addr xxx.xx.x.x, Connection AnyConnect: The following DAP records were selected for this connection: XXXXXXXX",
"@version" => "1",
"host" => "elk",
"syslog_severity" => "informational"
}
I think the issue is with the time or "cisco_message" does in the grok filter.
I know there are other posts like this, but I cannot put it together. Any help would be wonderful. Thanks for reading my post.
Also here is the logstash start output incase it is needed.
/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/test.conf --path.settings /etc/logstash/
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
WARNING: An illegal reflective access operation has occurred
WARNING: Illegal reflective access by com.headius.backport9.modules.Modules (file:/usr/share/logstash/logstash-core/lib/jars/jruby-complete-9.2.9.0.jar) to method sun.nio.ch.NativeThread.signal(long)
WARNING: Please consider reporting this to the maintainers of com.headius.backport9.modules.Modules
WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
WARNING: All illegal access operations will be denied in a future release
Sending Logstash logs to /usr/share/logstash/logs which is now configured via log4j2.properties
[2020-05-08T14:49:49,360][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2020-05-08T14:49:49,604][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.6.2"}
[2020-05-08T14:49:54,864][INFO ][org.reflections.Reflections] Reflections took 59 ms to scan 1 urls, producing 20 keys and 40 values
[2020-05-08T14:49:57,170][INFO ][logstash.filters.geoip ][main] Using geoip database {:path=>"/opt/logstash/databases/GeoLite2-City.mmdb"}
[2020-05-08T14:49:57,305][INFO ][logstash.filters.geoip ][main] Using geoip database {:path=>"/opt/logstash/databases/GeoLite2-ASN.mmdb"}
[2020-05-08T14:49:57,426][WARN ][org.logstash.instrument.metrics.gauge.LazyDelegatingGauge][main] A gauge metric of an unknown type (org.jruby.RubyArray) has been created for key: cluster_uuids. This may result in invalid serialization. It is recommended to log an issue to the responsible developer/development team.
[2020-05-08T14:49:57,434][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>500, "pipeline.sources"=>["/etc/logstash/conf.d/test.conf"], :thread=>"#<Thread:0x662e86ff run>"}
[2020-05-08T14:50:00,678][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
The stdin plugin is now waiting for input:
[2020-05-08T14:50:00,768][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2020-05-08T14:50:01,119][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600} it is needed.