Closer than Before ... WoW .. Cisco Log Processing

I got this on my logstash.stdout but still No result found on Kibana ... Please anyone with a hint what still i miss i would really appreciate.
Thanks in advance.

"message" => "<183>Apr 10 2017 09:41:01 ha-fw1 : %ASA-7-710005: TCP request discarded from 52.24.32.4/80 to outside:202.134.31.158/37391\n",
"@version" => "1",
"@timestamp" => "2017-04-09T20:41:01.000Z",
"type" => "syslog",
"host" => "10.254.36.252",
"syslog_pri" => "183",
"timestamp" => "Apr 10 2017 09:41:01",
"sysloghost" => "ha-fw1",
"ciscotag" => "ASA-7-710005",
"cisco_message" => "TCP request discarded from 52.24.32.4/80 to outside:202.134.31.158/37391\n",
"protocol" => "TCP",
"action" => "discarded",
"src_ip" => "52.24.32.4",
"src_port" => "80",
"dst_interface" => "outside",
"dst_ip" => "202.134.31.158",
"dst_port" => "37391",
"syslog_severity_code" => 7,
"syslog_facility_code" => 22,
"syslog_facility" => "local6",
"syslog_severity" => "debug",
"geoip" => {
"ip" => "52.24.32.4",
"country_code2" => "US",
"country_code3" => "USA",
"country_name" => "United States",
"continent_code" => "NA",
"region_name" => "OR",
"city_name" => "Boardman",
"postal_code" => "97818",
"latitude" => 45.86959999999999,
"longitude" => -119.688,
"dma_code" => 810,
"area_code" => 541,
"timezone" => "America/Los_Angeles",
"real_region_name" => "Oregon",
"location" => [
[0] -119.688,
[1] 45.86959999999999
],
"number" => "AS16509",
"asn" => "Amazon.com, Inc."
},
"tags" => [
[0] "GeoIP",
[1] "Whois"
]
}
{
"message" => "<182>Apr 10 2017 09:41:01 ha-fw1 : %ASA-6-305012: Teardown dynamic TCP translation from any:10.20.6.56/55043 to outside:202.134.31.158/52810 duration 0:00:00\n",
"@version" => "1",
"@timestamp" => "2017-04-09T20:41:01.000Z",
"type" => "syslog",
"host" => "10.254.36.252",
"syslog_pri" => "182",
"timestamp" => "Apr 10 2017 09:41:01",
"sysloghost" => "ha-fw1",
"ciscotag" => "ASA-6-305012",
"cisco_message" => "Teardown dynamic TCP translation from any:10.20.6.56/55043 to outside:202.134.31.158/52810 duration 0:00:00\n",
"action" => "Teardown",
"xlate_type" => "dynamic",
"protocol" => "TCP",
"src_interface" => "any",
"src_ip" => "10.20.6.56",
"src_port" => "55043",
"src_xlated_interface" => "outside",
"src_xlated_ip" => "202.134.31.158",
"syslog_severity_code" => 6,
"syslog_facility_code" => 22,
"syslog_facility" => "local6",
"syslog_severity" => "informational"
}
^C
root@fg-elk:/var/log/logstash#

FYI
This is show when i set output to
stdout { codec => rubydebug }

What does the rest of your config look like?

i have only one file named logstash.conf. Here is its content

input {
udp {
port => 5140
type => "cisco-fw"
}
}

filter {
grok {
match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"]
}

    # Extract fields from the each of the detailed message types
    # The patterns provided below are included in core of LogStash 1.4.2.
    grok {
            match => [
                    "cisco_message", "%{CISCOFW106001}",
                    "cisco_message", "%{CISCOFW106006_106007_106010}",
                    "cisco_message", "%{CISCOFW106014}",
                    "cisco_message", "%{CISCOFW106015}",
                    "cisco_message", "%{CISCOFW106021}",
                    "cisco_message", "%{CISCOFW106023}",
                    "cisco_message", "%{CISCOFW106100}",
                    "cisco_message", "%{CISCOFW110002}",
                    "cisco_message", "%{CISCOFW302010}",
                    "cisco_message", "%{CISCOFW302013_302014_302015_302016}",
                    "cisco_message", "%{CISCOFW302020_302021}",
                    "cisco_message", "%{CISCOFW305011}",
                    "cisco_message", "%{CISCOFW313001_313004_313008}",
                    "cisco_message", "%{CISCOFW313005}",
                    "cisco_message", "%{CISCOFW402117}",
                    "cisco_message", "%{CISCOFW402119}",
                    "cisco_message", "%{CISCOFW419001}",
                    "cisco_message", "%{CISCOFW419002}",
                    "cisco_message", "%{CISCOFW500004}",
                    "cisco_message", "%{CISCOFW602303_602304}",
                    "cisco_message", "%{CISCOFW710001_710002_710003_710005_710006}",
                    "cisco_message", "%{CISCOFW713172}",
                    "cisco_message", "%{CISCOFW733100}"
            ]
    }
   # Parse the syslog severity and facility
    syslog_pri { }

    geoip {
            add_tag => [ "GeoIP" ]
            database => "/opt/logstash/GeoLiteCity.dat"
            source => "src_ip"
    }

    if [geoip][city_name]      == "" { mutate { remove_field => "[geoip][city_name]" } }
    if [geoip][continent_code] == "" { mutate { remove_field => "[geoip][continent_code]" } }
    if [geoip][country_code2]  == "" { mutate { remove_field => "[geoip][country_code2]" } }
    if [geoip][country_code3]  == "" { mutate { remove_field => "[geoip][country_code3]" } }
    if [geoip][country_name]   == "" { mutate { remove_field => "[geoip][country_name]" } }
    if [geoip][latitude]       == "" { mutate { remove_field => "[geoip][latitude]" } }
    if [geoip][longitude]      == "" { mutate { remove_field => "[geoip][longitude]" } }
    if [geoip][postal_code]    == "" { mutate { remove_field => "[geoip][postal_code]" } }
    if [geoip][region_name]    == "" { mutate { remove_field => "[geoip][region_name]" } }
    if [geoip][time_zone]      == "" { mutate { remove_field => "[geoip][time_zone]" } }

    # Gets the source IP whois information from the GeoIPASNum.dat flat file database
    geoip {
            add_tag => [ "Whois" ]
            database => "/opt/logstash/GeoIPASNum.dat"
            source => "src_ip"
    }

    # Parse the date
    date {
            match => ["timestamp",
                    "MMM dd HH:mm:ss",
                    "MMM  d HH:mm:ss",
                    "MMM dd yyyy HH:mm:ss",
                    "MMM  d yyyy HH:mm:ss"
            ]
    }

}
output {
elasticsearch {
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
flush_size => 1
}
stdout {
codec => json
}
stdout { codec => rubydebug }
}

So just to be sure this comes out of an ASA via syslog yes? (Logging command in iOS)

That won't work because you aren't creating those fields.

Hi Boyan

Yes you right.

Thanks

1 Like

?Can you propose a working set

now i got this
Field data loading is forbidden on [src_fwuser

1 Like

@mhalatuituia I don't have one; I figured some time ago that Splunk does a better job when it comes to ASA syslog and also ASA netflow exports; of course there's a price to pay there...

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.