Hi
I'm trying to configure logstash to ingest CEF file that I received by rsyslog and stored in a file in json format.
I configure my logstash and the logstash.conf is like this (I try to implement ECS schema and in the config file I try to remove the sensitive information)
input {
file {
type => "syslog"
codec => json { charset => "CP1252" }
path => "/tmp/file.log"
}
}
filter {
# Filter only CEF logs here
# Manipulate the message
mutate {
# Saved the original message into a temporary field
add_field => { "tmp_message" => "%{message}" }
# splits message on the "|" and has index numbers
split => ["message", "|"]
# generate fields for the CEF header
add_field => { "cef.version" => "%{message[0]}" }
add_field => { "cef.device.vendor" => "%{message[1]}" }
add_field => { "cef.device.product" => "%{message[2]}" }
add_field => { "cef.device.version" => "%{message[3]}" }
add_field => { "cef.sig.id" => "%{message[4]}" }
add_field => { "cef.sig.name" => "%{message[5]}" }
add_field => { "cef.sig.severity" => "%{message[6]}" }
add_tag => [ "CEF Firewall" ]
}
# Parse the message with field=value formats for Firewall
kv {
# Note: values with spaces are lost (still getting there)
field_split => " "
trim_key => "<>\[\], "
trim_value => "<>\[\],"
# Only included the fields which are of interest (dont need everything)
include_keys => ["act","rt","spt","dpt","match_id","rule_action","ifname","dst","inzone","outzone","product","proto","service_id","src"]
}
mutate {
# Rename fields to cef_field_names
rename => [ "act", "cef.action"]
rename => [ "rt", "cef.time"]
rename => [ "spt", "cef.source.port"]
rename => [ "dpt", "cef.destination.port"]
rename => [ "match_id","cef.rule.number"]
rename => [ "rule_action","cef.rule.action"]
rename => [ "dst", "cef.ip.destination"]
rename => [ "ifname","cef.interface"]
rename => [ "inzone","cef.source.zone"]
rename => [ "outzone","cef.destination.zone"]
rename => [ "product","cef.product"]
rename => [ "proto", "cef.protocol"]
rename => [ "service_id", "cef.service"]
rename => [ "src", "cef.ip.source"]
# Revert original message and remove temporary field
replace => { "message" => "%{tmp_message}" }
remove_field => [ "tmp_message" ]
}
date {
match => ["cef.time","UNIX_MS"]
}
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
index => "logstash-%{+YYYY.MM.dd}"
codec => "plain"
workers => 1
manage_template => true
template_name => "logstash"
template_overwrite => false
ssl => true
cacert => "xxxx/root-ca.crt"
ssl_certificate_verification => true
user => logstash
password => "xxx"
}
}
a row of the file.log is this
{
"@timestamp": "2019-09-16T00:00:00.119709+02:00",
"host": "xx001.xx.xx",
"severity": "NOTICE",
"facility": "user",
"syslog-tag": "CEF:",
"source": "CEF",
"message": "0|Check Point|VPN-1 & FireWall-1|Check Point|Log|http_|Unknown|act=Accept destinationTranslatedAddress=0.0.0.0 destinationTranslatedPort=0 deviceDirection=1 rt=1568584798000 sourceTranslatedAddress=xx.xx.xx.xxx sourceTranslatedPort=57141 spt=34551 dpt=80 cs2Label=Rule Name cs2=Implied Rule layer_name=BZ_MAIN_CLUSTER_NO_QOS Security layer_uuid=6407ecb5-7913-4682-99f4-9d269e2c7642 match_id=0 parent_rule=0 rule_action=Accept rule_uid=0E3B6801-8AB0-4b1e-A317-8BE33055FB43 ifname=eth1 logid=0 loguid={0x5d7eb45e,0x50010,0xf919c152,0xc0000001} origin=xx.xxx.xx.xxx originsicname=CN\\=fw1,O\\=xxxxxx1..egqdwt sequencenum=56 version=5 dst=104.83.118.99 inzone=Local nat_addtnl_rulenum=1 nat_rulenum=111 outzone=External product=VPN-1 & FireWall-1 proto=6 service_id=http_ src=xx.xxx.xx.xxx ",
"hostgroups": []
}
In the log of logstash-plain.log I don't see error but I don't found nothing in logstash in the date of match.
Why don't I have an error in the log?
thank you
Franco