I was trying to ingest Fortigate firewall logs and under ECS schema. Here is my configuration:
input {
udp {
host => "0.0.0.0"
port => 10514
type => "log"
}
}
filter {
if [type] == "log" {
kv {
source => "message"
exclude_keys => [ "type", "subtype" ]
}
geoip { source => "dst" }
geoip { source => "dstip" }
geoip { source => "src" }
geoip { source => "srcip" }
mutate {
rename => [ "zone", "src_intf" ]
rename => [ "devid", "[host][id]" ]
rename => [ "device_id", "[host][name]" ]
rename => [ "host", "[host][ip]" ]
rename => [ "level", "[log][level]" ]
convert => ["[destination][bytes]", "integer"]
convert => ["[source][bytes]", "integer"]
remove_field => [ "msg" ]
}
}
}
output {
if [type] == "log" {
elasticsearch {
cloud_id => "XXXXX"
cloud_auth => "YYYY"
index => "logs-%{+YYYY.MM.dd}"
}
}
}
But running this, I am seeing a message in the logstash console like this:
[WARN ] 2020-07-10 12:22:55.572 [[main]>worker1] mutate - Exception caught while applying mutate filter {:exception=>"Could not set field 'id' on object '222.66.327.233' to value 'FGT3172132213'.This is probably due to trying to set a field like [foo][bar] = someValuewhen [foo] is not either a map or a string"}
Also here is a sample document. The field structure is not coming properly as per the Logstash configuration. Example, device_id is not mutated to host.name
{
"<189>date": "2020-07-09",
"action": "deny",
"app": "tcp/17167",
"appcat": "unscanned",
"bytes_recieved": 0,
"bytes_sent": 0,
"craction": "262144",
"crlevel": "low",
"crscore": "5",
"device_id": "StarWars",
"devid": "FGT3172132213",
"dst_ip": "222.66.327.233",
"dst_port": "12343",
"dstcountry": "India",
"dstintf": "root",
"dstintfrole": "undefined",
"duration": "0",
"host": "222.66.327.233",
"level": "notice",
"logid": "0023000014",
"logtime": "1594296011",
"policyid": "0",
"policytype": "local-in-policy",
"proto": "6",
"sentpkt": "0",
"service": "tcp/17167",
"sessionid": "34323342",
"src_intf": "wan",
"src_ip": "49.51.51.58"
}