Using Logstash I was able to setup a filter to convert those codes into a usable values stored in event_data.Accesses. It does find/replace using data from the one of the website you gave, then it splits the result into an array.
input {
beats {
port => 5044
}
}
filter {
if "AccessList" in [event_data] {
translate {
# Source: https://correlog.zendesk.com/entries/83901255-What-Do-NNNN-References-Mean-in-Windows-Agent-Messages-
dictionary => [ '%%1537', "Delete",
'%%1538', "ReadControl",
'%%1539', "ReadControl",
'%%1540', "ReadControl",
'%%1541', "Synchronize",
'%%1542', "Synchronize",
'%%4416', "ReadData",
'%%4417', "WriteData",
'%%4418', "AppendData",
'%%4419', "ReadEA",
'%%4420', "WriteEA",
'%%4423', "ReadAttrib",
'%%4424', "WriteAttrib",
'%%1801', "Granted",
'%%1805', "NotGranted" ]
field => "[event_data][AccessList]"
destination => "[event_data][Accesses]"
add_tag => ['accesses_translate']
exact => false
}
mutate {
split => { "[event_data][Accesses]" => " " }
}
}
}
output {
stdout { codec => rubydebug { metadata => true } }
}
This is the output of the filter from Logstash. Normally you would setup Logstash to output to Elasticsearch following these directions.
{
"@timestamp" => "2016-05-26T22:05:09.552Z",
"beat" => {
"hostname" => "wrks-001",
"name" => "winlogbeat-002"
},
"computer_name" => "wrks-001.elastic.co",
"event_data" => {
"AccessList" => "%%4417\n\t\t\t\t%%4418\n\t\t\t\t",
"AccessMask" => "0x6",
"HandleId" => "0x26c",
"ObjectName" => "C:\\ProgramData\\winlogbeat\\.winlogbeat.yml.new",
"ObjectServer" => "Security",
"ObjectType" => "File",
"ProcessId" => "0x105c",
"ProcessName" => "C:\\Program Files\\winlogbeat-5.0.0-alpha2-windows\\winlogbeat.exe",
"ResourceAttributes" => "S:AI",
"SubjectDomainName" => "ELASTIC",
"SubjectLogonId" => "0x3e7",
"SubjectUserName" => "WRKS-001$",
"SubjectUserSid" => "S-1-5-18",
"Accesses" => [
[0] "WriteData",
[1] "AppendData"
]
},
"event_id" => 4663,
"keywords" => [
[0] "Audit Success"
],
. . . (truncated)