Hi,
I'm reading the cloud trail log s from s3 bucket using the blow code. But only I getting log adding this
" prefix => "AWSLogs/xxxxxxxxx/CloudTrail/us-west-2/2016/03/18""
How get the log recursively from S3 subfolder.
input {
s3 {
access_key_id => ""
bucket => "mohantestaws"
secret_access_key => ""
prefix => "AWSLogs/xxxxxxxxx/CloudTrail/us-west-2/2016/03/18"
sincedb_path => "/tmp/s3.sincedb"
region => "us-west-2"
codec => "json"
add_field => { source => gzfiles }
}
}
filter {
if [type] == "cloudtrail" {
mutate {
gsub => [ "eventSource", ".amazonaws.com$", "" ]
add_field => {
"document_id" => "%{eventID}"
}
}
if ! [ingest_time] {
ruby {
code => "event['ingest_time'] = Time.now.utc.strftime '%FT%TZ'"
}
}
ruby {
code => "event.cancel if (Time.now.to_f - event['@timestamp'].to_f) > (60 * 60 * 24 * 1)"
}
ruby {
code => "event['ingest_delay_hours'] = (Time.now.to_f - event['@timestamp'].to_f) / 3600"
}
if [ingest_delay_hours] > 24 {
drop {}
}
if [eventSource] == "elasticloadbalancing" and [eventName] == "describeInstanceHealth" and [userIdentity.userName] == "deploy-s3" {
drop {}
}
}
}
output {
elasticsearch {
hosts => localhost
}
stdout { codec => json }
}