Need help for parsing s3 logs to ELK dashboard

please let us know how can i configure the logstash for multiple buckets and also please let me know for parsing the logs from multiple directories of buckets

Below is a configuration file for particular bucket and particular directories.

input {
s3 {
bucket => "file-service-access-logs"
prefix => "access-logs/AWSLogs/227866648348/elasticloadbalancing/ap-south-1/2018/04/21/"
region => "ap-south-1"
type => "elblogs"
codec => plain
secret_access_key => "myaccesskey"
access_key_id => "muid"
}
}
filter {
if [type] == "elblogs" {
grok {
match => ["message", "%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE:elb_name} %{IP:elb_client_ip}:%{INT:elb_client_port:int} (?:%{IP:elb_backend_ip}:%{NUMBER:elb_backend_port:int}|-) %{NUMBER:request_processing_time:float} %{NUMBER:backend_processing_time:float} %{NUMBER:response_processing_time:float} (?:%{INT:elb_status_code:int}|-) (?:%{INT:backend_status_code:int}|-) %{INT:elb_received_bytes:int} %{INT:elb_sent_bytes:int} "(?:%{GREEDYDATA:elb_request}|-)" "(?:%{GREEDYDATA:userAgent}|-)" %{NOTSPACE:elb_sslcipher} %{NOTSPACE:elb_sslprotocol}"]
match => ["message", "%{GREEDYDATA:event_name} for ELB: %{NOTSPACE:elb_name} at %{TIMESTAMP_ISO8601:timestamp}"]
}
if [elb_request] =~ /.+/ {
grok {
match => ["elb_request", "(?:%{WORD:http_method}) (?:%{DATA:http_path})? (?:%{DATA:http_type}/%{NUMBER:http_version:float})?|%{GREEDYDATA:rawrequest}"]
}
}
if [http_path] =~ /.+/ {
grok {
match => ["http_path", "(?:%{WORD:http_path_protocol}://)?(%{NOTSPACE:http_path_site}:)?(?:%{NUMBER:http_path_port:int})?(?:%{GREEDYDATA:http_path_url})?"]
}
}
geoip {
source => "elb_client_ip"
}
}
date {
match => [ "timestamp", "ISO8601" ]
}
}
output {
stdout { codec => rubydebug }
elasticsearch {
hosts => ["localhost:9200"]
index => "elb"
}
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.