Below is my logstash configuration file.
input {
beats {
port => 5044
}
}
filter {
json {
source => "message"
}
}
output {
stdout {
codec => json
}
}
filter {
mutate {
add_field => {
"ddsource" => "logstash"
"service" => "%{[fields][type]}"
}
}
mutate {
replace => {"host" => "%{[agent][name]}"}
}
if [fields][type] == "laravel" {
if [context] {
mutate {add_field => {"Laravel-data" => "%{[context]}"}}
} else {
mutate {add_field => {"Laravel-data" => "%{[event]}"}}
}
}
}
output {
datadog_logs {
api_key => "example"
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
ssl => false
index => "logs-%{+YYYY.MM.dd}"
ilm_enabled => false
}
}
output {
s3 {
access_key_id => "test"
secret_access_key => "test"
bucket => "logstorage"
region => "us-west-1"
codec => "json_lines"
prefix => "%{+YYYY}/%{+MM}/%{+dd}/1.log"
}
}
I am trying to send the filter data to datadog and elasticsearch and it works fine. But I want to send raw data to the S3 bucket without a filter. But when I execute the S3 configuration file it sends the logs to S3 in filter way which is not my need.
How can I achieve this?
Thank you