Hi All,
In my Logstash configuration, I have 2 output plugins, one is S3 and another is Elasticsearch.
My goal is to send logs to S3 and update the same logs with "uploadStatus = finished", everything works fine until I have a network issue, and S3 plugin did not send logs successfully, but Elasticsearch plugin updated logs as successful.
May I somehow run the Elasticsearch output plugin only when the S3 output plugin succeeded uploading?
I must say that with the Amazon_es output plugin there is the exact behavior I want to achieve (if amazon_es plugin fails, elasticsearch plugin not executed ) .
output {
stdout { codec => rubydebug { metadata => true } }
s3{
access_key_id => "${Logstash__AwsAccessKey}"
secret_access_key => "${Logstash__AwsSecretKey}"
region => "${Logstash__AwsRegion}"
bucket => "${Logstash__AwsS3TargetBucket}"
size_file => "${Logstash__AwsS3Size_File}"
time_file => "${Logstash__AwsS3Time_File}"
codec => "json_lines"
prefix => "${Logstash__AwsS3Prefix}/${Logstash__Device_Id}/%{[@metadata][_index]}"
validate_credentials_on_root_bucket => "${Logstash__AwsValidate_Credentials_On_Root_Bucket}"
proxy_uri => "${Logstash__Http_Proxy}"
retry_count => 0
retry_delay => 0
upload_queue_size => 1
}
elasticsearch {
hosts => "${Logstash__SourceEs_Url}"
index => "%{[@metadata][_index]}"
document_id => "%{[@metadata][_id]}"
action => "update"
script => "ctx._source.uploadStatus = 'finished'"
}
}