S3 plugin file collision

Having a small issue with pipelines on version 6.0 of logstash. I have a number of logs that I am reshaping and placing in an s3 bucket using the the following config:

input {
file {
path => "/opt/folder/lib/billing/customer/"
exclude => [ "
.idx", "*.properties" ]
}
}

filter {
grok {
patterns_dir => "/etc/logstash/patterns"
match => { "message" => "+%{DATA:msisdn};%{DATA:imsi};%{DATA:imei};%{DATA:subscription};%{DATA:language};%{DATA:session_start};%{DATA:service_code};%{DATA:bearer};%{DATA:pages_sent:int};%{DATA:duration:int};%{DATA:vlr_number};%{DATA:unused2};%{CSVFIELD:action_url}(;%{CSVFIELD:extra})?" }
}
date {
match => [ "session_start", "UNIX", "yyyyMMdd HH:mm:ss", "yyyy-MM-dd HH:mm:ss" ]
}
ruby {
code => "
event.cancel if (event.get('@timestamp').to_f - Time.now.to_f) > 31536000000
unless event.get('action_url').nil?
url_pairs = event.get('action_url').scan(/,?([^=]
=[^,]+)/).map{ |codeAndUrl| codeAndUrl[0].split('=')}
event.set('action_url_codes', url_pairs.map { |pair| pair[0].strip })
event.set('action_urls', url_pairs.map { |pair| pair[1].strip })
unless event.get('action_urls').length==0
url1 = event.get('action_urls')[0].split(///)
event.set('product', url1[url1.length-2])
end
end
" # 1000 years, drop dirty data which treates imsi as timestamp;split urls to to arrays.
}
mutate {
add_field => { "operator" => "customer" }
}
}

output {
#stdout {

codec => rubydebug

#}
s3 {
codec => "json_lines"
access_key_id => "########################"
secret_access_key => "########################"
region => "##-####-#"
bucket => "###-#########-##"
prefix => "incoming/cellcube"
canned_acl => "private"
restore => true
rotation_strategy => "size_and_time"
time_file => 5
size_file => 5242880
encoding => "gzip"
validate_credentials_on_root_bucket => false
}
}

My problem is, when adding multiple pipelines of the same type using pipelines.yml, when the first file is uploaded (successfully) the other pipelines start reporting that they can not upload the same file and start filling up the logs with posts like the following:

[ERROR][logstash.outputs.s3 ] Uploading failed, retrying {:exception=>Errno::ENOENT, :message=>"No such file or directory - No such file or directory - /tmp/logstash/41e79443-1df5-4c5e-876f-8a21a1d8b5b9/incoming/marker/ls.s3.5d118ec2-03f3-4ca0-8d40-b02ef9deb9cf.2017-12-08T15.44.part0.txt.gz", :path=>"/tmp/logstash/41e79443-1df5-4c5e-876f-8a21a1d8b5b9/incoming/marker/ls.s3.5d118ec2-03f3-4ca0-8d40-b02ef9deb9cf.2017-12-08T15.44.part0.txt.gz", :backtrace=>["org/jruby/RubyFileTest.java:240:in size'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/aws-sdk-resources-2.3.22/lib/aws-sdk-resources/services/s3/file_uploader.rb:31:in upload'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/aws-sdk-resources-2.3.22/lib/aws-sdk-resources/services/s3/object.rb:251:in upload_file'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-output-s3-4.0.13/lib/logstash/outputs/s3/uploader.rb:38:in upload'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-output-s3-4.0.13/lib/logstash/outputs/s3/uploader.rb:29:in block in upload_async'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/concurrent-ruby-1.0.5-java/lib/concurrent/executor/java_executor_service.rb:94:in run'"]}

Have I missed something?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.