Logstash: Can not output to elasticsearch when using s3 input plugin

Can anyone help please,

I am trying to analyse files in s3 bucket, however, it seems output is not getting processed or kibana is not showing any output on the dashboard. Only output i can see is from filebeat, which i have configured for other servers. I can say for sure that files are getting processed, as when i start my logstash server, files moves from one bucket to another as mentioned in my configuration.
My configuration is:

# cat 03-s3-input.conf
input {
  s3 {
    bucket => "<bucket-containing-files>"
    backup_to_bucket => "<bucket-where-files-move-after-process>"
    delete => true
    interval => 120
    region => "ap-southeast-2"
    add_field => { source => "akamai-gzfiles" }
    type => "s3"
    additional_settings => {
      "force_path_style" => true
      "follow_redirects" => false
    }
  }
} 
# cat 11-akamai-filter.conf
    filter {
        if [type] == "s3" {
        grok {
                match => { "message" => "%{NUMBER:epoch_timestamp}\t%{IP:ip_address}\t%{WORD:http_verb} -\t%{URIPATHPARAM:req_path}\t%{INT:http_status:int} %{INT:content_bytes:int}\t%{INT:total_bytes:int} %{INT:object_size:int}\t%{INT:uncompressed_object_size:int}\t%{INT:http_overhead_byte:int} \"%{URI:cs_Referer}\"\t\"%{GREEDYDATA:user_agent}\"\t\"-\" %{URIHOST:cs_host} %{GREEDYDATA:cs_content_type}\t-\t%{INT:time_turnaround_ms:int}\t%{INT:time_transfter_ms:int}\t\"%{WORD:request_id}\"\t%{INT:max_age:int}\t\"%{WORD:cache_status}\"\t\"%{WORD:cache_refresh_source}\" %{INT:last_byte_served_flag:int}\t%{INT:no_store_flag:int}" }
        }
        if [clientip] {
                geoip {
                        source => "clientip"
                        target => "geoip"
                        add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
                        add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}"  ]
                }
                mutate {
                        convert => [ "[geoip][coordinates]", "float" ]
                }
        }
    }
}
# cat 31-elasticsearch-output.conf
output {
  if [type] == "s3" {
    stdout {
                codec => rubydebug
    }
    elasticsearch {
      hosts => [ "https://<Elasticsearch-endpoint>" ]
      sniffing => false
      manage_template => false
      index => "akamai_logs-%{+YYYY-MM-dd}"
    }
  }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.