Unable to retrieve logs sent to same index

We have logstash setup that pushes alb and cloudfront logs from S3 and sends to elasticsearch. The index used is same. The logstash runs on docker and we do see that logstash starts and and registers the buckets and process the data. But the logs doesn't appear in Kibana. Below is our Logstash configuration. Also, when it pushes logs from single bucket, it works fine. Problem is when we use same index for 2 different inputs.

input
{
s3
{
bucket => "mybucket"
region => "us-east-1"
prefix => "AWSLogs/2719834979/elasticloadbalancing/"
type => "alb"
interval => 30
sincedb_path => "/dev/sincedb1"
add_field => {
"log_group" => "beta-dc-alb-logs"
}
}
s3
{
bucket => "mybucket2"
region => "us-east-1"
type => "cloudfront"
prefix => "clientlogs/"
interval => 15
sincedb_path => "/dev/sincedb2"
add_field => {
"log_group" => "beta-dc-cloudfront-logs"
}
}
}
filter {
if [type] == "alb" {
grok {
match => ["message", "%{NOTSPACE:request_type} %{TIMESTAMP_ISO8601:log_timestamp} %{NOTSPACE:alb-name} %{NOTSPACE:client} %{NOTSPACE:target} %{NOTSPACE:request_processing_time:float} %{NOTSPACE:target_processing_time:float} %{NOTSPACE:response_processing_time:float} %{NOTSPACE:elb_status_code} %{NOTSPACE:target_status_code} %{NOTSPACE:received_bytes:float} %{NOTSPACE:sent_bytes:float} %{QUOTEDSTRING:request} %{QUOTEDSTRING:user_agent} %{NOTSPACE:ssl_cipher} %{NOTSPACE:ssl_protocol} %{NOTSPACE:target_group_arn} %{QUOTEDSTRING:trace_id}"]
}
date {
match => [ "log_timestamp", ISO8601 ]
}
mutate {
gsub => [
"request", '"', "",
"trace_id", '"', "",
"user_agent", '"', ""
]
}
grok {
match => ["request", "(%{NOTSPACE:http_method})? (%{NOTSPACE:http_uri})? (%{NOTSPACE:http_version})?"]
}
grok {
match => ["http_uri", "(%{WORD:protocol})?(://)?(%{IPORHOST:domain})?(:)?(%{INT:http_port})?(%{GREEDYDATA:request_uri})?"]
}
grok {
match => ["client", "(%{IPORHOST:c_ip})?"]
}
geoip {
source => "c_ip"
}
}
else if [type] == "cloudfront" {
grok {
match => ["message", "%{YEAR:year}-%{MONTHNUM:month}-%{MONTHDAY:day}[ \t]%{TIME:time}[ \t]%{DATA:x_edge_location} \t[ \t]%{IP:c_ip}[ \t]%{WORD:cs_method}[ \t]%{HOSTNAME:cs_host}[ \t]%{NOTSPACE:cs_uri_stem}[ \t]%{NUMBER:sc_status}[ \t]%{GREEDYDATA:referrer}[ \t]%{NOTSPACE:user_agent}[ \t]%{GREEDYDATA:cs_uri_query}[ \t]%{NOTSPACE:cookie}[ \t]%{WORD:x_edge_result_type}[ \t]%{NOTSPACE:x_edge_request_id}[ \t]%{HOSTNAME:x_host_header}[ \t]%{URIPROTO:cs_protocol}[ \t]%{INT:cs_bytes}[ \t]%{NUMBER:time_taken}[ \t]%{NOTSPACE:x_forwarded_for}[ \t]%{NOTSPACE:ssl_protocol}[ \t]%{NOTSPACE:ssl_cipher}[ \t]%{NOTSPACE:x_edge_response_result_type}([ \t])?(%{NOTSPACE:cs_protocol_version})?"]
}
geoip {
source => "c_ip"
}
mutate {
add_field => ["listener_timestamp", "%{year}-%{month}-%{day} %{time}"]
convert => {
"[geoip][coordinates]" => "float"
"sc_bytes" => "integer"
"cs_bytes" => "integer"
"time_taken" => "float"
}
}
date {
match => ["listener_timestamp", "yyyy-MM-dd HH:mm:ss"]
}
}
else {
drop { }
}
}
output
{
amazon_es {
hosts => ["vpc-dev-studio-search-98794194702r8-28rwfhaskjfbqf09830912ur12.us-east-1.es.amazonaws.com"]
region => "us-east-1"
index => "cw-logs-%{+YYYY.MM.dd}"
}
stdout {
codec => rubydebug
}
}

Blockquote

Any help is much appreciated.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.