S3SNSSQS Plugin input

Hi

I've switched the input for my logs to use the logstash-input-s3-sns-sqs plugin instead of the S3 plugin, but seem to be having an issue with the input with one of the logs.

Input:

 input {
 s3snssqs {
         region                     => "eu-west-1"
         s3_default_options         => { "endpoint_discovery" => true }
         queue                      => "queue-id"
         queue_owner_aws_account_id => "account-id"
         type                       => "sechub-findings"
         codec                      => json
         from_sns                   => false
         s3_options_by_bucket       => [
         {
             bucket_name => "s3-bucket-name"
             folders => [
             {
                 type => "sechub-findings"
             }
             ]                                                                    
         }
         ]
     }
 }

The input is getting the message from the SQS queue and then downloading the file from the correct S3 location. However, it then only reads the first JSON block and ignore other JSON blocks in the downloaded S3 file.

Snippet of the S3 files is:

     {
   "version": "0",
   "detail-type": "Security Hub Findings - Imported",
   "source": "aws.securityhub",
   "time": "2021-04-16T09:22:57Z",
   "region": "eu-west-1",
   "resources": [
     "arn:aws:securityhub"
   ],
   "detail": {
     "findings": [
       {
         "GeneratorId": "dynamodb",
         "CreatedAt": "2020-07-27T10:22:49.767298+00:00",
         "RecordState": "ACTIVE",
         "Title": "dynamodb",
         "Workflow": {
           "Status": "NEW"
       }
     }
     ]
   }
 }{
   "version": "0",
   "detail-type": "Security Hub Findings - Imported",
   "source": "aws.securityhub",
   "time": "2021-04-16T09:23:03Z",
   "region": "eu-west-1",
   "resources": [
     "arn:aws:securityhub:eu-west-1::"
   ],
   "detail": {
     "findings": [
       {
         "GeneratorId": "dynamodb-autoscaling",
         "CreatedAt": "2020-07-27T10:23:00.237407+00:00",
         "RecordState": "ARCHIVED",
         "Title": "dynamodb",
         "Workflow": {
           "Status": "NEW"
         }
       }
     ]
   }
 }{
   "version": "0",
   "detail-type": "Security Hub Findings - Imported",
   "source": "aws.securityhub",
   "time": "2021-04-16T09:23:17Z",
   "region": "eu-west-1",
   "resources": [
     "arn:aws:securityhub:eu-west-1::product/"
   ],
   "detail": {
     "findings": [
       {
         "GeneratorId": "ec2",
         "CreatedAt": "2020-07-27T15:23:12.569548+00:00",
         "RecordState": "ACTIVE",
         "Title": "ec2",
         "Workflow": {
           "Status": "NEW"
         }
       }
     ]
   }
 }

I don't get any errors in the logstash log and the first JSON record is processed by the filter and outputted correctly to elasticsearch.

Regards

Martynas

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.