How to parse multiple json files with nested json arrays in it

Hi there,
I am trying to parse bunch of json files using logstash.
Following is my logstash conf file

 input
    {
    file
    {codec => multiline
    {
                pattern => '^\{'
                negate => true
                what => previous
    }
    path => '/LogsData/*.json'
    start_position => "beginning"
    sincedb_path => "/dev/null"
    }
    }

    output {
        elasticsearch {
              hosts => ["elasticsearch:9200"]
              manage_template => false
              index => "slinvoice-%{+YYYY.MM.dd}"
                       }
        stdout { codec => rubydebug }
      }

and my json files have array in beginning and followed by 8 level sub components in it.

[
  {
    "associatedInvoiceItemId": "",
    "billingItem": {
      "cancellationDate": "",
      "provisionTransaction": {
        "guestId": "",
        "hardwareId": 805645
      }
    },
    "billingItemId": 131730629,
    "categoryCode": "server",
    "createDate": "2017-08-01T06:26:32-06:00",
    "description": "Single Intel Xeon E3-1270 v3 (4 Cores, 3.50 GHz) (900hrs * .12495)",
    "domainName": "xxx.net",
    "hostName": "xxcc",
    "hourlyRecurringFee": ".309",
    "id": 527753721,
    "invoiceId": 12345523,
    "laborAfterTaxAmount": "0",
    "laborFee": "0",
    "laborFeeTaxRate": "0",
    "laborTaxAmount": "0",
    "oneTimeAfterTaxAmount": "0",
    "oneTimeFee": "0",
    "oneTimeFeeTaxRate": "0",
    "oneTimeTaxAmount": "0",
    "parentId": "",
    "productItemId": 4578,
    "recurringAfterTaxAmount": "93.84",
    "recurringFee": "93.84",
    "recurringFeeTaxRate": "0",
    "recurringTaxAmount": "0",
    "resourceTableId": 805645,
    "serviceProviderId": 1,
    "setupAfterTaxAmount": "0",
    "setupFee": "0",
    "setupFeeTaxRate": "0",
    "setupTaxAmount": "0",
    "totalOneTimeAmount": 0,
    "totalRecurringAmount": 414.29
  },
  {
    "associatedInvoiceItemId": "",
    "billingItem": {
      "cancellationDate": "",
      "provisionTransaction": {
        "guestId": "",
        "hardwareId": 1234534
      }
    }]

When i start my docker container i am unable to see any json files are getting parse.

[root@k8s ~]# docker logs root_logstash_1 -f
ERROR StatusLogger No log4j2 configuration file found. Using default configuration: logging only errors to the console.
Sending Logstash's logs to /var/log/logstash which is now configured via log4j2.properties
10:34:16.178 [main] INFO  logstash.setting.writabledirectory - Creating directory {:setting=>"path.queue", :path=>"/var/lib/logstash/queue"}
10:34:16.182 [main] INFO  logstash.setting.writabledirectory - Creating directory {:setting=>"path.dead_letter_queue", :path=>"/var/lib/logstash/dead_letter_queue"}
10:34:16.207 [LogStash::Runner] INFO  logstash.agent - No persistent UUID file found. Generating new UUID {:uuid=>"7fc91118-9bec-44ba-a33d-edb3c9c45965", :path=>"/var/lib/logstash/uuid"}
10:34:16.810 [[main]-pipeline-manager] INFO  logstash.outputs.elasticsearch - Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elasticsearch:9200/]}}
10:34:16.811 [[main]-pipeline-manager] INFO  logstash.outputs.elasticsearch - Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://elasticsearch:9200/, :path=>"/"}
10:34:16.923 [[main]-pipeline-manager] WARN  logstash.outputs.elasticsearch - Restored connection to ES instance {:url=>#<Java::JavaNet::URI:0x6e644f87>}
10:34:16.924 [[main]-pipeline-manager] INFO  logstash.outputs.elasticsearch - New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>[#<Java::JavaNet::URI:0x5850bd9c>]}
10:34:16.927 [[main]-pipeline-manager] INFO  logstash.pipeline - Starting pipeline {"id"=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>500}
10:34:17.077 [[main]-pipeline-manager] INFO  logstash.pipeline - Pipeline main started
10:34:17.199 [Api Webserver] INFO  logstash.agent - Successfully started Logstash API endpoint {:port=>9600}

Can someone please help me where i am doing wrong.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.