Indent any json file nested fields and make it flat

I have the following json code, it has a nested fields and I'm trying to make it flat but I couldn't!
Here is the source json file

{
    "body": {
        "BlockName": "SA",
        "input": {
            "type": "array",
            "identification": "iValue",
            "configs": {},
            "information": {
                "location": "Users\\files\\",
                "client_id": "abd"
            },
            "value": {
                "info": [
                    "Apple",
                    "MSFT",
                    "AVB"
                ],
                "domains": [],
                "uniqueId": "1108738857225224193",
                "restul_set": [
                    0.4112226366996765,
                    0.34098902344703674,
                    0.24778836965560913
                ],
                "timestamp": 1554940800000,
                "text": "Test Text"
            }
        }
    },
    "result": 0.06
}

here is what I'm looking for:

{
    "body.BlockName": "SA",
    "body.input.type": "array",
    "body.input.identification": "iValue",
    "body.input.configs": {},
    "body.input.information.location": "Users\\files\\",
    "body.input.information.client_id": "abd",    
    "body.input.value.info": [
        "Apple",
        "MSFT",
        "AVB"
    ],
    "body.input.value.domains": [],
    "body.input.value.uniqueId": "1108738857225224193",
    "body.input.value.restul_set": [
        0.4112226366996765,
        0.34098902344703674,
        0.24778836965560913
    ],
    "body.input.value.timestamp": 1554940800000,
    "body.input.value.text": "Test Text",
    "result": 0.06
}

and here is the logstash config file:

input {
 exec{
    command => "cat /usr/share/logstash/data/pipelineOutput.json"
    codec => json
    interval => 60
  }
}
filter {

  split {     
    field => "[body]"
  }

}
output {
 stdout {
    codec => json
 }
}

any ideas?

You will need a ruby script. Create a file called flattenJSON.rb that contains

def register(params)
    @field = params['field']
end

def flatten(object, name, event)
    if object
        if object.kind_of?(Hash) and object != {}
            object.each { |k, v| flatten(v, "#{name}.#{k}", event) }
        else
            event.set(name, object)
        end
    end
end

def filter(event)
    o = event.get(@field)
    if o
        flatten(o, @field, event)
    end
    event.remove(@field)
    [event]
end

and then call it using

    ruby {
        path => "/home/user/flattenJSON.rb"
        script_params => { "field" => "body" }
    }

@Badger Thanks for the reply, I'm getting this error:

[2019-08-08T15:45:20,855][INFO ][logstash.setting.writabledirectory] Creating directory {:setting=>"path.queue", :path=>"/usr/share/logstash/data/queue"}
[2019-08-08T15:45:20,876][INFO ][logstash.setting.writabledirectory] Creating directory {:setting=>"path.dead_letter_queue", :path=>"/usr/share/logstash/data/dead_letter_queue"}
[2019-08-08T15:45:21,372][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.3.0"}
[2019-08-08T15:45:21,401][INFO ][logstash.agent ] No persistent UUID file found. Generating new UUID {:uuid=>"52940edb-23e4-429d-bbeb-ae1e2734c9c1", :path=>"/usr/share/logstash/data/uuid"}
[2019-08-08T15:45:23,063][INFO ][org.reflections.Reflections] Reflections took 45 ms to scan 1 urls, producing 19 keys and 39 values
[2019-08-08T15:45:23,495][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"Errno::EISDIR", :message=>"Is a directory - /usr/share/logstash/data/flattenJson.rb", :backtrace=>["org/jruby/RubyIO.java:3770:in `read'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.5/lib/logstash/filters/ruby/script.rb:8:in `initialize'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-filter-ruby-3.1.5/lib/logstash/filters/ruby.rb:50:in `initialize'", "org/logstash/plugins/PluginFactoryExt.java:81:in `filter_delegator'", "org/logstash/plugins/PluginFactoryExt.java:251:in `plugin'", "org/logstash/execution/JavaBasePipelineExt.java:50:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:24:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:36:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:325:in `block in converge_state'"]}
[2019-08-08T15:45:23,805][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2019-08-08T15:45:28,885][INFO ][logstash.runner ] Logstash shut down.

here is how what I have in the config file:

input {
 exec{
    command => "cat /usr/share/logstash/data/pipelineOutput.json"
    codec => json
    interval => 60
  }
}
filter {
  ruby {
        path => "/usr/share/logstash/data/flattenJson.rb"
        script_params => { "field" => "body" }
    }

}
output {
 stdout {
    codec => json
 }
}

It's telling that is a directory, not a file.

@Badger it worked perfectly! thanks a lot for the help. Last question please:
How can I keep/detect the data type for the fields in the processed json file using ruby?

I do not understand the question. The types stay the same as they were in the original JSON.

{
            "body.input.value.domains" => [],
               "body.input.value.info" => [
        [0] "Apple",
        [1] "MSFT",
        [2] "AVB"
    ],
           "body.input.identification" => "iValue",
          "body.input.value.timestamp" => 1554940800000,
                      "body.BlockName" => "SA",
           "body.input.value.uniqueId" => "1108738857225224193",
                              "result" => 0.06,
     "body.input.information.location" => "Users\\files\\",
                  "body.input.configs" => {},
    "body.input.information.client_id" => "abd",
         "body.input.value.restul_set" => [
        [0] 0.4112226366996765,
        [1] 0.34098902344703674,
        [2] 0.24778836965560913
    ],
               "body.input.value.text" => "Test Text",
                     "body.input.type" => "array"
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.