Can anyone help on nested json parsing with Logstash?

You can parse the JSON using a json filter

json { source => "message" target => "someField" }

but I suspect that you do not like the resulting format, which is an array of objects like

    [ 9] {
        "MetricName" => "written_bytes",
             "Value" => 24.0,
        "Dimensions" => [
            [0] {
                "Value" => "i-123456",
                 "Name" => "instance"
            },
            [1] {
                "Value" => "1234234-1234-4ac2134c-12323-1234234",
                 "Name" => "session"
            },
            [2] {
                "Value" => "1",
                 "Name" => "connection"
            },
            [3] {
                "Value" => "filestorage",
                 "Name" => "channel"
            }
        ],
         "Timestamp" => "2021-10-25T14:07:23Z"
    },

although the rtt and session_count objects do not look like that. You could do something like

    json { source => "message" target => "[@metadata][data]" remove_field => [ "message" ] }
    ruby {
        code => '
            d = event.get("[@metadata][data]")
            if d.is_a? Array
                newD = []
                d.each { |x|
                    item = {}
                    item["Timestamp"] = x["Timestamp"]
                    item[x["MetricName"]] = x["Value"]

                    if x["StatisticValues"]
                        item["StatisticValues"] = x["StatisticValues"]
                    end

                    if x["Dimensions"]
                        x["Dimensions"].each { |y|
                            item[y["Name"]] = y["Value"]
                        }
                    end
                    newD << item
                }
                event.set("[@metadata][result]", newD)
            end
        '
    }
    split { field => "[@metadata][result]" }
    ruby { code => 'event.get("[@metadata][result]").each { |k, v| event.set(k,v) }' }

    date { match => [ "Timestamp", "ISO8601" ] }

which would result in events like

{
     "@timestamp" => 2021-10-25T14:07:23.000Z,
            "rtt" => nil,
        "session" => "1234234-1234-4ac2134c-12323-1234234",
        "channel" => "main",
       "instance" => "i-123456",
      "Timestamp" => "2021-10-25T14:07:23Z",
"StatisticValues" => {
        "Minimum" => 3.5150000000000001,
        "Maximum" => 16.617999999999999,
    "SampleCount" => 23,
            "Sum" => 129.398
},
     "connection" => "1",
}

{
      "@timestamp" => 2021-10-25T12:29:45.000Z,
"connection_count" => 1.0,
        "instance" => "i-123456",
       "Timestamp" => "2021-10-25T12:29:45Z",
         "session" => "1234234-1234-4ac2134c-12323-1234234"
}

{
   "@timestamp" => 2021-10-25T12:29:44.000Z,
"session_count" => 1.0,
     "instance" => "i-123456",
    "Timestamp" => "2021-10-25T12:29:44Z"
}

That creates 13 events for the 13 entries in the JSON array. It may be you only want to retain some of them, and keep them all in the same event. In that case you would not create an array (so you would not need the split filter) and can just create a hash and add the entries you care about to it.