I am trying to parse my CSV fomatted logs using logstash. I am facing problem in parsing a json which contains dot(.) in the key of json.
I took reference from this link ref but didn't work for me
My sample input is
2016-10-17 10:44:37,733|null|{"k1.k2.k3.k4":"val"}|{"requestId":"9s,ss4"}|{"status":"PG601"}|1732|{"testKey": "<?xml version="}
My filter code is :
filter {
ruby {
code => "
event.to_hash.keys.each { |k| event[ k.sub('.','_') ] = event.remove(k) if k.include?'.' }
"
}
mutate { gsub => ["message","\"","'"] } csv { columns => ["TIMESTAMP","URI","HEADERS","REQUEST","RESPONSE","RESPONSE_TIME","INTER_RESPONSE"] separator => "|" } mutate { add_field => { "INDEX_NAME" => "pg_request_response" } } mutate { gsub => ["REQUEST", "'", '"']} json { source => "REQUEST" target => "request" }
mutate { gsub => ["RESPONSE", "'", '"']} json { source => "RESPONSE" target => "response" }
mutate { gsub => ["HEADERS", "'", '"']} json { source => "HEADERS" target => "headers" } mutate { gsub => ["INTER_RESPONSE", "'", '"'] } json { source => "INTER_RESPONSE" } #date { match => ["TIMESTAMP", "YYYY-MM-dd HH:mm:ss,SSS"] }
}