Continuing the discussion from Split nested json array:
How to parse a json message events :-
- If any json key value, passed as null, It would not be parsed by the split filter and wouldn't be render.See the below json line and check the element name : FilterValue.
json file name : testdata-1.1.json
============================
{"VectorList": [{"LocalizationId": 60,"ServiceId": 2,"Date": "2018-09-11T00:00:00","IsAvailable": true,"FreeTermCount": 86,"SegmentFreeTermCount": 87, "FilterValue": null}]}
Index output:-
{
"_index": "logstash-2019.02.23",
"type": "doc",
"id": "aisEG2kBN4-7njBKFM",
"_score": 1,
"_source": {
"ServiceId": "2",
"IsAvailable": "true",
"Date": "2018-09-11T00:00:00",
"@timestamp": "2019-02-23T15:40:43.820Z",
"@version": "1",
"path": "C:/Users/n487610/LogStash/logstash-6.2.3/input_json_files/testdata-1.1.json",
"type": "MyLog",
"FreeTermCount": "86",
"host": "Z507B-9D70-B00E",
"FilterValue": "%{[VectorList][FilterValue]}", # willn't processed the value
"SegmentFreeTermCount": "87",
"VectorList": {
"ServiceId": 2,
"IsAvailable": true,
"Date": "2018-09-11T00:00:00",
"FreeTermCount": 86,
"FilterValue": null,
"SegmentFreeTermCount": 87,
"LocalizationId": 60
},
"LocalizationId": "60"
}
}
2.If any json key value, passed as "null" (with double quotes), It would be parsed successfully and render "null" value .See the below json line and check the element name : FilterValue.
json file name : testdata-1.2.json :-
{"VectorList": [{"LocalizationId": 60,"ServiceId": 2,"Date": "2018-09-11T00:00:00","IsAvailable": true,"FreeTermCount": 86,"SegmentFreeTermCount": 87, "FilterValue": "null"}]}
Index output:-
{
"_index": "logstash-2019.02.23",
"_type": "doc",
"_id": "aysHG2kBN4-7njBKus_a",
"_score": 1,
"_source": {
"ServiceId": "2",
"IsAvailable": "true",
"Date": "2018-09-11T00:00:00",
"@timestamp": "2019-02-23T15:44:45.019Z",
"@version": "1",
"path": "C:/Users/n487610/LogStash/logstash-6.2.3/input_json_files/testdata-1.2.json",
"type": "MyLog",
"FreeTermCount": "86",
"host": "Z507B-9D70-B00E",
"FilterValue": "null", # parse the value successfully
"SegmentFreeTermCount": "87",
"VectorList": {
"ServiceId": 2,
"IsAvailable": true,
"Date": "2018-09-11T00:00:00",
"FreeTermCount": 86,
"FilterValue": "null",
"SegmentFreeTermCount": 87,
"LocalizationId": 60
},
"LocalizationId": "60"
}
}
- Use the below filebeat config to create index in elastic search.
config details:-
input {
file {
type => "MyLog"
path => ["C:/Users/n487610/LogStash/logstash-6.2.3/input_json_files/*.json"]
start_position => "beginning"
codec => "json"
}
}
filter {
json {
source => "message"
}
split {
field => "[VectorList]"
terminator=> ","
}
mutate {
add_field => {
"LocalizationId" => "%{[VectorList][LocalizationId]}"
"ServiceId" => "%{[VectorList][ServiceId]}"
"Date" => "%{[VectorList][Date]}"
"IsAvailable" => "%{[VectorList][IsAvailable]}"
"FreeTermCount" => "%{[VectorList][FreeTermCount]}"
"SegmentFreeTermCount" => "%{[VectorList][SegmentFreeTermCount]}"
"FilterValue" => "%{[VectorList][FilterValue]}"
}
remove_field => [ "[message]" ]
}
}
output {
elasticsearch {
index => "logstash-%{+yyyy.MM.dd}"
hosts => ["localhost:9200"]
}
file {
path => "C:\Users\n487610\LogstashOutput\testing-out-%{+YYYY.MM.dd}"
}
stdout {
codec => "rubydebug"
}
}