hi guys ,
my scripts is as follows :-
input {
dead_letter_queue {
path => "${LS_HOME}/data/dead_letter_queue"
pipeline_id => "main"
}
}
filter {
ruby {
path => "${LS_HOME}/config/failed-index-create.rb"
script_params => {
"indexNamePrefix" => "xxx"
}
}
prune {
whitelist_names => [ "^failed_message$"]
}
date {
match => [ "timestamp", "ISO8601" ]
}
json {
source => "failed_message"
target => "failed_message_json"
}
mutate {
add_field => {
"reason" => "%{[@metadata][dead_letter_queue][reason]}"
# "plugin_id" => "%{[@metadata][dead_letter_queue][plugin_id]}"
# "plugin_type" => "%{[@metadata][dead_letter_queue][plugin_type]}"
# }
# }
json
{
source => "%{[@metadata][dead_letter_queue][reason]}"
target => "reason_json"
#remove_char_key => ">\[\]"
#remove_char_value => ">\[\]"
#include_brackets => false
}
#ruby {
#code => "
# hash = event.to_hash
# hash.each { |key,value|
# if value.include? '\"'
# event.set(key, value.gsub!('\"', ''))
# end
# } "
#}
}
output {
#elasticsearch{
# hosts => [ "localhost:9200" ]
#index => "%{[@metadata][indexName]}"
#}
stdout {
codec => rubydebug { metadata => true }
}
}
in my case the output in reason field is :-
```
reason" : """Could not index event to Elasticsearch. status: 400, action: ["update", {:_id=>"eOxcjn4/cA9RYi55c12BK0yEXHs=", :_index=>"xxx-x-201x", :_type=>"_doc", :routing=>nil, :retry_on_conflict=>1}, #<logstash::event:0x128db5a>], **response** : {"update"=>{"_index"=>"xxx-x-201x", "_type"=>"_doc", "_id"=>"eOxcjn4/cA9RYi55c12BK0yEXHs=", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [billDueDate] of type [date] in document with id 'eOxcjn4/cA9RYi55c12BK0yEXHs='", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"failed to parse date field [abc] with format [strict_date_optional_time||epoch_millis]", "caused_by"=>{"type"=>"date_time_parse_exception", "reason"=>"Failed to parse with all enclosed parsers"}}}}}"""
i want to extract the response array inside the reason field and index it into elasticsearch like this :-
and the expected output that need to be extracted from the above json and index this way into elasticsearch :-
{
"update": {
"_index": "xxx-x-201x",
"_type": "_doc",
"_id": "eOxcjn4/cA9RYi55c12BK0yEXHs=",
"status": 400,
"error": {
"type": "mapper_parsing_exception",
"reason": "failed to parse field [billDueDate] of type [date] in document with id 'eOxcjn4/cA9RYi55c12BK0yEXHs='",
"caused_by": {
"type": "illegal_argument_exception",
"reason": "failed to parse date field [abc] with format [strict_date_optional_time||epoch_millis]",
"caused_by": {
"type": "date_time_parse_exception",
"reason": "Failed to parse with all enclosed parsers"
}
}
}
}
i want to parse this in the same dlq script.