Below is my logstash configuration
input {
beats {
port => 5001
}
kafka {
bootstrap_servers => "kafka-xxxxxxxxxxxxxxxx:9092"
topics => [ "java-prod", "java-prod-cron" ]
codec => "json"
tags => "java-prod"
consumer_threads => "20"
decorate_events => true
}
}
filter {
mutate {
gsub => [
"message", "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2} ", ""
]
}
grok {
match => { "message" => "\[%{DATA:thread}\]\s+%{LOGLEVEL:severity}\s+%{DATA:class}\s+%{DATA:linenum}\s+-\s%{GREEDYDATA:log}" }
}
if [message] =~ /http.wire-log.writeResponse/ {
mutate {
add_field => { "http_type" => "response" }
}
json {
source => "log"
target => "response"
skip_on_invalid_json => true
}
mutate {
convert => { "response.body" => "string" }
}
}
if [message] =~ /http.wire-log.writeRequest/ {
mutate {
add_field => { "http_type" => "request" }
}
json {
source => "log"
target => "request"
skip_on_invalid_json => true
}
}
}
output {
stdout {
#codec => rubydebug
codec => json
}
if [@metadata][kafka][topic] == "java-prod" {
elasticsearch {
hosts => "elasticsearch-xxxxxxxxxxxxxxxx:9200"
index => "java-prod-%{+YYYY.MM.dd}"
manage_template => true
template => '/etc/logstash/templates/java-prod.json'
template_name => 'java'
template_overwrite => true
}
}
else if [@metadata][kafka][topic] == "java-prod-cron" {
elasticsearch {
hosts => "elasticsearch-xxxxxxxxxxxxxxxx:9200"
index => "java-cron-prod-%{+YYYY.MM.dd}"
manage_template => true
template => '/etc/logstash/templates/java-prod.json'
template_name => 'java'
template_overwrite => true
}
}
}
The issue I am facing is that response.body
contains json response which has nested json that has depth of 100 and more. I dont want those to be indexed as fields. How do I avoid parsing of response.body
. I tried
mutate {
convert => { "response.body" => "string" }
}
But it doesnt seem to work. Tried json_encode too. But that too didnt work. How can I handle it to consider as normal text and dont parse it as fields?