I've got the following logstash config, and I'm trying to send the RabbitMQ headers (which are stored in the @metadata field) to ElasticSearch
input {
rabbitmq {
auto_delete => false
durable => false
host => "my_host"
port => 5672
queue => "my_queue"
key => "#"
threads => 1
codec => "plain"
user => "user"
password => "pass"
metadata_enabled => true
}
}
filter {
???
}
output {
stdout { codec => rubydebug {metadata => true} }
elasticsearch { hosts => localhost }
}
I can see the headers in the std output
{
"@timestamp" => 2017-07-11T15:53:28.629Z,
"@metadata" => {
"rabbitmq_headers" => { "My_Header" => "My_value"
},
"rabbitmq_properties" => {
"content-encoding" => "utf-8",
"correlation-id" => "785901df-e954-4735-a9cf-868088fdac87",
"content-type" => "application/json",
"exchange" => "My_Exchange",
"routing-key" => "123-456",
"consumer-tag" => "amq.ctag-ZtX3L_9Zsz96aakkSGYzGA"
}
},
"@version" => "1",
"message" => "{...}"
Is there some filter (grok, mutate, kv, etc.) which can copy these values to Tags in the message sent to ElasticSearch?