logstash consuming JSON messages from Kafka and indexing them into Elasticsearch. we found that logstash version 8.0 is adding the additional field event with the value of the actual message.
could you please let me know how to ignore the field event without using mutate filter?
actual message is
{
"name": "test"
}
Doc indexed into Elasticsearch from logstash version 8.0
{
"_index" : "test-2022.02.15-000001",
"_id" : "3-eHC38BJKmKsTb9eGMU",
"_score" : 1.0,
"_source" : {
"name" : "test",
"@version" : "1",
"event" : {
"original" : "{\"name\": \"test\"}"
},
"@timestamp" : "2022-02-18T06:31:41.467701Z"
}
Doc indexed into Elasticsearch from logstash version 7.16.2
{
"_index" : "test-2022.02.15-000001",
"_id" : "4OeKC38BJKmKsTb9xWPN",
"_score" : 1.0,
"_source" : {
"@timestamp" : "2022-02-18T06:35:18.018Z",
"name" : "test",
"@version" : "1"
}
}
configuration
input
{
kafka
{
bootstrap_servers => ["hostname:9093"]
topics => ["acl"]
ssl_keystore_password => "changeme"
ssl_truststore_password => "changeme"
ssl_keystore_location => "/opt/logstash/ssl/keystore.jks"
ssl_truststore_location => "/opt/logstash/ssl/truststore.jks"
security_protocol => "SSL"
client_id => "client"
group_id => "group"
enable_auto_commit => "true"
auto_offset_reset => "latest"
check_crcs => "false"
decorate_events => true
consumer_threads => "1"
codec => "json"
}
}
output {
elasticsearch {
hosts => ["https://hostname:9200"]
user => "user"
password => "password"
ilm_enabled => true
ilm_rollover_alias => "alias"
ilm_policy => "ilm"
ssl => true
cacert => "ca.pem"
ssl_certificate_verification => true
manage_template => false
}
}
logstash.yml
node.name: hostname
path.data: /data/logstash
config.reload.automatic: True
config.reload.interval: 30
queue.type: persisted
path.queue: /data/logstash/queue
queue.page_capacity: 500mb
queue.max_events: 0
queue.max_bytes: 1gb
queue.checkpoint.acks: 0
queue.checkpoint.writes: 0
queue.checkpoint.interval: 0
http.host: 127.0.0.1
http.port: 9600
log.level: info
path.logs: /data/logs/logstsah