tried to do like this
filebeat.yml
filebeat.inputs:
- type: docker
containers.ids:
- "*"
fields:
tags: ["dev"]
filebeat.config.modules:
path: ${path.config}/modules.d/*.yml
reload.enabled: false
setup.template.settings:
index.number_of_shards: 3
output.logstash:
hosts: ["logstash_server:5044"]
logstash.conf
input {
beats {
port => 5044
}
http {
host => "0.0.0.0"
port => 8010
}
}
filter {
if [fields.tags] == "dev" {
grok {
match => {
"message" => "%{TIMESTAMP_ISO8601:timestamp}\s(-%{DATA:module}-)\s%{LOGLEVEL:loglevel}\s(-%{DATA:dev_index}-)\s\[%{DATA:env}\]\s(-%{GREEDYDATA:message})"
}
}
}
else {
grok {
match => {
"message" => "%{LOGLEVEL:loglevel}"
}
}
}
}
output {
if [fields.tags] == "dev" {
elasticsearch {
hosts => "elasticsearch:9200"
index => "%{dev_index}-%{+YYYY.MM.dd}"
}
}
else {
elasticsearch {
hosts => "elasticsearch:9200"
}
}
}
I did not create a new index, I see in the kiban fields.tags, but the messages are broken down into pieces
what am I doing wrong?