Index for a single server

tried to do like this
filebeat.yml

filebeat.inputs:
- type: docker
  containers.ids:
    - "*"
  fields:
    tags: ["dev"]

filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: false

setup.template.settings:
  index.number_of_shards: 3

output.logstash:
  hosts: ["logstash_server:5044"]

logstash.conf

input {
    beats {
        port => 5044
    }
    http {
        host => "0.0.0.0"
        port => 8010
    }
}

filter {
    if [fields.tags] == "dev" {
      grok {
        match => {
            "message" => "%{TIMESTAMP_ISO8601:timestamp}\s(-%{DATA:module}-)\s%{LOGLEVEL:loglevel}\s(-%{DATA:dev_index}-)\s\[%{DATA:env}\]\s(-%{GREEDYDATA:message})"
        }
      }
    }
    else {
      grok {
        match => {
           "message" => "%{LOGLEVEL:loglevel}"
        }
      }
    }
}

output {
   if [fields.tags] == "dev" {
      elasticsearch {
        hosts => "elasticsearch:9200"
        index => "%{dev_index}-%{+YYYY.MM.dd}"
       }
   }
   else {
      elasticsearch {
        hosts => "elasticsearch:9200"
      }
   }
}

I did not create a new index, I see in the kiban fields.tags, but the messages are broken down into pieces
%D0%A1%D0%BD%D0%B8%D0%BC%D0%BE%D0%BA%20%D1%8D%D0%BA%D1%80%D0%B0%D0%BD%D0%B0%20%D0%BE%D1%82%202018-11-27%2020-47-07

what am I doing wrong?