After upgraded to 7.6 filebeat doesn't work as expected with config from 7.5.x

Hi there.

I'm using Filebeat 7.5.2 to read pod/container logs of GKE (k8s) using official Elastic Helm Charts. After upgrading to 7.6.0 it doesn't read pod/container logs anymore. There isn't any error in Filebeat logs (It runs as daemonset in all k8s nodes). No another config was changed in all environment and I've double checked that log files from pods are mounted correctly inside Filebeat pods as well.

My filebeat config:
filebeat.inputs:

- type: google-pubsub
  project_id: xxxxxx
  topic: xxxxxxxx
  subscription.name: xxxxx
  subscription.create: false
  credentials_file: xxxx.json
  processors:
    - decode_json_fields:
        process_array: true
        max_depth: 20
        target: ""
        overwrite_keys: true
        fields: ["message"]
    - drop_fields:
        fields: ["message"]
    - add_fields:
        target: ''
        fields:
          topic_name: xxxxxx

- type: google-pubsub
  project_id: xxxxxx
  topic: xxx
  subscription.name: xxxx
  subscription.create: false
  credentials_file: xxxxxxx.json
  processors:
    - decode_json_fields:
        process_array: true
        max_depth: 20
        target: ""
        overwrite_keys: true
        fields: ["message"]
    - drop_fields:
        fields: ["message"]
    - add_fields:
        target: ''
        fields:
          topic_name: xxxxxx

- type: google-pubsub
  project_id: xxxxx
  topic: xxxxxx
  subscription.name: xxxxxx
  subscription.create: false
  credentials_file: xxxxxxx.json
  processors:
    - decode_json_fields:
        process_array: true
        max_depth: 20
        target: ""
        overwrite_keys: true
        fields: ["message"]
    - drop_fields:
        fields: ["message"]
    - add_fields:
        target: ''
        fields:
          topic_name: xxxxxxx

- type: google-pubsub
  project_id: xxxxxxx
  topic: xxxxxxxx
  subscription.name: xxxxxxxx
  subscription.create: false
  credentials_file: xxxx.json
  processors:
    - decode_json_fields:
        process_array: true
        max_depth: 20
        target: ""
        overwrite_keys: true
        fields: ["message"]
    - drop_fields:
        fields: ["message"]
    - add_fields:
        target: ''
        fields:
          topic_name: xxxxx

filebeat.autodiscover:
  providers:
    - type: kubernetes
      templates:
        - condition:
            equals:
              kubernetes.labels.elastic_logs/json: "true"
            # regexp:
            #     kubernetes.container.name: "auth.*|upms.*"
          config:
            - type: container
              stream: stdout
              paths:
                - "/var/lib/docker/containers/${data.kubernetes.container.id}/*.log"
              encoding: utf-8
              symlinks: true
              scan_frequency: 1s
              # multiline.pattern: '^[[:space:]]+(\bat\b|\.{3})|^Caused by:'
              # multiline.negate: false
              # multiline.match: after
              processors:
                - decode_json_fields:
                    process_array: true
                    max_depth: 10
                    target: ""
                    overwrite_keys: true
                    fields: ["message"]
                # - add_cloud_metadata:
                # - add_docker_metadata:
                #     labels.dedot: true
                - add_kubernetes_metadata:
                    labels.dedot: true
                    annotations.dedot: true
            - type: container
              stream: stderr
              paths:
                - "/var/lib/docker/containers/${data.kubernetes.container.id}/*.log"
              encoding: utf-8
              symlinks: true
              scan_frequency: 1s
              multiline.pattern: '^[[:space:]]+(\bat\b|\.{3})|^Caused by:'
              multiline.negate: false
              multiline.match: after
              processors:
                - decode_json_fields:
                    process_array: true
                    max_depth: 10
                    target: ""
                    overwrite_keys: true
                    fields: ["message"]
                # - add_cloud_metadata:
                # - add_docker_metadata:
                #     labels.dedot: true
                - add_kubernetes_metadata:
                    labels.dedot: true
                    annotations.dedot: true
        - condition:
            equals:
              kubernetes.namespace: haproxy
          config:
            - module: haproxy
              log:
                input:
                  type: container
                  paths:
                    - "/var/lib/docker/containers/${data.kubernetes.container.id}/*.log"
                  encoding: utf-8
                  symlinks: true
                  scan_frequency: 1s
                  # multiline.pattern: '^[[:space:]]+(\bat\b|\.{3})|^Caused by:'
                  # multiline.negate: false
                  # multiline.match: after
                  processors:
                    # - decode_json_fields:
                    #     process_array: true
                    #     max_depth: 10
                    #     target: ""
                    #     overwrite_keys: true
                    #     fields: ["message"]
                    # - add_cloud_metadata:
                    # - add_docker_metadata:
                    #     labels.dedot: true
                    - add_kubernetes_metadata:
                        labels.dedot: true
                        annotations.dedot: true

#logging.level: debug
#logging.selectors: ["*"]

monitoring.enabled: "true"
monitoring.elasticsearch.username: ${beats-username}
monitoring.elasticsearch.password: ${beats-password}

queue.mem:
  events: 10000
  flush.min_events: 2048
  flush.timeout: 1s

setup.dashboards.enabled: false
setup.template:
  enabled: true
  overwrite: false
  name: flb-k8s
  pattern: "flb-k8s-*"
  settings.index:
    number_of_shards: 3
    number_of_replicas: 0
    number_of_routing_shards: 30
    refresh_interval: "30s"
    translog.durability: "async"
    routing.allocation.require.node_type: "hot"


setup.ilm:
  enabled: false

#output.console.pretty: true

output.elasticsearch:
  worker: 2
  hosts: http://xxxxxx:9200
  username: ${filebeat-elastic-username}
  password: ${filebeat-elastic-password}
  bulk_max_size: 5000
  indices:
    - index: "flb-k8s-pubsub-%{[topic_name]}"
      when.contains:
        input.type: "google-pubsub"
    - index: "flb-k8s-%{[kubernetes.namespace]}"
      when.contains:
        input.type: "container"

setup.kibana:
  host: "https://xxxxxx:443"
  #username: ${filebeat_kibana_user}
  #password: ${filebeat_kibana_pwd}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.