Using the following filebeat config in OCP:
        filebeat.autodiscover:
      providers:
        - type: kubernetes
          node: ${NODE_NAME}
          templates:
            - condition:
                equals:
                  kubernetes.namespace: dev
              config:
                - type: container
                  paths:
                    - /var/log/containers/*-${data.kubernetes.container.id}.log
                  multiline.type: pattern
                  multiline.pattern: '^\]$'
                  multiline.negate: false
                  multiline.match: before
                  tags: ["finacle-dev-env"]
            - condition:
                equals:
                  kubernetes.namespace: st
              config:
                - type: container
                  paths:
                    - /var/log/containers/*-${data.kubernetes.container.id}.log
                  multiline.type: pattern
                  multiline.pattern: '^\]$'
                  multiline.negate: false
                  multiline.match: before
                  tags: ["finacle-st-env"]
            - condition:
                equals:
                  kubernetes.namespace: sit
              config:
                - type: container
                  paths:
                    - /var/log/containers/*-${data.kubernetes.container.id}.log
                  multiline.type: pattern
                  multiline.pattern: '^\]$'
                  multiline.negate: false
                  multiline.match: before
                  tags: ["finacle-sit-env"]
            - condition:
                and:
                  - equals:
                      kubernetes.namespace: cp4i
                  - contains:
                      kubernetes.pod.name: quickstart
              config:
                - type: container
                  paths:
                    - /var/log/containers/*-${data.kubernetes.container.id}.log
                  tags: ["mq-dev-env"]
            - condition:
                and:
                  - equals:
                      kubernetes.namespace: cp4i-st
                  - contains:
                      kubernetes.pod.name: aceqmgrst
              config:
                - type: container
                  paths:
                    - /var/log/containers/*-${data.kubernetes.container.id}.log
                  tags: ["mq-st-env"]
            - condition:
                and:
                  - equals:
                      kubernetes.namespace: cp4i-sit
                  - contains:
                      kubernetes.pod.name: aceqmgrsit
              config:
                - type: container
                  paths:
                    - /var/log/containers/*-${data.kubernetes.container.id}.log
                  tags: ["mq-sit-env"]
            
    processors:
      - add_kubernetes_metadata:
          host: ${NODE_NAME}
          matchers:
          - logs_path:
              logs_path: "/var/log/containers/"
                
    output.logstash:
      when:
        contains:
          tags: finacle-dev-env
      hosts: ["1.2.3.4:5044"]
      ssl.enabled: true
      ssl.supported_protocols: [TLSv1.2]
      ssl.verification_mode: certificate
      ssl.certificate_authorities: ["/etc/filebeat/ca.pem"]
      ssl.certificate: "/etc/filebeat/publicCert.pem"
      ssl.key: "/etc/filebeat/encrypted.key"
      ssl.key_passphrase: '****'
      when:
        contains:
          tags: mq-dev-env
      hosts: ["1.2.3.4:5046"]
      ssl.enabled: true
      ssl.supported_protocols: [TLSv1.2]
      ssl.verification_mode: certificate
      ssl.certificate_authorities: ["/etc/filebeat/ca.pem"]
      ssl.certificate: "/etc/filebeat/publicCert.pem"
      ssl.key: "/etc/filebeat/encrypted.key"
      ssl.key_passphrase: '***'
And the following logstash pipeline:
    input {
        beats {
            port => "5046"
            ssl => true
            ssl_key => "/etc/logstash/ssl/p8keyPass.pem"
            ssl_key_passphrase => "*****"
            ssl_certificate => "/etc/logstash/ssl/publicCert.pem"
            ssl_certificate_authorities => ["/etc/logstash/ssl/ca.pem"]
            ssl_verify_mode => "peer"
            tls_min_version => "1.2"
            tls_max_version => "1.2"
            ssl_peer_metadata => true
            tags => ["nbg-cosmos-mq-product-ocp-dev"]
            codec => 'json'
        }
    }
    filter {
      date {
        match => [ "ibm_datetime", "UNIX", "UNIX_MS", "yyyyMMdd'T'HH:mm:ss.SSSZ", "yyyyMMdd'T'HH:mm:ss.SSSZZ", "dd/MMM/yyyy:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss ZZ", "dd/MMM/yyyy:HH:mm:ss", "yyyy/MM/dd HH:mm:ss", "ISO8601" ]
        target => "@timestamp"
        locale => "en"
        timezone => "Europe/Athens"
        remove_field => [ "timestamp" ]
      }
    }
    output {
        stdout { codec => rubydebug }
    #    if "nbg-cosmos-mq-product-ocp-dev" in [tags] {
            elasticsearch {
                ecs_compatibility => "disabled"
                hosts => ["https://elk-es-dev-1:9200","https://elk-es-dev-2:9200"]
                user => "elastic"
                password => "*****"
                ssl_certificate_verification => false
                truststore => "/etc/logstash/http.p12"
                truststore_password => ''
                #index => "nbg-cosmos-mq-product-ocp-dev-%{+yyyy.MM.dd}"
                ilm_enabled => "true"
                ilm_rollover_alias => "nbg-cosmos-mq-product-ocp-dev"
                ilm_pattern => "000001"
                ilm_policy => "nbg-cosmos-mq-product-ocp-dev"
            }
    #    }
    }
I get the following exception in logstash:
2021-06-03T09:29:01,901][DEBUG][org.logstash.beats.BeatsHandler][nbg-cosmos-mq-product-ocp-dev][0e79a8bc794515ffb7bba48b163dcf559df30ce9da6d3ae3524e97efc67de081] [local: 10.120.88.13:5046, remote: 10.120.90.15:58886] Handling exception: java.lang.NullPointerException (caused by: java.lang.NullPointerException)
java.lang.NullPointerException: null
        at java.util.concurrent.ConcurrentHashMap.get(java/util/concurrent/ConcurrentHashMap.java:936) ~[?:?]
        at org.logstash.FieldReference.from(org/logstash/FieldReference.java:117) ~[logstash-core.jar:?]
        at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:?]
        at jdk.internal.reflect.NativeMethodAccessorImpl.invoke(jdk/internal/reflect/NativeMethodAccessorImpl.java:62) ~[?:?]
        at jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(jdk/internal/reflect/DelegatingMethodAccessorImpl.java:43) ~[?:?]
        at java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:566) ~[?:?]
        at org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(org/jruby/javasupport/JavaMethod.java:456) ~[jruby-complete-9.2.16.0.jar:?]
        at org.jruby.javasupport.JavaMethod.invokeStaticDirect(org/jruby/javasupport/JavaMethod.java:368) ~[jruby-complete-9.2.16.0.jar:?]
        at usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java.lib.logstash.inputs.beats.message_listener.set_nested(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-beats-6.1.3-java/lib/logstash/inputs/beats/message_listener.rb:163) ~[?:?]
        at usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java.lib.logstash.inputs.beats.message_listener.RUBY$method$set_nested$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java/lib/logstash/inputs/beats//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-beats-6.1.3-java/lib/logstash/inputs/beats/message_listener.rb) ~[?:?]
        at usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java.lib.logstash.inputs.beats.message_listener.extract_tls_peer(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-beats-6.1.3-java/lib/logstash/inputs/beats/message_listener.rb:145) ~[?:?]
        at usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java.lib.logstash.inputs.beats.message_listener.RUBY$method$extract_tls_peer$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java/lib/logstash/inputs/beats//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-beats-6.1.3-java/lib/logstash/inputs/beats/message_listener.rb) ~[?:?]
        at usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java.lib.logstash.inputs.beats.message_listener.onNewMessage(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-beats-6.1.3-java/lib/logstash/inputs/beats/message_listener.rb:39) ~[?:?]
        at usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java.lib.logstash.inputs.beats.message_listener.RUBY$method$onNewMessage$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_input_minus_beats_minus_6_dot_1_dot_3_minus_java/lib/logstash/inputs/beats//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-beats-6.1.3-java/lib/logstash/inputs/beats/message_listener.rb) ~[?:?]
        at org.logstash.beats.BeatsHandler.channelRead0(org/logstash/beats/BeatsHandler.java:52) ~[logstash-input-beats-6.1.3.jar:?]
        at org.logstash.beats.BeatsHandler.channelRead0(org/logstash/beats/BeatsHandler.java:12) ~[logstash-input-beats-6.1.3.jar:?]
        at io.netty.channel.SimpleChannelInboundHandler.channelRead(io/netty/channel/SimpleChannelInboundHandler.java:99) ~[netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(io/netty/channel/AbstractChannelHandlerContext.java:379) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(io/netty/channel/AbstractChannelHandlerContext.java:365) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(io/netty/channel/AbstractChannelHandlerContext.java:357) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(io/netty/handler/codec/ByteToMessageDecoder.java:324) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.handler.codec.ByteToMessageDecoder.channelRead(io/netty/handler/codec/ByteToMessageDecoder.java:296) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(io/netty/channel/AbstractChannelHandlerContext.java:379) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.channel.AbstractChannelHandlerContext.access$600(io/netty/channel/AbstractChannelHandlerContext.java:61) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.channel.AbstractChannelHandlerContext$7.run(io/netty/channel/AbstractChannelHandlerContext.java:370) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.util.concurrent.DefaultEventExecutor.run(io/netty/util/concurrent/DefaultEventExecutor.java:66) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(io/netty/util/concurrent/SingleThreadEventExecutor.java:989) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.util.internal.ThreadExecutorMap$2.run(io/netty/util/internal/ThreadExecutorMap.java:74) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at io.netty.util.concurrent.FastThreadLocalRunnable.run(io/netty/util/concurrent/FastThreadLocalRunnable.java:30) [netty-all-4.1.49.Final.jar:4.1.49.Final]
        at java.lang.Thread.run(java/lang/Thread.java:834) [?:?]
[2021-06-03T09:29:01,905][WARN ][io.netty.channel.DefaultChannelPipeline][nbg-cosmos-mq-product-ocp-dev][0e79a8bc794515ffb7bba48b163dcf559df30ce9da6d3ae3524e97efc67de081] An exceptionCaught() event was fired, and it reached at the tail of the pipeline. It usually means the last handler in the pipeline did not handle the exception.
java.lang.NullPointerException: null
I use 7.13.0 in filebeat/logstash/elastic search on linux.
Is my syntax for conditional output in filebeat (see output.logstash) correct?
Why is this exception is thrown?