Add multiline codec issue on logstash

Errors came out if I added multiline under "filter", but it works if I have added under "input". Anyone can give me hints?

input {
  file {
    path => "/tmp/input.log"
   #codec => multiline {
   #  pattern => "^%{TIMESTAMP_ISO8601}"
   # negate=> true
   # what => "previous"
   #}
    }
}

filter {
  codec => multiline {
    pattern => "^%{TIMESTAMP_ISO8601}"
    negate=> true
    what => "previous"
   }

  grok {
        match => {"message" => "%{TIMESTAMP_ISO8601:timestamp} \[%{DATA:app_name},%{DATA:trace_id},%{DATA:seq_no},%{DATA:reference},%{DATA:run_date},%{DATA:message_type},%{DATA:message_code},%{DATA:service_code}\] %{WORD:level}%{SPACE}+\[%{DATA:thread_id}\] %{DATA:class}\[%{DATA:line}\] - %{GREEDYDATA:message}"}
        overwrite => [ "message" ]
     }

}

output {
  file {
    path => "/tmp/output.log"
  }

  elasticsearch {
    hosts => ["http://localhost:9200"]
    user => elastic
    password => XXXXXXXXXXXX
  }
}

Logstash logs:

[2023-05-24T20:08:12,433][INFO ][logstash.runner ] Log4j configuration path used is: /etc/logstash/log4j2.properties
[2023-05-24T20:08:12,541][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"8.2.1", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.14.1+1 on 11.0.14.1+1 +indy +jit [linux-x86_64]"}
[2023-05-24T20:08:12,544][INFO ][logstash.runner ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -XX:+UseConcMarkSweepGC, -XX:CMSInitiatingOccupancyFraction=75, -XX:+UseCMSInitiatingOccupancyOnly, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -Djruby.jit.threshold=0, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, -Djruby.regexp.interruptible=true, -Djdk.io.File.enableADS=true, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED]
[2023-05-24T20:08:13,983][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[2023-05-24T20:08:14,408][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \t\r\n], "#", "{" at line 8, column 9 (byte 73) after filter {\n codec ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:189:in initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:48:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:50:in execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:381:in block in converge_state'"]}
[2023-05-24T20:08:14,564][INFO ][logstash.runner ] Logstash shut down.
[2023-05-24T20:08:14,574][FATAL][org.logstash.Logstash ] Logstash stopped processing because of an error: (SystemExit) exit
org.jruby.exceptions.SystemExit: (SystemExit) exit
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:747) ~[jruby.jar:?]
at org.jruby.RubyKernel.exit(org/jruby/RubyKernel.java:710) ~[jruby.jar:?]
at usr.share.logstash.lib.bootstrap.environment.(/usr/share/logstash/lib/bootstrap/environment.rb:91) ~[?:?]
^C

Because there is no codec under filter, this does not exist, you need to use it in the input, not in the filter block.

Is it possible input multipline codec in kafka like below?

input {
    kafka {
          bootstrap_servers => "10.10.10.10:9093"
          client_id => "logs1-1"
          group_id => "logs1-1"
          auto_offset_reset => "latest"
          topics_pattern => "sit.*"
          codec => json { charset => "UTF-8" }
          decorate_events => true
          security_protocol => "SSL"
          ssl_keystore_location => "/data/ssl/server.keystore.jks"
          ssl_keystore_password => "XXXX"
          ssl_keystore_type => "JKS"
          ssl_truststore_location => "/data/ssl/server.truststore.jks"
          ssl_truststore_password => "XXXX"
          ssl_truststore_type => "JKS"
          ssl_key_password => "XXXX"

          ssl_endpoint_identification_algorithm  => ""

     codec => multiline {
        pattern => "^%{TIMESTAMP_ISO8601}"
        negate=> true
        what => "previous"
     }
    }

You can use a codec in almost every input, but just one, in the input you shared you are already using a json codec, so you cannot use the multiline codec.

Also, if your messages are in json format it does not make much sense to use the multiline codec you shared, it would never match.

It would help if you shared a sample of your messages and what you are trying to achieve.

Sorry, I'm misleading it. Below is my part of logstash.conf and application logs for your reference. One of my application logs are printing multiple line when got error and would like to print all error to Kibana. It's raw message, not json format and configured with k8s-testing topic. It doesn't work although I have configured on kafka field under input section.

2023-05-25 19:23:23,790 [k8s-service,,,,,,,] DEBUG [redisson-netty-5-8] i.n.resolver.dns.DnsNameResolver[1352] - [id: 0x79fb57f8] RECEIVED: UDP [58940: /172.22.0.10:53], DatagramDnsResponse(from: /172.22.0.10:53, to: /0.0.0.0:49753, 58940, QUERY(0), NoError(0), RD AA)
DefaultDnsQuestion(sentinel-2.sentinel..svc.cluster.local. IN A)
DefaultDnsRawRecord(sentinel-2.sentinel..svc.cluster.local. 1 IN A 4B)
DefaultDnsRawRecord(OPT flags:0 udp:4096 0B)

input {
    kafka {
          bootstrap_servers => "10.10.10.10:9093"
          client_id => "logs1-1"
          group_id => "logs1-1"
          auto_offset_reset => "latest"
          topics_pattern => "uat*"
          codec => json { charset => "UTF-8" }
          decorate_events => true
          security_protocol => "SSL"
          ssl_keystore_location => "/data/ssl/server.keystore.jks"
          ssl_keystore_password => "XXXXXX"
          ssl_keystore_type => "JKS"
          ssl_truststore_location => "/data/ssl/server.truststore.jks"
          ssl_truststore_password => "XXXXXX"
          ssl_truststore_type => "JKS"
          ssl_key_password => "XXXXXX"

          ssl_endpoint_identification_algorithm  => ""
    }
    kafka {
          bootstrap_servers => "10.10.10.10:9093"
          client_id => "logs1-2"
          group_id => "logs1-2"
          auto_offset_reset => "latest"
          topics_pattern => "k8s*"
          #codec => json { charset => "UTF-8" }
          decorate_events => true
          security_protocol => "SSL"
          ssl_keystore_location => "/data/ssl/server.keystore.jks"
          ssl_keystore_password => "XXXXXX"
          ssl_keystore_type => "JKS"
          ssl_truststore_location => "/data/ssl/server.truststore.jks"
          ssl_truststore_password => "XXXXXX"
          ssl_truststore_type => "JKS"
          ssl_key_password => "XXXXXX"
          ssl_endpoint_identification_algorithm  => ""
		  
          codec => multiline {
          pattern => "^%{TIMESTAMP_ISO8601}"
          negate=> true
          what => "previous"
          }
    }
}

filter {
   if [@metadata][kafka][topic] == "k8s-testing"  {
    grok {
        match => {"message" => "%{TIMESTAMP_ISO8601:timestamp} \[%{DATA:app_name},%{DATA:trace_id},%{DATA:seq_no},%{DATA:reference},%{DATA:run_date},%{DATA:message_type},%{DATA:message_code},%{DATA:service_code}\] %{WORD:level}%{SPACE}+\[%{DATA:thread_id}\] %{DATA:class}\[%{DATA:line}\] - %{GREEDYDATA:message}"}
        overwrite => [ "message" ]
    }
}

output {
		if [@metadata][kafka][topic] =~ "k8s*" {
			elasticsearch {
				hosts => ["https://10.10.10.250:9200"]
				ssl => true
				ssl_certificate_verification => false
				cacert => "/etc/logstash/conf.d/kirk.pem"
				index => "%{[@metadata][kafka][topic]}-2-%{+YYYY.MM.dd}"
				user => "elastic"
				password => "XXXXXXXXXX"
			}
		else if [@metadata][kafka][topic] =~ "uat*" {
			elasticsearch {
				hosts => ["https://10.10.10.250:9200"]
				ssl => true
				ssl_certificate_verification => false
				cacert => "/etc/logstash/conf.d/kirk.pem"
				index => "%{[@metadata][kafka][topic]}-2-%{+YYYY.MM.dd}"
				user => "elastic"
				password => "XXXXXXXXXX"
			}
		}
	}
}

anyone can give me hints, please

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.