Sample Input data
2016-09-21 09:59:51,577] ERROR fetching topic metadata for topics [Set(test)] from broker [ArrayBuffer(BrokerEndPoint(2,kafka.example.com,9092), BrokerEndPoint(3,kafka.example.com,9092), BrokerEndPoint(0,kafka.example.com,9092), BrokerEndPoint(1,kafka.example.com,9092))] failed (kafka.utils.CoreUtils$)
kafka.common.KafkaException: fetching topic metadata for topics [Set(test)] from broker [ArrayBuffer(BrokerEndPoint(2,kafka.example.com,9092), BrokerEndPoint(3,kafka.example.com,9092), BrokerEndPoint(0,kafka.example.com,9092), BrokerEndPoint(1,kafka.example.com9092))] failed
at kafka.client.ClientUtils$.fetchTopicMetadata(ClientUtils.scala:73)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:82)
at kafka.producer.async.DefaultEventHa
Caused by: java.nio.channels.ClosedChannelException
at kafka.network.BlockingChannel.send(BlockingChannel.scala:122)
at kafka.producer.SyncProducer.liftedTree1$1(SyncProducer.scala:77)
at kafka.producer.SyncProducer.kafka$producer$SyncProducer$$doSend(SyncProducer.scala:76)
I have created few custom pattern using which I am matching Error|WARN|FATAL messages. Now each FATAL log is followed by exceptionclass:data and caused by : class:data.
From above example I want to group
-
2016-09-21 09:59:51,577] ERROR fetching topic metadata for topics [Set(test)] from broker [ArrayBuffer(BrokerEndPoint(2,kafka.example.com,9092),
-
kafka.common.KafkaException: fetching topic metadata for topics [Set(test)] from broker [ArrayBuffer(BrokerEndPoint(2,kafka.example.com9092),
-
Caused by: java.nio.channels.ClosedChannelException: connectionerror
input { file { path => "/var/log/ClosedChannelException.log" start_position => "beginning" ignore_older => 0 type => "javaStackTrace" sincedb_path => "/dev/null" } } filter { if [type] == "javaStackTrace" { grok { patterns_dir => ["/etc/logstash/patterns"] match => { "message" => "\[%{MYTIME:errortime}] %{LOGLEVEL:level} %{DATA:errordata}$" } remove_tag => [ "_grokparsefailure" ] add_tag => [ "errortg" ] add_field => { "subtype" => "ErrorStatement" } } if "_grokparsefailure" in [tags] { grok { patterns_dir => ["/etc/logstash/patterns"] match => { "message" => "^%{JAVACLASS:exceptionclass}:%{DATA:exceptiondata}$" } add_field => { "subType" => "parentclass" } remove_tag => ["_grokparsefailure"] add_tag => [ "exceptiontg" ] } if "_grokparsefailure" in [tags] { grok { patterns_dir => ["/etc/logstash/patterns"] # check if the log line is has 'caused by' match => { "message" => "%{CAUSED:Causedby}: %{JAVACLASS:causedbyclass}: %{DATA:causedbydata}$" } add_field => { "subType" => "caused" } remove_tag => ["_grokparsefailure"] add_tag => [ "causedtg" ] } } } } mutate { remove_field => [ "message","@version","host","@timestamp","path" ] } } output { if "_grokparsefailure" in [tags] { csv { fields => [ "errortime", "level", "errordata", "exceptionclass", "exceptiondata", "causedbyclass", "causedbydata" ] "path" => "/tmp/grok_failures.csv" } } else{ if "errortg" in [tags] { csv{ fields => [ "errortime", "errordata", "level" ] "path" => "/etc/logstash/errortg.csv" } } else if "exceptiontg" in [tags] { csv{ fields => [ "exceptionclass", "exceptiondata" ] "path" => "/etc/logstash/exceptiontg.csv" } } else{ csv{ fields => [ "causedbyclass", "causedbydata" ] "path" => "/etc/logstash/causedbytg.csv" } } } }
I am thinking of using a global variable for ex id =100, so every time it matches all patterns in sequence it will add tag id = 100
In this way I can group with id.
Any help or workaround