Filter JSON message in kibana

Hi,
My JSON file looks like this. I want a filter on Kibana for the JSON object I am passing through logstash. For example

{
  "_index": "mule-logs",
  "_type": "_doc",
  "_id": "S_gavoIBvzaqpVvKmITM",
  "_score": 1,
  "_source": {
    "message": "{a=1, b=2, type=error}",
    "@version": "1",
    "threadId": 43,
    "host": "127.0.0.1",
    "threadPriority": 5,
    "instant": {
      "nanoOfSecond": 223331800,
      "epochSecond": 1661046790
    },
    "loggerFqcn": "org.apache.logging.slf4j.Log4jLogger",
    "loggerName": "org.mule.runtime.core.internal.processor.LoggerMessageProcessor",
    "port": 64719,
    "level": "INFO",
    "@timestamp": "2022-08-21T01:53:10.223Z",
    "thread": "[MuleRuntime].uber.01: [test].testFlow12.CPU_LITE @29903cf9",
    "endOfBatch": true
  },
  "fields": {
    "loggerFqcn": [
      "org.apache.logging.slf4j.Log4jLogger"
    ],
    "loggerFqcn.keyword": [
      "org.apache.logging.slf4j.Log4jLogger"
    ],
    "level": [
      "INFO"
    ],
    "instant.epochSecond": [
      1661046790
    ],
    "endOfBatch": [
      true
    ],
    "@version.keyword": [
      "1"
    ],
    "loggerName.keyword": [
      "org.mule.runtime.core.internal.processor.LoggerMessageProcessor"
    ],
    "thread": [
      "[MuleRuntime].uber.01: [test].testFlow12.CPU_LITE @29903cf9"
    ],
    "threadPriority": [
      5
    ],
    "message": [
      "{a=1, b=2, type=error}"
    ],
    "thread.keyword": [
      "[MuleRuntime].uber.01: [test].testFlow12.CPU_LITE @29903cf9"
    ],
    "threadId": [
      43
    ],
    "instant.nanoOfSecond": [
      223331800
    ],
    "@timestamp": [
      "2022-08-21T01:53:10.223Z"
    ],
    "level.keyword": [
      "INFO"
    ],
    "port": [
      64719
    ],
    "message.keyword": [
      "{a=1, b=2, type=error}"
    ],
    "host": [
      "127.0.0.1"
    ],
    "@version": [
      "1"
    ],
    "host.keyword": [
      "127.0.0.1"
    ],
    "loggerName": [
      "org.mule.runtime.core.internal.processor.LoggerMessageProcessor"
    ]
  }
}

In the above JSON object I want to filter message.type = "error" and message.type = "success".

Currently I am using the below configuration in logstash.

# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.

input {
  tcp {
    port => 4560
    codec => json
  }
}
filter {   
  date {
    match => [ "timeMillis", "UNIX_MS" ]
  }
}
output {
	elasticsearch {
		hosts => ["localhost:9200"]
		index => "mule-logs"	
	}	
}

You need to parse your message field into an actual object using the Logstash JSON filter:

This is a JSON parsing filter. It takes an existing field which contains JSON and expands it into an actual data structure within the Logstash event.

Once the object is correctly parsed and with the appropriate mapping previously set up for your index, all content should be accessible from Kibana.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.