Grok Pattern for dynamic json

Hi,

My data is coming live from spring boot logs and in successful execution the logs show in json format (each json has different input fields).In case of any error the logs show error with a string format

Here is a sample data

  • 2022-06-15 14:16:34 [39865] INFO c.t.T.S.Controller.ApiLoggingFilter - {"requestId":"3c761824-0fae-490c-9cdb-0c5a2f08a05d","type":"response","body":{"lastTransactionDateTime":"2022-06-15T09:11:05"},"contentType":"application/json","status":200}

  • 2022-06-15 14:20:47 [39865] INFO c.n.d.s.r.aws.ConfigClusterResolver - Resolving eureka endpoints via configuration

  • 2022-06-15 14:21:33 [39865] INFO c.t.T.S.Controller.ApiLoggingFilter - {"method":"POST","requestId":"ee325017-6a10-4821-a143-54d24b8c822f","requestURL":"http://localhost:8085/transaction/","type":"request","body":"{"transactionFrom": 71,"transactionTo": 1,"purchaseAmount": 25352,"purchaseUnits": 2,"propertyId": 12,"paymentMethodId": 1,"proofOfPurchaseId": 7,"proofOfDeliveryId": 9}"}

  • 2022-06-15 14:21:33 [39865] WARN com.zaxxer.hikari.pool.PoolBase - HikariPool-1 - Failed to validate connection com.mysql.cj.jdbc.ConnectionImpl@66888ec4 (No operations allowed after connection closed.). Possibly consider using a shorter maxLifetime value.

My logstash file is :

input {
	file {
		path => "/home/ubuntu/transaction_service.log"
		start_position => "beginning"
	}
}
filter {
	grok {
		match => {
			"message" => "(?<timestamp>%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}) \[%{NUMBER:transactionId:int}\] %{LOGLEVEL:level} %{GREEDYDATA:class} - {%{GREEDYDATA:result}}"
		}

	}
	json {
                    source => "result"
	}
	
	date {
		match => ["timestamp", "yyyy-MM-dd HH::mm:ss"]
	}

}
output {
	elasticsearch{
		hosts => "http://localhost:9200"
		index => "logindex-%{+yyyy-MM-dd HH:mm:ss}"
	}
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.