Hi, I'm trying to parse logs in logstash, My logs are in Json,
{"timestamp":"2021-02-25T15:20:38.490Z","logLevel":"INFO","serviceName":"xxx","transactionId":"0000-0000-000","elapsedTime":820,"endTime":"","url":"","statusCode":"","statusMessage":"","body":{},"query":{"queryType":"ABCD","dataSource":"stp","dimensions":,"intervals":["2021-02-23/2021-02-26"],"granularity":"all","toInclude":{"type":"none"},"context":{"queryId":"0000-0000-0000-0000-124dfc23b124"}},"method":"","source":"stp","dataStore":"SQL-GRID","remoteAddress":"","remoteFamily":"","hostname":"https://hosturl:8005/abc","error":{},"dContextId":"0000-0000-0000-0000"}
{"timestamp":"2021-02-25T15:20:38.893Z","logLevel":"INFO","serviceName":"abc","transactionId":"0000-0000-000","elapsedTime":402,"endTime":"","url":"","statusCode":"","statusMessage":"","body":{},"query":{"queryType":"scan","dataSource":"stp","dimensions":,"intervals":["2021-02-24T00:00:00.000Z/2021-02-25T00:00:00.000Z"],"granularity":"all","limit":1,"filter":{"type":"selector","dimension":"mdn","value":"6060606060"},"metrics":,"context":{"queryId":"0000-0000-000"}},"method":"","source":"stp","dataStore":"sql-vgrid","remoteAddress":"","remoteFamily":"","hostname":"hosturl:8005/abc","error":{},"dContextId":"0000-0000-000-0000"}
{"timestamp":"2021-02-25T15:20:40.162Z","logLevel":"INFO","serviceName":"abc","transactionId":"0000-0000-000-0000","elapsedTime":1266,"endTime":"","url":"","statusCode":"","statusMessage":"","body":{},"query":{"queryType":"scan","dataSource":"stp","dimensions":null,"intervals":["2021-01-26T00:00:00.000/2021-02-26T00:00:00.000"],"granularity":"all","limit":1000,"filter":{"type":"selector","dimension":"SUBSCRIBER_ID","value":"6060606060"},"metrics":,"context":{"queryId":"0000-0000-0000-0000"}},"method":"","source":"stp","dataStore":"abc-grid","remoteAddress":"","remoteFamily":"","hostname":"https://hosturl:8005/abc","error":{},"dContextId":"0000-0000-0000"}
{"timestamp":"2021-02-25T15:20:40.251Z","logLevel":"INFO","serviceName":"abc","transactionId":"0000-00000-0000-0000","elapsedTime":2601,"endTime":"2021-02-25T15:20:40.251Z","url":"/xyz?from=abc","statusCode":200,"statusMessage":"OK","body":{"startdate":"02-25-2021 12:00","unitoftime":"Days","unitoftimerange":31,"mdn":"0808080808","min":"","imsi":""},"query":{},"method":"POST","source":"","dataStore":"","remoteAddress":"::ffff:0.0.0.0","remoteFamily":"IPv6","hostname":"34554553534532","error":{}}
and I'm trying to parse like:
input {
file {
type => "log"
path => "/usr/share/dockerlogs/data//.log"
# sincedb_path => "/tmp/sincedb"
start_position => "beginning"
codec => "json"
}
}
json {son {
source => "message"
target => "message"
# target => "message"
remove_field => "message"
}
grok{
match => {
"message" => "%{GREEDYDATA:msg}"
}
}
Getting below Error:
LogStash::Event:0x7b2b3d47>], :response=>{"index"=>{"_index"=>"Index-2021.02.25", "_type"=>"_doc", "_id"=>"68bs2XcBjWnLiHWK8C5c", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [query] of type [text] in document with id '68bs2XcBjWnLiHWK8C5c'. Preview of field's value: '{filter={type=selector, dimension=SUBSCRIBER_ID, value=606060606}, intervals=[2021-01-26T00:00:00.000/2021-02-26T00:00:00.000], granularity=all, limit=1000, context={queryId=0000-0000-0000-0000}, metrics=, dataSource=stp, dimensions=null, queryType=scan}'", "caused_by"=>{"type"=>"illegal_state_exception", "reason"=>"Can't get text on a START_OBJECT at 1:340"}}}}}
I tried removing index and reindex to map correct indices but no luck! can someone please help? Thanks in Advance.