I have a log similar to this and I need to get these lines and upload in kibana:
[03/07/2024 00:00:31.306][123456][newRequest][Request][Company|signature|L|id|123456789|0987654321]
[03/07/2024 00:00:31.634][123456][Company][Signature][Result][eyJhbGciOiJFUzI1NiIsInBwdCI6InNoYWtlb]
My logstash config:
input{
file{
path => ["L:/FileServer/*.log"]
start_position => "beginning"
stat_interval => "60000 s"
discover_interval => 1
tags => EXTRALOG
}
}
if "EXTRALOG" in [tags] {
grok {
match => ["message", "\[%{DATESTAMP:timestamp}\]\[%{WORD:taskid}\]\[%{WORD:req_process}\]\[%{WORD:req_type}\]\[%{GREEDYDATA:req_body}\]"]
}
grok {
match => ["message", "\[%{DATESTAMP:timestamp}\]\[%{WORD:taskid}\]\[%{WORD:res_company}\]\[%{WORD:res_process}\]\[%{WORD:res_type}\]\[%{GREEDYDATA:res_body}\]"]
}
if [req_process] == "newRequest" and [req_type] == "Request"{
aggregate {
task_id => "%{taskid}"
code => "map['req_body'] = event.get('req_body')"
map_action => "create"
add_field => {
"req_body" => "%{req_body}"
"res_body" => ""
}
}
} else if [res_process] == "Signature" and [res_type] == "Result"{
aggregate {
task_id => "%{taskid}"
code => "map['res_body'] = event.get('res_body')"
map_action => "update"
add_field => {
"res_body" => "%{res_body}"
}
}
}else{
drop { }
}
}
if "EXTRALOG" in [tags] {
elasticsearch {
ssl_certificate_verification => false
user => elastic
password => abc12345
hosts => [ "https://localhost:9200" ]
index => "service_log"
}
}
I'm trying to create three columns (taskid,request,response) and display them in kibana
![OqGQH|690x29](upload://v99QYNwnCjHlEfI4W32r1DLG6Df.png)
however I get a row with taskid and request and other with taskid and response.
![HxMLt|690x47](upload://iK0misTYRndvc2fbSLWXQBTKpiU.png)
What is the issue or I'm using a wrong plugin?
Thank you