Parsing failure Jmeter to logstash

I m trying to use beats to send Jmeter data to log stash, I can get the data in Ok , But not in the format I want. Logstash puts it all into one field.

My Logstash.config file looks like

Read input from filebeat by listening to port 5044 on which filebeat will send the data

input {
beats {
type => "Jmeter_test"
port => "5044"
}
}

filter {
grok {
patterns_dir => ["D:\logstash-7.4.0\Patterns"]
match => {"message" => "^%{MYDATEPATTERN:RunTime}%{NUMBER:Elapsed}%{WORD:Label}%{NUMBER:responsecode}%{WORD:responseMessage}%{WORD:Success}%{NUMBER:Bytes}%{NUMBER:grpThreads}%{NUMBER:allThreads}%{PATH:URL}%{NUMBER:Latency}%{NUMBER:SampleCount}%{NUMBER:ErrorCount}%{WORD:Hostname}%{NUMBER:IdleTime}%{NUMBER:Connect}"
}
}
}
output {
stdout {
codec => rubydebug
}

Sending properly parsed log events to elasticsearch

elasticsearch {
hosts => ["localhost:9200"]
}
}

My input data file looks like

2019/10/24 08:30:37,244,PHI Private ,200,OK,xyzcom_bp08 General Browsing 2 20-28,true,38166,1,2,https://xyz.com.au/,239,1,0,ServerName,0,32
2019/10/24 08:30:37,68,GetQuote_ Pge Load,200,OK,xyzcom_bp08 General Browsing 2 20-28,true,4707,1,2,https://xyz.com.au/,68,1,0,ServerName,0,21
2019/10/24 08:30:45,197,HHP xyz.COM Home Page,200,OK,xyzcom_bp06 xyz.COM HP 18-25,true,52971,1,3,https://xyz.com.au/,191,1,0,ServerName,0,18

I need to be able to reference all the data to build graphs and dashboards

When the logstash is running i get this error
}
{
"input" => {
"type" => "log"
},
"ecs" => {
"version" => "1.1.0"
},
"@version" => "1",
"host" => {
"name" => "ServerName"
},
"@timestamp" => 2019-10-29T04:20:44.427Z,
"message" => "2019/10/29 08:41:57,103,MEMB Members,200,OK,xyzcom_bp10 General Browsing 4 22-7,true,50300,4,94,htt
ps://xyz.com.au/members,92,1,0,ServerName,0,15",
"tags" => [
[0] "beats_input_codec_plain_applied",
[1] "_grokparsefailure"
],
"type" => "Jmeter_test",
"agent" => {
"hostname" => "ServerName",
"id" => "eb1a89a1-51dd-4342-a791-392e12f4fa4a",
"version" => "7.4.1",
"type" => "filebeat",
"ephemeral_id" => "8fc7d8d1-e3e9-48d4-9ce5-f549fccce490"
},
"log" => {
"offset" => 884395,
"file" => {
"path" => "D:\JmeterKibana7\Web_RLSE_20191029_0830_Baseline_240U.csv"
}
}
}
{

OK, so your grok pattern does not match your data. Why not use a csv filter instead of grok?

Badger,

I tried that before but it was not separating the message. I tried again with CSV and tweeked some settings and it worked

Here is the working Jmeter csv config

Read input from filebeat by listening to port 5044 on which filebeat will send the data

input {
beats {
port => 5044
type => "log"
}
}

filter {
csv {
columns => [
"timeStamp",
"elapsed",
"label",
"responseCode",
"responseMessage",
"threadName",
"success",
"bytes",
"grpThreads",
"allThreads",
"pathurl",
"Latency",
"SampleCount",
"ErrorCount",
"Hostname",
"IdleTime",
"Connect"
]
}

#ruby {
#code => "event['testName'] = event['path'].split('/').last.split('.').first"
#}

#ruby {
#code => "event['groupId'] = event['threadName'].split(' ').last.split('-').first"
#}

#ruby {
#code => "event['vuserId'] = event['threadName'].split(' ').last.split('-').last"
#}

#ruby {
#code => "event['Hits'] = event['responseMessage'].split(',').first.split(' ').last"
#}

#ruby {
#code => "event['time'] = Time.at(event['timeStamp'].to_f/1000).to_datetime.iso8601(3)"
#}

mutate{convert => { "elapsed" => "integer"}}
mutate{convert => { "bytes" => "integer"}}
mutate{convert => { "grpThreads" => "integer"}}
mutate{convert => { "allThreads" => "integer"}}
mutate{convert => { "Latency" => "integer"}}
mutate{convert => { "SampleCount" => "integer"}}
mutate{convert => { "ErrorCount" => "integer"}}
mutate{convert => { "groupId" => "integer"}}
mutate{convert => { "vuserId" => "integer"}}
mutate{convert => { "Hits" => "integer"}}
}

output {

Sending properly parsed log events to elasticsearch

elasticsearch {
hosts => ["localhost:9200"]
manage_template => false
index => "%{[@metadata][beat]}-%{+yyyy.ww}"
document_type => "%{[@metadata][type]}"

}
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.