Erros Logstash fields Kibana

Hello,

I'm try start with ELK, but very dificult for me.

I try read log access.log for weblogic , output exemple is :

172.16.27.211 - - [30/Ago/2018:10:05:41 -0300] "GET /teste/rest/currentUser?noCache=1535634341625 HTTP/1.1" 200 211

in my filter. i try any option ,
COMMONAPACHELOG, COMBINEDAPACHELOG,HTTPD_COMBINEDLOG and specific message

match => { "message" => "%{IP:client} %{USERNAME} %{USERNAME} [%{HTTPDATE:timestamp}] %{WORD:request} %{URIPATHPARAM:path} HTTP/%{NUMBER:version} %{NUMBER:response} %{GREEDYDATA:responseMessage}" }
}

both always failure in my output

{
"tags" => [
[0] "_grokparsefailure"
],
"@timestamp" => 2018-08-31T18:56:52.725Z,
"message" => "172.16.27.211 - - [31/Ago/2018:15:56:51 -0300] "POST /lmsa/swtbrokerziped HTTP/1.1" 200 312 ",
"path" => "/var/log/weblogic/producao/lms01/access.log",
"host" => "brpoaelk01.mercurio.local",
"@version" => "1",
"type" => "weblogic-access-lms01"
}

I do not know what to do anymore, none of the alternatives work. with this I can not create the index with the fields that I need

Hello,

try below changes

filter {

grok {
match => { message => "%{IP:client} %{DATA:USERNAME} %{DATA:USERNAME} \[%{DATA:logtime}\] "%{WORD:request} %{URIPATHPARAM:path} HTTP/%{NUMBER:version}" %{NUMBER:response} %{GREEDYDATA:responseMessage}" }
}
mutate{
gsub => ["logtime"," -0300",""]
}

date{
match => ["logtime","dd/MMM/yyy:HH:mm:ss"]
}

}

in this case logtime is time field

Hello Harshad

I try test y script . but I receive this error:

[2018-09-03T16:03:28,842][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2018-09-03T16:03:29,469][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.3.2"}
[2018-09-03T16:03:30,221][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, {, } at line 14, column 91 (byte 262) after filter {\n\ngrok {\nmatch => { message => "%{IP:client} %{DATA:USERNAME} %{DATA:USERNAME} \[%{DATA:logtime}\]"", :backtrace=>["/app/elk/logstash-6.3.2/logstash-core/lib/logstash/compiler.rb:42:in `compile_imperative'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/compiler.rb:50:in `compile_graph'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/compiler.rb:12:in `block in compile_sources'", "org/jruby/RubyArray.java:2486:in `map'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/compiler.rb:11:in `compile_sources'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/pipeline.rb:49:in `initialize'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/pipeline.rb:167:in `initialize'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/pipeline_action/create.rb:40:in `execute'", "/app/elk/logstash-6.3.2/logstash-core/lib/logstash/agent.rb:305:in `block in converge_state'"]}

I found error , and the same error occur.

{
"tags" => [
[0] "_grokparsefailure"
],
"message" => "172.16.2.211 - - [03/Set/2018:16:31:53 -0300] "POST /lmsa/swtbrokerziped HTTP/1.1" 200 475 ",
"@version" => "1",
"host" => "brpoaelk01.mercurio.local",
"type" => "weblogic-access-lms01",
"path" => "/var/log/weblogic/producao/lms01/access.log",
"@timestamp" => 2018-09-03T19:48:20.647Z
}

match => { message => "%{IP:client} %{DATA:USERNAME} %{DATA:USERNAME} [%{DATA:logtime}] %{WORD:request} %{URIPATHPARAM:path} HTTP/%{NUMBER:version} %{NUMBER:response} %{GREEDYDATA:responseMessage}" }
}

Harshad_Velapure,

I Change code y send me :
match => { message => "%{IP:client} %{DATA:USERNAME} %{DATA:USERNAME} [%{DATA:logtime}] "%{WORD:request} %{URIPATHPARAM:path} HTTP/%{NUMBER:versioni}" %{NUMBER:response} %{GREEDYDATA:responseMessage}" }

And before error change too.

{
"host" => "brpoaelk01.mercurio.local",
"path" => [
[0] "/var/log/weblogic/producao/lms01/access.log",
[1] "/lmsa/jsonbroker"
],
"versioni" => "1.1",
"USERNAME" => [
[0] "-",
[1] "-"
],
"client" => "172.16.2.211",
"@timestamp" => 2018-09-03T20:14:33.209Z,
"@version" => "1",
"request" => "POST",
"response" => "200",
"tags" => [
[0] "_dateparsefailure"
],
"message" => "172.16.2.211 - - [03/Set/2018:16:57:30 -0300] "POST /lmsa/jsonbroker HTTP/1.1" 200 958 ",
"type" => "weblogic-access-lms01",
"responseMessage" => "958 ",
"logtime" => "03/Set/2018:16:57:30"
}

Now , y help me again ?

what's the error now ?
can you share full updated configuration along with sample log?

Hi,

input {
file {
path => "/var/log/weblogic/producao/lms01/access.log"
type => "weblogic-access-lms01"
start_position => "beginning"
}

}

filter {

grok {
match => { message => "%{IP:client} %{DATA:USERNAME} %{DATA:USERNAME} [%{DATA:logtime}] "%{WORD:request} %{NOTSPACE:path} HTTP/%{NUMBER:versioni}" %{NUMBER:response} %{GREEDYDATA:responseMessage}" }

}
mutate{
remove_field => [ "message" ]
}

date{
match => ["logtime","dd/MMM/yyy:HH:mm:ss Z"]
}

}
output {
elasticsearch { hosts => "lx-swelk01:9800"
index => "access_weblogic_index"

}

stdout { codec => rubydebug }

}

Error:

{
"logtime" => "14/Set/2018:12:08:35 -0300",
"client" => "172.16.2.211",
"tags" => [
[0] "_dateparsefailure"
],
"USERNAME" => [
[0] "-",
[1] "-"
],
"versioni" => "1.1",
"responseMessage" => "83 ",
"host" => "brpoaelk01.mercurio.local",
"request" => "POST",
"@version" => "1",
"type" => "weblogic-access-lms01",
"@timestamp" => 2018-09-14T15:08:41.563Z,
"response" => "200",
"path" => [
[0] "/var/log/weblogic/producao/lms01/access.log",
[1] "/lmsa/rest/coleta/programacaoColetasVeiculos/findColetasRealizadas"
]
}

Line log :
172.16.27.211 - - [14/Set/2018:12:08:35 -0300] "GET /lmsa/rest/coleta/programacaoColetasVeiculos/findColetasRealizadas HTTP/1.1" 200 211

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.