Hi,
For some time now I have this stack, filbeats shipping logs of several application servers to elasticsearch via logstash. Logstash filtering is done in 4 pipelines. Now recently our supplier decided to change a field in the logs and ever since then it seems like my stack is cursed. I got _grokparsefailures. However after making s;aight adjustments, no logs at alle appear in elastic. So someone suggested to use dissect instead of grok. So far, so good. or not. Still get no data in elastic. I simply do not understand why.
My logstash config.
input {
beats {
port => 5047
}
}
###
# ConcurrentStreams API Log importer
# Log events can vary, so lots of logic is required to make sense of it all.
# The following custom failure tags are used:
# _noeventtype: The log event contains no event-type.
# _unknowneventtype: When the event-type of the log event is not known (but exists).
###
filter {
### Handle Concurrent Streams API log.
if "api-log" in [tags] and "concurrentstreams" in [tags] {
### Trim the message
mutate {
strip => ["message"]
}
dissect {
mapping => {
"message" => "ts: %{ts} %{+ts} | logLevel: %{log-level} | appId: %{app-id} | %{} | SID: %{session-id} | TN: %{transaction-id} | clientIp: %{client-ip} | userId: %{user-id} | apiType: %{} | api: %{api} | platform: %{platform} | %{additional-data}"
}
}
mutate {
strip => ["app-id", "api", "session-id", "transaction-id", "additional-data"]
}
# Try to get the eventType field. Not all log events have it...
grok {
keep_empty_captures => true
named_captures_only => true
tag_on_failure => ["_noeventtype"]
match => { "additional-data" => "eventType:\s*(?<event-type>[A-Z_]+)" }
}
if [log-level] == "DEBUG" {
# Drop all debug messages.
drop { }
}
else {
if ![event-type] {
# This is a log event without an event type.
# Example: ts: 2019-03-11 21:23:20,695 | logLevel: INFO | appId: concurrentstream-ms | thread: (HTTP-1743) | SID: cf101ef0-7234-df6b-b923-658780cf3bfa | TN: ffc757c3-759e-043f-ff11-0e46b77b541d | clientIp: 86.84.33.51 | username: 3207382 | apiType: B2B | api: increaseConcurrentStreams | platform: smarttv | {"eventId":"9e5e5af2-0657-4940-9a04-79247af5f4ea","eventSource":"Increase Concurrent Streams","eventType":"StreamConcurrencyLimitReached","timestamp":"1552335800695","headers":{},"payload":"{\"action\":\"StreamConcurrencyLimitReached\",\"platform\":\"smarttv\",\"username\":\"20000001827463\",\"crmAccountId\":\"20000001827463\",\"sessionId\":\"GQd+lIar3VvXG6OQGnOx8Bf1\",\"contentId\":\"30\",\"contentType\":\"Channel\",\"timestamp\":1552335800694,\"ruleId\":\"a4403d7c-a43b-4c5b-b289-caf63f50c6ad\",\"ruleType\":\"Subscriber\",\"streamLimit\":\"30\"}","payloadType":"JSON","payloadEncoding":"UTF-8"}
# Everything after the platform is a status-message.
mutate {
copy => { "additional-data" => "status-message" }
}
}
else if [event-type] == "APISTART" {
# ts: 2019-01-28 00:05:29,297 | logLevel: INFO | appId: concurrentstream-ms | thread: (HTTP-1847) | SID: | TN: 223c7343-9ed5-4615-9051-2f4fe96e10dc | clientIp: 10.31.205.92 | username: | apiType: B2B | api: getConcurrentStreamsPoliciesByRuleType | platform: | className: com.accenture.avs.be.concurrentstream.web.LoggingInterceptor | methodName: preHandle | eventType: APISTART
# Nothing left to do!
}
else if [event-type] == "REQBODY" {
# ts: 2019-01-28 09:32:05,817 | logLevel: INFO | appId: concurrentstream-ms | thread: (HTTP-1069) | SID: f0d65087-dc3b-1ad9-a26d-1904b95581b1 | TN: 38c8611b-9603-e6fc-85a5-c6a02d860b27 | clientIp: 62.140.132.83 | username: 4 | apiType: B2B | api: increaseConcurrentStreams | platform: android | className: com.accenture.avs.be.concurrentstream.web.LoggingInterceptor | methodName: logRequest | eventType: REQBODY | body: {"crmAccountId":"00012314150010","ruleIds":["23606456-13f8-4f23-b854-74db05dccc51","e6452e95-28ec-4443-b280-1d752a5ad605"],"tags":[{"type":"Platform","value":"android"},{"type":"channel","value":"19"},{"type":"channelOutOfHome","value":"19"},{"type":"outOfHome"}]}
# For now we are dropping all REQBODY messages. They take a lot of space and don't contain data we use frequently.
drop { }
# grok {
# keep_empty_captures => true
# named_captures_only => true
# match => { "additional-data" => "body:\s*(%{GREEDYDATA:outgoing-request-body})?" }
# }
}
else if [event-type] == "APIEND" {
## ts: 2019-01-28 00:05:29,312 | logLevel: INFO | appId: concurrentstream-ms | thread: (HTTP-1847) | SID: | TN: 223c7343-9ed5-4615-9051-2f4fe96e10dc | clientIp: 10.31.205.92 | username: | apiType: B2B | api: getConcurrentStreamsPoliciesByRuleType | platform: | className: com.accenture.avs.be.concurrentstream.web.LoggingInterceptor | methodName: beforeBodyWrite | eventType: APIEND | userAgent: Apache-HttpClient/4.5.3 (Java/1.8.0_162) | request: | response: result=OK, resultCode=ACN_200, resultDescription=OK | executionTime(ms): 15
grok {
pattern_definitions => {
"DELIM" => "\s*\|\s*"
"STRING" => "[^|]+"
}
keep_empty_captures => true
named_captures_only => true
match => { "additional-data" => "userAgent:\s*(%{STRING:user-agent})%{DELIM}request:\s*(%{STRING:incoming-request-data})%{DELIM}response:\s*(%{STRING:outgoing-response-data})%{DELIM}executionTime\(ms\):\s*%{NUMBER:execution-time}$" }
}
mutate {
strip => ["outgoing-response-data", "user-agent", "incoming-request-data"]
}
kv {
field_split => ","
trim_key => " "
trim_value => " "
source => "outgoing-response-data"
target => "outgoing-response-data"
}
if [outgoing-response-data] {
if [outgoing-response-data][result] {
mutate {
rename => { "[outgoing-response-data][result]" => "outgoing-response-result" }
}
}
if [outgoing-response-data][resultCode] {
mutate {
rename => { "[outgoing-response-data][resultCode]" => "outgoing-response-code" }
}
}
if [outgoing-response-data][resultDescription] {
mutate {
rename => { "[outgoing-response-data][resultDescription]" => "outgoing-response-description" }
}
}
mutate {
remove_field => [ "outgoing-response-data" ]
}
}
}
else if [event-type] == "SYSERROR" {
grok {
pattern_definitions => {
"DELIM" => "\s*\|\s*"
"STRING" => "[^|]+"
}
keep_empty_captures => true
named_captures_only => true
match => { "additional-data" => "errorType:\s*%{STRING:error-type}%{DELIM}Exception:\s*%{STRING:error-exception}%{DELIM}message:\s*(%{STRING:error-message})%{DELIM}stacktrace:\s*(?<error-stacktrace>.*)$" }
}
mutate {
strip => ["error-type", "error-message", "error-exception", "error-stacktrace"]
}
}
else{
mutate {
add_tag => [ "_unknowneventtype"]
}
}
Continued in next post due to bodylimit