Hello all:
I'm working with a logstash docker image (https://registry.hub.docker.com/u/helder/logstash/) linked to elasticsearch and kibana, all via docker.
My problem is that logstash stops processing randomly. I only have 2 window hosts sending messages back via nxlog on a narrow subset of application text logs.
My questions are as follows, any help would be greatly appreciated:
-
where does logstash log it's own events? I cannot find any logfiles related to logstash in the container via /var/log, and I do not see any *.log files in /opt/logstash
-
would a singular message cause logstash to stop processing all-together?
My logstash.conf file is as follows:
input {
tcp {
type => "iseapps"
codec => json
port => 5514
}
}
filter {
if [type] == "iseapps" {
grok {
break_on_match => false
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}SocketEngine WriteSock%{SPACE}%{GREEDYDATA:infomsg}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}GTS_ORA start processing%{SPACE}%{GREEDYDATA:infomsg}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTYDATA:infomsg}"]}\)%{SPACE}\(\)%{SPACE}GTS_ORA -----> POB%{SPACE}%{GREED--More--(12%)
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}GTS_ORA processing%{SPACE}%{GREEDYDATA:infomsg}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}CustomRule Tag%{GREEDYDATA:infomsg}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}GTS_ORA%{GREEDYDATA:infomsg}, gtsSeqNum:%{SPACE}%{NOTSPACE:gtsSeqNum} fixSeqNum: %{NOTSPACE:fixSeqNum} msgType: %{NOTSPACE:msgType} requestId: %{NOTSPACE:requestId} dtiSendingTime: %{NOTSPACE:dtiSendingTime} %{SPACE} GWSession \[%{NOTSPACE:GWSession}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}GTS_ORA%{GREEDYDATA:infomsg}, clOrdId: %{NOTSPACE:clOrdId} securityId: %{NOTSPACE:securityId} lastQty: %{NOTSPACE:lastQty} lastPx: %{NOTSPACE:lastPx} trdRptId: %{NOTSPACE:trdRptId} trdLinkId: %{NOTSPACE:trdLinkId} trdTransType: %{NOTSPACE:trdTransType} msgEventSrc: %{NOTSPACE:msgEventSrc}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPA--More--CE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}GTS_ORA%{GREEDYDATA:infomsg}, seqNum: %{NOTSPACE:seqNum} securityId: %{NOTSPACE:securityId}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}%{GREEDYDATA:infomsg}%{SPACE}gtsSeqNum: %{NOTSPACE:gtsSeqNum} execId: %{NOTSPACE:execId} requestMsgId: %{NOTSPACE:requestMsgId} clOrdId: %{NOTSPACE:clOrdId}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}%{GREEDYDATA:infomsg}%{SPACE}ClOrdId: %{NOTSPACE:clOrdId}%{SPACE}OrigClOrdId: %{NOTSPACE:OrigClOrdId}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\)%{SPACE}%{GREEDYDATA:infomsg}%{SPACE}ClOrdId: %{NOTSPACE:clOrdId}%{SPACE}"]
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}\(%{NOTSPACE:session_id}\)%{SPACE}\(\) %{GREEDYDATA} -- SecurityId %{NOTSPACE:SecurityId}, %{GREEDYDATA} SecurityId %{NOTSPACE:SecurityId}"]
add_tag => ["info"]
remove_tag => ["_grokparsefailure"]
}
# ReferenceData Exception
if "_grokparsefailure" in [tags] {
grok {
break_on_match => false
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}%{NOTSPACE:id_3}%{SPACE}%{NOTSPACE:filler}%{SPACE}%{NOTSPACE:ISEApps_type}%{SPACE}Exception while initializing ReferenceData.%{SPACE}%{NOTSPACE:Library}%{SPACE}Message request timed out:%{SPACE}sessId=%{NOTSPACE:session_id}"]
add_tag => ["ReferenceData_Exception"]
remove_tag => ["_grokparsefailure"]
}
}
# ReferenceData Service Failed
if "_grokparsefailure" in [tags] {
grok {
break_on_match => false
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}%{
NOTSPACE:id_3}%{SPACE}%{NOTSPACE:filler}%{SPACE}%{NOTSPACE:ISEApps_type}%{SPACE}ReferenceData Service failed or unavailable."]
add_tag => ["ReferenceData_Service_Failed"]
remove_tag => ["_grokparsefailure"]
}
}
# Process Socket Closed
if "_grokparsefailure" in [tags] {
grok {
break_on_match => false
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}(%{NOTSPACE:session_id})%{SPACE}%{NOTSPACE:filler}%{SPACE}SocketEngine ProcessSocketClosed - WickSock exception caught%{SPACE}%{NOTSPACE:Exception}%{SPACE}%{NOTSPACE:excep_id}%{SPACE}An existing connection was forcibly closed by the remote host"]
add_tag => ["Process_Socket_Closed"]
remove_tag => ["_grokparsefailure"]
}
}
# Socket Write Error
if "_grokparsefailure" in [tags] {
grok {
break_on_match => false
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%--More--{SPACE}(%{NOTSPACE:session_id})%{SPACE}%{NOTSPACE:filler}%{SPACE}SocketEngine WriteSock - error%{SPACE}%{NOTSPACE:Exception}%{SPACE}%{NOTSPACE:excep_id}%{SPACE}A request to send or receive data was disallowed because the socket is not connected"]
add_tag => ["Socket_Write_Error"]
remove_tag => ["_grokparsefailure"]
}
}
# Socket Logout Exception
if "_grokparsefailure" in [tags] {
grok {
break_on_match => false
match => ["message", "%{NOTSPACE:errorlevel}%{SPACE}%{NOTSPACE:date}%{SPACE}%{HAPROXYTIME:log_time}%{SPACE}%{NOTSPACE:id_1}%{SPACE}%{NOTSPACE:id_2}%{SPACE}(%{NOTSPACE:session_id})%{SPACE}%{NOTSPACE:filler}%{SPACE}SocketEngine Send - Logout exception%{SPACE}%{NOTSPACE:Exception}%{SPACE}%{NOTSPACE:Exception}%{SPACE}%{NOTSPACE:excep_id}%{SPACE}A request to send or receive data was disallowed because the socket is not connected"]
add_tag => ["Socket_Logout_Exception"]
remove_tag => ["_grokparsefailure"]
}
}
}
}
output {
elasticsearch { host => elasticsearch }
}
I'm relatively new at this, so I am trying to understand how I can best troubleshoot what could be causing the processing to stop with no warnings or issues generated.
Many thanks again for any help!