I create a grok filter in order to capture this information with logstash from a syslog message
The message of Syslog is like this
"Logon Attempt";"4624";"Audit Success";"NetApp-Security-Auditing";"CIFS";"Security";"ITF11000/svm-M-cifs";"2/1/2019 4:38:41";"10.8.38.80";"61035";"S-1-5-21-2976626676-2712618565-3739747982-140608";"IT01896";"false";"GRTGROUP";"3";"KRB5";"";"";""
Using grok debug site I found that the following grok run
-|"(%{GREEDYDATA:eventname})");(-|"(%{NUMBER:event_id})");(-|"(%{GREEDYDATA:audit_result})");(-|"(%{GREEDYDATA:provider})");(-|"(%{GREEDYDATA:source_netapp})");(-|"(%{GREEDYDATA:security})");(-|"(%{GREEDYDATA:computer})");(-|"(%{DATESTAMP:created_date})");(-|"(%{IP:ip})");(-|"(%{GREEDYDATA:ip_port})");(-|"(%{GREEDYDATA:target_userid})");(-|"(%{USERNAME:username})");(-|"(%{GREEDYDATA:target_user_is_local})");(-|"(%{GREEDYDATA:domain})");(-|"(%{GREEDYDATA:logon_type})");(-|"(%{GREEDYDATA:authentication_package_name})");(-|"(%{GREEDYDATA:error_code})");(-|"(%{GREEDYDATA:error_code_reason})");(-|"(%{GREEDYDATA:error_code_reason_string})")
And I found the data.
So I create a filter in order to extract information and then put in ES
My filter is the following
filter {
if [message] =~ /^\s*Safed[/ {
mutate {
replace => [ "source", "Safed" ]
}
}
if [source] == "Safed"{
# Safed without stripped syslog-tag "Safed[...][...]"
grok {
patterns_dir => "/neteye/shared/logstash/conf/pattern.d"
match => [ "message", "(-|"(%{GREEDYDATA:eventname})");(-|"(%{NUMBER:event_id})");(-|"(%{GREEDYDATA:audit_result})");(-|"(%{GREEDYDATA:provider})");(-|"(%{GREEDYDATA:source_netapp})");(-|"(%{GREEDYDATA:security})");(-|"(%{GREEDYDATA:computer})");(-|"(%{DATESTAMP:created_date})");(-|"(%{IP:ip})");(-|"(%{GREEDYDATA:ip_port})");(-|"(%{GREEDYDATA:target_userid})");(-|"(%{USERNAME:username})");(-|"(%{GREEDYDATA:target_user_is_local})");(-|"(%{GREEDYDATA:domain})");(-|"(%{GREEDYDATA:logon_type})");(-|"(%{GREEDYDATA:authentication_package_name})");(-|"(%{GREEDYDATA:error_code})");(-|"(%{GREEDYDATA:error_code_reason})");(-|"(%{GREEDYDATA:error_code_reason_string})")" ]
overwrite => [ "message" ]
remove_tag => "_grokparsefailure"
break_on_match => false
}
mutate {
convert => { "event_id" => "integer" }
}
if [event_id] in [ 4634, 4647 ] {
mutate {
add_field => [ "audit_type", "LOGOFF" ]
add_tag => "AUDIT_LOGOFF"
}
}
if [event_id] in [ 4624, 4647 ] {
mutate {
add_field => [ "audit_type", "LOGON" ]
add_tag => "AUDIT_LOGON"
}
}
if [event_id] in [ 4625, 4647 ] {
mutate {
add_field => [ "audit_type", "FAILURE" ]
add_tag => "AUDIT_FAILURE"
}
}
}
}
And when the logstash restart I found the error Expected one of #, {, ,, ] at line XX. I show the entire error.
^[[2019-04-30T13:12:16,552][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, {, ,, ] at line 59, column 34 (byte 1239) after filter {\n if [message] =~ /^\s*Safed\[/ {\n mutate {\n replace => [ "source", "Safed" ]\n }\n }\n if [source] == "Safed"{\n # Safed without stripped syslog-tag "Safed[...][...]"\n grok {\n patterns_dir => "/neteye/shared/logstash/conf/pattern.d"\n match => [ "message", "(-|"", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:42:in compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:50:in
compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:12:in block in compile_sources'", "org/jruby/RubyArray.java:2486:in
map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:in compile_sources'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:51:in
initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:169:in initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:40:in
execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:315:in block in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in
with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:312:in block in converge_state'", "org/jruby/RubyArray.java:1734:in
each'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:299:in converge_state'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:166:in
block in converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:141:in with_pipelines'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:164:in
converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:105:in block in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/interval.rb:18:in
interval'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:94:in execute'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:348:in
block in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"]}
I'm searching in the logstash.conf and I found that the point is here
match => [ "message", "(-|"(%{GREEDYDATA:eventname})");(-|"(%{NUMBER:event_id})");(-|"(%{GREEDYDATA:audit_result})");(-|"(%{GREEDYDATA:provider})");(-|"(%{GREEDYDATA:source_netapp})");(-|"(%{GREEDYDATA:security})");(-|"(%{GREEDYDATA:computer})");(-|"(%{DATESTAMP:created_date})");(-|"(%{IP:ip})");(-|"(%{GREEDYDATA:ip_port})");(-|"(%{GREEDYDATA:target_userid})");(-|"(%{USERNAME:username})");(-|"(%{GREEDYDATA:target_user_is_local})");(-|"(%{GREEDYDATA:domain})");(-|"(%{GREEDYDATA:logon_type})");(-|"(%{GREEDYDATA:authentication_package_name})");(-|"(%{GREEDYDATA:error_code})");(-|"(%{GREEDYDATA:error_code_reason})");(-|"(%{GREEDYDATA:error_code_reason_string})")" ]
Why? In the grok debug it's ok.
Thank you
franco