After that sens it to Logstash.
The case name can be found in Nuix.log from the main log.
Here is a snippet from the log
2021-12-30 13:15:08.614 +0100 [AWT-EventQueue-0] 51818 INFO com.nuix.data.keystore.b - Read 0 passwords from C:\CASE\Test Log 1-1\Stores\DecryptionKeys\pgpkeylist.dat
so the case name is "Test Log 1-1"
So in logstach a can get it by using
else if "NUIX-RUNNER-START-WS" in [tags] and "NUIX-LOG" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:nuix_timestamp} %{NUMBER:nuix_timezone} [%{DATA:nuix_source}] %{NUMBER:nuix_thread} %{LOGLEVEL:nuix_loglevel}%{SPACE}%{DATA:nuix_class} - (?<case_name>.+?(?=\Stores\DecryptionKeys\pgpkeylist.dat))"}
}
mutate {
gsub => [
# replace backslashes and forward slashes with a hyphen
"case_name", "[\/]", "€ "
]
}
grok {
match => {"case_name" => " (?<nuix_case>([^€]+$))"}
So what I'm doing here is within the aggregate loop I sent here yestoday I want to tag every event with case name or add a filed with the case name to every event.
I post my whole lostasch conf here so maybe you can se what I'm doing wrong .
The Logstash do alto more but my main problem is to tag it with case name.
input {
beats {
port => 5044
}
}
filter {
if "AUTOMATION" in [tags] {
grok {
match => { "ts" => "(?^.{0,23})" }
}
date {
match => [ "ts1", "ISO8601", "YYYY-MM-dd HH:mm:ss", "YYYY-MM-dd HH:mm:ss.ZZZ", "UNIX" ]
target => "@timestamp"
locale => "en"
}
ruby {
code => '
t = event.get("@timestamp")
event.set("automation_timestamp_fixed", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
}
}
filter {
if "MULTILINE" in [tags] and "NUIX-LOG" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:nuix_timestamp} %{NUMBER:nuix_timezone} [%{DATA:nuix_source}] %{NUMBER:nuix_thread} %{LOGLEVEL:nuix_loglevel}%{SPACE}%{DATA:nuix_class} - (?<nuix_short_message>[\s\S]{0,50})%{DATA:nuix_second_short_message}\n(?m)%{GREEDYDATA:nuix_multilines}"}
}
} else if "AUTOMATION" in [tags] {
grok {
match => {"message" => "%{LOGLEVEL:level}%{GREEDYDATA:log_message}"}
}
} else if "INVESTIGATE" in [tags] and "MULTILINE" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:investigate_timestamp} %{DATA:message}\n(?m)%{GREEDYDATA:message_multilines}"}
}
} else if "METRICBEAT" in [tags] {
grok {
match => {"message" => "%{GREEDYDATA:log_message}"}
}
} else if "INVESTIGATE" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:investigate_timestamp}%{GREEDYDATA:log_message}"}
}
} else if "_grokparsefailure" in [tags] {
grok {
match => {"message" => "%{GREEDYDATA:log_message}"}
}
} else if "NUIX-RUNNER-START-WS" in [tags] and "NUIX-LOG" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:nuix_timestamp} %{NUMBER:nuix_timezone} [%{DATA:nuix_source}] %{NUMBER:nuix_thread} %{LOGLEVEL:nuix_loglevel}%{SPACE}%{DATA:nuix_class} - (?<case_name>.+?(?=\Stores\DecryptionKeys\pgpkeylist.dat))"}
}
mutate {
gsub => [
# replace backslashes and forward slashes with a hyphen
"case_name", "[\/]", "€ "
]
}
grok {
match => {"case_name" => " (?<nuix_case>([^€]+$))"}
}
} else if "OCR-LOG" in [tags] {
grok {
match => {"message" => "%{GREEDYDATA:log_message}"}
}
} else if "ABBY-WORKER-LOG" in [tags] {
grok {
match => {"message" => "%{GREEDYDATA:log_message}"}
}
} else {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:nuix_timestamp} %{NUMBER:nuix_timezone} [%{DATA:nuix_source}] %{NUMBER:nuix_thread} %{LOGLEVEL:nuix_loglevel}%{SPACE}%{DATA:nuix_class} - %{GREEDYDATA:automation_status}"}
add_tag => [ "SINGLE-LINE" ]
}
}
if "NUIX-RUNNER-START" in [tags] {
ruby {
code => '
event.set("[nuix_log]", {
"runner" => event.get("nuix_class").delete_prefix("SCRIPT.").delete_suffix(".gen"),
"path" => File.dirname(event.get("[log][file][path]")),
"host" => event.get("[agent][hostname]"),
})
'
}
aggregate {
task_id => "%{[nuix_log][host]}"
code => "map[event.get('[nuix_log][path]')] = event.get('[nuix_log]')"
map_action => "create"
}
} else if "NUIX-RUNNER-END" in [tags] {
ruby {
code => 'event.set("[nuix_log][host]", event.get("[agent][hostname]"))'
}
aggregate {
task_id => "%{[nuix_log][host]}"
code => '
log_dir = File.dirname(event.get("[log][file][path]"))
if map.key?(log_dir)
event.set("[nuix_log]", map[log_dir])
end
'
timeout => 2629743
inactivity_timeout => 300
map_action => "update"
end_of_task => true
}
} else {
ruby {
code => 'event.set("[nuix_log][host]", event.get("[agent][hostname]"))'
}
aggregate {
task_id => "%{[nuix_log][host]}"
code => '
log_dir = File.dirname(event.get("[log][file][path]"))
(0..6).each do |i|
if map.key?(log_dir)
event.set("[nuix_log]", map[log_dir])
break
end
log_dir = File.dirname(log_dir)
end
'
map_action => "update"
}
grok { match => { "[nuix_log][path]" => "/\w+/%{WORD:[@metadata][appname]}-" } add_tag => [ "%{[@metadata][appname]}" ] }
}
}
##########################################################
filter {
if "MULTILINE" in [tags] and "NUIX-LOG" in [tags] and "NUIX-MAIN-LOG-ERROR" in [tags] {
mutate {
add_field => { "nuix_error_message" => "%{nuix_short_message}%{nuix_second_short_message}" }
}
}
}
filter {
if "MULTILINE" in [tags] and "NUIX-LOG" in [tags] and "NUIX-WORKER-LOG-ERROR" in [tags] {
mutate {
add_field => { "nuix_error_message" => "%{nuix_short_message}%{nuix_second_short_message}" }
}
}
}
filter {
if "SINGLE-LINE" in [tags] and "NUIX-LOG" in [tags] and "NUIX-WORKER-LOG-ERROR" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:nuix_timestamp_blaj} %{NUMBER:nuix_timezone_blaj} [%{DATA:nuix_source_blaj}] %{NUMBER:nuix_thread-blaj} %{GREEDYDATA:nuix_error_message}"}
}
mutate {
remove_field => [ "nuix_timestamp_blaj", "nuix_timezone_blaj", "nuix_source_blaj", "uix_thread-blaj" ]
}
}
}
filter {
if "SINGLE-LINE" in [tags] and "NUIX-LOG" in [tags] and "NUIX-MAIN-LOG-ERROR" in [tags] {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601:nuix_timestamp-blaj} %{NUMBER:nuix_timezone_blaj} [%{DATA:nuix_source_blaj}] %{NUMBER:nuix_thread_blaj} %{GREEDYDATA:nuix_error_message}"}
}
mutate {
remove_field => [ "nuix_timestamp_blaj", "nuix_timezone_blaj", "nuix_source_blaj", "uix_thread-blaj" ]
}
}
}
###########################################################
filter {
if "NUIX-SINGLE-WORKER" in [tags] {
mutate { convert => ["process_pid", "string"] }
ruby {
code => '
event.set("nuix_running_worker", event.get("[process][name]") + "-" + event.get("process_pid"))
'
}
}
}
#filter {
if "DISK-SPACE-UTIL" in [tags] {
mutate { convert => ["[system][filesystem][used][bytes]", "string"] }
mutate { convert => ["[system][filesystem][total]", "string"] }
mutate {
add_field => {
"nuix_disc_event" => " Drive %{[system][filesystem][device_name]} %{[system][filesystem][used][bytes]} of %{[system][filesystem][total]}"
}
}
}
#}
#filter {
if "DISK-SPACE-UTIL" in [tags] {
mutate { convert => ["system_filesystem_free", "string"] }
mutate { convert => ["system_filesystem_total", "string"] }
mutate {
add_field => {
"nuix_disc_event" => " Drive %{[system][filesystem][device_name]} %{system_filesystem_free} of %{system_filesystem_total}"
}
}
#}
############ From here Metricbeat
#event.set("nuix_running_worker", "Host=" + event.get("[agent][hostname]") + event.get("[process][name]") + event.get("process_pid"))
filter {
if "NUIX-LOG" in [tags] {
date {
match => [ "nuix_timestamp", "ISO8601", "YYYY-MM-dd HH:mm:ss", "YYYY-MM-dd HH:mm:ss.ZZZ" ]
target => "@timestamp"
locale => "en"
}
}
}
filter {
if "INVESTIGATE" in [tags] {
date {
match => [ "investigate_timestamp", "ISO8601", "YYYY-MM-dd HH:mm:ss", "YYYY-MM-dd HH:mm:ss.ZZZ" ]
target => "@timestamp"
locale => "en"
}
}
}
filter {
if "RU" in [tags] {
elapsed {
start_tag => "STARTING-RUNNER"
end_tag => "ENDING-RUNNER"
periodic_flush => false
unique_id_field => "runner"
timeout => 2629743
new_event_on_match => false
}
} else if "ST" in [tags] {
elapsed {
start_tag => "STARTING-STAGE"
end_tag => "ENDING-STAGE"
periodic_flush => false
unique_id_field => "[automation][stage_id]"
timeout => 2629743
new_event_on_match => false
}
}
}
filter {
if [elapsed_time] {
ruby {
code => '
event.set("[elapsed_hr]", Time.at(event.get("[elapsed_time]")).utc.strftime("%T.%L"))
'
}
}
}
filter {
if "STARTING-RUNNER" in [tags] {
ruby {
code => '
t = event.get("@timestamp")
event.set("elapsed_timestamp_event_start_runner", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
mutate {
add_field => { "elapsed_timestamp_event_end_runner" => "--------------->" }
}
mutate {
add_field => { "elapsed_hr" => "Runner Started" }
}
}
}
filter {
if "STARTING-STAGE" in [tags] {
ruby {
code => '
t = event.get("@timestamp")
event.set("elapsed_timestamp_event_start_stage", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
mutate {
add_field => { "elapsed_timestamp_event_end_stage" => "--------------->" }
}
mutate {
add_field => { "elapsed_hr" => "Stage Started" }
}
}
}
filter {
if "ENDING-STAGE" in [tags] {
ruby {
code => '
t = event.get("elapsed_timestamp_start")
event.set("elapsed_timestamp_event_start_stage", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
}
}
filter {
if "ENDING-RUNNER" in [tags] {
ruby {
code => '
t = event.get("elapsed_timestamp_start")
event.set("elapsed_timestamp_event_start_runner", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
}
}
filter {
if "AUTOMATION-FINISHED-STAGE" in [tags] {
ruby {
code => '
t = event.get("@timestamp")
event.set("elapsed_timestamp_event_end_stage", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
}
}
filter {
if "AUTOMATION-FINISHED-RUNNER" in [tags] {
ruby {
code => '
t = event.get("@timestamp")
event.set("elapsed_timestamp_event_end_runner", Time.at(t.to_f).strftime("%Y-%m-%d %H:%M:%S"))
'
}
}
}
filter {
mutate {
remove_tag => [ "beats_input_codec_plain_applied", "beats_input_raw_event" ]
}
}
filter {
if "_grokparsefailure" in [tags] {
drop{}
}
}
#filter {
if "NUIX-LOG" in [tags] {
mutate {
copy => { "[nuix_log][runner]" => "nuix_case" }
}
}
#}
output {
Elasticsearch {
hosts => ["http://automation.avian.dk:9200", "http://automation.avian.dk:9201"]
index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
}
}