log event
"2024-11-21 17:12:48|INFO|endpoint|ar-endpoint|AWS||null|null||Tenant GUID [ujwujqllkll-232-11919191], event GUID [*******************]: Consumed record: thread[0], offset[615049], key[null]"
Grok pattern
input {
file {
path => "/western2/log/**/*.log"
start_position => "beginning"
sincedb_path => "/western2/log/.sincedb"
type => "default"
codec => multiline {
# Grok pattern names are valid! :)
pattern => "^%{TIMESTAMP_ISO8601}\|"
negate => true
what => previous
max_lines => 1000
}
}
}
filter {
if [type] =~ "default" {
grok {
match => {"message" => "%{TIMESTAMP_ISO8601}\|%{LOGLEVEL:level}\|%{DATA:service}\|%{DATA:env}\|%{DATA:dc}\|%{HOSTNAME:host}\|%{DATA:class}\|%{DATA:tenant}\|%{IP:ip}\|%{GREEDYDATA:msg}"}
}
mutate{
lowercase=>["service"]
}
}
}
output {
if [type] =~ "default" {
elasticsearch {
hosts => ["{{.Values.endpoint}}:{{.Values.port}}"]
ssl => true
manage_template => false
index => "%{service}-%{+YYYY.MM.dd}"
}
}
service is basically not getting parsed. My index is based on service which is picked from the log event.
on Elasticsearch the index name appears like %{service}-2024.11.21. This was the working setup.