I am passing logs from server to Elasticsearch and they are indexed properly but the grok filter I have applied in not extracting the information. The grok pattern works perfectly when i use it in grok debugger against the log line, just not in pipeline.conf file. Why?
input {
beats {
port => 5044
}
}
filter {
if[fields][tags] =="ngta-web" {
grok {
break_on_match => false
match => {
"message" => [
"%{DATESTAMP:timestamp}%{SPACE}%{NONNEGINT:code}%{GREEDYDATA}%{LOGLEVEL}%{SPACE}%{NONNEGINT:anum}%{SPACE}%{GREEDYDATA:logmessage}"
]
}
}
}
else if [fields][tags] == "ngta-app" {
grok {
break_on_match => false
match => {
"message" => [
"%{DATESTAMP:timestamp}%{SPACE}%{NONNEGINT:code}%{GREEDYDATA}%{LOGLEVEL}%{SPACE}%{NONNEGINT:anum}%{SPACE}%{GREEDYDATA:logmessage}"
]
}
}
}
else if [fields][tags] == "monitoring-app" {
grok {
break_on_match => false
match => {
"message" => [
"%{DATESTAMP:timestamp}%{SPACE}%{NONNEGINT:code}%{GREEDYDATA}%{LOGLEVEL}%{SPACE}%{NONNEGINT:anum}%{SPACE}%{GREEDYDATA:logmessage}"
]
}
}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
ilm_enabled => false
index => "%{[fields][tags]}"
}
stdout {
codec => rubydebug
}
}