I have used filebeat to capture IIS logs. and I have used following grok to filter IIS logs.
match => ["message", "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:serverip} %{WORD:verb} %{NOTSPACE:request} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:auth} %{IPORHOST:clientip} %{NOTSPACE:browser}/%{NOTSPACE:agent} %{NOTSPACE:referrer} %{NUMBER:response} %{NUMBER:sub_response} %{NUMBER:sc_status} %{NUMBER:responsetime}"]
but in kibana I couldn't received the filtered data according to the grok filter.
I tried refresh the index
remove and add the index again but no luck
please help
my logstash.conf is as follows
input {
beats {
port => 5000
}
}
First filter
filter {
#ignore log comments
if [message] =~ "^#" {
drop {}
}
grok {
#patterns_dir => "./patterns"
match => ["message", "%{TIMESTAMP_ISO8601:timestamp} %{IPORHOST:serverip} %{WORD:verb} %{NOTSPACE:request} %{NOTSPACE:querystring} %{NUMBER:port} %{NOTSPACE:auth} %{IPORHOST:clientip} %{NOTSPACE:browser}/%{NOTSPACE:agent} %{NOTSPACE:referrer} %{NUMBER:response} %{NUMBER:sub_response} %{NUMBER:sc_status} %{NUMBER:responsetime}"]
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss" ]
locale => "en"
}
}
Second filter
filter {
if "_grokparsefailure" in [tags] {
} else {
# on success remove the message field to save space
mutate {
remove_field => ["message", "timestamp"]
}
}
}
output {
elasticsearch {
hosts => ["172.24.80.86:9200"]
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
}