Hi
I have 2 types of log inputs, nginx and iptabes.
In filter section of logstash config file, I filter these messages and put a tag on them.
my issue is that all of logstash messages ( input from filebeats ) forwards to elasticsearch. But I want index messages with my custom tags in elasticsearch.
This is my Config for iptables:
filter {
if [fields][log_type] == "firewall" {
grok {
match => [ "message", "%{IPTABLES}"]
patterns_dir => "/etc/logstash/grok/iptables.pattern"
add_tag => [ "Blocked" ]
}
geoip {
source => "src_ip"
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
remove_field => [ "timestamp" ]
}
}
}
This is my config for nginx:
filter {
if [fields][log_type] == "nginx" {
grok {
match => [ "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}"]
overwrite => [ "message" ]
add_tag => [ "Nginx" ]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
}
geoip {
source => "clientip"
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
remove_field => [ "timestamp" ]
}
useragent {
source => "agent"
}
}
}
and this is output config:
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "logstash-%{+YYYY.MM.dd}"
}
stdout { codec => rubydebug }
}
where should I add something like this ?
if ("nginx" not in [tags]) or ("Blocked" not in [tags])
{ drop{} }
thanx