hey folks, i have logstash running on my server collecting nginx and magento logs, here are my confs
logstash-magento-es.conf
input {
file {
type => "mageto2"
path => [ "/var/www/html/current/var/log/system.log" ]
}
}
filter {
if [type] == "magento2" {
grok {
match => { "message" => "[%{TIMESTAMP_ISO8601:timestamp}] %{DATA:log_level}: %{GREEDYDATA:message}"}
add_field => [ "received_at", "%{@timestamp}" ]
}
}
if [type] == "magento1" {
grok {
match => { "message" => "%{TIMESTAMP_ISO8601:date} %{DATA:log_level} ([0-9]+): %{GREEDYDATA:message}"}
add_field => [ "received_at", "%{@timestamp}" ]
}
}
}
output {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "magento-syslogs-frontend-%{+YYYY.MM.dd}"
document_type => "magento_logs"
}
}
logstash-nginx-es.conf
input {
beats {
host => "0.0.0.0"
port => 5400
}
}
filter {
grok {
match => [ "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}"]
overwrite => [ "message" ]
}
mutate {
remove_field => [ "[host]" ]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
}
geoip {
source => "clientip"
target => "geoip"
add_tag => [ "nginx-geoip" ]
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
remove_field => [ "timestamp" ]
}
useragent {
source => "agent"
}
}
output {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "weblogs-frontend-%{+YYYY.MM.dd}"
document_type => "nginx_logs"
}
}
everything works fine. but when i create two index on kibana for those two logs, i get same data in both indexs. i cannot seems to find why. what would be the reason?