Good morning. I set up a log server with ELK stack on one machine. I need to ship to it logs from 16 other machines, and i'd like to have 16 named indexes. So i tried this (4 machines for now)
input {
beats {
type => syslog
client_inactivity_timeout=> 1200
port => 5047
ssl => true
ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
tags => [ 'Jira' ]
}
beats {
type => syslog
client_inactivity_timeout=> 1200
port => 5044
ssl => true
ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
tags => [ 'polluce' ]
}
beats {
type => syslog
client_inactivity_timeout=> 1200
port => 5045
ssl => true
ssl_certificate => "/etc/pki/tls/certs/logstash-forwarder.crt"
ssl_key => "/etc/pki/tls/private/logstash-forwarder.key"
tags => [ 'commserve' ]
}
syslog {
type => omelasticsearch
port => 5001
codec => json
tags => [ 'nagios' ]
}
}
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\]) %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
if [type] == "omelasticsearch" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\]) %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
output {
if 'jira' in [tags] {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "jira-%{+YYYY.MM.dd}"
}
stdout {}
}
if 'polluce' in [tags] {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "polluce-%{+YYYY.MM.dd}"
}
stdout {}
}
if 'commserve' in [tags] {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "commserve-%{+YYYY.MM.dd}"
}
stdout {}
}
if 'nagios' in [tags] {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "nagios-%{+YYYY.MM.dd}"
}
stdout {}
}
}
Right now i'm only seeing 2 indexes, one from the Nagios machine which ship from rsyslog (i couldn't install filebeats there) and one from the commserve machine which ship from windows version of filebeats.
What i'm i doing wrong? Should i change my work method and set up a multiple pipeline structure? Thank you for your support.