Hello,
I've got a strange problem.
I'm using logstash to get logs from syslog and ossec into elasticsearch to make it possible to search through either kibana or queries run directly on it.
I have different index paterns for events from syslog and ossec, the syslog index patern is logstash-%{+YYYY.MM.dd} and for ossec it is ossec-%{+YYYY.MM.dd}
Now the interesting part is that some logs who should go to the syslog indexes go into the ossec indexes, i have no idea how this can happen because these are the 2 config files for logstash:
Ossec config:
input {
lumberjack {
port => 5003
type => "lumberjack"
ssl_certificate => "/etc/logstash/logstash-forwarder.crt"
ssl_key => "/etc/logstash/logstash-forwarder.key"
codec => json
}
}
filter {
geoip {
source => "srcip"
target => "geoip"
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
add_field => [ "[geoip][location]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][location]", "%{[geoip][latitude]}" ]
}
date {
match => ["timestamp", "YYYY MMM dd HH:mm:ss"]
target => "@timestamp"
}
mutate {
convert => [ "[geoip][location]", "float"]
rename => [ "hostname", "AgentName" ]
rename => [ "geoip", "GeoLocation" ]
rename => [ "file", "AlertsFile" ]
rename => [ "agentip", "AgentIP" ]
rename => [ "[rule][comment]", "[rule][description]" ]
rename => [ "[rule][level]", "[rule][AlertLevel]" ]
remove_field => [ "timestamp" ]
}
}
output {
#stdout { codec => rubydebug }
elasticsearch {
hosts => ["bcksrv16:9200"]
index => "ossec-%{+YYYY.MM.dd}"
document_type => "ossec"
template => "/etc/logstash/elastic-ossec-template.json"
template_name => "ossec"
template_overwrite => true
}
}
And syslog
input {
udp {
port => 5001
type => syslog
}
}
filter {
if [type] == "syslog" {
grok {
break_on_match => true
match => ["message", "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(: %{POSINT:win_eventid})?(?:[%{POSINT:syslog_pid}])?: %{GREEDYDATA:syslog_message}"]
add_field => ["received_at", "%{@timestamp}"]
remove_field => ["host"]
}
syslog_pri {}
date {
match => ["syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "MM/dd/yy HH:mm:ss"]
}
}
}
filter {
grok {
break_on_match => true
match => [
"message", "%{HAPROXYHTTP}"
]
add_tag => ["HAProxy"]
}
geoip {
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
source => "client_ip"
target => "geoip"
}
}
output {
elasticsearch {
hosts => bcksrv16
index => "logstash-%{+YYYY.MM.dd}"
document_type => "syslog"
}
}
I've checked and there is no way that the syslog events are sent to the logstash ossec port.
If anyone has an idea please let me know.
Greetings Richard.