Hi,
My host shipping logs via filebeat 6.1.1 to our logstash 6.1.1 server, and i can see the parsed logs on the screen by using stdout { codec => rubydebug }. But logs data not sending to Elasticsearch 6.1.1
In my logstash pipeline i have one beats input with multiple filters and outputs (to create several indexes based on log tags).
Please if you can check my logstash.conf where is the problem:
input {
beats {
port => 5044
tags => [ "ApacheAccessLogs", "ApacheErrorLogs", "MysqlErrorLogs" ]
}
}
filter {
if "ApacheAccessLogs" in [tags] {
grok {
match => [
"message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}",
"message" , "%{COMMONAPACHELOG}+%{GREEDYDATA:extra_fields}"
]
overwrite => [ "message" ]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
}
geoip {
source => "clientip"
target => "geoip"
add_tag => [ "apache-geoip" ]
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
remove_field => [ "timestamp" ]
}
useragent {
source => "agent"
}
}
if "ApacheErrorLogs" in [tags] {
grok {
match => { "message" => ["\[%{APACHE_TIME:[apache2][error][timestamp]}\] \[%{LOGLEVEL:[apache2][error][level]}\]( \[client %{IPORHOST:[apache2][error][client]}\])? %{GREEDYDATA:[apache2][error][message]}",
"\[%{APACHE_TIME:[apache2][error][timestamp]}\] \[%{DATA:[apache2][error][module]}:%{LOGLEVEL:[apache2][error][level]}\] \[pid %{NUMBER:[apache2][error][pid]}(:tid %{NUMBER:[apache2][error][tid]})?\]( \[client %{IPORHOST:[apache2][error][client]}\])? %{GREEDYDATA:[apache2][error][message1]}" ] }
pattern_definitions => {
"APACHE_TIME" => "%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}"
}
remove_field => "message"
}
mutate {
rename => { "[apache2][error][message1]" => "[apache2][error][message]" }
}
date {
match => [ "[apache2][error][timestamp]", "EEE MMM dd H:m:s YYYY", "EEE MMM dd H:m:s.SSSSSS YYYY" ]
remove_field => "[apache2][error][timestamp]"
}
}
if "MysqlErrorLogs" in [tags] {
grok {
match => { "message" => ["%{LOCALDATETIME:[mysql][error][timestamp]} (\[%{DATA:[mysql][error][level]}\] )?%{GREEDYDATA:[mysql][error][message]}",
"%{TIMESTAMP_ISO8601:[mysql][error][timestamp]} %{NUMBER:[mysql][error][thread_id]} \[%{DATA:[mysql][error][level]}\] %{GREEDYDATA:[mysql][error][message1]}",
"%{GREEDYDATA:[mysql][error][message2]}"] }
pattern_definitions => {
"LOCALDATETIME" => "[0-9]+ %{TIME}"
}
remove_field => "message"
}
mutate {
rename => { "[mysql][error][message1]" => "[mysql][error][message]" }
}
mutate {
rename => { "[mysql][error][message2]" => "[mysql][error][message]" }
}
date {
match => [ "[mysql][error][timestamp]", "ISO8601", "YYMMdd H:m:s" ]
remove_field => "[apache2][access][time]"
}
}
}
output {
if "ApacheAccessLogs" in [tags] {
elasticsearch { hosts => ["localhost:9200"]
index => "apacheAccess-%{+YYYY.MM.dd}"
document_type => "doc"
}
}
if "ApacheErrorLogs" in [tags] {
elasticsearch { hosts => ["localhost:9200"]
index => "apacheError-%{+YYYY.MM.dd}"
document_type => "doc"
}
}
if "MysqlErrorLogs" in [tags] {
elasticsearch { hosts => ["localhost:9200"]
index => "mySql-%{+YYYY.MM.dd}"
document_type => "doc"
}
}
stdout { codec => rubydebug }
}
My Filebeat.yml - I am posting the filebeat configuration just to show why i used tags and fields.type:
filebeat.prospectors:
- type: log
paths:
- C:\xampp\apache\logs\access.log
fields:
type: apache_access
tags: ["ApacheAccessLogs"]
- type: log
paths:
- C:\xampp\apache\logs\error.log
fields:
type: apache_error
tags: ["ApacheErrorLogs"]
- type: log
paths:
- C:\xampp\mysql\data\mysql_error.log
#- C:\Program Files\Microsoft SQL Server\MSSQL12.SQLEXPRESS\MSSQL\Log\ERRORLOG
fields:
type: mysql_error
tags: ["MysqlErrorLogs"]
output.logstash:
# The Logstash hosts
hosts: ["logstashdomain:5044"]