I made these changes based on the above. Should this logstash.conf do it? Kibana discovery is not showing any records at all for logstash-*
input {
beats {
port => 5044
}
udp {
host => "127.0.0.1"
port => 10514
codec => "json"
type => "logstash"
}
beats {
port => 5045
type => "ex_msg_trk"
}
file {
path => "/var/log/remote-hosts/BRT1-VHOST1.RETREATHEALTHCARE.ORG/BRT1-VHOST1.RETREATHEALTHCARE.ORG-20170127.log"
start_position => "beginning"
}
}
filter {
if [type] == "ex_msg_trk" {
grok {
match => { "message" => "(%{TIMESTAMP_ISO8601:date-time})?,(%{IPORHOST:client-ip})?,(%{IPORHOST:client-hostname})?,(%{IPORHOST:server-ip})?,(%{IPORHOST:server-hostname})?,(%{GREEDYDATA:source-context})?,(
%{GREEDYDATA:connector-id})?,(%{WORD:source})?,(%{WORD:event-id})?,(%{NUMBER:internal-message-id})?,(%{GREEDYDATA:message-id})?,(%{GREEDYDATA:recipient-address})?,(%{GREEDYDATA:recipient-status})?,(%{NUMBER:t
otal-bytes})?,(%{NUMBER:recipient-count})?,(%{GREEDYDATA:related-recipient-address})?,(%{GREEDYDATA:reference})?,(%{GREEDYDATA:message-subject})?,(%{GREEDYDATA:sender-address})?,(%{GREEDYDATA:return-path})?,(
%{GREEDYDATA:message-info})?,(%{WORD:directionality})?,(%{GREEDYDATA:tenant-id})?,(%{IPORHOST:original-client-ip})?,(%{IPORHOST:original-server-ip})?,(%{GREEDYDATA:custom-data})?" }
}
mutate {
convert => [ "total-bytes", "integer" ]
convert => [ "recipient-count", "integer" ]
split => ["recipient-address", ";"]
split => [ "source-context", ";" ]
split => [ "custom_data", ";" ]
}
}
if [type] == "logstash" {
# Extract fields from the each of the detailed message types
# The patterns provided below are included in core of LogStash 1.4.2.
grok {
match => [
"message", "%{CISCOFW106001}",
"message", "%{CISCOFW106006_106007_106010}",
"message", "%{CISCOFW106014}",
"message", "%{CISCOFW106015}",
"message", "%{CISCOFW106021}",
"message", "%{CISCOFW106023}",
"message", "%{CISCOFW106100}",
"message", "%{CISCOFW110002}",
"message", "%{CISCOFW302010}",
"message", "%{CISCOFW302013_302014_302015_302016}",
"message", "%{CISCOFW302020_302021}",
"message", "%{CISCOFW305011}",
"message", "%{CISCOFW313001_313004_313008}",
"message", "%{CISCOFW313005}",
"message", "%{CISCOFW402117}",
"message", "%{CISCOFW402119}",
"message", "%{CISCOFW419001}",
"message", "%{CISCOFW419002}",
"message", "%{CISCOFW500004}",
"message", "%{CISCOFW602303_602304}",
"message", "%{CISCOFW710001_710002_710003_710005_710006}",
"message", "%{CISCOFW713172}",
"message", "%{CISCOFW733100}"
]
}
# Parse the syslog severity and facility
syslog_pri { }
# Do a DNS lookup for the sending host
# Otherwise host field will contain an
# IP address instead of a hostname
dns {
reverse => [ "host" ]
action => "replace"
}
geoip {
source => "src_ip"
target => "geoip"
database => "/opt/logstash/databases/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
# do GeoIP lookup for the ASN/ISP information.
geoip {
database => "/opt/logstash/databases/GeoIPASNum.dat"
source => "src_ip"
}
}
if [path] =~ "VHOST" {
mutate { replace => {"type" => "esxi_host"}}
}
}
output {
if [type] == "logstash" {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "%{type}-%{+YYYY.MM.dd}"
manage_template => false
document_type => "logstash"
}
}
if [type] == "ex_msg_trk" {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "logstash_exch-%{+YYYY.MM.dd}"
}
}
if [type] == "esxi_host" {
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "%{type}-%{+YYYY.MM.dd}"
}
} else {
elasticsearch {
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
}
}