Here you go...input {
file {
path => ["/var/log/adc/2018///adc.log",
"/var/log/adc/2018///asdi.log",
"/var/log/adc/2018///edct_cdm_flight_data.log",
"/var/log/adc/2018///flightaware.log",
"/var/log/adc/2018///flight_manager.log",
"/var/log/adc/2018///fp.log",
"/var/log/adc/2018///invalid_outgoing.log",
"/var/log/adc/2018///iridium.log",
"/var/log/adc/2018///met_error.log",
"/var/log/adc/2018///microservice.log",
"/var/log/adc/2018///mq_output.log",
"/var/log/adc/2018///performance.log",
"/var/log/adc/2018///position_data.log",
"/var/log/adc/2018///rmqapps.log",
"/var/log/adc/2018///sbbtraffic.log",
"/var/log/adc/2018///schneider.log",
"/var/log/adc/2018///skyguide_notams.log",
"/var/log/adc/2018///sql.log",
"/var/log/adc/2018///unparsed.log",
"/var/log/adc/2018///wx.log"
]
tags => [ "standard_adc_format" ]
default discover_interval is 15 sec
discover_interval => 60
file where indexes into the current log file positions are stored
sincedb_path => "/tmp/logstash-sincedb.db"
when a new log is first found, begin reading from the first line
start_position => "beginning"
}
file {
path => ["/var/log/adc/2018///api.log",
"/var/log/adc/2018///dashboard.log"
]
tags => [ "alt_adc_format" ]
default discover_interval is 15 sec
discover_interval => 60
file where indexes into the current log file positions are stored
sincedb_path => "/tmp/logstash-sincedb2.db"
when a new log is first found, begin reading from the first line
start_position => "beginning"
}
file {
path => ["/var/log/sys/2018///maillog"
]
tags => [ "syslog_format" ]
default discover_interval is 15 sec
discover_interval => 60
file where indexes into the current log file positions are stored
sincedb_path => "/tmp/logstash-sincedb3.db"
when a new log is first found, begin reading from the first line
start_position => "beginning"
}
}
filter {
if "standard_adc_format" in [tags] {
if ".py" in [message] {
it's a log line from a python app with extra info
grok {
match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{NOTSPACE:appname}[%{USERNAME:process_id}] %{NOTSPACE:serverdate} %{NOTSPACE:servertime} %{WORD:loglevel} %{NUMBER:thread_id} %{NOTSPACE:source_file} %{POSINT:source_line} %{GREEDYDATA:message}" ]
overwrite => [ "message" ]
}
} else {
it's a standard syslog format not generated by our python logging libs
grok {
match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{NOTSPACE:appname}[%{USERNAME:process_id}] %{GREEDYDATA:message}" ]
}
}
mutate {
gsub => [ "message", "", "
" ]
}
}
if "alt_adc_format" in [tags] {
grok {
match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} #|%{NOTSPACE:date2} %{NOTSPACE:time2} %{WORD:loglevel} %{NUMBER:thread_id} %{NOTSPACE:source_file} %{POSINT:source_line} %{GREEDYDATA:message}" ]
overwrite => [ "message" ]
}
mutate {
gsub => [ "message", "", "
" ]
}
}
if "syslog_format" in [tags] {
grok {
match => [ "message", "^%{TIMESTAMP_ISO8601:logdate} <%{NOTSPACE:syslog}> %{NOTSPACE:hostname} %{NOTSPACE:appname} %{GREEDYDATA:message}" ]
overwrite => [ "message" ]
}
}
}
output {
if "_grokparsefailure" in [tags] {
write events that didn't match to a file
file { "path" => "/tmp/grok_failures.txt" }
} else {
elasticsearch { hosts => ["localhost:9200"] }
}
for debugging:
stdout { codec => rubydebug }
}