Hi all,
I have an apache log file that I want to ingest into logstash and then send the stashed data to elasticsearch.
I run the logstash command like this:
.\logstash -f logtstash.conf
After running the command this is the output I get
Successfully started Logstash API endpoint is the last line. Ideally I should be able to see all the logs being uploaded to Elasticsearch. But, it isn't happening.
Here is my logstash.conf
input {
file {
path => "E:\elk\logstash\apache_logs"
type => "apache_access"
start_position => "beginning"
}
}
filter {
if [type] in [ "apache" , "apache_access" , "apache-access" ] {
grok {
match => [
"message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}",
"message" , "%{COMMONAPACHELOG}+%{GREEDYDATA:extra_fields}"
]
overwrite => [ "message" ]
}
mutate {
convert => ["response", "integer"]
convert => ["bytes", "integer"]
convert => ["responsetime", "float"]
}
geoip {
source => "clientip"
target => "geoip"
add_tag => [ "apache-geoip" ]
}
date {
match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
remove_field => [ "timestamp" ]
}
useragent {
source => "agent"
}
}
if [type] in ["apache_error","apache-error"] {
grok {
match => ["message", "\[%{WORD:dayname} %{WORD:month} %{DATA:day} %{DATA:hour}:%{DATA:minute}:%{DATA:second} %{YEAR:year}\] \[%{NOTSPACE:loglevel}\] (?:\[client %{IPORHOST:clientip}\] ){0,1}%{GREEDYDATA:message}"]
overwrite => [ "message" ]
}
mutate
{
add_field =>
{
"time_stamp" => "%{day}/%{month}/%{year}:%{hour}:%{minute}:%{second}"
}
}
date {
match => ["time_stamp", "dd/MMM/YYYY:HH:mm:ss"]
remove_field => [ "time_stamp","day","dayname","month","hour","minute","second","year"]
}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "apache-%{+YYYY.MM.dd}"
document_type => "system_logs"
}
stdout { codec => rubydebug }
}
My elasticsearch indices look like this
I have attempted several times to execute the above command, but the logs never get stashed. What wrong am I doing? Can someone please help me out?