I am new to ELK and am experimenting to learn how to use the tools. I have a config file for logstash setup and have gotten data into Elastic. I noticed that it ignores the data and time in the log entries and uses the clock time for the events - I want to do the opposite and have the timestamp from the log used in Elastic
Config file:
input {
file {
path => "/opt/bitnami/apache2/logs/access_log"
start_position => beginning
sincedb_path => "/dev/null"
ignore_older => 0
}
}
filter {
grok {
match => { "message" => "%{IP:client}%{SPACE}%{NOTSPACE}%{SPACE}%{DATA:who}%{SPACE}\[%{HTTPDATE}\]%{SPACE}%{NOTSPACE}%{WORD:verb}%{SPACE}%{URIPATHPARAM}%{SPACE}%{WORD:protocol}%{NOTSPACE}%{NUMBER:protoversion}%{NOTSPACE}%{SPACE}%{WORD:status}%{SPACE}%{GREEDYDATA:size}" }
}
date {
match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
target => "@timestamp"
}
mutate {
convert => { "size" => "integer" }
}
}
output {
elasticsearch {
hosts => [ "127.0.0.1:9200" ]
}
}
One event from Elastic in JSON format
{
"_index": "logstash-2018.01.19",
"_type": "doc",
"_id": "mA0aD2EBgQZkMV75rhqq",
"_version": 1,
"_score": null,
"_source": {
"path": "/opt/bitnami/apache2/logs/access_log",
"protoversion": "1.1",
"protocol": "HTTP",
"@timestamp": "2018-01-19T15:47:42.041Z",
"size": 212,
"@version": "1",
"host": "debian",
"verb": "GET",
"client": "192.168.1.56",
"message": "192.168.1.56 - - [15/Dec/2017:16:43:04 +0000] \"GET / HTTP/1.1\" 302 212",
"who": "-",
"status": "302"
},
"fields": {
"@timestamp": [
"2018-01-19T15:47:42.041Z"
]
},
"sort": [
1516376862041
]
}