I am using logstash-file-mongodb plugin for indexing data to Elasticsearch coming from MongoDB.
Here in my input file:
input {
mongodb {
uri => 'mongodb://myuser:mypwd@myhost/TestDB'
placeholder_db_dir => 'E:/logstash-mongodb/'
placeholder_db_name => 'logstash_sqlite.db'
collection => 'MediaSummary'
batch_size => 5000
generateId => false
parse_method => "simple"
}
}
filter {
mutate {
rename => { "_id" => "mongo_id" }
}
date {
match => ["StartDate", "YYYY-MM-dd HH:mm:ss.SSSZ"]
target => "StartDate"
timezone => "UTC"
}
date {
match => ["EndDate", "YYYY-MM-dd HH:mm:ss.SSSZ"]
target => "EndDate"
timezone => "UTC"
}
date {
match => ["ClientExpiryDate", "YYYY-MM-dd HH:mm:ss.SSSZ"]
target => "ClientExpiryDate"
timezone => "UTC"
}
mutate {
remove_field => ["path","tags"]
}
}
output {
stdout {
codec => rubydebug
}
elasticsearch {
hosts => ["127.0.0.1:9200"]
index => "mediasummary"
doc_as_upsert => true
document_id => "%{mongo_id}"
}
}
My indexed data in Elasticsearch shows up somewhat like this:
"hits" : [
{
"_index" : "mediasummary",
"_type" : "doc",
"_id" : "59a41f63dba6488b332683ee",
"_score" : 1.0,
"_source" : {
"@version" : "1",
"StartDate" : "2017-03-13 11:48:53 UTC",
"@timestamp" : "2019-01-23T14:28:16.877Z",
"logdate" : "2017-08-28T13:49:23+00:00",
"mongo_id" : "59a41f63dba6488b332683ee",
"EndDate" : "2017-03-13 12:48:53 UTC"
}
}]
You can see its not converting StartDate, EndDate fields correctly to Elasticsearch recognized date type. Only logdate and @timestamp fields showing in correct format.
So how I can convert date fields from input to Elasticsearch recognized date type.