Hi just ran this all in 7.2.0 all ran fine...
PUT my_index/_doc/1
{
"value_at": "2019-07-10T17:01:01.500Z"
}
GET my_index
{
"my_index" : {
"aliases" : { },
"mappings" : {
"properties" : {
"value_at" : {
"type" : "date"
}
}
},
"settings" : {
"index" : {
"creation_date" : "1563048120913",
"number_of_shards" : "1",
"number_of_replicas" : "1",
"uuid" : "2WuVHjLyTXSHx2fzeQDRCg",
"version" : {
"created" : "7020099"
},
"provided_name" : "my_index"
}
}
}
}
PUT my_index/_doc/2
{
"value_at": "2019-07-10T17:01:01.50Z"
}
PUT my_index/_doc/3
{
"value_at": "2019-07-10T17:01:01.5Z"
}
GET my_index/_search
{
"took" : 0,
"timed_out" : false,
"_shards" : {
"total" : 1,
"successful" : 1,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 3,
"relation" : "eq"
},
"max_score" : 1.0,
"hits" : [
{
"_index" : "my_index",
"_type" : "_doc",
"_id" : "1",
"_score" : 1.0,
"_source" : {
"value_at" : "2019-07-10T17:01:01.500Z"
}
},
{
"_index" : "my_index",
"_type" : "_doc",
"_id" : "2",
"_score" : 1.0,
"_source" : {
"value_at" : "2019-07-10T17:01:01.50Z"
}
},
{
"_index" : "my_index",
"_type" : "_doc",
"_id" : "3",
"_score" : 1.0,
"_source" : {
"value_at" : "2019-07-10T17:01:01.5Z"
}
}
]
}
}
If you are tied to that version you could probably use logstash and some logic to parse / fix before sending to Elasticsearch