My mappings:
config = {
"mappings": {
my_doc_type: {
"dynamic": False,
"properties": {
"timestamp_start": {
"type": "date"
},
"timestamp_end": {
"type": "date"
},
"entity_id": {
"type": "string",
"index": "not_analyzed"
},
"version": {
"type": "string",
"index": "not_analyzed"
},
}
},
...
}
If I perform this query:
{
"aggregations": {
"by_entity_id": {
"terms": {
"field": "entity_id"
}
}
},
"from": 0,
"query": {
"filtered": {
"filter": {
"range": {
"timestamp_end": {
"from": "2018-04-07T18:00:17Z",
"to": "2018-06-06T18:00:17Z"
}
}
}
}
},
"size": 0
}
My entity_id field still seems parsed and tokenized (this data set should have exactly one entity_id bucket):
{
"aggregations": {
"by_entity_id": {
"buckets": [
{
"doc_count": 480,
"key": "4bcf"
},
{
"doc_count": 480,
"key": "60965392"
},
{
"doc_count": 480,
"key": "73ba"
},
{
"doc_count": 480,
"key": "bb1f"
},
{
"doc_count": 480,
"key": "ff0cf25f5480"
}
],
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0
}
},
"items": [],
"total": 480
}
What am I doing wrong?