I got another insight from another thread here: Search by digits doesn't work with edge_ngram - #2 by dadoonet
I tried analyze and it is removing the numbers from my query. I replaced the tokenizer with "whitespace" instead
I also decided to have 2 fields: original and term, and keeping term.autocomplete as in the example. Both fields are searched with no boosting and it seems to work as expected now.
PUT /searches
{
"settings": {
"analysis": {
"analyzer": {
"autocomplete": {
"tokenizer": "autocomplete",
"filter": [
"lowercase"
]
},
"autocomplete_search": {
"type" : "custom",
"tokenizer": "whitespace",
"filter": "lowercase"
}
},
"tokenizer": {
"autocomplete": {
"type": "edge_ngram",
"min_gram": 1,
"max_gram": 10,
"token_chars": []
}
}
}
},
"mappings": {
"properties": {
"original": {
"type": "text"
},
"term": {
"type": "text",
"fields": {
"autocomplete": {
"type": "text",
"analyzer": "autocomplete",
"search_analyzer": "autocomplete_search"
}
}
}
}
}
}
PUT /searches/_doc/1
{
"original": "Pentium 4",
"term": "pentium4"
}
PUT /searches/_doc/2
{
"original": "Pentium 3 Intel Processor",
"term": "pentium3intelprocessor"
}
GET /searches/_search
{
"query": {
"multi_match": {
"query": "pentium3",
"fields": ["term.autocomplete", "original"],
"operator": "and"
}
}
}