After performing a large migration, I came to realize that our Edge-NGram Tokenizer's max token is too small... Am I able to increase this without reindexing the entire index? Also.. How does increasing max size impact performance/hardware requirements?
"settings": {
"index": {
"number_of_shards": "4",
"provided_name": "568fe5d50965ef83899c5ca6-breadcrumbs_v1",
"creation_date": "1522082871152",
"analysis": {
"analyzer": {
"edgegram_analyzer": {
"type": "custom",
"tokenizer": "edgegram_tokenizer"
}
},
"tokenizer": {
"edgegram_tokenizer": {
"type": "edge_ngram",
"min_gram": "3",
"max_gram": "10"
}
}
},
"number_of_replicas": "1",
"uuid": "nVVehVmMTum7FpH_XPjWaw",
"version": {
"created": "6020399"
}
}
}