Hello, I have a problem I could not index special characters in Elasticsearch. I try to do it like that:
PUT my-index-000001
{
"settings": {
"index": {
"max_result_window": 100000,
"number_of_replicas": 0
},
"analysis": {
"filter": {
"autocomplete_filter": {
"type": "edge_ngram",
"min_gram": 1,
"max_gram": 35
}
},
"analyzer": {
"autocomplete": {
"type": "custom",
"tokenizer": "my_tokenizer",
"filter": [
"lowercase",
"autocomplete_filter"
]
}
},
"tokenizer": {
"my_tokenizer": {
"type": "edge_ngram",
"min_gram": 1,
"max_gram": 35,
"tokenize_on_chars": [
"%"
]
}
}
}
},
"mappings": {
"properties": {
}
}
}
POST my-index-000001/_doc/
{
"@timestamp": "2099-11-15T13:12:00",
"message": "GET /search HTTP/1.1 200 1070000",
"user": {
"id": "kimchy % D"
}
}
but I could not receive anything by search request:
GET my-index-000001/_search
{
"size": 10000,
"query": {
"term": {
"user.id": "%"
}
}
}
Could you help me please?