Getting a "Moving to ERROR step" while rollover from an alias when "max_age" paramter is met
Steps to reproduce
- Create an Index Template
{
"name": "temp-test_idx_template",
"index_template": {
"index_patterns": [
"temp-test*"
],
"template": {
"settings": {
"index": {
"lifecycle": {
"name": "temp-test_policy"
},
"analysis": {
"analyzer": {
"domain_name_analyzer": {
"filter": "lowercase",
"type": "custom",
"tokenizer": "domain_name_tokenizer"
}
},
"tokenizer": {
"domain_name_tokenizer": {
"type": "char_group",
"tokenize_on_chars": [
".",
"$"
]
}
}
},
"number_of_shards": "1",
"number_of_replicas": "1"
}
},
"mappings": {
"properties": {
"field_1": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"field_2": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword"
}
}
}
}
}
},
"composed_of": []
}
}
- test-temp_ilm_policy
{
"temp-test_policy": {
"version": 12,
"modified_date": "2023-06-16T12:37:31.561Z",
"policy": {
"phases": {
"hot": {
"min_age": "0ms",
"actions": {
"set_priority": {
"priority": 26
},
"rollover": {
"max_primary_shard_size": "50gb",
"max_age": "1m"
}
}
},
"delete": {
"min_age": "10h",
"actions": {
"delete": {
"delete_searchable_snapshot": true
}
}
}
}
},
"in_use_by": {
"indices": [
"temp-test-000001",
"temp-test-000002"
],
"data_streams": [],
"composable_templates": [
"temp-test_idx_template"
]
}
}
}
- Create an alias
PUT temp-test-000001
{
"settings": {
"index.lifecycle.name": "temp-test_policy",
"index.lifecycle.rollover_alias": "temp-test"
},
"aliases": {
"temp-test":{
"is_write_index": True
}
}
}
- Ingest some sample data
- After ingestion, ilm "max_age" parameter met and new alias "temp-test-000002" got created. But started receiving ERRORs in the logs
{"@timestamp":"2023-06-16T14:01:52.471Z", "log.level":"ERROR", "message":"policy [temp-test_policy] for index [temp-test-000002] failed on step [{\"phase\":\"hot\",\"action\":\"rollover\",\"name\":\"check-rollover-ready\"}]. Moving to ERROR step", "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.server","process.thread.name":"elasticsearch[elasticsearch-master-2][trigger_engine_scheduler][T#1]","log.logger":"org.elasticsearch.xpack.ilm.IndexLifecycleRunner","elasticsearch.cluster.uuid":"ibThozLaQAmiKpvog933SA","elasticsearch.node.id":"rw4LuAM8S-2uuETb14g0hw","elasticsearch.node.name":"elasticsearch-master-2","elasticsearch.cluster.name":"elasticsearch","error.type":"java.lang.IllegalArgumentException","error.message":"setting [index.lifecycle.rollover_alias] for index [temp-test-000002] is empty or not defined","error.stack_trace":"java.lang.IllegalArgumentException: setting [index.lifecycle.rollover_alias] for index [temp-test-000002] is empty or not defined\n\tat org.elasticsearch.xcore@8.7.0/org.elasticsearch.xpack.core.ilm.WaitForRolloverReadyStep.evaluateCondition(WaitForRolloverReadyStep.java:107)\n\tat org.elasticsearch.ilm@8.7.0/org.elasticsearch.xpack.ilm.IndexLifecycleRunner.runPeriodicStep(IndexLifecycleRunner.java:233)\n\tat org.elasticsearch.ilm@8.7.0/org.elasticsearch.xpack.ilm.IndexLifecycleService.triggerPolicies(IndexLifecycleService.java:427)\n\tat org.elasticsearch.ilm@8.7.0/org.elasticsearch.xpack.ilm.IndexLifecycleService.triggered(IndexLifecycleService.java:355)\n\tat org.elasticsearch.xcore@8.7.0/org.elasticsearch.xpack.core.scheduler.SchedulerEngine.notifyListeners(SchedulerEngine.java:185)\n\tat org.elasticsearch.xcore@8.7.0/org.elasticsearch.xpack.core.scheduler.SchedulerEngine$ActiveSchedule.run(SchedulerEngine.java:219)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:577)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317)\n\tat java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)\n\tat java.base/java.lang.Thread.run(Thread.java:1589)\n"}
{"@timestamp":"2023-06-16T14:11:52.470Z", "log.level": "INFO", "message":"policy [temp-test_policy] for index [temp-test-000002] on an error step due to a transient error, moving back to the failed step [check-rollover-ready] for execution. retry attempt [32]", "ecs.version": "1.2.0","service.name":"ES_ECS","event.dataset":"elasticsearch.server","process.thread.name":"elasticsearch[elasticsearch-master-2][trigger_engine_scheduler][T#1]","log.logger":"org.elasticsearch.xpack.ilm.IndexLifecycleRunner","elasticsearch.cluster.uuid":"ibThozLaQAmiKpvog933SA","elasticsearch.node.id":"rw4LuAM8S-2uuETb14g0hw","elasticsearch.node.name":"elasticsearch-master-2","elasticsearch.cluster.name":"elasticsearch"}
- Now, Verify temp-test-000001 settings & temp-test-000002 settings. After verifying, I see "rollover_alias": "temp-test" is missing for temp-test-000002
GET temp-test-000001/_settings
{
"temp-test-000001": {
"settings": {
"index": {
"lifecycle": {
"name": "temp-test_policy",
"rollover_alias": "temp-test",
"indexing_complete": "true"
},
"routing": {
"allocation": {
"include": {
"_tier_preference": "data_content"
}
}
},
"number_of_shards": "1",
"provided_name": "temp-test-000001",
"creation_date": "1686919464827",
"analysis": {
"analyzer": {
"domain_name_analyzer": {
"filter": "lowercase",
"type": "custom",
"tokenizer": "domain_name_tokenizer"
}
},
"tokenizer": {
"domain_name_tokenizer": {
"type": "char_group",
"tokenize_on_chars": [
".",
"$"
]
}
}
},
"priority": "26",
"number_of_replicas": "1",
"uuid": "t_AJ7kEiSG6dzBxR-ckzPQ",
"version": {
"created": "8070099"
}
}
}
}
}
GET temp-test-000002/_settings
{
"temp-test-000002": {
"settings": {
"index": {
"lifecycle": {
"name": "temp-test_policy"
},
"routing": {
"allocation": {
"include": {
"_tier_preference": "data_content"
}
}
},
"number_of_shards": "1",
"provided_name": "temp-test-000002",
"creation_date": "1686922884639",
"analysis": {
"analyzer": {
"domain_name_analyzer": {
"filter": "lowercase",
"type": "custom",
"tokenizer": "domain_name_tokenizer"
}
},
"tokenizer": {
"domain_name_tokenizer": {
"type": "char_group",
"tokenize_on_chars": [
".",
"$"
]
}
}
},
"priority": "26",
"number_of_replicas": "1",
"uuid": "oN7F7GeDSLaxUNfGTizirg",
"version": {
"created": "8070099"
}
}
}
}
}
Right now, I am ending up in creating duplicate index templates.
Please help me out with your valuable inputs. Thank you