Hi Martijn,
After upgrading to 1.0.0 version and deleting all ole watcher indices, thread pool error is gone.
Now we're faced with another exception for the below watch. The error is getting repeated every minute.
Error:
failed to execute action [node_event/send_email]
org.elasticsearch.common.mustache.MustacheException: Index: 0, Size: 0@[query-template:1]
at org.elasticsearch.common.mustache.codes.DefaultCode.get(DefaultCode.java:107)
at org.elasticsearch.common.mustache.codes.ValueCode.execute(ValueCode.java:55)
at org.elasticsearch.common.mustache.codes.DefaultMustache.run(DefaultMustache.java:30)
at org.elasticsearch.common.mustache.codes.DefaultCode.execute(DefaultCode.java:126)
at org.elasticsearch.common.mustache.codes.DefaultCode.execute(DefaultCode.java:115)
at org.elasticsearch.watcher.support.template.xmustache.XMustacheScriptEngineService$MustacheExecutableScript.run(XMustacheScriptEngineService.java:183)
at org.elasticsearch.watcher.support.template.xmustache.XMustacheTemplateEngine.render(XMustacheTemplateEngine.java:51)
at org.elasticsearch.watcher.actions.email.service.EmailTemplate.render(EmailTemplate.java:121)
at org.elasticsearch.watcher.actions.email.ExecutableEmailAction.execute(ExecutableEmailAction.java:60)
at org.elasticsearch.watcher.actions.ActionWrapper.execute(ActionWrapper.java:104)
at org.elasticsearch.watcher.execution.ExecutionService.executeInner(ExecutionService.java:379)
at org.elasticsearch.watcher.execution.ExecutionService.execute(ExecutionService.java:271)
at org.elasticsearch.watcher.execution.ExecutionService$WatchExecutionTask.run(ExecutionService.java:417)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.IndexOutOfBoundsException: Index: 0, Size: 0
at java.util.ArrayList.rangeCheck(ArrayList.java:653)
at java.util.ArrayList.get(ArrayList.java:429)
at org.elasticsearch.common.collect.Iterables.get(Iterables.java:728)
at org.elasticsearch.watcher.support.template.xmustache.XMustacheFactory$CollectionMap.get(XMustacheFactory.java:137)
at org.elasticsearch.watcher.support.template.xmustache.XMustacheFactory$CollectionMap.containsKey(XMustacheFactory.java:146)
at org.elasticsearch.common.mustache.reflect.ReflectionObjectHandler.findWrapper(ReflectionObjectHandler.java:125)
at org.elasticsearch.common.mustache.reflect.ReflectionObjectHandler.find(ReflectionObjectHandler.java:72)
at org.elasticsearch.common.mustache.reflect.GuardedBinding.getWrapper(GuardedBinding.java:94)
at org.elasticsearch.common.mustache.reflect.GuardedBinding.createAndGet(GuardedBinding.java:79)
at org.elasticsearch.common.mustache.reflect.GuardedBinding.get(GuardedBinding.java:74)
at org.elasticsearch.common.mustache.codes.DefaultCode.get(DefaultCode.java:105)
... 15 more
Output from stats
GET /_watcher/stats?pretty
{
"watcher_state": "started",
"watch_count": 6,
"execution_thread_pool": {
"queue_size": 0,
"max_size": 40
}
}
PUT _watcher/watch/node_event/execute/_ack
{
"_status": {
"last_checked": "2015-07-14T23:06:43.406Z",
"last_met_condition": "2015-07-14T23:06:43.406Z",
"actions": {
"send_email": {
"ack": {
"timestamp": "2015-07-14T23:05:43.039Z",
"state": "awaits_successful_execution"
},
"last_execution": {
"timestamp": "2015-07-14T23:06:43.406Z",
"successful": false,
"reason": "MustacheException[Index: 0, Size: 0@[query-template:1]]; nested: IndexOutOfBoundsException[Index: 0, Size: 0]; "
}
}
}
}
}
watch from elasticsearch usecase example
PUT _watcher/watch/node_event
{
"trigger": {
"schedule": {
"interval": "1m"
}
},
"input": {
"search": {
"request": {
"indices": [
".marvel-*"
],
"search_type": "query_then_fetch",
"body": {
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"match": {
"event": "node_left"
}
},
{
"match": {
"event": "node_joined"
}
}
]
}
},
"filter": {
"range": {
"@timestamp": {
"from": "{{ctx.trigger.scheduled_time}}||-60s",
"to": "{{ctx.trigger.triggered_time}}"
}
}
}
}
},
"fields": [
"event",
"message",
"cluster_name"
],
"sort": [
{
"@timestamp": {
"order": "desc"
}
}
]
}
}
}
},
"throttle_period": "1m",
"condition": {
"script": {
"inline": "ctx.payload.hits.size() > 0 "
}
},
"actions": {
"send_email": {
"email": {
"to": "team@abc.com",
"subject": "{{ctx.payload.hits.hits.0.fields.event}} the cluster",
"body": "DEMO CLUSTER {{ctx.payload.hits.hits.0.fields.message}} the cluster {{ctx.payload.hits.hits.0.fields.cluster_name}} "
}
}
}
}
Hope above information will help to debug the issue.
Thanks,
Sumit