Hi elastic community,
I want to send alert message to telegram, when ML got anomalies, like 2 times higher or lower.
now I create ML job and run it frequency.
Then I use watcher search ML job, and send message to telegram.
It's fine when run simulate. But actually execute the watcher job, I got
"type": "s_s_l_handshake_exception",
"reason": "Received fatal alert: handshake_failure"
What causes it happened?
forgive my broken english
my environment is:
Docker version 18.06.0-ce, build 0ffa825
docker image: elasticsearch:7.0.0
my watcher config:
{
"trigger": {
"schedule": {
"interval": "1m"
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
".ml-anomalies-.write-ml4"
],
"rest_total_hits_as_int": true,
"body": {
"size": 0,
"query": {
"bool": {
"filter": {
"range": {
"timestamp": {
"gte": "{{ctx.trigger.scheduled_time}}||-5m",
"lte": "{{ctx.trigger.scheduled_time}}",
"format": "strict_date_optional_time||epoch_millis"
}
}
}
}
}
}
}
}
},
"condition": {
"script": {
"source": "if (ctx.payload.hits.total > params.threshold) { return true; } return false;",
"lang": "painless",
"params": {
"threshold": 10
}
}
},
"actions": {
"my_webhook": {
"webhook": {
"scheme": "https",
"host": "api.telegram.org",
"port": 443,
"method": "get",
"path": "/bot*********:***********************************/sendMessage",
"params": {
"text": "ML schedule is higher then threshold {{ctx.payload.hits.total}}, threshold is 10 (this is text message)",
"chat_id": "-*********"
},
"headers": {},
"proxy": {
"host": "10.33.66.222",
"port": 3128
}
}
}
}
}
Simulation Results
"watch_id": "_inlined_",
"node": "57m4ePiOR1W2iQda63yBAg",
"state": "executed",
"status": {
"state": {
"active": true,
"timestamp": "2020-04-21T09:31:54.554Z"
},
"last_checked": "2020-04-21T09:31:54.554Z",
"last_met_condition": "2020-04-21T09:31:54.554Z",
"actions": {
"my_webhook": {
"ack": {
"timestamp": "2020-04-21T09:31:54.554Z",
"state": "awaits_successful_execution"
},
"last_execution": {
"timestamp": "2020-04-21T09:31:54.554Z",
"successful": false,
"reason": ""
}
}
},
"execution_state": "executed",
"version": -1
},
"trigger_event": {
"type": "manual",
"triggered_time": "2020-04-21T09:31:54.554Z",
"manual": {
"schedule": {
"scheduled_time": "2020-04-21T09:31:54.554Z"
}
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
".ml-anomalies-.write-ml4"
],
"rest_total_hits_as_int": true,
"body": {
"size": 0,
"query": {
"bool": {
"filter": {
"range": {
"timestamp": {
"gte": "{{ctx.trigger.scheduled_time}}||-5m",
"lte": "{{ctx.trigger.scheduled_time}}",
"format": "strict_date_optional_time||epoch_millis"
}
}
}
}
}
}
}
}
},
"condition": {
"script": {
"source": "if (ctx.payload.hits.total > params.threshold) { return true; } return false;",
"lang": "painless",
"params": {
"threshold": 10
}
}
},
"metadata": {
"name": "getML_toTelegram",
"xpack": {
"type": "json"
}
},
"result": {
"execution_time": "2020-04-21T09:31:54.554Z",
"execution_duration": 489,
"input": {
"type": "search",
"status": "success",
"payload": {
"_shards": {
"total": 1,
"failed": 0,
"successful": 1,
"skipped": 0
},
"hits": {
"hits": [],
"total": 22,
"max_score": null
},
"took": 0,
"timed_out": false
},
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
".ml-anomalies-.write-ml4"
],
"rest_total_hits_as_int": true,
"body": {
"size": 0,
"query": {
"bool": {
"filter": {
"range": {
"timestamp": {
"gte": "2020-04-21T09:31:54.55457Z||-5m",
"lte": "2020-04-21T09:31:54.55457Z",
"format": "strict_date_optional_time||epoch_millis"
}
}
}
}
}
}
}
}
},
"condition": {
"type": "script",
"status": "success",
"met": true
},
"actions": [
{
"id": "my_webhook",
"type": "webhook",
"status": "failure",
"error": {
"root_cause": [
{
"type": "s_s_l_handshake_exception",
"reason": "Received fatal alert: handshake_failure"
}
],
"type": "s_s_l_handshake_exception",
"reason": "Received fatal alert: handshake_failure"
}
}
]
},
"messages": []
}