You could use two filter
aggregations to get the number of successes and the number of attempts. Using a bucket_script
aggregation, you can then calculate the ratio between these two numbers.
So, given this index definition:
PUT test
{
"settings": {
"number_of_shards": 1
},
"mappings": {
"doc": {
"properties": {
"name": {
"type": "keyword"
},
"timestamp": {
"type": "date"
}
}
}
}
}
And these documents:
PUT test/doc/1
{
"name": "attempt",
"timestamp" : "2017-08-31T13:15:30Z"
}
PUT test/doc/2
{
"name": "Failure",
"timestamp" : "2017-08-31T13:16:30Z"
}
PUT test/doc/3
{
"name": "attempt",
"timestamp" : "2017-08-31T13:17:30Z"
}
PUT test/doc/4
{
"name": "Failure",
"timestamp" : "2017-08-31T13:18:30Z"
}
PUT test/doc/5
{
"name": "attempt",
"timestamp" : "2017-08-31T13:19:30Z"
}
PUT test/doc/6
{
"name": "Success",
"timestamp" : "2017-08-31T13:20:30Z"
}
You could use this aggregation to get the hourly success rate:
GET test/_search
{
"size": 0,
"aggs": {
"all": {
"date_histogram": {
"field": "timestamp",
"interval": "hour"
},
"aggs": {
"attempts": {
"filter": {
"term": {
"name": "attempt"
}
}
},
"successes": {
"filter": {
"term": {
"name": "Success"
}
}
},
"succesrate": {
"bucket_script": {
"buckets_path": {
"attempts": "attempts._count",
"successes": "successes._count"
},
"script": "params.successes / params.attempts"
}
}
}
}
}
}
Which would tell you the success rate in our one hour of data is 0.33 (33%):
buckets": [
{
"key_as_string": "2017-08-31T13:00:00.000Z",
"key": 1504184400000,
"doc_count": 6,
"successes": {
"doc_count": 1
},
"attempts": {
"doc_count": 3
},
"succesrate": {
"value": 0.3333333333333333
}
}
]