OK I made very basic data so you can understand better.
here is the data that I imported with FileBeat (date, EventCode,Version):
2017/10/03 07:07:02,APP_VERSION,v1
2017/10/03 07:08:02,APP_VERSION,v1
2017/10/03 07:11:02,APP_VERSION,v2
2017/10/03 07:13:02,APP_CRASH,v1
2017/10/03 07:15:02,APP_CRASH,v2
2017/10/04 07:00:02,APP_VERSION,v1
2017/10/04 07:01:02,APP_VERSION,v1
2017/10/04 07:02:02,APP_VERSION,v2
2017/10/04 07:03:02,APP_VERSION,v2
2017/10/04 07:04:02,APP_CRASH,v1
2017/10/04 07:05:02,APP_CRASH,v1
2017/10/04 07:06:02,APP_CRASH,v2
2 days of data.
the 3rd: 2 instances of V1 with 1 crash (so 50%), 1 instance of V2 with 1 crash (so 100%)
the 4th: 2 instances of V1 with 2 crash (so 100%), 2 instance of V2 with 1 crash (so 50%)
This is the ES query that works:
POST filebeat-*/_search
{
"size":0,
"aggs": {
"per_day": {
"date_histogram": {
"field": "@timestamp",
"interval": "day"
},
"aggs": {
"per_Version": {
"terms": {
"field": "Version.keyword"
},
"aggs": {
"crash": {
"filter": {
"term": {
"EventCode.keyword": "APP_CRASH"
}
},
"aggs": {
"nb_crash": {
"value_count": {
"field": "EventCode.keyword"
}
}
}
},
"app_deployed": {
"filter": {
"term": {
"EventCode.keyword": "APP_VERSION"
}
},
"aggs": {
"nb_app": {
"value_count": {
"field": "EventCode.keyword"
}
}
}
},
"percentage": {
"bucket_script": {
"buckets_path": {
"b_crash": "crash>nb_crash",
"b_app_deployed": "app_deployed>nb_app"
},
"script": "(params.b_crash / params.b_app_deployed) * 100"
}
}
}
}
}
}
}
}
and the output that I would like to see as a chart:
{
"took": 12,
"timed_out": false,
"_shards": {
"total": 10,
"successful": 10,
"skipped": 0,
"failed": 0
},
"hits": {
"total": 12,
"max_score": 0,
"hits": []
},
"aggregations": {
"per_day": {
"buckets": [
{
"key_as_string": "2017-10-03T00:00:00.000Z",
"key": 1506988800000,
"doc_count": 5,
"per_Version": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": "v1",
"doc_count": 3,
"app_deployed": {
"doc_count": 2,
"nb_app": {
"value": 2
}
},
"crash": {
"doc_count": 1,
"nb_crash": {
"value": 1
}
},
"percentage": {
"value": 50
}
},
{
"key": "v2",
"doc_count": 2,
"app_deployed": {
"doc_count": 1,
"nb_app": {
"value": 1
}
},
"crash": {
"doc_count": 1,
"nb_crash": {
"value": 1
}
},
"percentage": {
"value": 100
}
}
]
}
},
{
"key_as_string": "2017-10-04T00:00:00.000Z",
"key": 1507075200000,
"doc_count": 7,
"per_Version": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": "v1",
"doc_count": 4,
"app_deployed": {
"doc_count": 2,
"nb_app": {
"value": 2
}
},
"crash": {
"doc_count": 2,
"nb_crash": {
"value": 2
}
},
"percentage": {
"value": 100
}
},
{
"key": "v2",
"doc_count": 3,
"app_deployed": {
"doc_count": 2,
"nb_app": {
"value": 2
}
},
"crash": {
"doc_count": 1,
"nb_crash": {
"value": 1
}
},
"percentage": {
"value": 50
}
}
]
}
}
]
}
}
}
and the logstash config:
grok {
match => { "message" => "(?<Time>[^,]*),(?<EventCode>[^,]*),(?<Version>[^,]*)" }
}
date {
locale => "fr"
match => ["Time", "YYYY/MM/dd HH:mm:ss"]
timezone => "Europe/Vienna"
target => "@timestamp"
add_field => { "debug" => "timestampMatched"}
}
thanks for your help,
Rod