Simple aggregation on a field for more than one value

Basically what i want is instead of creating doc_count for every unique value for a field....

i just want to have two keys "passed" and "failed". failed key doc_count should be equal to sum of doc_count containing values any one of these "high","critical" and passed key doc_count should be equal to sum of doc_count containing values any one of these "-","'medium","low"

As there are only 5 possible values for data.vulnerability.severity field which are

  1. high
  2. medium
  3. low
  4. critical

i want to see how many document contain value high or critical with their key as failed and how many document contain value medium or low or - with their key as passed

Currently i have this query

POST /index/_search
{
   "aggs":{
      "2":{
         "date_histogram":{
            "field":"timestamp",
            "fixed_interval":"30m",
            "time_zone":"Asia/Karachi",
            "min_doc_count":1
         },
         "aggs":{
            "3":{
               "terms":{
                  "field":"data.vulnerability.severity",
                  "order":{
                     "_count":"desc"
                  },
                  "size":5
               }
            }
         }
      }
   },
   "size":0,
   "stored_fields":[
      "*"
   ],
   "script_fields":{

   },
   "docvalue_fields":[
      {
         "field":"timestamp",
         "format":"date_time"
      },
      {
         "field":"data.aws.createdAt",
         "format":"date_time"
      },
      {
         "field":"data.aws.end",
         "format":"date_time"
      },
      {
         "field":"data.aws.resource.instanceDetails.launchTime",
         "format":"date_time"
      },
      {
         "field":"data.aws.service.eventFirstSeen",
         "format":"date_time"
      },
      {
         "field":"data.aws.service.eventLastSeen",
         "format":"date_time"
      },
      {
         "field":"data.aws.start",
         "format":"date_time"
      },
      {
         "field":"data.aws.updatedAt",
         "format":"date_time"
      },
      {
         "field":"data.vulnerability.published",
         "format":"date_time"
      },
      {
         "field":"syscheck.mtime_after",
         "format":"date_time"
      },
      {
         "field":"syscheck.mtime_before",
         "format":"date_time"
      }
   ],
   "_source":{
      "excludes":[
         "@timestamp"
      ]
   },
   "query":{
      "bool":{
         "must":[
            {
               "match_all":{

               }
            }
         ],
         "filter":[
            {
               "match_all":{

               }
            },
            {
               "match_phrase":{
                  "manager.name":{
                     "query":"localhost.localdomain"
                  }
               }
            },
            {
               "match_phrase":{
                  "rule.groups":{
                     "query":"vulnerability-detector"
                  }
               }
            },
            {
               "range":{
                  "timestamp":{
                     "gte":"2020-05-02T09:51:04.424Z",
                     "lte":"2020-05-03T09:51:04.424Z",
                     "format":"strict_date_optional_time"
                  }
               }
            }
         ],
         "should":[

         ],
         "must_not":[

         ]
      }
   }
}

**The result is **

{
  "took" : 5,
  "timed_out" : false,
  "_shards" : {
    "total" : 3,
    "successful" : 3,
    "skipped" : 0,
    "failed" : 0
  },
  "hits" : {
    "total" : {
      "value" : 338,
      "relation" : "eq"
    },
    "max_score" : null,
    "hits" : [ ]
  },
  "aggregations" : {
    "2" : {
      "buckets" : [
        {
          "key_as_string" : "2020-05-02T14:30:00.000+05:00",
          "key" : 1588411800000,
          "doc_count" : 332,
          "3" : {
            "doc_count_error_upper_bound" : 0,
            "sum_other_doc_count" : 0,
            "buckets" : [
              {
                "key" : "Medium",
                "doc_count" : 157
              },
              {
                "key" : "High",
                "doc_count" : 119
              },
              {
                "key" : "Low",
                "doc_count" : 56
              }
            ]
          }
        },
        {
          "key_as_string" : "2020-05-02T15:00:00.000+05:00",
          "key" : 1588413600000,
          "doc_count" : 4,
          "3" : {
            "doc_count_error_upper_bound" : 0,
            "sum_other_doc_count" : 0,
            "buckets" : [
              {
                "key" : "-",
                "doc_count" : 2
              },
              {
                "key" : "Medium",
                "doc_count" : 2
              }
            ]
          }
        },
        {
          "key_as_string" : "2020-05-02T20:00:00.000+05:00",
          "key" : 1588431600000,
          "doc_count" : 2,
          "3" : {
            "doc_count_error_upper_bound" : 0,
            "sum_other_doc_count" : 0,
            "buckets" : [
              {
                "key" : "-",
                "doc_count" : 2
              }
            ]
          }
        }
      ]
    }
  }
}

but i want to have a result like this

{
  "took" : 5,
  "timed_out" : false,
  "_shards" : {
    "total" : 3,
    "successful" : 3,
    "skipped" : 0,
    "failed" : 0
  },
  "hits" : {
    "total" : {
      "value" : 338,
      "relation" : "eq"
    },
    "max_score" : null,
    "hits" : [ ]
  },
  "aggregations" : {
    "2" : {
      "buckets" : [
        {
          "key_as_string" : "2020-05-02T14:30:00.000+05:00",
          "key" : 1588411800000,
          "doc_count" : 332,
          "3" : {
            "doc_count_error_upper_bound" : 0,
            "sum_other_doc_count" : 0,
            "buckets" : [
              {
                "key" : "passed",
                "doc_count" : 213
              },
              {
                "key" : "failed",
                "doc_count" : 119
              }
            ]
          }
        },
        {
          "key_as_string" : "2020-05-02T15:00:00.000+05:00",
          "key" : 1588413600000,
          "doc_count" : 4,
          "3" : {
            "doc_count_error_upper_bound" : 0,
            "sum_other_doc_count" : 0,
            "buckets" : [
              {
                "key" : "passed",
                "doc_count" : 2
              },
            ]
          }
        },
        {
          "key_as_string" : "2020-05-02T20:00:00.000+05:00",
          "key" : 1588431600000,
          "doc_count" : 2,
          "3" : {
            "doc_count_error_upper_bound" : 0,
            "sum_other_doc_count" : 0,
            "buckets" : [
              {
                "key" : "passed",
                "doc_count" : 2
              }
            ]
          }
        }
      ]
    }
  }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.