Error while adding condition to APM Error Watcher

Kibana version: v 7.1.1

Elasticsearch version: v 7.1.1

APM Server version:

APM Agent language and version:

Original install method (e.g. download page, yum, deb, from source, etc.) and version: Elastic Cloud 7.1.1

Is there anything special in your setup? No

Description of the problem including expected versus actual behavior. Please include screenshots (if relevant):

I want to filter out some 3rd party error messages that are spamming our slack channel but when I add condition to filter out those errors I'm getting an error.

How do we filter out some errors from the watcher?

 "condition": {
     "never": {
         "term": {
               "error.exception.message": "Http failure response for (unknown url): 0 Unknown Error"
           }
      }
   }

Provide logs and/or server output (if relevant):

{
  "trigger": {
    "schedule": {
      "interval": "10m"
    }
  },
  "input": {
    "search": {
      "request": {
        "search_type": "query_then_fetch",
        "indices": [
          "apm-*"
        ],
        "rest_total_hits_as_int": true,
        "body": {
          "size": 0,
          "query": {
            "bool": {
              "filter": [
                {
                  "term": {
                    "service.name": "{{ctx.metadata.serviceName}}"
                  }
                },
                {
                  "term": {
                    "processor.event": "error"
                  }
                },
                {
                  "range": {
                    "@timestamp": {
                      "gte": "now-{{ctx.metadata.timeRangeValue}}{{ctx.metadata.timeRangeUnit}}"
                    }
                  }
                }
              ]
            }
          },
          "aggs": {
            "error_groups": {
              "terms": {
                "min_doc_count": "{{ctx.metadata.threshold}}",
                "field": "error.grouping_key",
                "size": 10,
                "order": {
                  "_count": "desc"
                },
               "condition": {
                  "never": {
                    "term": {
                      "error.exception.message": "Http failure response for (unknown url): 0 Unknown Error"
                    }
                  }
                }
              },
              "aggs": {
                "sample": {
                  "top_hits": {
                    "_source": [
                      "error.log.message",
                      "error.exception.message",
                      "error.exception.handled",
                      "error.culprit",
                      "error.grouping_key",
                      "@timestamp"
                    ],
                    "sort": [
                      {
                        "@timestamp": "desc"
                      }
                    ],
                    "size": 1
                  }
                }
              }
            }
          }
        }
      }
    }
  },
  "condition": {
    "script": {
      "source": "return ctx.payload.aggregations.error_groups.buckets.length > 0",
      "lang": "painless"
    }
  },
  "actions": {
    "log_error": {
      "logging": {
        "level": "info",
        "text": "Your service \"{{ctx.metadata.serviceName}}\" has error groups which exceeds {{ctx.metadata.threshold}} occurrences within \"{{ctx.metadata.timeRangeValue}}{{ctx.metadata.timeRangeUnit}}\"<br/><br/>{{#ctx.payload.aggregations.error_groups.buckets}}<br/><strong>{{sample.hits.hits.0._source.error.log.message}}{{^sample.hits.hits.0._source.error.log.message}}{{sample.hits.hits.0._source.error.exception.0.message}}{{/sample.hits.hits.0._source.error.log.message}}</strong><br/>{{sample.hits.hits.0._source.error.culprit}}{{^sample.hits.hits.0._source.error.culprit}}N/A{{/sample.hits.hits.0._source.error.culprit}}<br/>{{doc_count}} occurrences<br/>{{/ctx.payload.aggregations.error_groups.buckets}}"
      }
    },
    "slack_webhook": {
      "webhook": {
        "scheme": "https",
        "host": "hooks.slack.com",
        "port": 443,
        "method": "post",
        "path": "{{ctx.metadata.slackUrlPath}}",
        "params": {},
        "headers": {
          "Content-Type": "application/json"
        },
        "body": "__json__::{\"text\":\"Your service \\\"{{ctx.metadata.serviceName}}\\\" has error groups which exceeds {{ctx.metadata.threshold}} occurrences within \\\"{{ctx.metadata.timeRangeValue}}{{ctx.metadata.timeRangeUnit}}\\\"\\n{{#ctx.payload.aggregations.error_groups.buckets}}\\n>*{{sample.hits.hits.0._source.error.log.message}}{{^sample.hits.hits.0._source.error.log.message}}{{sample.hits.hits.0._source.error.exception.0.message}}{{/sample.hits.hits.0._source.error.log.message}}*\\n>{{#sample.hits.hits.0._source.error.culprit}}`{{sample.hits.hits.0._source.error.culprit}}`{{/sample.hits.hits.0._source.error.culprit}}{{^sample.hits.hits.0._source.error.culprit}}N/A{{/sample.hits.hits.0._source.error.culprit}}\\n>{{doc_count}} occurrences\\n{{/ctx.payload.aggregations.error_groups.buckets}}\"}"
      }
    }
  },
  "metadata": {
    "emails": [],
    "timeRangeValue": 5,
    "slackUrlPath": "/services/T03DVALCR/B012V7JUQ3V/OLepGUbDtxV6cVoBJFSsrJPi",
    "threshold": 5,
    "trigger": "This value must be changed in trigger section",
    "serviceName": "startwizard-5",
    "timeRangeUnit": "m"
  }
}

This topic was automatically closed 20 days after the last reply. New replies are no longer allowed.