OpenLdap Log parsing? (strange errors)

I am having a crazy hard time trying to parse logs coming from my LDAP servers.

  • Sending data Vida filebeat to logstash.

And found a nice example openldap.conf file someone was working on (it works okay but i have made some changes)

and now i am hitting the same type of parsing errors as this user (post linked Below)

My issue is I am still getting large amounts of log data just shoved into the message field and it is not getting parsed correctly by my grok patters.

Was curious if anyone in the Elk community has built a logstash filter for OpenLdap logs?

Attached is an example log i am having issues with

{
  "_index": "ldap-0.1-2019.10.22",
  "_type": "_doc",
  "_id": "6odc9G0B6D8KZPEhuaYL",
  "_version": 1,
  "_score": null,
  "_source": {
    "@timestamp": "2019-10-22T16:46:19.718Z",
    "type": "log",
    "source": "/var/log/misclog",
    "beat": {
      "version": "5.6.12",
      "name": "myserver",
      "hostname": "myhost"
    },
    "tags": [
      "beats_input_codec_plain_applied",
      "_grokparsefailure"
    ],
    "@version": "1",
    "input_type": "log",
    "message": "Oct 22 12:46:19 admin02.dal.sync.lan slapd[28575]: do_syncrep2: rid=005 LDAP_RES_INTERMEDIATE - NEW_COOKIE",
    "fields": {
      "index_name": "ldap_log_ship"
    },
    "offset": 90126486
  },
  "fields": {
    "@timestamp": [
      "2019-10-22T16:46:19.718Z"
    ]
  },
  "sort": [
    1571762779718
  ]
}{
  "_index": "ldap-0.1-2019.10.22",
  "_type": "_doc",
  "_id": "6odc9G0B6D8KZPEhuaYL",
  "_version": 1,
  "_score": null,
  "_source": {
    "@timestamp": "2019-10-22T16:46:19.718Z",
    "type": "log",
    "source": "/var/log/misclog",
    "beat": {
      "version": "5.6.12",
      "name": "hello.dal.sync.lan",
      "hostname": "hello.dal.sync.lan"
    },
    "tags": [
      "beats_input_codec_plain_applied",
      "_grokparsefailure"
    ],
    "@version": "1",
    "input_type": "log",
    "message": "Oct 22 12:46:19 hello.dal.sync.lan slapd[28575]: do_syncrep2: rid=005 LDAP_RES_INTERMEDIATE - NEW_COOKIE",
    "fields": {
      "index_name": "ldap_log_ship"
    },
    "offset": 90126486
  },
  "fields": {
    "@timestamp": [
      "2019-10-22T16:46:19.718Z"
    ]
  },
  "sort": [
    1571762779718
  ]
}

and my logstash .conf file

filter {
  grok {
    match => [ "message", "%{SYSLOGBASE} (?:(?:<= (?:b|m)db_%{DATA:index_error_filter_type}_candidates: \(%{WORD:index_error_attribute_name}\) not indexed)|(?:ppolicy_%{DATA:ppolicy_op}: %{DATA:ppolicy_data})|(?:connection_input: conn=%{INT:connection} deferring operation: %{DATA:deferring_op})|(?:connection_read\(%{INT:fd_number}\): no connection!)|(?:conn=%{INT:connection} (?:(?:fd=%{INT:fd_number} (?:(?:closed(?: \(connection lost\)|))|(?:ACCEPT from IP=%{IP:src_ip}\:%{INT:src_port} \(IP=%{IP:dst_ip}\:%{INT:dst_port}\))|(?:TLS established tls_ssf=%{INT:tls_ssf} ssf=%{INT:ssf})))|(?:op=%{INT:operation_number} (?:(?:(?:(?:SEARCH )|(?:))RESULT (?:tag=%{INT:tag}|oid=(?:%{DATA:oid}(?:))) err=%{INT:error_code}(?:(?: nentries=%{INT:nentries})|(?:)) text=(?:(?:%{DATA:error_text})|(?:)))|(?:%{WORD:operation_name}(?:(?: %{DATA:data})|(?:))))))))%{SPACE}$" ]
  }
  date {
    locale => "en"
    match => [ "timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601" ]
    target => "@timestamp"
  }
  if [operation_name] == "BIND" {
    grok {
      match => [ "data", "(?:(?:(?<bind_dn>anonymous))|(?:dn=\"%{DATA:bind_dn}\")) (?:(?:method=%{WORD:bind_method})|(?:mech=%{WORD:bind_mech} ssf=%{INT:bind_ssf}))%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "SRCH" {
    grok {
      match => [ "data", "(?:(?:base=\"%{DATA:search_base}\" scope=%{INT:search_scope} deref=%{INT:search_deref} filter=\"%{DATA:search_filter}\")|(?:attr=%{DATA:search_attr}))%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "MOD" {
    grok {
      match => [ "data", "(?:(?:dn=\"%{DATA:mod_dn}\")|(?:attr=%{DATA:mod_attr}))%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "MODRDN" {
    grok {
      match => [ "data", "dn=\"%{DATA:modrdn_dn}\"%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "ADD" {
    grok {
      match => [ "data", "dn=\"%{DATA:add_dn}\"%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "DEL" {
    grok {
      match => [ "data", "dn=\"%{DATA:del_dn}\"%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "CMP" {
    grok {
      match => [ "data", "dn=\"%{DATA:cmp_dn}\" attr=\"%{DATA:cmp_attr}\"%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [operation_name] == "EXT" {
    grok {
      match => [ "data", "oid=%{DATA:ext_oid}%{SPACE}$" ]
      remove_field => [ "data" ]
    }
  }
  if [ppolicy_op] == "bind" {
    grok {
      match => [ "ppolicy_data", "(?:(?:Entry %{DATA:ppolicy_bind_dn} has an expired password: %{INT:ppolicy_grace} grace logins)|(?:Setting warning for password expiry for %{DATA:ppolicy_bind_dn} = %{INT:ppolicy_expiration} seconds))%{SPACE}$" ]
      remove_field => [ "ppolicy_data" ]
    }
  }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.