Hi all,
I recently posted a problem on ingest-pipelines working in testing mode but finally not working. I have the same issue and i m still stuck while I ve tried many tricks (remove special characters from syslog string, etc.)
So, I m currently working on a bind9 log which is "<133>Jan 27 14:35:01 ns1 bind-logs 27-Jan-2023 14:35:01.390 queries: info: client @0x7f6ca42929b8 10.1.14.40#55168 (test.test): query: test.test IN AAAA +E(0) (10.1.17.20)"
My pipeline test result is
{
  "docs": [
    {
      "doc": {
        "_index": ".ds-logs-udp.dns-default-2023.01.03-000001",
        "_id": "zOFw84UB-z0M9c7cU43w",
        "_version": "-3",
        "_source": {
          "input": {
            "type": "udp"
          },
          "agent": {
            "name": "rsyslog",
            "id": "79647a28-6e08-4fcb-9aad-78c8e67b3311",
            "type": "filebeat",
            "ephemeral_id": "aa0bdaa4-4759-4571-80ea-a12493ec14df",
            "version": "8.5.2"
          },
          "@timestamp": "2023-01-27T13:35:01.394Z",
          "ecs": {
            "version": "8.5.0"
          },
          "log": {
            "source": {
              "address": "127.0.0.1:45044"
            }
          },
          "data_stream": {
            "namespace": "default",
            "type": "logs",
            "dataset": "udp.dns"
          },
          "syslog5424_pri": "133",
          "elastic_agent": {
            "id": "79647a28-6e08-4fcb-9aad-78c8e67b3311",
            "version": "8.5.2",
            "snapshot": false
          },
          "syslog5424_sd": "27-Jan-2023 14:35:01.390 queries: info: client @0x7f6ca42929b8 10.1.14.40#55168 (test.test): query: test.test IN AAAA +E(0) (10.1.17.20)",
          "event": {
            "original": "<133>Jan 27 14:35:01 ns1 bind-logs 27-Jan-2023 14:35:01.390 queries: info: client @0x7f6ca42929b8 10.1.14.40#55168 (test.test): query: test.test IN AAAA +E(0) (10.1.17.20)",
            "destination": "10.1.17.20",
            "client_ip": "10.1.14.40",
            "dataset": "udp.dns",
            "query_value": "test.test",
            "record_type": "AAAA"
          },
          "tags": [
            "syslog",
            "forwarded",
            "dns"
          ]
        },
        "_ingest": {
          "timestamp": "2023-01-27T14:34:49.949673256Z"
        }
      }
    }
  ]
}
We can see I ve well the event.destination, event.client_ip, etc. values. But when I'm looking for the documents those fields are not present.
my pipeline code is
[
  {
    "set": {
      "field": "ecs.version",
      "value": "8.5.0"
    }
  },
  {
    "rename": {
      "field": "message",
      "target_field": "event.original",
      "ignore_missing": true
    }
  },
  {
    "grok": {
      "field": "event.original",
      "patterns": [
        "%{SYSLOG5424PRI}%{GREEDYDATA:syslog5424_sd}$"
      ]
    }
  },
  {
    "gsub": {
      "field": "syslog5424_sd",
      "pattern": ".*\\b-logs ",
      "replacement": "",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "grok": {
      "field": "syslog5424_sd",
      "patterns": [
        "%{MONTHDAY}[-]%{MONTH}[-]%{YEAR}\\s*%{TIME}\\s*%{WORD}[:]\\s*%{WORD}[:]\\s*%{WORD}\\s*%{DATA}\\s*%{IP:event.client_ip}[#]%{NUMBER}\\s*\\(%{HOSTNAME}\\)[:]\\s*query:\\s*%{HOSTNAME:event.query_value}\\s*%{WORD}\\s*%{WORD:event.record_type}\\s*%{NOTSPACE}\\s*\\(%{IP:event.destination}\\)"
      ],
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.client_data",
      "type": "string",
      "target_field": "event.original : *bind* and not event.original : *audit*",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.destination",
      "type": "ip",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.record_type",
      "type": "string",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.client_port",
      "type": "integer",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.queries",
      "type": "string",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.client",
      "type": "string",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.client_ip",
      "type": "ip",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.day",
      "type": "integer",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.info",
      "type": "string",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.query_value",
      "type": "string",
      "ignore_missing": true,
      "ignore_failure": true
    }
  },
  {
    "convert": {
      "field": "event.misc",
      "type": "string",
      "ignore_missing": true,
      "ignore_failure": true
    }
  }
]
Do you have please any clue for me to find an exit way or to look at logs directly on the elastic host ? I can't find the right log file.
Thanks for your help
Pierre
