This should certainly have been taken care of for you by default and is worth investigating why that didn't occur. Another thing to check is if the system package pipelines installed which should also be added by default. Can you navigate back to "Stack Management > Ingest Node Pipelines" and search on logs-system
and let me know what you see? There should be 5 pipelines with that prefix.
Further, check the ES logs for references to logs-endpoint.events.security-0.19.1
. There may be errors there where it tried to install the pipelines but failed - there could be more useful information for debugging.
In the meantime, you can work around the Endpoint issues by manually adding the pipelines.
Navigate to "Dev Tools > Console"
From there, you can set up the API calls to add the pipelines. It will look something like this:
You can read more about pipelines and the API here
Assuming that none of the pipelines installed, here is the full list (there are 8 for Endpoint). All of the bodies for the pipelines is the same except for logs-endpoint.events.network-0.19.1
which has another processor.
Let me know how this works out!
PUT _ingest/pipeline/logs-endpoint.events.registry-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.alerts-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.diagnostic.collection-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.events.file-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.events.library-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.events.network-0.19.1
{
"description": "Pipeline for network events",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
},
{
"geoip": {
"field": "source.ip",
"target_field": "source.geo",
"properties": [
"continent_name",
"country_name",
"country_iso_code",
"region_iso_code",
"region_name",
"city_name",
"location"
],
"ignore_missing": true
}
},
{
"geoip": {
"field": "destination.ip",
"target_field": "destination.geo",
"properties": [
"continent_name",
"country_name",
"country_iso_code",
"region_iso_code",
"region_name",
"city_name",
"location"
],
"ignore_missing": true
}
},
{
"grok": {
"if": "ctx.network?.protocol == 'dns'",
"ignore_missing": true,
"ignore_failure": true,
"field": "message",
"patterns": [
"^DNS query is completed for the name .* type %{WORD:dns.question.Ext_temp.type}"
]
}
},
{
"script": {
"ignore_failure": true,
"if": "ctx.network?.protocol == 'dns' && ctx.dns?.question?.Ext_temp?.type != null && ctx.dns?.question?.type == null",
"source": "Map typeMap = ['1': 'A', '2': 'NS', '3': 'MD', '4': 'MF', '5': 'CNAME', '6': 'SOA', '7': 'MB', '8': 'MG', '9': 'MR',\n '10': 'NULL', '11': 'WKS', '12': 'PTR', '13': 'HINFO', '14': 'MINFO', '15': 'MX', '16': 'TXT', '17': 'RP', '18': 'AFSDB',\n '19': 'X25', '20': 'ISDN', '21': 'RT', '22': 'NSAP', '23': 'NSAPPTR', '24': 'SIG', '25': 'KEY', '26': 'PX',\n '27': 'GPOS', '28': 'AAAA', '29': 'LOC', '30': 'NXT', '31': 'EID', '32': 'NIMLOC', '33': 'SRV', '34': 'ATMA',\n '35': 'NAPTR', '36': 'KX', '37': 'CERT', '38': 'A6', '39': 'DNAME', '40': 'SINK', '41': 'OPT', '42': 'APL',\n '43': 'DS', '44': 'SSHFP', '45': 'ISECKEY', '46': 'RRSIG', '47': 'NSEC', '48': 'DNSKEY', '49': 'DHCID',\n '50': 'NSEC3', '51': 'NSEC3PARAM', '52': 'TLSA', '53': 'SMIMEA', '55': 'HIP', '56': 'NINFO', '57': 'RKEY',\n '58': 'TALINK', '59': 'CDS', '60': 'CDNSKEY', '61': 'OPENPGPKEY', '62': 'CSYNC', '63': 'ZONEMD', '64': 'SVCB',\n '65': 'HTTPS', '99': 'SPF', '100': 'UINFO', '101': 'UID', '102': 'GID', '103': 'UNSPEC', '104': 'NID',\n '105': 'L32', '106': 'L64', '107': 'LP', '108': 'EUI48', '109': 'EUI64', '249': 'TKEY', '250': 'TSIG',\n '251': 'IXFR', '252': 'AXFR', '253': 'MAILB', '254': 'MAILA', '255': 'ANY','256': 'URI', '257': 'CAA',\n '258': 'AVC'];\ndef type = typeMap[ctx.dns.question.Ext_temp.type]; if (type != null) {\n ctx.dns.question.type = type;\n}"
}
},
{
"remove": {
"if": "ctx.network?.protocol == 'dns'",
"ignore_failure": true,
"ignore_missing": true,
"field": "dns.question.Ext_temp"
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.events.process-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}
PUT _ingest/pipeline/logs-endpoint.events.security-0.19.1
{
"description": "Pipeline for setting event.ingested",
"processors": [
{
"set": {
"field": "event.ingested",
"value": "{{ _ingest.timestamp }}",
"ignore_failure": true
}
}
]
}