Hi there folks,
Hope you can help me as I'm at a bit of a loss. I've configured Filebeat v7.17.5 to enable the nginx module to scrape some logs located at
/usr/local/kong/logs/access.log and /usr/local/kong/logs/error.log
The message that's outputting in the filebeat logs with logging.level set to debug is as follows
2022-08-25T10:01:31.910+0100 DEBUG [processors] processing/processors.go:203 Publish event: {
"@timestamp": "2022-08-25T09:01:31.910Z",
"@metadata": {
"beat": "filebeat",
"type": "_doc",
"version": "7.17.5",
"pipeline": "kong-nginx-logs"
},
"host": {
"name": "ieavu3015"
},
"log": {
"file": {
"path": "/usr/local/kong/logs/access.log"
},
"offset": 3918
},
"message": "10.68.233.187 - - [24/Aug/2022:17:30:27 +0100] \"POST /myapp HTTP/2.0\" 200 8045 \"-\" \"insomnia/2022.5.1\"",
"fileset": {
"name": "access"
},
"ecs": {
"version": "1.12.0"
},
"agent": {
"hostname": "box01",
"ephemeral_id": "70f843a3-4166-46f9-8fed-bfe29a0288e3",
"id": "7a8dd5d2-3cd8-48eb-932e-63d173fd589b",
"name": "box01",
"type": "filebeat",
"version": "7.17.5"
},
"service": {
"type": "nginx"
},
"input": {
"type": "log"
},
"event": {
"module": "nginx",
"dataset": "nginx.access",
"timezone": "+01:00"
},
"fields": {
"env": "tst"
}
}
which doesn't look anything like the structure of the exported fields mentioned here
Also I'm seeing what appears to be code outputting in the filebeat log
2022-08-25T11:08:18.705+0100 DEBUG [esclientleg] eslegclient/connection.go:340 PUT http://elastic.myco.com:4119/_ingest/pipeline/filebeat-7.17.5-nginx-access-pipeline map[description:Pipeline for parsi
ng Nginx access logs. Requires the geoip and user_agent plugins. on_failure:[map[set:map[field:error.message value:{{ _ingest.on_failure_message }}]]] processors:[map[set:map[field:event.ingested value:{{_ingest.timestamp}}]] map[rename:map[field:message target_
field:event.original]] map[grok:map[field:event.original ignore_missing:true pattern_definitions:map[NGINX_ADDRESS_LIST:(?:%{IP}|%{WORD})("?,?\s*(?:%{IP}|%{WORD}))* NGINX_HOST:(?:%{IP:destination.ip}|%{NGINX_NOTSEPARATOR:destination.domain})(:%{NUMBER:destinatio
n.port})? NGINX_NOTSEPARATOR:[^ ,:]+] patterns:[(%{NGINX_HOST} )?"?(?:%{NGINX_ADDRESS_LIST:nginx.access.remote_ip_list}|%{NOTSPACE:source.address}) - (-|%{DATA:user.name}) \[%{HTTPDATE:nginx.access.time}\] "%{DATA:nginx.access.info}" %{NUMBER:http.respons
e.status_code:long} %{NUMBER:http.response.body.bytes:long} "(-|%{DATA:http.request.referrer})" "(-|%{DATA:user_agent.original})"]]] map[grok:map[field:nginx.access.info ignore_missing:true patterns:[%{WORD:http.request.method} %{DATA:_tmp.url_orig} HTTP/%{NUMBE
R:http.version} ]]] map[set:map[field:url.domain if:ctx.url?.domain == null && ctx.destination?.domain != null value:{{destination.domain}}]] map[remove:map[field:[nginx.access.info _tmp.url_orig] ignore_missing:true]] map[split:map[field:nginx.access.remote_ip_
list ignore_missing:true separator:"?,?\s+]] map[split:map[field:nginx.access.origin ignore_missing:true separator:"?,?\s+]] map[set:map[field:source.address if:ctx.source?.address == null value:]] map[script:map[if:ctx.nginx?.access?.remote_ip_list != null && c
tx.nginx.access.remote_ip_list.length > 0 lang:painless params:map[dot:.] source:boolean isPrivate(def dot, def ip) {
try {
StringTokenizer tok = new StringTokenizer(ip, dot);
int firstByte = Integer.parseInt(tok.nextToken());
int secondByte = Integer.parseInt(tok.nextToken());
if (firstByte == 10) {
return true;
}
if (firstByte == 192 && secondByte == 168) {
return true;
}
if (firstByte == 172 && secondByte >= 16 && secondByte <= 31) {
return true;
}
if (firstByte == 127) {
return true;
}
return false;
}
catch (Exception e) {
return false;
}
} try {
ctx.source.address = null;
if (ctx.nginx.access.remote_ip_list == null) {
return;
}
def found = false;
for (def item : ctx.nginx.access.remote_ip_list) {
if (!isPrivate(params.dot, item)) {
ctx.source.address = item;
found = true;
break;
}
}
if (!found) {
ctx.source.address = ctx.nginx.access.remote_ip_list[0];
}
} catch (Exception e) {
ctx.source.address = null;
}]] map[remove:map[field:source.address if:ctx.source.address == null]] map[grok:map[field:source.address ignore_failure:true patterns:[^%{IP:source.ip}$]]] map[rename:map[field:@timestamp target_field:event.created]] map[date:map[field:nginx.access.time formats:[dd/MMM/yyyy:H:m:s Z] on_failure:[map[append:map[field:error.message value:{{ _ingest.on_failure_message }}]]] target_field:@timestamp]] map[remove:map[field:nginx.access.time]] map[user_agent:map[field:user_agent.original ignore_missing:true]] map[geoip:map[field:source.ip ignore_missing:true target_field:source.geo]] map[geoip:map[database_file:GeoLite2-ASN.mmdb field:source.ip ignore_missing:true properties:[asn organization_name] target_field:source.as]] map[rename:map[field:source.as.asn ignore_missing:true target_field:source.as.number]] map[rename:map[field:source.as.organization_name ignore_missing:true target_field:source.as.organization.name]] map[set:map[field:event.kind value:event]] map[append:map[field:event.category value:web]] map[append:map[field:event.type value:access]] map[set:map[field:event.outcome if:ctx?.http?.response?.status_code != null && ctx.http.response.status_code < 400 value:success]] map[set:map[field:event.outcome if:ctx?.http?.response?.status_code != null && ctx.http.response.status_code >= 400 value:failure]] map[append:map[field:related.ip if:ctx?.source?.ip != null value:{{source.ip}}]] map[append:map[field:related.ip if:ctx?.destination?.ip != null value:{{destination.ip}}]] map[append:map[field:related.user if:ctx?.user?.name != null value:{{user.name}}]] map[script:map[description:This script processor iterates over the whole document to remove fields with null values. lang:painless source:void handleMap(Map map) {
for (def x : map.values()) {
if (x instanceof Map) {
handleMap(x);
} else if (x instanceof List) {
handleList(x);
}
}
map.values().removeIf(v -> v == null);
}
void handleList(List list) {
for (def x : list) {
if (x instanceof Map) {
handleMap(x);
} else if (x instanceof List) {
handleList(x);
}
}
}
handleMap(ctx);
]]]]
Has anyone seen anything like this before?