Hi,
I need some help to setup different configuration on filebeat.
I would like add a new grok pattern in /usr/share/filebeat/module/system/syslog/ingest/pipeline.json for this pattern of log :
[test0][pression][3.10.2][vpfr][ERR] 2019/07/11 07:42:19 MASTER-PRESSION-VTOM.go:122: <MSG MASTER> > [ 2019-07-11 07:42:19.824120057 +0200 CEST m=+650.565875227 ]
So I just added the following :
grok": {
"field": "message",
"patterns": [
.....
"\\[%{DATA:agent.hostname}\\]\\[%{DATA:process.name}\\]\\[%{DATA:process.version}\\]\\[%{DATA:process.customer}\\]\\[%{DATA:log.level}\\] %{GREEDYDATA:system.syslog.message}"
],
...
I have created a new template so setup :
PUT _template\logs-software
{
"order" : 1,
"index_patterns" : [
"logs-software-*"
],
"settings" : {
"index" : {
"lifecycle" : {
"name" : "logs-software",
"rollover_alias" : "logs-software"
},
"mapping" : {
"total_fields" : {
"limit" : "10000"
}
},
"refresh_interval" : "5s",
"number_of_routing_shards" : "30",
"number_of_shards": "1",
"number_of_replicas": "0",
"query" : {
"default_field" : [
"message",
"agent.name",
"log.level",
"process.name",
"process.version",
"process.customer",
"log.file.path",
"input.type",
"fields.*"
]
}
}
},
"mappings" : {
"dynamic_templates" : [
{
"string_as_keyword": {
"match_mapping_type": "string",
"mapping": {
"type": "keyword",
"doc_values": "true"
}
}
}
],
"properties" : {
"agent" : {
"properties" : {
"hostname" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"name" : {
"ignore_above" : 1024,
"type" : "keyword"
}
}
},
"system" : {
"properties" : {
"syslog" : {
"properties" : { }
}
}
},
"log" : {
"properties" : {
"original" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"file" : {
"properties" : {
"path" : {
"ignore_above" : 1024,
"type" : "keyword"
}
}
},
"offset" : {
"type" : "long"
},
"level" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"flags" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"source" : {
"properties" : {
"address" : {
"ignore_above" : 1024,
"type" : "keyword"
}
}
}
}
},
"syslog" : {
"properties" : {
"priority" : {
"type" : "long"
},
"facility" : {
"type" : "long"
},
"severity_label" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"facility_label" : {
"ignore_above" : 1024,
"type" : "keyword"
}
}
},
"process" : {
"properties" : {
"args" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"name" : {
"ignore_above" : 1024,
"type" : "keyword"
},
"version": {
"type": "keyword",
"ignore_above": 1024
},
"customer": {
"type": "keyword",
"ignore_above": 1024
},
"pid" : {
"type" : "long"
}
}
},
"message" : {
"norms" : false,
"type" : "text"
},
"@timestamp" : {
"type" : "date"
}
}
}
}
I did some tests on grok debugger which is in Kibana to be sur taht the format is correct.
But when I check my in visualize in kibana the parsing seems not working, I did many differents things, but nothing is working, so I would like to get some help
please find below the result in kibana :
{
"_index": "logs-software-pression-2019.07.11",
"_type": "_doc",
"_id": "OTzf4WsB",
"_version": 1,
"_score": null,
"_source": {
"@timestamp": "2019-07-11T16:30:45.309Z",
"message": "[test0][pression][3.10.2][vpfr][ERR] 2019/07/11 07:42:19 MASTER-PRESSION.go:122: <MSG MASTER> > [ 2019-07-11 07:42:19.824120057 +0200 CEST m=+650.565875227 ]",
"input": {
"type": "log"
},
"fields": {
"origin": "logs-software-pression"
},
"ecs": {
"version": "1.0.0"
},
"host": {
"name": "test0"
},
"agent": {
"ephemeral_id": "de8b0eb0-5076-432a-942a-65de23632814",
"hostname": "test0",
"id": "da7ed90a-f9f5-47bd-89b",
"version": "7.2.0",
"type": "filebeat"
},
"log": {
"offset": 166,
"file": {
"path": "/home/log_services/pression/test.log"
}
}
},
"fields": {
"@timestamp": [
"2019-07-11T16:30:45.309Z"
]
},
"sort": [
1562862645309
]
}
As you can see my fields does not appear (process.customer, process.version, etc...)
Have you got any idea ?
Thank you