Here is my logstash conf
input {
file {
path => '/home/logs/corporate_access.log'
}
}
filter {
grok {
match => {
"message" => "User id: %{NUMBER:UserId}, Client id:%{NUMBER:ClientId}"
}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
Here is my log data :
{ "_index": "filebeat-2016.07.12", "_type": "corporate-access", "_id": "AVXdvwm4Om7vxCUl3Ywh", "_score": null, "_source": { "message": "12 Jul 2016 11:45:25,218 http-bio-9080-exec-10 [INFO ] corporate_access - Request details - Uri: /corporate/analytics/report/1278829, Ip: x.x.x.x, **User id:** 12315, **Client id:**10905", "@version": "1", "@timestamp": "2016-07-12T06:15:26.308Z", "fields": null, "beat": { "hostname": "abc.xyz.df", "name": "abc.xyz.df" }, "source": "/home/logs/corporate_access.log", "offset": 6962908, "type": "corporate-access", "input_type": "log", "count": 1, "host": "abc.xyz.df", "tags": [ "beats_input_codec_plain_applied" ] }, "fields": { "@timestamp": [ 1468304126308 ] }, "sort": [ 1468304126308 ] }
My problem is these filters are not creating fields and neither showing up in kibana