pope843
(john Paul)
October 12, 2017, 12:33pm
1
Hello,
Need help pls, whats the grok format for this kind of timestamp? also i would like to add the current date(e.g YYYY-MM-DD). to it
04:38:30.793UTC
My logs is like this.
sample log is like this:
05:08:33.351UTC DEBUG c.x.x.x.MapSsnActor MapSsnActor(akka://x) - create dialog request id=2767649
pope843
(john Paul)
October 13, 2017, 6:04am
2
Here's my logstash filter
filter
if [host] == "ip-10-x-x-x" or [host] == "ip-x-3-x-x" {
grok {
match => [ "message", "%{TIME:logtime}%{TZ:tz} %{LOGLEVEL:level} %{GREEDYDATA:message}" ]
}
mutate {
add_field => { "timestamp" => "MMM dd YYYY %{logtime}%{tz}" }
}
date {
locale => "en"
timezone => "UTC"
match => ["timestamp", "MMM dd YYYY HH:mm:ss.SSSZZZ"]
target => "@timestamp "
}
}
}
jsvd
(João Duarte)
October 13, 2017, 9:29am
3
this is what I tried:
input {
generator {
count => 1
message => "05:08:33.351UTC DEBUG c.emnify.esc.cmaprouter.MapSsnActor MapSsnActor(akka://esc) - create dialog request id=2767649"
}
}
filter {
grok {
match => ["message", "%{DATA:timestamp} .*"]
}
date {
match => ["timestamp", "HH:MM:ss.SSSZZZ"]
}
}
output {
stdout { codec => rubydebug }
}
This results in :
{
"@version" => "1",
"host" => "Joaos-MBP-5.lan",
"sequence" => 0,
"@timestamp" => 2017-08-01T05:00:33.351Z,
"message" => "05:08:33.351UTC DEBUG c.emnify.esc.cmaprouter.MapSsnActor MapSsnActor(akka://esc) - create dialog request id=2767649",
"timestamp" => "05:08:33.351UTC"
}
pope843
(john Paul)
October 13, 2017, 9:46am
4
oh, thanks for the reply I'll try this and get back to you
pope843
(john Paul)
October 13, 2017, 10:24am
5
Hi,
Im still seeing different value on @timestamp
{
"_index": "maprouter-2017.10.13",
"_type": "log",
"_id": "AV8VPYbGtbGo0IPlJYxy",
"_version": 1,
"_score": null,
"_source": {
"message": "10:17:32.998UTC INFO a.r.RemoteActorRefProvider$RemotingTerminator akka.tcp://x@:50689/system/remoting-terminator - Remoting shut down.",
"@version ": "1",
"@timestamp ": "2017-10-13T10:17:34.466Z",
"offset": 2776538,
"type": "log",
"input_type": "log",
"beat": {
"name": "ip-10-x-x-x",
"hostname": "ip-10-x-x-x",
"version": "5.6.2"
},
"source": "/opt/esc/logs/cmaprouter101.log",
"host": "ip-10-3-101-15",
"tags": [
"beats_input_codec_plain_applied",
"_dateparsefailure"
],
"timestamp": "10:17:32.998UTC",
"%{": {
"@metadata ": {
"fingerprint": {
"}": 1681251507
}
}
}
},
"fields": {
"@timestamp ": [
1507889854466
]
},
"sort": [
1507889854466
]
}
pope843
(john Paul)
October 13, 2017, 10:35am
6
Here's my filter btw.
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:[%{POSINT:syslog_pid}])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp }" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "HH:mm:ss.SSSZZZ" ]
}
}
else if [type] == "log" {
grok {
match => ["message", "%{DATA:timestamp} .*"]
}
date {
match => ["timestamp", "HH:MM:ss.SSSZZZ"]
}
}
if "DEBUG" not in [message] and "WARN" not in [message] and "INFO" not in [message] and "ERROR" not in [message] {
drop { }
}
if "DEBUG o.m.protocols.sctp.AssociationImpl" in [message] {
drop { }
}
if [host] == "ip-10-x-x-x" or [host] == "ip-10-x-x-x" {
fingerprint {
source => "message"
target => "%{[@metadata ][fingerprint]}"
method => "MURMUR3"
}
}
}
YuWatanabe
(Yu Watanabe)
October 13, 2017, 10:45am
7
Because the date filter is wrong.
date {
match => ["timestamp", "HH:MM:ss.SSSZZZ"]
}
Try
https://www.elastic.co/guide/en/logstash/current/plugins-filters-date.html
date {
match => ["timestamp", "HH:mm:ss.SSSZZZ"]
}
pope843
(john Paul)
October 13, 2017, 10:48am
8
oh i see, thanks i'll try this
pope843
(john Paul)
October 13, 2017, 11:04am
9
Hi
when i change MM to mm, it seems it doesnt work. No more timestamp field.
{
"_index": "x-2017.10.13",
"_type": "log",
"_id": "AV8VYhs8tbGo0IPlJrka",
"_version": 1,
"_score": null,
"_source": {
"message": " DEBUG c.x.x.x.x x x-router - initializing SCTP stack ...",
"@version ": "1",
"@timestamp ": "2017-10-13T10:57:47.313Z",
"source": "/opt/esc/logs/cmaprouter101.log",
"offset": 3012936,
"type": "log",
"input_type": "log",
"beat": {
"name": "ip-10-x-x-x",
"hostname": "ip-10-x-x-x",
"version": "5.6.2"
},
"host": "ip-10-x-x-x",
"tags": [
"beats_input_codec_plain_applied"
],
"%{": {
"@metadata ": {
"fingerprint": {
"}": 3017888107
}
}
}
},
"fields": {
"@timestamp ": [
1507892267313
]
},
"sort": [
1507892267313
]
}
filter:
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:[%{POSINT:syslog_pid}])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{@timestamp }" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss", "HH:mm:ss.SSSZZZ" ]
}
}
else if [type] == "log" {
grok {
match => ["message", "%{DATA:timestamp} .*"]
}
date {
match => ["timestamp", "HH:mm:ss.SSSZZZ"]
}
}
if "DEBUG" not in [message] and "WARN" not in [message] and "INFO" not in [message] and "ERROR" not in [message] {
drop { }
}
if "DEBUG o.m.protocols.sctp.AssociationImpl" in [message] {
drop { }
}
if [host] == "ip-10-x-x-x" or [host] == "ip-10-x-x-x" {
fingerprint {
source => "message"
target => "%{[@metadata ][fingerprint]}"
method => "MURMUR3"
}
}
}
pope843
(john Paul)
October 13, 2017, 11:08am
10
Hi I'm having this error
{:timestamp=>"2017-10-13T11:08:15.589000+0000", :message=>"Failed parsing date from field", :field=>"timestamp", :value=>"00:", :exception=>"Invalid format: "00:" is too short", :config_parsers=>"HH:mm:ss.SSSZZZ", :config_locale=>"default=en_US", :level=>:warn}
jsvd
(João Duarte)
October 13, 2017, 2:02pm
11
pope843:
{:timestamp=>"2017-10-13T11:08:15.589000+0000", :message=>"Failed parsing date from field", :field=>"timestamp", :value=>"00:", :exception=>"Invalid format: "00:" is too short", :config_parsers=>"HH:mm:ss.SSSZZZ", :config_locale=>"default=en_US", :level=>:warn}
In this message the timestamp field has a different format, which means you have events with different timestamp formats too?
pope843
(john Paul)
October 14, 2017, 6:29am
12
Hello jsvd
Im now seeing logs but im having wrong year(2017.01.01) which also ships on wring index. How can i correct this, appreciated your help thanks
"message" => "05:52:28.773UTC DEBUG akka.cluster.ClusterCoreDaemon akka.tcp://1/system/cluster/core/daemon - Cluster Node [akka.tcp://] - Receiving gossip from [UniqueAddress(akka.tcp://esc@x:41599,-2133981790)]",
"@version" => "1",
**"@timestamp" => "2017-01-01T05:52:28.773Z",**
"source" => "/opt/x/logs/x.log",
"offset" => 1544114,
"type" => "applog",
"input_type" => "log",
"beat" => {
"name" => "ip-10-x-x-x",
"hostname" => "ip-10-x-x-x",
"version" => "5.6.2"
},
"host" => "ip-10-x-x-x",
"tags" => [
[0] "beats_input_codec_plain_applied"
],
"timestamp" => [
[0] "05:52:28.773UTC",
[1] "05:52:28.773UTC"
],
"%{" => {
"@metadata" => {
"fingerprint" => {
"}" => 2596617487
}
}
}
}
pope843
(john Paul)
October 14, 2017, 8:04am
13
Hello
I've tried to use this
ruby {
code => "
event['date'] = Time.now.strftime('%Y-%m-%d')
"
}
But im having below error for logstash 5.6
[2017-10-14T08:01:18,237][ERROR][logstash.filters.ruby ] Ruby exception occurred: Direct event field references (i.e. event['field'] = 'value') have been disabled in favor of using event get and set methods (e.g. event.set('field', 'value')). Please consult the Logstash 5.0 breaking changes documentation for more details.
pope843
(john Paul)
October 15, 2017, 7:04am
14
Hi
I was able to get the correct event api for current date in logstash 5.6. but now my date wont match. Date(YYYY-MM-dd) so i'm having incorrect index with date(2017.01.01). See below
filter:
if "UTC " in [message] {
ruby {
code => "
event.set('dateko', Time.now.strftime('%Y-%m-%d'))
"
}
mutate {
add_field => {
"akkatimestamp" => "%{dateko} %{timestampko}"
}
remove_field => ["timestampko"]
}
date {
match => [ "akkatimestamp ", "YYYY-MM-dd HH:mm:ss.SSSZZZ ", "yyyy-MM-dd HH:mm:ss.SSSZZZ" ]
target => "@timestamp "
}
}
ES:
{
"_index": "maprouter-applog-2017.01.01",
"_type": "applog",
"_id": "%{[@metadata ][fingerprint]}",
"_version": 96960,
"_score": null,
"_source": {
"dateko": "2017-10-15",
"syslog_severity_code": 5,
"offset": 24718,
"syslog_facility": "user-level",
"input_type": "log",
"syslog_facility_code": 1,
"newtimestamp": "%{date} 06:42:20.488UTC",
"source": "/maplogs/logs/maprouter001.log",
"message": "06:42:20.488UTC DEBUG c.x.x.x.MapSsnActor MapSsnActor(akka://esc) - endDialog id=5222641",
"type": "applog",
"syslog_severity": "notice",
"tags": [
"applogs",
"beats_input_codec_plain_applied",
"_grokparsefailure",
"_rubyexception",
"_dateparsefailure"
],
"akkatimestamp": "2017-10-15 06:42:20.488UTC",
"@timestamp ": "2017-01-01T06:42:20.488Z",
"%{": {
"@metadata ": {
"fingerprint": {
"}": 1689888401
}
}
},
"@version ": "1",
"beat": {
"name": "ip-10-x-x-x",
"hostname": "ip-10-x-x-x",
"version": "5.6.2"
},
"host": "ip-10-x-x-x",
"timestamp": "06:42:20.488UTC"
},
"fields": {
"@timestamp ": [
1483252940488
]
},
"sort": [
1483252940488
]
}
Pls help
pope843
(john Paul)
October 15, 2017, 2:17pm
15
Hello,
Im having hard time to know why i received the logs with a @timestamp ": "2017-01-01 T14:03:42.082Z which should not be the case. as you can see from below ES data, "received_at": "2017-10-15T14:07:04.635Z", but my default date appearing is 2017.01.01. how this can be? it's also affecting the index, which also wrong, even my date filter doesnt work. (see above logstash config)
{
"_index": "x-x-x-2017.01.01",
"_type": "applog",
"_id": "%{[@metadata ][fingerprint]}",
"_version": 2350,
"_score": null,
"_source": {
"dateko": "2017-10-15",
"syslog_severity_code": 5,
"offset": 324791,
"syslog_facility": "user-level",
"input_type": "log",
"syslog_facility_code": 1,
"newtimestamp": "%{date} 14:03:42.082UTC",
"source": "/maplogs/logs/maprouter001.log",
"message": "14:03:42.082UTC DEBUG c.x.x.cmaprouter.MapSsnActor MapSsnActor(akka://esc) - endDialog id=6086010",
"type": "applog",
"syslog_severity": "notice",
"tags": [
"applogs",
"beats_input_codec_plain_applied",
"_grokparsefailure",
"_rubyexception",
"_dateparsefailure"
],
"akkatimestamp": "2017-10-15.14:03:42.082UTC",
"received_from": "ip-10-x-x-x",
"@timestamp ": "2017-01-01T14:03:42.082Z",
"received_at": "2017-10-15T14:07:04.635Z",
"%{": {
"@metadata ": {
"fingerprint": {
"}": 2008906984
}
}
},
"@version ": "1",
"beat": {
"name": "ip-10-x-x-x",
"hostname": "ip-10-x-x-x",
"version": "5.6.2"
},
"host": "ip-10-x-x-x",
"timestamp": "14:03:42.082UTC"
},
"fields": {
"received_at": [
1508076424635
],
"@timestamp ": [
1483279422082
],
"dateko": [
1508025600000
]
},
"sort": [
1483279422082
]
}
pope843
(john Paul)
October 16, 2017, 5:28am
16
Hello
Appreciate any assistance
pope843
(john Paul)
October 29, 2017, 11:53pm
17
I've already fixed this
it's because of this ""_id": "%{[@metadata ][fingerprint]}","
system
(system)
Closed
November 27, 2017, 12:07am
18
This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.