Filebeat 7.3.2 logstash module timezone bug - Time in the future

Hey there!

For awhile I noticed that logs from the Logstash module are in the future so I went digging in this bug.

I'm using logstash to receive the logs and forward them to the ingress pipeline of elasticsearch. To fix this I changed the ingest pipeline by doing the following:

DELETE /_ingest/pipeline/filebeat-7.3.2-logstash-log-pipeline-plain

PUT /_ingest/pipeline/filebeat-7.3.2-logstash-log-pipeline-plain
{
    "description": "Pipeline for parsing logstash logs in the plain format",
    "on_failure": [
        {
            "set": {
                "field": "error.message",
                "value": "{{ _ingest.on_failure_message }}"
            }
        }
    ],
    "processors": [
        {
            "grok": {
                "field": "message",
                "pattern_definitions": {
                    "LOGSTASH_CLASS_MODULE": "[\\w\\.]+",
                    "LOGSTASH_LOGLEVEL": "INFO|ERROR|DEBUG|FATAL|WARN|TRACE",
                    "GREEDYMULTILINE" : "(.|\n)*"
                },
                "patterns": [
                    "\\[%{TIMESTAMP_ISO8601:logstash.log.timestamp}\\]\\[%{LOGSTASH_LOGLEVEL:log.level}\\s?\\]\\[%{LOGSTASH_CLASS_MODULE:logstash.log.module}\\s*\\] %{GREEDYMULTILINE:message}"
                ]
            }
        },
        {
            "rename": {
                "field": "@timestamp",
                "target_field": "event.created"
            }
        },
        {
            "date": {
                "field": "logstash.log.timestamp",
                "target_field": "@timestamp",
                "formats": [
                    "yyyy-MM-dd'T'HH:mm:ss,SSS"
                ],
                "ignore_failure": true
            }
        },
        {
            "date": {
                "if": "ctx.event.timezone != null",
                "field": "logstash.log.timestamp",
                "target_field": "@timestamp",
                "formats": [
                    "yyyy-MM-dd'T'HH:mm:ss,SSS"
                ],
                "timezone": "{{ event.timezone }}",
                "on_failure": [{"append": {"field": "error.message", "value": "{{ _ingest.on_failure_message }}"}}]
            }
		},	
        {
            "remove": {
                "field": "logstash.log.timestamp"
            }
        }
    ]
}

And for the slowlogs:

DELETE /_ingest/pipeline/filebeat-7.3.2-logstash-slowlog-pipeline-plain

PUT /_ingest/pipeline/filebeat-7.3.2-logstash-slowlog-pipeline-plain
{
    "description": "Pipeline for parsing logstash slowlogs in the plain format",
    "on_failure": [
        {
            "set": {
                "field": "error.message",
                "value": "{{ _ingest.on_failure_message }}"
            }
        }
    ],
    "processors": [
        {
            "grok": {
                "field": "message",
                "pattern_definitions": {
                    "LOGSTASH_CLASS_MODULE": "[\\w\\.]+\\s*",
                    "LOGSTASH_LOGLEVEL": "INFO|ERROR|DEBUG|FATAL|WARN|TRACE"
                },
                "patterns": [
                    "\\[%{TIMESTAMP_ISO8601:logstash.slowlog.timestamp}\\]\\[%{LOGSTASH_LOGLEVEL:log.level}\\s?\\]\\[%{LOGSTASH_CLASS_MODULE:logstash.slowlog.module}\\] %{GREEDYDATA:message}"
                ]
            }
        },
        {
            "grok": {
                "field": "logstash.slowlog.module",
                "patterns": [
                    "slowlog.logstash.%{WORD:logstash.slowlog.plugin_type}.%{WORD:logstash.slowlog.plugin_name}"
                ]
            }
        },
        {
            "grok": {
                "field": "message",
                "patterns": [
                    "{:plugin_params=>%{GREEDYDATA:logstash.slowlog.plugin_params}, :took_in_nanos=>%{NUMBER:event.duration}, :took_in_millis=>%{NUMBER:logstash.slowlog.took_in_millis}, :event=>%{GREEDYDATA:logstash.slowlog.event}}"
                ]
            }
        },
        {
            "rename": {
                "field": "@timestamp",
                "target_field": "event.created"
            }
        },
                {
            "date": {
                "field": "logstash.slowlog.timestamp",
                "target_field": "@timestamp",
                "formats": [
                    "yyyy-MM-dd'T'HH:mm:ss,SSS"
                ],
                "ignore_failure": true
            }
        },
        {
            "date": {
                "if": "ctx.event.timezone != null",
                "field": "logstash.slowlog.timestamp",
                "target_field": "@timestamp",
                "formats": [
                    "yyyy-MM-dd'T'HH:mm:ss,SSS"
                ],
                "timezone": "{{ event.timezone }}",
                "on_failure": [{"append": {"field": "error.message", "value": "{{ _ingest.on_failure_message }}"}}]
            }
		},
        {
            "remove": {
                "field": [
                    "message",
                    "logstash.slowlog.timestamp"
                ]
            }
        },
        {
            "convert": {
                "field": "event.duration",
                "type": "long"
            }
        },
        {
            "convert": {
                "field": "logstash.slowlog.took_in_millis",
                "type": "long"
            }
        }
    ]
}

This fixes it, Not sure what the ingest-json.json file does, but I didn't edit those ones to make it work. All I changed was how the date is handled in the ingest. I hope it's useful to someone and maybe someday it could make it in a new release!

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.