Logstash date filter issue with kibana/elasticsearch

Hi,

I am having a very strange issue here.

For instance, when i include the date filter in my code

      if [fields][document_type] == "mvpep" {
    grok {
      match => { "message" => "(%{TIMESTAMP_ISO8601:timestamp_event}(%{SPACE}%{SPACE}|%{SPACE})%{NUMBER:request_ID}%{SPACE}\[%{GREEDYDATA:java_thread}\]  %{LOGLEVEL:log_level} (?<java_class>[a-zA-Z.]+):%{NUMBER:code_line} - (((%{WORD:method}|%{WORD:method}\(%{NUMBER:instances}\)): %{WORD:http_type} - %{URI:info})|%{GREEDYDATA:random_data})|%{TIMESTAMP_ISO8601:timestamp_event}(%{SPACE}%{SPACE}|%{SPACE})%{LOGLEVEL:log_level}%{SPACE}\[(([,]{3})|(?<java_thread>[A-Za-z0-9\-,.]+))\]%{SPACE}([?\-]{3,4})%{SPACE}([?\-]{3,4})%{SPACE}\[(?<runtime>[A-Za-z0-9\-\[\]\s]+)\]%{SPACE}(?<java_class>[A-Za-z0-9.\s]+):%{SPACE}((%{WORD:process}|%{WORD:process}\(%{NUMBER:executed_time}\)|%{WORD:process}%{SPACE:space_exists}\(%{NUMBER:executed_time}\))(:|%{SPACE}[>-]+)( %{WORD:http_type} - %{URI:info}|%{SPACE}(?<java_type>[A-Za-z]+)%{SPACE}(?<java_method>[A-Za-z.,\s\[\]\(\)]+)\|%{GREEDYDATA}\|(?<java_user>[A-Z\s]+)\|%{GREEDYDATA})|%{GREEDYDATA:event_message}))" }
      add_tag => [ "grok", "mvpep" ]
    }
    if [space_exists] {
      mutate {
        add_field => { "internal" => 1 }
        remove_field => [ "space_exists" ]
      }
    } else {
      mutate {
        add_field => { "internal" => 0 }
      }
    }
    mutate {
      strip => ["java_class", "runtime", "java_method", "java_type", "java_user"]
      convert => {
        "executed_time" => "integer"
        "internal" => "boolean"
      }
    }
    if [java_method] and [executed_time] {
      ruby {
        code => "zabbix_key = event.get('java_method').split('(')[0];
                 zabbix_key = 'trapper.key[' + zabbix_key + ']';
                 event.set('zabbix_key',zabbix_key)
                "
      }
    }
    date {
      match => [ "timestamp_event", "YYYY-MM-dd HH:mm:ss.SSS", "YYYY-MM-dd HH:mm:ss.SS", "YYYY-MM-dd HH:mm:ss.S" ]
    }
  }

i can see that in the logstash output that the @timestamp is updated accordingly.

logstash       |     "timestamp_event" => "2020-06-09 12:51:07.143",
logstash       |          "@timestamp" => 2020-06-09T12:51:07.143Z,

but the information does not show in kibana timeline!!!

Any idea why this is occurring?

I reseted the index and updated/remade the index pattern yet the problem persists. I use the date filter in other parts of my logstash.conf and it works! I have a feeling this is an issue with the format of the timestamp_event. I am not receving a _dateparsefailure either. simply nothing appears.

As an example where it works

  if [fields][document_type] == "secure" {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:timestamp_event} %{HOSTNAME:hostname} %{GREEDYDATA:event_output}" }
      add_tag => [ "grok", "secure" ]
    }
    date {
      locale => en
      match => [ "timestamp_event", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss", "MMM d HH:mm:ss" ]
    }
  }

I am seeing this in the elasticsearch logs. I do not know if this is related.

es01           | {"type": "deprecation", "timestamp": "2020-06-09T16:51:01,397Z", "level": "WARN", "component": "o.e.d.s.a.b.h.DateHistogramAggregationBuilder", "cluster.name": "es-docker-cluster", "node.name": "es01", "message": "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.", "cluster.uuid": "2IK2zt_wRCSfBJKOwZvW9A", "node.id": "PCefJvWCTVSrvcih66fKBg"  }

I will investigate this error more.

RESOLVED. timezone needs to be used here.