Persistent queue in Logstash 6.1 causes problems with cef parsing

After enabling the persistent queue some of our cef logs could no longer be parsed.
The Logs semed to be split wrong.

The config looks as follows:

input {
   tcp {
     port => 5514
     type => "forti_log"
     codec => "cef"
  }
}


filter {
#The Fortigate syslog contains a type field as well, we'll need to rename that field in order for this to work
 if [type] == "forti_log" {

      grok{
          match => ["syslog", "%{SYSLOG5424PRI:syslog_index}%{SYSLOGTIMESTAMP:date} %{NOTSPACE:logsource}"]
      }




        kv {
    source => "message"
    value_split => "="
#Expects you have csv enable set on your Fortigate. If not I think you'll have to change it to " " but I didn't test that.
    field_split => " "
}

    mutate {

#The syslog contains a type field which messes with the Logstash type field so we have to rename it.
    rename => { "type" => "ftg_type" }
    rename => { "subtype" => "ftg_subtype" }
    add_field => { "type" => "forti_log" }
    convert => { "rcvdbyte" => "integer" }
    convert => { "sentbyte" => "integer" }
}

date {
    match => ["FTNTFGTeventtime", "UNIX" ]
    target => "FTNTFGTeventtime"
}

date {
    match => [ "date", "MMM dd HH:mm:ss" ]
    target => "@timestamp"
    }

     mutate {
 #add/remove fields as you see fit.
     remove_field => ["syslog_index","syslog","syslog5424_pri","path","service","date","time","sentpkt","rcvdpkt","log_id","poluuid"]
 }
}
}

Is this a known problem that could be solved with a simple upgrade of logstash?