Aggregation Filter 2 logs same index

Fixed the issue, below the correct configuration, i hope that could be useful for other member of the community.

input {
  udp {
    port => 1514
    type => "alcatel"
  }
}

filter {
  if [type] == "alcatel" {
    grok {

      match => { "message" => [ "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\(%{POSINT:syslog_pid}\))\s+\w+:\s+\[\w+\](?\<status\>\s+Authentication\s+failed):\s+\w+ %{DATA:job_id},",
                                "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\(%{POSINT:syslog_pid}\))\s+\w+:\s+\[\w+\](?\<status\>\s+Authentication\s+succeed):\s+\w+ %{DATA:job_id},",
                                "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\(%{POSINT:syslog_pid}\))\s+\w+:\s+\[\w+\]\s+\w+%{DATA:job_id},\s+\w+\s+\w+\s+%{SYSLOGHOST:srcip}" ] }                 
      add_field => [ "received_at", "%{@timestamp}" ]
      #add_tag => "Alcatel"
     }
      mutate { gsub => [ "status", "^ ", "" ] } 
      mutate { gsub => [ "job_id", "^ ", "" ] }
     if "_grokparsefailure" in [tags] {
       drop {}
     }
    aggregate {
      task_id => "%{job_id}"
       code => "
        map['received_at'] ||= event.get('received_at') 
        map['type'] ||= event.get('type')
        map['syslog_timestamp'] ||= event.get('syslog_timestamp')
        map['syslog_hostname'] ||= event.get('syslog_hostname')
        map['syslog_program'] ||= event.get('syslog_program')
        map['syslog_pid'] ||= event.get('syslog_pid')
        map['status'] ||= event.get('status')
        map['srcip'] ||= event.get('srcip')
        map['host'] ||= event.get('host')
        map['job_id'] ||= event.get('job_id')
        "
      timeout => 2
      timeout_tags => ['Alcatel']
      map_action => 'create_or_update'
      push_map_as_event_on_timeout => true
    }
  }
}