Duplicate data in logstash

i have duplicate data in logstash
my config file is :

input {
  file {
    path => "/var/log/flask/access*"
    type => "flask_access"
    max_open_files => 409599
  }
  stdin{}
}
filter {
  mutate { replace => { "type" => "flask_access" } }
  grok {
    match => { "message" => "%{FLASKACCESS}" }
  }
  mutate {
    add_field => {
      "temp" => "%{uniqueid} %{method}"
    }
  }
   if "Entering" in [api_status] {
     aggregate {
       task_id => "%{temp}"
       code => "map['blockedprocess'] = 2"
       map_action => "create"
     }
   }
   if "Entering" in [api_status] or "Leaving" in [api_status]{
     aggregate {
       task_id => "%{temp}"
       code => "map['blockedprocess'] -= 1"
       map_action => "update"
     }
   }
   if "End Task" in [api_status] {
     aggregate {
       task_id => "%{temp}"
       code => "event['blockedprocess'] = map['blockedprocess']"
       map_action => "update"
       end_of_task => true
       timeout => 120
     }
   }
 }
output {
  elasticsearch {
    hosts => ["localhost:9200"]
#    sniffing => true
#    manage_template => false
#    index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
#    document_type => "%{[@metadata][type]}"
  }
  stdout { codec => rubydebug }
}

please help me
thanks

i solve it

i create a unique id by ('document_id') in output section

document_id point to my temp and temp is my unique id in my project

my output changed to:

output {
  elasticsearch {
    hosts => ["localhost:9200"]
    document_id => "%{temp}"
#    sniffing => true
#    manage_template => false
#    index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
#    document_type => "%{[@metadata][type]}"
  }
  stdout { codec => rubydebug }
}