Logstash script adding duplicate data

Hi,
I'm uploading php_error.logs using logstash script but when i run ths script it adds allover data again and again so index is showing count of duplicate data too. how i can avoid that?

This is the script which i am using

input {
  file {
    path => "/var/log/apache2/php_errors.log"
    start_position => "beginning"
    sincedb_path => "/dev/null"
    codec => multiline {
      pattern => '^\[\d{2}-[a-zA-Z]{3}-\d{4} \d{2}:\d{2}:\d{2} '
      negate => true
      what => "previous"
      auto_flush_interval => 1
      multiline_tag => ""
    }
  }
}

filter {
  grok {
    match => { "message" => "\[%{LOGTIME:[@metadata][timestamp]} %{DATA:[@metadata][tz]}\] PHP %{DATA:log_level}:%{SPACE}%{GREEDYDATA:log_message}" }
    pattern_definitions => { "LOGTIME" => "%{MONTHDAY}-%{MONTH}-%{YEAR} %{TIME}" }
  }

  date {
    match => [ "[@metadata][timestamp]", "dd-MMM-yyyy HH:mm:ss" ]
    target => "@timestamp"
    timezone => "%{[@metadata][tz]}"
  }

  # You can remove unnecessary fields if needed
  mutate {
    remove_field => ["log", "message", "event", "host"]
  }
}

output {
  elasticsearch {
    hosts => ["http://localhost:9200"]
    user => "elastic"
    password => "plus91"
    index => "php_error_logs_all"
    action => "index"
    doc_as_upsert => true
  }

  stdout {
    codec => rubydebug
  }
}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.