Duplicate log events with different _id generated by logstash

Hi,

I am able to see duplicate log events with different _id generated by logstash in Kibana. Below is my logstash config. Could someone help me with this?

input {
    file {
		path => "E:/console-2020*.log"		
		sincedb_path => "E:\logstash\logstash-7.6.0\sincedb\console-sincedb.txt"
		type => "consolelogs"
		codec => multiline {
			pattern => "%{WORD}%{SPACE}\|%{SPACE}jvm%{SPACE}1%{SPACE}\|%{SPACE}srvmain%{SPACE}\|%{SPACE}%{YEAR}/%{MONTHNUM2}/%{MONTHDAY}%{SPACE}%{HOUR}:%{MINUTE}:%{SECOND}.%{SECOND}%{SPACE}\|%{SPACE}at"
			what => "previous"
		}
    }
}

filter {	
	if "DefaultSolrClientPool" in [message] {
		drop { }
	}

	grok {
        }
	
		}
	}
		
	date {
		match => ["newtimestamp", "yyyy/MM/dd HH:mm:ss.SSS"]
		timezone => "Europe/Stockholm"
		target => "@timestamp"
	}
}
			
output {
	if [type] == "consolelogs" {
		elasticsearch {
			hosts => ["<ip1>:9200"]
			index => "console-preprod-%{+YYYY.MM.dd}"
		}
	}
}

Is it reading from a local drive or is this networked storage? How is the log file updated/append to?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.