Log in wrong index

Again sorry i was to quick, i read both the document and your first comment again and i think i might have a bit of an understanding.

I have to filter the events and get them in the right output right?
What I'm not sure about yet is how to do it exactly, So some help there would be very much appreciated.

I think if i where to do it like this it should work right?

input {
	lumberjack {
		port => 5003
		type => "lumberjack"
		ssl_certificate => "/etc/logstash/logstash-forwarder.crt"
		ssl_key => "/etc/logstash/logstash-forwarder.key"
		codec => json
	}
}
filter {
	if [type] == "ossec" {
		geoip {
			source => "srcip"
			target => "geoip"
			database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
			add_field => ["[geoip][location]", "%{[geoip][longitude]}"]
			add_field => ["[geoip][location]", "%{[geoip][latitude]}"]
		}
		date {
			match => ["timestamp", "YYYY MMM dd HH:mm:ss"]
			target => "@timestamp"
		}
		mutate {
			convert => ["[geoip][location]", "float"]
			rename => ["hostname", "AgentName"]
			rename => ["geoip", "GeoLocation"]
			rename => ["file", "AlertsFile"]
			rename => ["agentip", "AgentIP"]
			rename => ["[rule][comment]", "[rule][description]"]
			rename => ["[rule][level]", "[rule][AlertLevel]"]
			remove_field => ["timestamp"]
		}
	}
}

output {
	 # stdout {
		codec => rubydebug
	}
	elasticsearch {
		hosts => ["bcksrv16:9200"]
		index => "ossec-%{+YYYY.MM.dd}"
		document_type => "ossec"
		template => "/etc/logstash/elastic-ossec-template.json"
		template_name => "ossec"
		template_overwrite => true
	}
}


input {
	udp {
		port => 5001
		type => syslog
	}
}

filter {
	if [type] == "syslog" {
		grok {
			break_on_match => true
			match => ["message", "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(: %{POSINT:win_eventid})?(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}"]
			add_field => ["received_at", "%{@timestamp}"]
			remove_field => ["host"]
		}
		syslog_pri {}
		date {
			match => ["syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "MM/dd/yy HH:mm:ss"]
		}
	}
}

output {
	elasticsearch {
		hosts => bcksrv16
		index => "logstash-%{+YYYY.MM.dd}"
		document_type => "syslog"
	}
}

I have removed one filter part from the syslog part because it's no longer needed, we don't sent those items to logstash anymore.

Thanks for any help