I'm using the following logtash config, and I can see it working on stdout, but nothing is show up in kibana/elastic search, I created an index called logstash* but there seems to be nothing in it
input {
tcp {
port => 5000
type => syslog
 }
udp {
  port => 5000
  type => syslog
 }
}
      filter {
grok {
		match => ["message", "%{CISCO_TAGGED_SYSLOG} %{GREEDYDATA:cisco_message}"]
}
# Extract fields from the each of the detailed message types
# The patterns provided below are included in core of LogStash 1.4.2.
grok {
	match => [
		"cisco_message", "%{CISCOFW106001}",
		"cisco_message", "%{CISCOFW106006_106007_106010}",
		"cisco_message", "%{CISCOFW106014}",
		"cisco_message", "%{CISCOFW106015}",
		"cisco_message", "%{CISCOFW106021}",
		"cisco_message", "%{CISCOFW106023}",
		"cisco_message", "%{CISCOFW106100}",
		"cisco_message", "%{CISCOFW110002}",
		"cisco_message", "%{CISCOFW302010}",
		"cisco_message", "%{CISCOFW302013_302014_302015_302016}",
		"cisco_message", "%{CISCOFW302020_302021}",
		"cisco_message", "%{CISCOFW305011}",
		"cisco_message", "%{CISCOFW313001_313004_313008}",
		"cisco_message", "%{CISCOFW313005}",
		"cisco_message", "%{CISCOFW402117}",
		"cisco_message", "%{CISCOFW402119}",
		"cisco_message", "%{CISCOFW419001}",
		"cisco_message", "%{CISCOFW419002}",
		"cisco_message", "%{CISCOFW500004}",
		"cisco_message", "%{CISCOFW602303_602304}",
		"cisco_message", "%{CISCOFW710001_710002_710003_710005_710006}",
		"cisco_message", "%{CISCOFW713172}",
		"cisco_message", "%{CISCOFW733100}"
	]
}
# Parse the syslog severity and facility
syslog_pri { }
geoip {
  		add_tag => [ "GeoIP" ]
  		database => "/opt/logstash/databases/GeoLiteCity.dat"
  		source => "src_ip"
	}
if [geoip][city_name]      == "" { mutate { remove_field => "[geoip][city_name]" } }
	if [geoip][continent_code] == "" { mutate { remove_field => "[geoip][continent_code]" } }
	if [geoip][country_code2]  == "" { mutate { remove_field => "[geoip][country_code2]" } }
	if [geoip][country_code3]  == "" { mutate { remove_field => "[geoip][country_code3]" } }
	if [geoip][country_name]   == "" { mutate { remove_field => "[geoip][country_name]" } }
	if [geoip][latitude]       == "" { mutate { remove_field => "[geoip][latitude]" } }
	if [geoip][longitude]      == "" { mutate { remove_field => "[geoip][longitude]" } }
	if [geoip][postal_code]    == "" { mutate { remove_field => "[geoip][postal_code]" } }
	if [geoip][region_name]    == "" { mutate { remove_field => "[geoip][region_name]" } }
	if [geoip][time_zone]      == "" { mutate { remove_field => "[geoip][time_zone]" } }
# Gets the source IP whois information from the GeoIPASNum.dat flat file database
geoip {
  		add_tag => [ "Whois" ]
  		database => "/opt/logstash/databases/GeoIPASNum.dat"
  		source => "src_ip"
	}
# Parse the date
date {
		match => ["timestamp",
  			"MMM dd HH:mm:ss",
  			"MMM  d HH:mm:ss",
  			"MMM dd yyyy HH:mm:ss",
  			"MMM  d yyyy HH:mm:ss"
		]
}
}
output {
stdout { codec => rubydebug }
elasticsearch {
		hosts => ["10.1.1.17:9200"]
}
}