Logstash conf problem

Hi,

I have a problem with logstash, when i add a grok filter, logstash seem to stop sending data to ES or kibana.

My data come by udp protocol on port 514

here is my filter conf file (the ELK system is working fine at this point)

filter {
#  if [type] == "syslog" {
#    syslog_pri { }
#    date {
#      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
#      match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
#    }
#  }


####FILTER ESX

if [host] == "IPESX1" or [host] == "IPESX2" {
	grok{
		break_on_match => true
		match => [
		"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}:\s%{WORD:esx_syslog_level}\s%{SYSLOGPROG}\[%{DATA:esx_thread_id}\]\s\[%{DATA} sub=%{DATA:esx_sub} opID=%{DATA:esx_opID}\]\s\[%{DATA:esx_msg_service_info}\]\s%{GREEDYDATA:esx_msg}",
		"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}:\s%{WORD:esx_syslog_level}\s%{SYSLOGPROG}\[%{DATA:esx_thread_id}\]\s\[%{DATA} sub=%{DATA:esx_sub}\]\s%{DATA:esx_msg_service_info}\s:%{GREEDYDATA:esx_msg}",{SYSLOGPROG}\[%{DATA:esx_thread_id}\]\s\[%{DATA} sub=%{DATA:esx_sub}\]\s%{DATA:esx_msg_service_info}:%{GREEDYDATA:esx_msg}",
		"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}:\s%{WORD:esx_syslog_level}\s%{GREEDYDATA:esx_msg}",
		"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}\[%{DATA:esx_thread_id}\]:%{GREEDYDATA:esx_msg}",
		"message", "<%{POSINT:syslog_pri}>-->\s\[%{DATA:msg}\]",
		"message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}: %{GREEDYDATA:msg}",
		"message", "<%{POSINT:syslog_pri}>%{UUID:esx_uuid}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}:\s%{GREEDYDATA:msg}"
		
		]
		add_tag => [ "esx" ]

	}
	mutate {
        replace => [ "@message", "%{msg}" ]
	}
	
	
	if "Rejected password" in [message] and "cpu" in [message] {
        grok {
            break_on_match => false
            match => [
                "message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}: %{GREEDYDATA:msg}"
            ]
			add_tag => [ "esx" ]
        }
	}
	if "Section for VMware" in [message] {
        grok {
            break_on_match => false
            match => [
                "message", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp}\s%{SYSLOGHOST:esx_syslog_hostname}\s%{SYSLOGPROG:esx_syslog_program}: %{GREEDYDATA:msg},\spid=%{NUMBER:esx_pid},\sversion=%{DATA:esx_version},\sbuild=%{NUMBER:esx_build},\soption=%{WORD:esx_option}"
            ]
			add_tag => [ "esx" ]
        }
	}

}


}



 filter {
  if [type] == "nxlog" {
    json {
      source => "message"
    }
    mutate {
        lowercase => [ "EventType", "FileName", "Hostname", "Severity", "host" ]
        rename => [ "Hostname", "host" ]
        rename => [ "Message", "message" ]
    }
  }
}

The system stop to work when i add my grok filter for my fortigate firewall :

if [type] == "syslog" {
	if [host] == "10.33.2.254" {
		grok {
			patterns_dir => "/etc/logstash/patterns/"
			match => ["message","%{FORTIGATE_BASE},poluuid=%{UUID:fw_poluuid},sessionid=%{NUMBER},proto=%{WORD:fw_proto},action=%{WORD:fw_action},policyid=%{NUMBER:fw_policyid},%{FORTIGATE_COUNTRY},transip=%{IPV4:fw_transip},transport=%{NUMBER:fw_transport},service=%{QUOTEDSTRING:fw_service},%{FORTIGATE_BYTE_PKT}"]
			match => ["message","%{FORTIGATE_BASE},poluuid=%{UUID:fw_poluuid},sessionid=%{NUMBER},proto=%{WORD:fw_proto},action=%{WORD:fw_action},policyid=%{NUMBER:fw_policyid},%{FORTIGATE_COUNTRY},tranip=%{IPV4:fw_transip},tranport=%{NUMBER:fw_transport},service=%{QUOTEDSTRING:fw_service},%{FORTIGATE_BYTE_PKT}"]
			match => ["message","%{FORTIGATE_BASE},sessionid=%{NUMBER},proto=%{WORD:fw_proto},action=%{WORD:fw_action},policyid=%{NUMBER:fw_policyid},%{FORTIGATE_COUNTRY},service=%{QUOTEDSTRING:fw_service},app=%{QUOTEDSTRING:fw_app},%{FORTIGATE_BYTE_PKT}"]
			add_tag => [ "Firewall" ]
		}
	}
}

i put this block behind the esx block and i the first filter block.

When i restart my logstash service, i have no more incoming data in kibana.

How can i fix this ?

Thanks

Is there anything interesting in the Logstash logs? What if you increase the logging verbosity?

Hi Magnus, thanks for your reply,

After a logstash reboot with the entire conf file i got this logstash log :

{:timestamp=>"2016-04-15T09:49:38.255000+0200", :message=>"SIGTERM received. Shutting down the agent.", :level=>:warn}
{:timestamp=>"2016-04-15T09:49:38.255000+0200", :message=>"stopping pipeline", :id=>"main"}
{:timestamp=>"2016-04-15T09:49:38.385000+0200", :message=>"UDP listener died", : exception=>#<IOError: closed stream>, :backtrace=>["org/jruby/RubyIO.java:3682:in `select'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-udp-2.0.5/lib/logstash/inputs/udp.rb:77:in `udp_listener'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-udp-2.0.5/lib/logstash/inputs/udp.rb:50:in `run'" , "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:342:in `inputworker'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:336:in `start_input'"],:level=>:warn}
{:timestamp=>"2016-04-15T09:49:39.647000+0200", :message=>"Pipeline main has been shutdown"}
{:timestamp=>"2016-04-15T09:49:44.927000+0200", :message=>"Pipeline aborted due to error", :exception=>#<Grok::PatternError: pattern %{HOST:hostname} not defined>, :backtrace=>["/opt/logstash/vendor/bundle/jruby/1.9/gems/jls-grok-0.11.2/lib/grok-pure.rb:123:in `compile'", "org/jruby/RubyKernel.java:1479:in `loop'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/jls-grok-0.11.2/lib/grok-pure.rb:93:in `compile'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-grok-2.0.5/lib/logstash/filters/grok.rb:264:in `register'", "org/jruby/RubyArray.java:1613:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-grok-2.0.5/lib/logstash/filters/grok.rb:259:in `register'", "org/jruby/RubyHash.java:1342:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-grok-2.0.5/lib/logstash/filters/grok.rb:255:in `register'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:182:in `start_workers'", "org/jruby/RubyArray.java:1613:in `each'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:182:in `start_workers'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:136:in `run'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/agent.rb:465:in `start_pipeline'"], :level=>:error}
{:timestamp=>"2016-04-15T09:49:46.625000+0200", :message=>"SSL Error", :exception=>#<OpenSSL::SSL::SSLError: Socket closed>, :backtrace=>["org/jruby/ext/openssl/SSLSocket.java:272:in `accept'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/jruby-openssl-0.9.13-java/lib/jopenssl19/openssl/ssl-internal.rb:106:in `accept'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-tcp-3.0.4/lib/logstash/inputs/tcp.rb:112:in `run_server'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-input-tcp-3.0.4/lib/logstash/inputs/tcp.rb:84:in `run'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:342:in `inputworker'", "/opt/logstash/vendor/bundle/jruby/1.9/gems/logstash-core-2.3.1-java/lib/logstash/pipeline.rb:336:in `start_input'"], :level=>:error}
{:timestamp=>"2016-04-15T09:49:47.932000+0200", :message=>"stopping pipeline", :id=>"main"}

don't know why but it seem the field host is no longer recognize when i add my firewall grok pattern.

... Grok::PatternError: pattern %{HOST:hostname} not defined ...

Well, there's no HOST pattern in grok's standard set so unless you define that pattern in one of your own pattern files this isn't going to work. Perhaps you meant HOSTNAME?

I have a same problem.