Field name cannot contain '.'

Hi Gary

I am using Logstash to parse logs from Bro IDS. This ruby filter works on most of the conf files except a file called "weird.conf"

Can you please help.

//below is the config file
input {
file {
type => "bro-weird_log"
start_position => "end"
sincedb_path => "/var/tmp/.bro_weird_sincedb"

#Edit the following path to reflect the location of your log files. You can also change the extension if you use something else
path => "/usr/local/bro/logs/current/weird.log"

}
}

filter {

#Let's get rid of those header lines; they begin with a hash
if [message] =~ /^#/ {
drop { }
}

#get rid of "."

ruby {
code => "
event.to_hash.keys.each { |k| event[ k.sub('.','_') ] = event.remove(k) if k.include?'.' }
"
}

#Now, using the csv filter, we can define the Bro log fields
if [type] == "bro-weird_log" {
csv {

  #weird.log:#fields	ts	uid	id.orig_h	id.orig_p	id.resp_h	id.resp_p	name	addl	notice	peer
  columns => ["ts","uid","id.orig_h","id.orig_p","id.resp_h","id.resp_p","name","addl","notice","peer"]

  #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
  separator => "	"
}

#Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
date {
  match => [ "ts", "UNIX" ]
}

# add geoip attributes
geoip {
  source => "id.orig_h"
  target => "orig_geoip"
}
geoip {
  source => "id.resp_h"
  target => "resp_geoip"
}

mutate {
  convert => [ "id.orig_p", "integer" ]
  convert => [ "id.resp_p", "integer" ]
}

}
}

output {

stdout { codec => rubydebug }

elasticsearch { hosts => localhost }
}