I have a setup of ES, Kibana on the cloud and LS and Filebeat (2 nodes). It was all working fine but suddenly i I see today that the pipelines stop in LS with the following error:
[2019-01-21T12:51:07,188][ERROR][logstash.pipeline ] Exception in pipelineworker, the pipeline stopped processing new events, please check your filter configuration and restart Logstash. {:pipeline_id=>"main", "exception"=>"Could not set field 'city_name' on object '49.35.23.207' to value 'Mumbai'.This is probably due to trying to set a field like [foo][bar] = someValuewhen [foo] is not either a map or a string",
Here is my logstash configuration
if [type] == "api" {
grok {
match => { "message" => "%{IP:s2sip} %{USER:user} %{USER:auth} [%{HTTPDATE:timestamp}] %{GREEDYDATA:botmanpath}" }
}
grok {
match => { "botmanpath" => "%{URIPARAM:botman_query}" }
}
kv {
source => "botmanpath"
field_split => "&?"
prefix => "bq_"
}
geoip {
source => "bq_ip"
add_field => [ "[bq_ip][coordinates]", "%{[bq_ip][longitude]}" ]
add_field => [ "[bq_ip][coordinates]", "%{[bq_ip][latitude]}" ]
}
mutate {
convert => [ "[bq_ip][coordinates]", "float"]
}
useragent {
source => "bq_s6"
}
}
}
output {
if [type] == "tag" {
elasticsearch {
hosts => [ "--" ]
user => [ "--" ]
password =>[ "--" ]
index => [ "%{[parsed][bizid]}-%{+YYYY.MM.dd}" ]
}
}
if [type] == "api" {
elasticsearch {
hosts => [ "--" ]
user => [ "--" ]
password =>[ "--" ]
index => [ "%{[parsed][bizid]}-%{+YYYY.MM.dd}" ]
}
}