"object mapping for [host] tried to parse field [host] as object, but found a concrete value"

I am getting this error on my logstash log:
"object mapping for [host] tried to parse field [host] as object, but found a concrete value"
this started happening when I tried to log the syslog coming from fortigate, here's my config file:

input {
      udp {
        port => 5000
        type => "SYSLOG"
        }
}

filter {
        if [type] == "SYSLOG" {

                grok {
                                        match => ["message", "%{SYSLOG5424PRI:SYSLOG_index}%{GREEDYDATA:message}"]
                                        overwrite => [ "message" ]
                                        tag_on_failure => [ "forti_grok_failure" ]
                }


                kv {
                        source => "message"
                        value_split => "="
                        field_split => ","
                }

                mutate {
                        add_field => { "temp_time" => "%{date} %{time}" }
                        rename => { "type" => "ftg_type" }
                        rename => { "subtype" => "ftg_subtype" }
                        add_field => { "type" => "SYSLOG" }
                        convert => { "rcvdbyte" => "integer" }
                        convert => { "sentbyte" => "integer" }
                }

                date {
                        match => [ "temp_time", "yyyy-MM-dd HH:mm:ss" ]
                        timezone => "UTC" #change with your timestamp
                        target => "@timestamp"
                }

                mutate {
                remove_field => ["SYSLOG_index","SYSLOG5424_pri","path","temp_time","service","date","time","sentpkt","rcvdpkt","log_id","message","poluuid"]
                }
        }
}

output {
        stdout { codec => rubydebug }
        if [type] == "SYSLOG" {
                elasticsearch {
                hosts => ["IP:9200"]
                http_compression => "true"
                index => "forti-%{+YYYY.MM.dd}"
                user => "elastic"
                password => "elastic"
                template => "/usr/share/logstash/bin/forti.json"
                template_name => "forti-*"
                }
        }
}

The exact error is this:

`

[2018-07-26T13:09:54,472][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #LogStash::Event:0x2a462e54], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"XzV-1mQBrYy0Cu312FEm", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}

`

other error I get:

[2018-07-26T13:10:08,476][ERROR][logstash.pipeline        ] A plugin had an unrecoverable error. Will restart this plugin.
  Pipeline_id:mypipeline_1
  Plugin: <LogStash::Inputs::Beats port=>5044, id=>"0c4520c89d59a340d6e2f4793d4328bde33d1e2b0ebbd19656dd74d9d8423b05", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_1c78f731-a09e-40a8-94f4-8e0838d03dbd", enable_metric=>true, charset=>"UTF-8">, host=>"0.0.0.0", ssl=>false, ssl_verify_mode=>"none", include_codec_tag=>true, ssl_handshake_timeout=>10000, tls_min_version=>1, tls_max_version=>1.2, cipher_suites=>["TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256"], client_inactivity_timeout=>60, executor_threads=>4>
  Error: Address already in use
  Exception: Java::JavaNet::BindException
  Stack: sun.nio.ch.Net.bind0(Native Method)
sun.nio.ch.Net.bind(sun/nio/ch/Net.java:433)
sun.nio.ch.Net.bind(sun/nio/ch/Net.java:425)
sun.nio.ch.ServerSocketChannelImpl.bind(sun/nio/ch/ServerSocketChannelImpl.java:223)
io.netty.channel.socket.nio.NioServerSocketChannel.doBind(io/netty/channel/socket/nio/NioServerSocketChannel.java:128)
io.netty.channel.AbstractChannel$AbstractUnsafe.bind(io/netty/channel/AbstractChannel.java:558)
io.netty.channel.DefaultChannelPipeline$HeadContext.bind(io/netty/channel/DefaultChannelPipeline.java:1283)
io.netty.channel.AbstractChannelHandlerContext.invokeBind(io/netty/channel/AbstractChannelHandlerContext.java:501)
io.netty.channel.AbstractChannelHandlerContext.bind(io/netty/channel/AbstractChannelHandlerContext.java:486)
io.netty.channel.DefaultChannelPipeline.bind(io/netty/channel/DefaultChannelPipeline.java:989)
io.netty.channel.AbstractChannel.bind(io/netty/channel/AbstractChannel.java:254)
io.netty.bootstrap.AbstractBootstrap$2.run(io/netty/bootstrap/AbstractBootstrap.java:364)
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(io/netty/util/concurrent/AbstractEventExecutor.java:163)
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(io/netty/util/concurrent/SingleThreadEventExecutor.java:403)
io.netty.channel.nio.NioEventLoop.run(io/netty/channel/nio/NioEventLoop.java:463)
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(io/netty/util/concurrent/SingleThreadEventExecutor.java:858)
io.netty.util.concurrent.FastThreadLocalRunnable.run(io/netty/util/concurrent/FastThreadLocalRunnable.java:30)
java.lang.Thread.run(java/lang/Thread.java:748)

There are many threads discussing this problem. If you do a search on that error message you should be able to find them. This is one of them.

1 Like

I can't edit the original post: But here is an update:

input {
	udp {
		port => 5000 
		type => "SYSLOG"
	}
}

filter {
	if [type] == "SYSLOG" {

	grok {
		match => ["message", "%{SYSLOG5424PRI:syslog_index}%{GREEDYDATA:message}"]
		overwrite => [ "message" ]
		tag_on_failure => [ "forti_grok_failure" ]
	}


    kv {
		source => "message"
		value_split => "="
		field_split => ","
	}

	mutate {
		add_field => { "temp_time" => "%{date} %{time}" }
		rename => { "type" => "ftg_type" }
		rename => { "subtype" => "ftg_subtype" }
		add_field => { "type" => "SYSLOG" }
		convert => { "rcvdbyte" => "integer" }
		convert => { "sentbyte" => "integer" }
	}

	date {
	match => [ "temp_time", "yyyy-MM-dd HH:mm:ss" ]
	timezone => "UTC" #change with your timestamp
	target => "@timestamp"
	}

	mutate {
		remove_field => ["syslog_index","syslog5424_pri","path","temp_time","service","date","time","sentpkt","rcvdpkt","log_id","message","poluuid"]
	}
	}
}

output {
	stdout { codec => rubydebug }
	if [type] == "SYSLOG" {
		elasticsearch {
		hosts => "localhost:9200"
		http_compression => "true"
		index => "forti-%{+YYYY.MM.dd}"
		user => "elastic"
		password => "elastic"
		template => "/usr/share/logstash/bin/forti.json"
		template_name => "forti-*"
		}
	}
}

The exact error is this:

[2018-07-26T13:56:46,288][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x30fce6df>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"WzWp1mQBrYy0Cu31v6m0", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:56:46,369][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x30fce6df>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"XTWp1mQBrYy0Cu31wKke", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:56:48,461][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x476184cc>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"XjWp1mQBrYy0Cu31yKlJ", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:56:48,520][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x476184cc>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"YDWp1mQBrYy0Cu31yKmF", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:56:48,919][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x1d7f5264>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"YTWp1mQBrYy0Cu31yqkT", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:56:48,961][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x1d7f5264>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"YzWp1mQBrYy0Cu31yqk9", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:56:58,360][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x670de581>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"ozWp1mQBrYy0Cu317qn1", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T13:57:35,486][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x67ea3a76>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"0TWq1mQBrYy0Cu31f6r6", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}

im getting logs from winlogbeat and logstash but none of them is from the syslog(fortigate) any clues?

none of it helped me, I will give it a go though - Will edit this if anything.

So, I gave it a go and as expected...didnt work:
here is the log error:

[2018-07-26T15:25:00,538][WARN ][logstash.shutdownwatcher ] {"inflight_count"=>0, "stalling_thread_info"=>{"other"=>[{"thread_id"=>40, "name"=>"[mypipeline_1]<beats", "current_call"=>"[...]/vendor/bundle/jruby/2.3.0/gems/logstash-input-beats-5.0.16-java/lib/logstash/inputs/beats.rb:198:in `run'"}, {"thread_id"=>42, "name"=>"[mypipeline_1]<udp", "current_call"=>"[...]/vendor/bundle/jruby/2.3.0/gems/logstash-input-udp-3.3.3/lib/logstash/inputs/udp.rb:115:in `select'"}], ["LogStash::Filters::Grok", {"match"=>{"message"=>"%{COMBINEDAPACHELOG}"}, "id"=>"bc98e2e54262bbc53a4d293c3d93a6eece4542e59e2ff2fc88730cfdb875763b"}]=>[{"thread_id"=>34, "name"=>nil, "current_call"=>"[...]/vendor/bundle/jruby/2.3.0/gems/manticore-0.6.3-java/lib/manticore/response.rb:50:in `call'"}, {"thread_id"=>35, "name"=>nil, "current_call"=>"[...]/logstash-core/lib/logstash/pipeline.rb:418:in `read_batch'"}, {"thread_id"=>36, "name"=>nil, "current_call"=>"[...]/logstash-core/lib/logstash/pipeline.rb:418:in `read_batch'"}, {"thread_id"=>37, "name"=>nil, "current_call"=>"[...]/logstash-core/lib/logstash/pipeline.rb:418:in `read_batch'"}]}}
[2018-07-26T15:25:00,564][ERROR][logstash.shutdownwatcher ] The shutdown process appears to be stalled due to busy or blocked plugins. Check the logs for more information.
[2018-07-26T15:25:01,218][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x78ba6661>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"mzb61mQBrYy0Cu31i0Qg", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T15:25:01,251][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"logstash-2018.07.26", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x78ba6661>], :response=>{"index"=>{"_index"=>"logstash-2018.07.26", "_type"=>"doc", "_id"=>"nTb61mQBrYy0Cu31i0RB", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"object mapping for [host] tried to parse field [host] as object, but found a concrete value"}}}}
[2018-07-26T15:25:05,148][INFO ][logstash.pipeline        ] Pipeline has terminated {:pipeline_id=>"mypipeline_1", :thread=>"#<Thread:0x1c339207 run>"}
[2018-07-26T15:25:37,540][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"6.3.1"}
[2018-07-26T15:25:39,837][ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:mypipeline_1, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of #, => at line 107, column 10 (byte 1947) after filter {\n\tif [type] == \"SYSLOG\" {\n\n\tgrok {\n\t\tmatch => [\"message\", \"%{SYSLOG5424PRI:syslog_index}%{GREEDYDATA:message}\"]\n\t\toverwrite => [ \"message\" ]\n\t\ttag_on_failure => [ \"forti_grok_failure\" ]\n\t}\n\n\n    kv {\n\t\tsource => \"message\"\n\t\tvalue_split => \"=\"\n\t\tfield_split => \",\"\n\t}\n\n\tmutate {\n\t\tadd_field => { \"temp_time\" => \"%{date} %{time}\" }\n\t\trename => { \"type\" => \"ftg_type\" }\n\t\trename => { \"subtype\" => \"ftg_subtype\" }\n\t\tadd_field => { \"type\" => \"SYSLOG\" }\n\t\tconvert => { \"rcvdbyte\" => \"integer\" }\n\t\tconvert => { \"sentbyte\" => \"integer\" }\n\t\trename ", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:42:in `compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:50:in `compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:12:in `block in compile_sources'", "org/jruby/RubyArray.java:2486:in `map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:11:in `compile_sources'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:49:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:167:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:40:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:305:in `block in converge_state'"]}
[2018-07-26T15:25:40,473][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

I changed my syslog config to:

input {
  tcp {
    port => 5000
    type => syslog
  }
  udp {
    port => 5000
    type => syslog
  }
}

filter {
  if [type] == "syslog" {
    grok {
      match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:                                                                          syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDA                                                                          TA:syslog_message}" }
      add_field => [ "received_at", "%{@timestamp}" ]
      add_field => [ "received_from", "%{host}" ]
    }
    date {
      match => [ "syslog_timestamp", "MMM  d HH:mm:ss", "MMM dd HH:mm:ss" ]
    }
  }
}

output {
  elasticsearch { hosts => ["localhost:9200"] }
  stdout { codec => rubydebug }
}

Then restarted the host and got this output on my kibana:

anyone know how to get better output?

ANyone?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.