Keep getting this error indexing into Elastic

Hi all,

I've got this logstash conf of consuming a JMS channel, do some transformations, and indexing into an Elastic Cluster.

But i keep getting this error:

[2019-05-24T10:29:43,516][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"4b6d4e72-cd20-44ba-b3e8-ad78bae28cd6", :_index=>"eai_prod_log_v2-2019.05.24", :_type=>"doc", :routing=>nil}, #LogStash::Event:0xfec8b3e], :response=>{"index"=>{"_index"=>"index-2019.05.24", "_type"=>"doc", "_id"=>"4b6d4e72-cd20-44ba-b3e8-ad78bae28cd6", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [client_id] of type [long] in document with id '4b6d4e72-cd20-44ba-b3e8-ad78bae28cd6'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"For input string: "%{[logEnrich][0][client_id]}""}}}}}

input {
	jms {
		destination => "QUEUE"
		yaml_file => "D:\mq.yml"
		yaml_section => "section2"
	}	
}
filter {
	xml{
        source => "message"
        store_xml => false
		remove_namespaces => true
		force_array => false
        xpath => [
            "/LogMessage/TransactionID/text()", 	"transaction_id",
			"/LogMessage/ClientID/text()", 			"client_id",
		"/LogMessage/ServiceName/text()", 		"service_name",
			"/LogMessage/ServiceKey/text()", 		"service_key"
        ]
    }

	############################ Enrich the log with DB Data ##############################
	if [service_key] {
	
		mutate { 
			gsub => [
				"service_key", "&", "&",
				"service_key", "&lt;", "<",
				"service_key", "&gt;", ">"
			]
		}
		
		jdbc_streaming {
			jdbc_driver_library => "..\db2jcc.jar"
			jdbc_driver_class => "com.ibm.db2.jcc.DB2Driver"
			jdbc_connection_string => "Database"
			jdbc_user => "user"
			jdbc_password => "pwd"
			cache_expiration => 10.0
			parameters => { "serviceKey" => "service_key" }
			parameters => { "serviceName" => "service_name" }
			statement => "
				SELECT CLIENT_ID, APPLICATION_ID, CLIENT_NAME
				FROM TABLE
				WHERE SECRET_KEY = :serviceKey AND APPLICATION_NAME = :serviceName"
			
			target => "logEnrich"
		}
	} else {
		jdbc_streaming {
			jdbc_driver_library => "..\db2jcc.jar"
			jdbc_driver_class => "com.ibm.db2.jcc.DB2Driver"
			jdbc_connection_string => "Database"
			jdbc_user => "user"
			jdbc_password => "pwd"
			parameters => { "serviceName" => "service_name" }
			statement => "
				SELECT 0 AS CLIENT_ID, APPLICATION_ID, '' AS CLIENT_NAME
				FROM TABLE
				WHERE APPLICATION_NAME = :serviceName "
				
			target => "logEnrich"
		}
	}
	
	mutate {
		add_field =>  {
			"client_id" => "%{[logEnrich][0][client_id]}"
			"application_id" => "%{[logEnrich][0][application_id]}"
			"client_name" => "%{[logEnrich][0][client_name]}"
		}
	}
	
	if [client_id] == "%{[logEnrich][0][client_id]}}" {
		mutate { replace => [ "client_id", "0" ] }
	}
	
	if [application_id] == "%{[logEnrich][0][application_id]}" {
		mutate { replace => [ "application_id", "0" ] }
	}
	
	if [client_name] == "%{[logEnrich][0][client_name]}" {
		mutate { replace => [ "client_name", "" ] }
	}	
	########################################################################################
	
	############################ Creating the Unique ID for doc ############################
	uuid {
		target    => "uuid"
		overwrite => true
	}
	########################################################################################

	############################ CLEANING ##############################
	mutate { remove_field => "logEnrich" }
	mutate { remove_field => "service_key" }
	####################################################################
}
output {
    elasticsearch {
        hosts => ["elasticp01:9200", "elasticp02:9200"]
		index => "index-%{+YYYY.MM.dd}"
		document_id =>"%{uuid}"
    }

	if [client_id] == "%{[logEnrich][0][client_id]}}" {
		file {
				path => "D:\debug-PROD-%{+YYYY-MM-dd}.log"
				codec => rubydebug
		}
	}
}

The error tell that the client_id field has invalid values, but how can it be based in the conf above?
Anyone can see where is the mistake?

Cheers,

Do you have a float and integer on the same field client_id maybe?

I believe not.
The client_id is obtained from the database.
In some cases i can't obtained the client_id, so i set the client_id with the value 0.
That's it.

Based on the error it seems that the set of the value is not done and it keeps the value %{[logEnrich][0][client_id]}

Cheers,

Hi there,

Maybe you have a „}“ too much after client_id:
if [client_id] == "%{[logEnrich][0][client_id]}}" {
mutate { replace => [ "client_id", "0" ] }
}

1 Like

You are correct, but i think its not the problem, because i remove the extra "}" and i still getting the error:

[2019-05-24T13:11:03,453][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch.
{
: status=>400,
: action=>[
"index",
{
: _id=>"897d3edd-b307-4fb8-86a5-40ede5f2103e",
: _index=>"eai_prod_log_v2-2019.05.24",
: _type=>"doc",
: routing=>nil
},
#<LogStash: : Event: 0x2684b01f>
],
: response=>{
"index"=>{
"_index"=>"eai_prod_log_v2-2019.05.24",
"_type"=>"doc",
"_id"=>"897d3edd-b307-4fb8-86a5-40ede5f2103e",
"status"=>400,
"error"=>{
"type"=>"mapper_parsing_exception",
"reason"=>"failed to parse field [client_id] of type [long] in document with id '897d3edd-b307-4fb8-86a5-40ede5f2103e'",
"caused_by"=>{
"type"=>"illegal_argument_exception",
"reason"=>"For input string: "%{[logEnrich][0][client_id]}""
}
}
}
}
}

hmm,

maybe try this one:

if [client_id] =~ /logEnrich][0][client_id {
	mutate { replace => [ "client_id", "0" ] }
}

this will check if the substring is inside [client_id] and if true it will replace it with 0

Hi there,

I found the problem, indeed the problem was the type of data that client_id field has.
Initially the client_id was filled with the value "1", default value present in the XML (that i don't know about).
When i add the field client_id with the value obtained from the DB the field was with the value [1, 100] and this is not valid for the type of Elastic, that is a long not an array.

Removed the initial default value solves the problem.

Cheers,

1 Like

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.