Hi all,
I've got this logstash conf of consuming a JMS channel, do some transformations, and indexing into an Elastic Cluster.
But i keep getting this error:
[2019-05-24T10:29:43,516][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>"4b6d4e72-cd20-44ba-b3e8-ad78bae28cd6", :_index=>"eai_prod_log_v2-2019.05.24", :_type=>"doc", :routing=>nil}, #LogStash::Event:0xfec8b3e], :response=>{"index"=>{"_index"=>"index-2019.05.24", "_type"=>"doc", "_id"=>"4b6d4e72-cd20-44ba-b3e8-ad78bae28cd6", "status"=>400, "error"=>{"type"=>"mapper_parsing_exception", "reason"=>"failed to parse field [client_id] of type [long] in document with id '4b6d4e72-cd20-44ba-b3e8-ad78bae28cd6'", "caused_by"=>{"type"=>"illegal_argument_exception", "reason"=>"For input string: "%{[logEnrich][0][client_id]}""}}}}}
input {
jms {
destination => "QUEUE"
yaml_file => "D:\mq.yml"
yaml_section => "section2"
}
}
filter {
xml{
source => "message"
store_xml => false
remove_namespaces => true
force_array => false
xpath => [
"/LogMessage/TransactionID/text()", "transaction_id",
"/LogMessage/ClientID/text()", "client_id",
"/LogMessage/ServiceName/text()", "service_name",
"/LogMessage/ServiceKey/text()", "service_key"
]
}
############################ Enrich the log with DB Data ##############################
if [service_key] {
mutate {
gsub => [
"service_key", "&", "&",
"service_key", "<", "<",
"service_key", ">", ">"
]
}
jdbc_streaming {
jdbc_driver_library => "..\db2jcc.jar"
jdbc_driver_class => "com.ibm.db2.jcc.DB2Driver"
jdbc_connection_string => "Database"
jdbc_user => "user"
jdbc_password => "pwd"
cache_expiration => 10.0
parameters => { "serviceKey" => "service_key" }
parameters => { "serviceName" => "service_name" }
statement => "
SELECT CLIENT_ID, APPLICATION_ID, CLIENT_NAME
FROM TABLE
WHERE SECRET_KEY = :serviceKey AND APPLICATION_NAME = :serviceName"
target => "logEnrich"
}
} else {
jdbc_streaming {
jdbc_driver_library => "..\db2jcc.jar"
jdbc_driver_class => "com.ibm.db2.jcc.DB2Driver"
jdbc_connection_string => "Database"
jdbc_user => "user"
jdbc_password => "pwd"
parameters => { "serviceName" => "service_name" }
statement => "
SELECT 0 AS CLIENT_ID, APPLICATION_ID, '' AS CLIENT_NAME
FROM TABLE
WHERE APPLICATION_NAME = :serviceName "
target => "logEnrich"
}
}
mutate {
add_field => {
"client_id" => "%{[logEnrich][0][client_id]}"
"application_id" => "%{[logEnrich][0][application_id]}"
"client_name" => "%{[logEnrich][0][client_name]}"
}
}
if [client_id] == "%{[logEnrich][0][client_id]}}" {
mutate { replace => [ "client_id", "0" ] }
}
if [application_id] == "%{[logEnrich][0][application_id]}" {
mutate { replace => [ "application_id", "0" ] }
}
if [client_name] == "%{[logEnrich][0][client_name]}" {
mutate { replace => [ "client_name", "" ] }
}
########################################################################################
############################ Creating the Unique ID for doc ############################
uuid {
target => "uuid"
overwrite => true
}
########################################################################################
############################ CLEANING ##############################
mutate { remove_field => "logEnrich" }
mutate { remove_field => "service_key" }
####################################################################
}
output {
elasticsearch {
hosts => ["elasticp01:9200", "elasticp02:9200"]
index => "index-%{+YYYY.MM.dd}"
document_id =>"%{uuid}"
}
if [client_id] == "%{[logEnrich][0][client_id]}}" {
file {
path => "D:\debug-PROD-%{+YYYY-MM-dd}.log"
codec => rubydebug
}
}
}
The error tell that the client_id field has invalid values, but how can it be based in the conf above?
Anyone can see where is the mistake?
Cheers,