Hi Team,
I have an issue in pushing data into elasticsearch using my database table through logstash JDBC,some row's data is missing and throws an Exception.
The Exception is is.
[2018-07-06T12:37:24,587][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"bank_crisil_rated", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x2aaa7c95>], :response=>{"index"=>{"_index"=>"bank_crisil_rated", "_type"=>"doc", "_id"=>"irdqbmQBoIEiWn9atfMq", "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"mapper [capital_adequacy_ratio] cannot be changed from type [float] to [long]"}}}}
[2018-07-06T12:37:24,629][WARN ][logstash.outputs.elasticsearch] Could not index event to Elasticsearch. {:status=>400, :action=>["index", {:_id=>nil, :_index=>"bank_crisil_rated", :_type=>"doc", :_routing=>nil}, #<LogStash::Event:0x10d76091>], :response=>{"index"=>{"_index"=>"bank_crisil_rated", "_type"=>"doc", "_id"=>"urdqbmQBoIEiWn9atfMr", "status"=>400, "error"=>{"type"=>"illegal_argument_exception", "reason"=>"mapper [total_income] cannot be changed from type [float] to [long]"}}}}
logstash.conf
input {
jdbc {
jdbc_validate_connection => true
jdbc_connection_string => "jdbc:oracle:thin:@DEV:8080/CORPS"
jdbc_user => "ABC"
jdbc_password => "pass#1234"
jdbc_driver_library => "/opt/READONLYREST/OSS/logstash-6.3.0/ojdbc7.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
statement => "Select Distinct Bm.Crmcompanyid,Cd.Company_Name,
Bm.Datatype,Bm.Template,
Bm.Isconsolidated,
Bm.Periodendson,
Bm.Crr_Date,
bm.period_code,
Column1 TOTAL_INCOME ,
Column10 TOTAL_OPERATING_EXPENDITURE ,
Column9 TOTAL_OTHER_INCOME
From Banknbfc_Periodmaster_Synm Bm,
Banknbfc_Perioddata_Synm Bd,
company_details_mv_synm cd
Where Bm.Period_Code = Bd.Period_Code
And Cd.Company_Code = Bm.Crmcompanyid
and bm.template = 'Bank'
and cd.company_status = 'Active'
"
}
}
filter {
mutate {
convert => {
"TOTAL_INCOME" => "float"
"Capital_Adequacy_Ratio" => "float"
}
}
}
output {
elasticsearch {
hosts => "172.11.111.111:9200"
index => "bank_rated"
user => "c-akhilesht"
password => "abc#123"
ssl => true
ssl_certificate_verification => false
truststore => "/opt/READONLYREST/elasticsearch-6.3.0/config/keystore.jks"
truststore_password => "readonlyrest"
}
}
but when we trying to push data again(second time) then all rows'data is pushed successfully in the Elasticsearch.
I swa one solution,using Mutate Filter but its not working.
can you Help me please.