Real time data not syncing from oracle to Elasticsearch index

Hi Team,

I am using Logstash to sync data from Oracle to ES.
When I update in Oracle db the same changes are not reflecting in ES.
Could you please help to resolve the issue?

Below is the Logstash configuration that I am using ::

input {
jdbc {
jdbc_driver_library => "C:\Users\admin\logstash-7.17.4\ojdbc8.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
jdbc_connection_string => "jdbc:oracle:thin:@domain:1521/temp"
jdbc_user => "user"
jdbc_password => "user23"
schedule => "*/1 * * * * *"
statement => "SELECT req.ID,req.BU_COUNTRY,req.BUID,req.CFO_ACTION,req.CFO_SUB_TYPE,req.CFO_TYPE,req.DELIVERY_METHOD,req.DOC_NUMBER,req.CORRELATION_ID,req.ORIGINATOR,req.ORDER_NUMBER,req.STATUS_CODE_ID,req.CREATED_DATE,req.LAST_UPDATE_DATE,req.DELIVERY_DATE,req.MSG_ID,req.RESPONSE_CODE,req.IS_ACTIVE,code.STATUS_DESC FROM request req,statuscodes code WHERE req.STATUS_CODE_ID=code.ID AND req.last_update_date > :sql_last_value AND req.originator='org' AND req.cfo_type='debit'"
use_column_value => true
tracking_column => "last_update_date"
tracking_column_type => "timestamp"
clean_run => true
}
}
filter {

if ![delivery_date] {
mutate {
rename => { "delivery_date" => "actualDeliveryDate" }
remove_field => ["delivery_date"]
}
}
date {
match => ["createdDate", "yyyy-MM-dd'T'HH:mm:ss.SSSz", "ISO8601"]
}
date {
match => ["updatedDate", "yyyy-MM-dd'T'HH:mm:ss.SSSz", "ISO8601"]
}
mutate {
remove_field => ["@version", "@timestamp","order_number","msg_id","response_code","is_active","buid","cfo_sub_type","tags","cfo_action","correlation_id","originator"]
convert => {
"createdDate" => "string"
}
convert => {
"updatedDate" => "string"
}
gsub => ["createdDate", ".\d{3}", ""]
gsub => ["updatedDate", ".\d{3}", ""]
rename => { "bu_country" => "country" }
rename => { "cfo_type" => "cfoSubType" }
rename => { "delivery_method" => "deliveryMeth" }
rename => { "status_code_id" => "txnStatusCode" }
rename => { "created_date" => "createdDate" }
rename => { "status_desc" => "txnStatus" }
rename => { "last_update_date" => "updatedDate" }
copy => { "id" => "esid" }
}
grok
{
remove_field => ["doc_number"]
match => {
"doc_number" => "%{DATA:transactionNumber}-%{GREEDYDATA:poNum}"
}
}
grok
{
remove_field => ["time"]
remove_field => ["delivery_date"]
match => {
"delivery_date" => "%{GREEDYDATA:actualDeliveryDate}T%{GREEDYDATA:time}"

}

}
}
output {
elasticsearch{
hosts => ["https://domain:9200","https://domain:9200","https://domain:9200"]
index => "testlogstash"
user => "admin"
password => "admin"
ssl => true
ssl_certificate_verification => true
cacert => 'C:\Users\admin\logstash-7.17.4\nonprodescertificate.cer'
document_id => "%{id}"
doc_as_upsert => true
}
stdout {
codec => rubydebug
}
}

Thanks,
Suresh.

Can anyone please suggest?

sorry It's my bad.
After executed Update operation in Sql developer, I forgot to commit which will save permanently.
Solution:
UPDATE table-name
SET last_update_date = systimestamp
WHERE id = 1253816;
commit; This is important

Thanks,
Suresh.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.