Unable to get the updated value of certain columns in logstash 8.6.1 version

Why is the below query not getting the updated value of some columns even after there is an updated value for couple of columns ? Here is my logstash pipeline.

input {
  jdbc {
    jdbc_driver_library => "mysql-connector-j-8.0.33.jar"
    jdbc_driver_class => "com.mysql.jdbc.Driver"
    jdbc_connection_string => "abc?useTimezone=true&useLegacyDatetimeCode=false&serverTimezone=UTC"
    jdbc_user => "cricket"
    jdbc_paging_enabled => true
    tracking_column => "process_update_dttm"
    use_column_value => true
    tracking_column_type => "timestamp"
    schedule => "*/5 * * * *"
    last_run_metadata_path => "/etc/logstash/java-libs/.logstash_jdbc_last_run_7"
    statement_filepath => "/etc/logstash/java-libs/test.sql"
      }
    }
  
output {
   elasticsearch {
    hosts => ["xyx"]
    ssl => true
    action => "create"
    cacert => "CA.pem"
    data_stream => true
    data_stream_type => "logs"
    data_stream_dataset => "something"
    data_stream_namespace => "ds"
    document_id =>  "%{process_id}_%{process_update_dttm}"   
    scripted_upsert => true   # This ensures that if the document exists, it is updated with the script
    script => '%{[@metadata][script]}'  # Executes the script defined in the filter section
    script_params => {
      "Received" => "%{Received}",
      "Sent" => "%{Sent}"
    }
     } 
stdout{ codec => rubydebug }
}

From Elastic Search to Logstash

You mean update in your Elasticsearch?

You are using a data stream, data streams are append only and you cannot update it through logstash.

If you want to update your data you need to use normal indices, not data streams.