Logstash redis output plugin does not update record it always insert new record in the list of redis

Hello Guys,

I am migrating data from mysql to redis using logstash output pluging, but logstash does not update the record if it is already exist , it always insert new one into the list.

Please see the below code.

input
{
# Fetching Data form contet database
jdbc{

             use_column_value => true
            #tracking_column => "productid,sku,mfgpartno"
            #tracking_column => "productid"
            tracking_column =>"modifieddate"
         #  clean_run=>true
            tracking_column_type => "timestamp"
            plugin_timezone => "local"

            schedule => "* * * * *"

            jdbc_connection_string => "jdbc:mysql://ec2-inquiredev.etilize.com:3306/content_op"
            jdbc_user => "inquire"
            jdbc_password => "inquire"
            jdbc_driver_library => "/home/bgoswami/Downloads/com.mysql.jdbc_5.1.5.jar"
            jdbc_driver_class => "com.mysql.jdbc.Driver"

            statement => "select productid,text,modifieddate,ordernumber from test where  modifieddate >  :sql_last_value order by modifieddate"


    } # jdbc

} #close input tag

filter {

mutate {
add_field => {
"[_id]" => "%{ordernumber}"
}
}
}
#Output
output {
#Data to be stored on productId key

            redis
            {
                    host => localhost
                    port => 6379
                    data_type => list
                    key =>"%{ordernumber}"
            #       id=> "%{ordernumber}"
            #       batch=> true
            }

stdout {codec => rubydebug}
}

Output Result :
127.0.0.1:6379> lrange 6 0 -1

  1. "{"ordernumber":6,"modifieddate":"2020-02-22T23:17:42.000Z","@version":"1","productid":123,"text":"Bharat Goswami","@timestamp":"2020-02-23T04:18:03.990Z","_id":"6"}"

  2. "{"ordernumber":6,"modifieddate":"2020-02-22T23:18:13.000Z","@version":"1","productid":123,"text":"Bharat Goswami","@timestamp":"2020-02-23T04:19:03.055Z","_id":"6"}"

  3. "{"ordernumber":6,"modifieddate":"2020-02-22T23:22:55.000Z","@version":"1","productid":123,"text":"Bharat Goswami","@timestamp":"2020-02-23T04:23:02.842Z","_id":"6"}"
    127.0.0.1:6379>

I have resolved myself

Solution : By adding the ruby filter , which reads redis and remove the list via key and add new entry as soon as data is updated.

filter{

mutate {
add_field => {
"[productidKey]" => "%{productid}"
}
}
mutate {
add_field => {
"[menuf_partnoKey]" => "%{menuf_partno}"
}
}
mutate {
add_field => {
"[skuKey]" => "%{sku}"
}
}

      #Redis conectivity & Remove if data is already exsit.. (Issue Fixed, On every run redis stores new row either we update the record or we add the new record, List was increased on every run

    ruby {
            init => 'require "redis"; $rc = Redis.new(host: "127.0.0.1", port: 6379)'
            code => '$rc.del(event.get("[productidKey]"))'
    } # end of ruby
      ruby {
            init => 'require "redis"; $rc = Redis.new(host: "127.0.0.1", port: 6379)'
            code => '$rc.del(event.get("[menuf_partnoKey]"))'

     } # end of ruby

      ruby {
            init => 'require "redis"; $rc = Redis.new(host: "127.0.0.1", port: 6379)'
            code => '$rc.del(event.get("[skuKey]"))'
     } # end of ruby

}#end of filter

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.