Hello all,
I'm running into some problems when I'm trying to enrich data using input jdbc and filter jdbc_streaming.
This is my conf file
input {
jdbc {
jdbc_connection_string => "connectionString"
jdbc_user => "user"
jdbc_password => "password"
jdbc_driver_library => "postgresql-42.2.2.jar"
jdbc_driver_class => "org.postgresql.Driver"
connection_retry_attempts => "3"
connection_retry_attempts_wait_time => "10"
statement => "SELECT offer_item_id ||'-'|| TO_CHAR(pubofr_dhs_deb,'YYYYMMDDHHMISS') AS id, offer_item_id AS offeritemid, pubofr_dhs_deb AS validitybegindate, pubofr_dhs_end AS validityenddate FROM t_offer_item"
jdbc_default_timezone => "UTC"
}
}
filter {
jdbc_streaming {
jdbc_connection_string => "connectionString"
jdbc_user => "user"
jdbc_password => "password"
jdbc_driver_library => "postgresql-42.2.2.jar"
jdbc_driver_class => "org.postgresql.Driver"
statement => "SELECT offer_item_id AS relativeofferid, pubofr_dhs_deb as begindate FROM t_offer_item_assoc WHERE offer_item_id_assoc = :offerId AND pubofr_dhs_deb_assoc = :offerDate"
parameters => {
"offerId" => "offeritemid"
"offerDate" => "validitybegindate"
}
target => "[relativeOffers]"
}
}
output {
elasticsearch{
index => "activeoffers_idx"
action => "create"
document_type => "activeoffers"
document_id => "%{id}"
hosts => "elasticsearchHost"
}
}
My problem is the following, if I have the property "jdbc_default_timezone" set and have a field of type timestamp on the query of the jdbc_streaming i get this error:
Exception in pipelineworker, the pipeline stopped processing new events, please check your filter configuration and restart Logstash.
{:pipeline_id=>"main", "exception"=>"Missing Converter handling for full class name=org.jruby.RubyObjectVar3, simple name=RubyObjectVar3", "backtrace"=>
["org.logstash.Valuefier.fallbackConvert(Valuefier.java:97)",
"org.logstash.Valuefier.convert(Valuefier.java:75)",
"org.logstash.ConvertedMap$1.visit(ConvertedMap.java:35)",
"org.logstash.ConvertedMap$1.visit(ConvertedMap.java:29)",
"org.jruby.RubyHash.visitLimited(RubyHash.java:662)", "org.jruby.RubyHash.visitAll(RubyHash.java:647)",
"org.logstash.ConvertedMap.newFromRubyHash(ConvertedMap.java:69)",
"org.logstash.ConvertedMap.newFromRubyHash(ConvertedMap.java:64)",
"org.logstash.Valuefier.lambda$initConverters$11(Valuefier.java:142)",
"org.logstash.Valuefier.convert(Valuefier.java:73)",
"org.logstash.ConvertedList.newFromRubyArray(ConvertedList.java:44)",
"org.logstash.Valuefier.lambda$initConverters$15(Valuefier.java:154)",
"org.logstash.Valuefier.fallbackConvert(Valuefier.java:94)",
"org.logstash.Valuefier.convert(Valuefier.java:75)",
"org.logstash.ext.JrubyEventExtLibrary$RubyEvent.ruby_set_field(JrubyEventExtLibrary.java:99)",
"org.logstash.ext.JrubyEventExtLibrary$RubyEvent$INVOKER$i$2$0$ruby_set_field.call(JrubyEventExtLibrary$RubyEvent$INVOKER$i$2$0$ruby_set_field.gen)",
"org.jruby.internal.runtime.methods.JavaMethod$JavaMethodN.call(JavaMethod.java:741)",
"org.jruby.ir.targets.InvokeSite.invoke(InvokeSite.java:145)",
If I remove the jdbc_default_timezone property, events are processed with no problem, but i need the property there so the dates match.
Any help is welcome,
Thanks in advance.