Problem using JDBC static filter with HIVE

I checked and found that the JDBC static filter example with postgreSQL works fine.

But when I try HIVE instead of postgreSQL, it says it failed to create database.

input {
  generator {
    lines => [
      '{"from_ip": "10.2.3.20", "app": "foobar", "amount": 32.95}',
      '{"from_ip": "10.2.3.30", "app": "barfoo", "amount": 82.95}',
      '{"from_ip": "10.2.3.40", "app": "bazfoo", "amount": 22.95}'
    ]
    count => 0
  }
}

filter {
  json {
    source => "message"
  }

  jdbc_static {
    loaders => [
      {
        id => "servers"
        query => "select ip, descr from local_ips order by ip"
        local_table => "servers"
      }
    ]
    local_db_objects => [
      {
        name => "servers"
        index_columns => ["ip"]
        columns => [
          ["ip", "varchar(15)"],
          ["descr", "varchar(255)"]
        ]
      }
    ]
    local_lookups => [
      {
        query => "select descr as description from servers WHERE ip = :ip"
        parameters => {ip => "[from_ip]"}
        target => "server"
      }
    ]
    staging_directory => "/tmp/logstash/jdbc_static/import_data"
    loader_schedule => "*/30 * * * *"
    jdbc_user => "hive"
    jdbc_password => ""
    jdbc_driver_class => "org.apache.hive.jdbc.HiveDriver"
    jdbc_driver_library => "/usr/share/logstash/vendor/hive-jdbc-1.2.1000.2.6.3.0-235-standalone.jar,/usr/share/logstash/vendor/hadoop-common-2.7.3.2.6.3.0-235.jar,/usr/share/logstash/vendor/hadoop-auth-2.7.3.2.6.3.0-235.jar"
    jdbc_connection_string => "jdbc:hive2://ephdpm.-------.com:10000/default"
  }
}

output {
  file {
    path => "/var/log/logstash/output.log"
    codec => rubydebug {metadata => true}
  }
}
/var/log/logstash/logstash-plain.log

[2018-08-17T09:57:39,203][ERROR][logstash.pipeline ] Error registering plugin {:pipeline_id=>"main", :plugin=>"#<LogStash::FilterDelegator:0x4c81530c @metric_events_out=org.jruby.proxy.org.logstash.instrument.metrics.counter.LongCounter$Proxy2 - name: out value:0, @metric_events_in=org.jruby.proxy.org.logstash.instrument.metrics.counter.LongCounter$Proxy2 - name: in value:0, @metric_events_time=org.jruby.proxy.org.logstash.instrument.metrics.counter.LongCounter$Proxy2 - name: duration_in_millis value:0, @id=\"5128874646a21ff65c2ebdde4d01a958aac72f6e020ea8d7be354e04fb859736\", @klass=LogStash::Filters::JdbcStatic, @metric_events=#<LogStash::Instrument::NamespacedMetric:0x561e1081 @metric=#<LogStash::Instrument::Metric:0x176ea4ee @collector=#<LogStash::Instrument::Collector:0x41bca8d5 @agent=nil, @metric_store=#<LogStash::Instrument::MetricStore:0x7639add @store=#<Concurrent::Map:0x00000000000fb4 entries=3 default_proc=nil>, @structured_lookup_mutex=#<Mutex:0x67aa6312>, @fast_lookup=#<Concurrent::Map:0x00000000000fb8 entries=71 default_proc=nil>>>>, @namespace_name=[:stats, :pipelines, :main, :plugins, :filters, :\"5128874646a21ff65c2ebdde4d01a958aac72f6e020ea8d7be354e04fb859736\", :events]>, @filter=<LogStash::Filters::JdbcStatic loaders=>[{\"id\"=>\"servers\", \"query\"=>\"select ip, descr from local_ips order by ip\", \"local_table\"=>\"servers\", \"jdbc_driver_library\"=>\"/usr/share/logstash/vendor/hive-jdbc-1.2.1000.2.6.3.0-235-standalone.jar,/usr/share/logstash/vendor/hadoop-common-2.7.3.2.6.3.0-235.jar\", \"jdbc_driver_class\"=>\"org.apache.hive.jdbc.HiveDriver\", \"jdbc_connection_string\"=>\"jdbc:hive2://ephdpm.-------.com:10000/default\", \"jdbc_user\"=>\"hive\", \"jdbc_password\"=><password>, \"staging_directory\"=>\"/tmp/logstash/jdbc_static/import_data\"}], local_db_objects=>[{\"name\"=>\"servers\", \"index_columns\"=>[\"ip\"], \"columns\"=>[[\"ip\", \"varchar(15)\"], [\"descr\", \"varchar(255)\"]]}], local_lookups=>[{\"query\"=>\"select descr as description from servers WHERE ip = :ip\", \"parameters\"=>{\"ip\"=>\"[from_ip]\"}, \"target\"=>\"server\"}], staging_directory=>\"/tmp/logstash/jdbc_static/import_data\", loader_schedule=>\"*/30 * * * *\", jdbc_user=>\"hive\", jdbc_password=><password>, jdbc_driver_class=>\"org.apache.hive.jdbc.HiveDriver\", jdbc_driver_library=>\"/usr/share/logstash/vendor/hive-jdbc-1.2.1000.2.6.3.0-235-standalone.jar,/usr/share/logstash/vendor/hadoop-common-2.7.3.2.6.3.0-235.jar\", jdbc_connection_string=>\"jdbc:hive2://ephdpm.-------.com:10000/default\", id=>\"5128874646a21ff65c2ebdde4d01a958aac72f6e020ea8d7be354e04fb859736\", enable_metric=>true, periodic_flush=>false, tag_on_failure=>[\"_jdbcstaticfailure\"], tag_on_default_use=>[\"_jdbcstaticdefaultsused\"]>>", :error=>"Java::JavaSql::SQLException: Failed to create database 'memory:b9dd5f169d625ebde6c6e20a', see the next exception for details.", :thread=>"#<Thread:0x25c7e538@/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:48 run>"}

/var/log/logstash/logstash-plain.log (more)

[2018-08-17T09:57:39,232][ERROR][logstash.pipeline ] ARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_3_dot_0/gems/sequel_minus_5_dot_10_dot_0/lib/sequel/adapters//usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/adapters/jdbc.rb)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.make_new(/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool.rb:127)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.RUBY$method$make_new$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_3_dot_0/gems/sequel_minus_5_dot_10_dot_0/lib/sequel//usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool.rb)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.threaded.assign_connection(/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool/threaded.rb:206)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.threaded.RUBY$method$assign_connection$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_3_dot_0/gems/sequel_minus_5_dot_10_dot_0/lib/sequel/connection_pool//usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool/threaded.rb)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.threaded.acquire(/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool/threaded.rb:138)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.threaded.RUBY$method$acquire$0$__VARARGS__(usr/share/logstash/vendor/bundle/jruby/$2_dot_3_dot_0/gems/sequel_minus_5_dot_10_dot_0/lib/sequel/connection_pool//usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool/threaded.rb)", "usr.share.logstash.vendor.bundle.jruby.$2_dot_3_dot_0.gems.sequel_minus_5_dot_10_dot_0.lib.sequel.connection_pool.threaded.hold(/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/sequel-5.10.0/lib/sequel/connection_pool/threaded.rb:90)",

...OMITTED...

"usr.share.logstash.logstash_minus_core.lib.logstash.pipeline.start_workers(/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:361)", "usr.share.logstash.logstash_minus_core.lib.logstash.pipeline.RUBY$method$start_workers$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/pipeline.rb)", "usr.share.logstash.logstash_minus_core.lib.logstash.pipeline.run(/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:288)", "usr.share.logstash.logstash_minus_core.lib.logstash.pipeline.RUBY$method$run$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/pipeline.rb)", "usr.share.logstash.logstash_minus_core.lib.logstash.pipeline.block in start(/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:248)", "org.jruby.RubyProc.call(org/jruby/RubyProc.java:289)", "org.jruby.RubyProc.call(org/jruby/RubyProc.java:246)", "java.lang.Thread.run(java/lang/Thread.java:748)"], :thread=>"#<Thread:0x25c7e538@/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:48 run>"}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.