i am trying to import mysql table into elastic search using logistash and kibana.
i gave the following config file.
input{
jdbc{
jdbc_connection_string => "jdbc:mysql://172.158.2.18:3306/edst"
jdbc_user => "root"
jdbc_password => ""
jdbc_driver_library => "/home/scopus/mysql-connector-java-5.1.48.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
tracking_column => "created_at"
statement => "seelct * from sample where created_at >:sql_last_value;"
}
}
output{
stdout{ codec => json_lines }
elasticsearch {
hosts => "172.158.2.18:9200"
index => "name-sql"
}
}
wheni am running logistash i am getting
Java HotSpot(TM) 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
[2019-10-30T16:48:29,925][INFO ][logstash.outputs.elasticsearch][main] ES Output version determined {:es_version=>7}
[2019-10-30T16:48:29,928][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2019-10-30T16:48:29,953][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//172.158.2.18:9200"]}
[2019-10-30T16:48:30,033][INFO ][logstash.outputs.elasticsearch][main] Using default mapping template
[2019-10-30T16:48:30,052][WARN ][org.logstash.instrument.metrics.gauge.LazyDelegatingGauge][main] A gauge metric of an unknown type (org.jruby.specialized.RubyArrayOneObject) has been create for key: cluster_uuids. This may result in invalid serialization. It is recommended to log an issue to the responsible developer/development team.
[2019-10-30T16:48:30,057][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1000, :thread=>"#<Thread:0x10ea18b6 run>"}
[2019-10-30T16:48:30,123][INFO ][logstash.outputs.elasticsearch][main] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-*", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"*", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2019-10-30T16:48:30,245][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
[2019-10-30T16:48:30,319][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[2019-10-30T16:48:30,651][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2019-10-30T16:48:30,837][ERROR][logstash.javapipeline ][main] A plugin had an unrecoverable error. Will restart this plugin.
Pipeline_id:main
Plugin: <LogStash::Inputs::Jdbc jdbc_user=>"root", tracking_column=>"created_at", jdbc_paging_enabled=>true, jdbc_password=><password>, statement=>"seelct * from sample where created_at >:sql_last_value;", jdbc_page_size=>50000, jdbc_driver_library=>"/home/scopus/mysql-connector-java-5.1.48.jar", jdbc_connection_string=>"jdbc:mysql://172.158.2.18:3306/edst", id=>"a1ca9e733b048a6215780a43d074302f4c70f51b68ebc5e4a24bd5c75bf57d23", jdbc_driver_class=>"com.mysql.jdbc.Driver", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_62775155-15f0-4cdc-b220-4d8d0d482b13", enable_metric=>true, charset=>"UTF-8">, jdbc_validate_connection=>false, jdbc_validation_timeout=>3600, jdbc_pool_timeout=>5, sql_log_level=>"info", connection_retry_attempts=>1, connection_retry_attempts_wait_time=>0.5, plugin_timezone=>"utc", last_run_metadata_path=>"/root/.logstash_jdbc_last_run", use_column_value=>false, tracking_column_type=>"numeric", clean_run=>false, record_last_run=>true, lowercase_column_names=>true, use_prepared_statements=>false>
Error: com.mysql.jdbc.Driver not loaded. Are you sure you've included the correct jdbc driver in :jdbc_driver_library?
Exception: LogStash::PluginLoadingError
Stack: /usr/share/logstash-7.4.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.16/lib/logstash/plugin_mixins/jdbc/jdbc.rb:190:in `open_jdbc_connection'
/usr/share/logstash-7.4.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.16/lib/logstash/plugin_mixins/jdbc/jdbc.rb:253:in `execute_statement'
/usr/share/logstash-7.4.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.16/lib/logstash/inputs/jdbc.rb:309:in `execute_query'
/usr/share/logstash-7.4.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.16/lib/logstash/inputs/jdbc.rb:281:in `run'
/usr/share/logstash-7.4.1/logstash-core/lib/logstash/java_pipeline.rb:314:in `inputworker'
/usr/share/logstash-7.4.1/logstash-core/lib/logstash/java_pipeline.rb:306:in `block in start_input'
^C[2019-10-30T16:48:33,341][WARN ][logstash.runner ] SIGINT received. Shutting down.
^C[2019-10-30T16:48:33,703][FATAL][logstash.runner ] SIGINT received. Terminating immediately..
[2019-10-30T16:48:33,825][ERROR][org.logstash.Logstash ] org.jruby.exceptions.ThreadKill
please help me in solving
Thank you.