Unable to push data from Mysql to elastic search through LogStash

Hello All I'm unable to push data from Mysql to Elastic search. I'm using Logstash to push data to ElasticSearch
My config file is given below
input {
jdbc {
jdbc_driver_library => "D:/ELK/mysql-connector-java-8.0.16/mysql-connector-java-8.0.16.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://10.10.202.109:3306/bhi"
jdbc_user => "root"
jdbc_password => "blah"
statement => "SELECT * from application"
}
}
output {
elasticsearch {

hosts => ["http://11.239.456.30:9200"]
index => "likeqns"
document_type =>"data"
}
stdout { 
	codec => json_lines 
}

}

I'm getting the following error in the logstash log as below
[2019-05-23T12:29:54,149][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2019-05-23T12:29:54,180][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"7.0.1"}
[2019-05-23T12:30:03,024][WARN ][logstash.outputs.elasticsearch] You are using a deprecated config setting "document_type" set in elasticsearch. Deprecated settings will continue to work, but are scheduled for removal from logstash in the future. Document types are being deprecated in Elasticsearch 6.0, and removed entirely in 7.0. You should avoid this feature If you have any questions about this, please visit the #logstash channel on freenode irc. {:name=>"document_type", :plugin=><LogStash::Outputs::ElasticSearch index=>"likeqns", id=>"7db49d1780e920c00b30c1beee05e7198d87d3255962474b955ebe6247b169ea", hosts=>[http://11.239.456.30:9200], document_type=>"data", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_eb02ce57-99da-485a-bdc0-0270d4da1b4c", enable_metric=>true, charset=>"UTF-8">, workers=>1, manage_template=>true, template_name=>"logstash", template_overwrite=>false, doc_as_upsert=>false, script_type=>"inline", script_lang=>"painless", script_var_name=>"event", scripted_upsert=>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1, ilm_enabled=>"auto", ilm_rollover_alias=>"logstash", ilm_pattern=>"{now/d}-000001", ilm_policy=>"logstash-policy", action=>"index", ssl_certificate_verification=>true, sniffing=>false, sniffing_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, resurrect_delay=>5, validate_after_inactivity=>10000, http_compression=>false>}
[2019-05-23T12:30:03,837][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>, :added=>[http://10.219.246.241:9200/]}}
[2019-05-23T12:30:04,149][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://11.239.456.30:9200"}
[2019-05-23T12:30:04,227][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>7}
[2019-05-23T12:30:04,227][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the type event field won't be used to determine the document _type {:es_version=>7}
[2019-05-23T12:30:04,259][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://10.219.246.241:9200"]}
[2019-05-23T12:30:04,274][INFO ][logstash.outputs.elasticsearch] Using default mapping template
[2019-05-23T12:30:04,321][INFO ][logstash.javapipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>4, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>500, :thread=>"#<Thread:0x33dbcb1f run>"}
[2019-05-23T12:30:04,540][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"index_patterns"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s", "number_of_shards"=>1}, "mappings"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}
[2019-05-23T12:30:06,290][ERROR][logstash.javapipeline ] Pipeline aborted due to error {:pipeline_id=>"main", :exception=>#<TypeError: no implicit conversion of Integer into String>, :backtrace=>["uri:classloader:/META-INF/jruby.home/lib/ruby/stdlib/date/format.rb:335:in _parse'", "uri:classloader:/META-INF/jruby.home/lib/ruby/stdlib/date.rb:734:inparse'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/plugin_mixins/jdbc/value_tracking.rb:87:in set_value'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/plugin_mixins/jdbc/value_tracking.rb:36:ininitialize'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/plugin_mixins/jdbc/value_tracking.rb:29:in build_last_value_tracker'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/vendor/bundle/jruby/2.5.0/gems/logstash-input-jdbc-4.3.13/lib/logstash/inputs/jdbc.rb:216:inregister'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/logstash-core/lib/logstash/java_pipeline.rb:191:in block in register_plugins'", "org/jruby/RubyArray.java:1792:ineach'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/logstash-core/lib/logstash/java_pipeline.rb:190:in register_plugins'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/logstash-core/lib/logstash/java_pipeline.rb:280:instart_inputs'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/logstash-core/lib/logstash/java_pipeline.rb:244:in start_workers'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/logstash-core/lib/logstash/java_pipeline.rb:145:inrun'", "D:/ELK/logstash-7.0.1/logstash-7.0.1/logstash-core/lib/logstash/java_pipeline.rb:104:in `block in start'"], :thread=>"#<Thread:0x33dbcb1f run>"}
[2019-05-23T12:30:06,337][ERROR][logstash.agent ] Failed to execute action {:id=>:main, :action_type=>LogStash::ConvergeResult::FailedAction, :message=>"Could not execute action: PipelineAction::Create, action_result: false", :backtrace=>nil}
[2019-05-23T12:30:06,884][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2019-05-23T12:30:11,696][INFO ][logstash.runner ] Logstash shut down.

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.