I tried to read the content of table from my Database. I faced the problem of java heap memory in logstash 2.x But It was working with a small resultset .. When I upgrade to 5.1 and using X-Pack.. Now I couldn't manage to read from database .. Please, Find blow my config file and the output.. Logstash is restarting itself .. I don't know what's wrong !!
Here is my config file:
input {
jdbc {
type => "jdbc-demo"
jdbc_driver_library => "/usr/share/java/mysql-connector-java-5.1.40-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://localhost:3306/statistics"
jdbc_user => "username"
jdbc_password => "password"
statement => "SELECT * FROM stations limit 1"
}
}
output {
elasticsearch {
hosts => [ "localhost:9200" ]
user => elastic
password => changeme
}
}
Here is the logs:
[2017-01-30T10:35:11,044][INFO ][logstash.inputs.jdbc ] (0.011000s) SELECT * FROM stations limit 1
[2017-01-30T10:35:11,182][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>["http://~hidden~:~hidden~@localhost:9200"]}}
[2017-01-30T10:35:11,183][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:url=>#<URI::HTTP:0x2aef5bf7 URL:http://~hidden~:~hidden~@localhost:9200>, :healthcheck_path=>"/"}
[2017-01-30T10:35:11,266][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>#<URI::HTTP:0x2aef5bf7 URL:http://~hidden~:~hidden~@localhost:9200>}
[2017-01-30T10:35:11,267][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2017-01-30T10:35:11,297][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword"}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2017-01-30T10:35:11,301][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["localhost:9200"]}
[2017-01-30T10:35:11,303][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>32, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>4000}
[2017-01-30T10:35:11,317][INFO ][logstash.pipeline ] Pipeline main started
[2017-01-30T10:35:11,383][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600
[2017-01-30T10:35:14,329][WARN ][logstash.agent ] stopping pipeline {:id=>"main"}
[2017-01-30T10:35:21,871][INFO ][logstash.inputs.jdbc ] (0.009000s) SELECT * FROM stations limit 1
[2017-01-30T10:35:22,021][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>["http://~hidden~:~hidden~@localhost:9200"]}}
[2017-01-30T10:35:22,023][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:url=>#<URI::HTTP:0x4713a90f URL:http://~hidden~:~hidden~@localhost:9200>, :healthcheck_path=>"/"}
[2017-01-30T10:35:22,107][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>#<URI::HTTP:0x4713a90f URL:http://~hidden~:~hidden~@localhost:9200>}
[2017-01-30T10:35:22,108][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2017-01-30T10:35:22,136][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword"}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2017-01-30T10:35:22,140][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["localhost:9200"]}
[2017-01-30T10:35:22,141][INFO ][logstash.pipeline ] Starting pipeline {"id"=>"main", "pipeline.workers"=>32, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5, "pipeline.max_inflight"=>4000}
[2017-01-30T10:35:22,153][INFO ][logstash.pipeline ] Pipeline main started
[2017-01-30T10:35:22,224][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600}
[2017-01-30T10:35:25,163][WARN ][logstash.agent ] stopping pipeline {:id=>"main"}