Hello All,
I want an create a Dashboard with Visualization based on data from an Oracle Database.
Since i want it to be real time means if new row is inserted in the table the same to be reflected back in the index in Kibana I am using schedule tab with JDBC input plugin.
My config file is:-
input {
jdbc {
jdbc_connection_string => "jdbc:oracle:thin:hostname:port/XE"
jdbc_user => "username"
jdbc_password => "password"
jdbc_driver_library => "/path/mybox/KibanaDbTry/ojdbc7.jar"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
#jdbc_validate_connection => true
schedule => "* * * * *"
statement => "SELECT * from thetable WHERE user_id > :sql_last_value ORDER by user_id"
use_column_value => true
tracking_column => user_id
tracking_column_type => "numeric"
clean_run => true
record_last_run => true
last_run_metadata_path => "/path/.logstash_jdbc_last_run"
}
}
output {
stdout { codec => json_lines }
elasticsearch {
index => "eeeeee"
hosts => "http://localhost:9201"
document_type => "schedules"
document_id => "%{user_id}"
}
}
The problem is during the first time the index appears same as that of SQL table but when I make changes in the table those are not reflected in the Kibana index/discover tab.
And in 2 cases in have observed the inserted columns are also removed from the Oracele database itself.
Table have 2 columns= Id and Username. Thats all.
Logs are :-
[2018-10-22T10:45:29,164][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"fb_apache", :directory=>"/apps/tomcat/elk/ELK/logstash-6.2.4/modules/fb_apache/configuration"}
[2018-10-22T10:45:29,185][INFO ][logstash.modules.scaffold] Initializing module {:module_name=>"netflow", :directory=>"/apps/tomcat/elk/ELK/logstash-6.2.4/modules/netflow/configuration"}
[2018-10-22T10:45:29,766][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2018-10-22T10:45:30,475][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"6.2.4"}
[2018-10-22T10:45:30,988][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9601}
[2018-10-22T10:45:33,724][WARN ][logstash.outputs.elasticsearch] You are using a deprecated config setting "document_type" set in elasticsearch. Deprecated settings will continue to work, but are scheduled for removal from logstash in the future. Document types are being deprecated in Elasticsearch 6.0, and removed entirely in 7.0. You should avoid this feature If you have any questions about this, please visit the logstash channel on freenode irc. {:name=>"document_type", :plugin=><LogStash::Outputs::Elasticsearch index=>"eeeeee", hosts=>[http://10.23.213.99:9201], document_type=>"schedules", document_id=>"%{user_id}", id=>"3ae27aa2f81ef6b55d6ba82673fdd009084fa821576cdc2f39f5bb80bd994f19", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_228763b2-647f-435e-9951-40326c5620b6", enable_metric=>true, charset=>"UTF-8">, workers=>1, manage_template=>true, template_name=>"logstash", template_overwrite=>false, doc_as_upsert=>false, script_type=>"inline", script_lang=>"painless", script_var_name=>"event", scripted_upsert=>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1, action=>"index", ssl_certificate_verification=>true, sniffing=>false, sniffing_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, resurrect_delay=>5, validate_after_inactivity=>10000, http_compression=>false>}
[2018-10-22T10:45:33,829][INFO ][logstash.pipeline ] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>5}
[2018-10-22T10:45:34,396][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>, :added=>[http://10.23.213.99:9201/]}}
[2018-10-22T10:45:34,410][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://10.23.213.99:9201/, :path=>"/"}
[2018-10-22T10:45:34,710][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.23.213.99:9201/"}
[2018-10-22T10:45:34,798][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>6}
[2018-10-22T10:45:34,803][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: thetype
event field won't be used to determine the document _type {:es_version=>6}
[2018-10-22T10:45:34,820][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>nil}
[2018-10-22T10:45:34,840][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>60001, "settings"=>{"index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date"}, "@version"=>{"type"=>"keyword"}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}}}}}}
[2018-10-22T10:45:34,897][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::Elasticsearch", :hosts=>["http://10.23.213.99:9201"]}
[2018-10-22T10:45:35,124][INFO ][logstash.pipeline ] Pipeline started successfully {:pipeline_id=>"main", :thread=>"#<Thread:0x634438b5 sleep>"}
[2018-10-22T10:45:35,244][INFO ][logstash.agent ] Pipelines running {:count=>1, :pipelines=>["main"]}
[2018-10-22T10:46:01,851][INFO ][logstash.inputs.jdbc ] (0.087609s) SELECT * from thetable WHERE user_id > 0 ORDER by user_id
[2018-10-22T10:47:00,262][INFO ][logstash.inputs.jdbc ] (0.005313s) SELECT * from thetable WHERE user_id > 2 ORDER by user_id
[2018-10-22T10:48:00,342][INFO ][logstash.inputs.jdbc ] (0.001482s) SELECT * from thetable WHERE user_id > 2 ORDER by user_id
[2018-10-22T10:49:01,159][INFO ][logstash.inputs.jdbc ] (0.014202s) SELECT * from thetable WHERE user_id > 2 ORDER by user_id
.
.
.
.
and it goes on and on like the above
The table have only 2 rows at start but when i add more rows not only they do not appear in Kibana index, sometimes they are removed from the table itself.
I am relatively new to ELK stack will be really thankful if someone help me.