We are running a pipeline that will fetch the data from oracle db table and send it to elasticsearch. We are using jdbc input plugin for that. As it is a one time activity we are running it in the following way
nohup logstash-6.8.2/bin/logstash -w 1 -b 1000 -f pipelines/pipeline-oracle_db.conf &
(We are using aggregation filter so that worker count is 1)
As it is an one time job, expectation is logstash will be getting closed once process is completed. But after running successfully for a couple of minutes (like 2 - 3 hours ) pipeline certainly got terminated without completing the job but logstash is running in background. Not outputs are going to ElasticSearch from Logstash.
[2019-12-04T15:13:43,318][INFO ][logstash.pipeline ] Pipeline has terminated {:pipeline_id=>"main", :thread=>"#<Thread:0x14184562 run>"
input {
jdbc {
jdbc_connection_string => "{{ db_connection_string }}"
jdbc_user => "{{ db_user }}"
jdbc_password => "{{ db_password }}"
jdbc_driver_class => "Java::oracle.jdbc.driver.OracleDriver"
jdbc_driver_library => "{{ data_volume }}/logstash-repo/lib/ojdbc6-11.2.0.3.jar"
statement => "SELECT * from table"
jdbc_fetch_size => 100000
}
}
filter {
ruby {
code => "
hash = event.to_hash
hash.each do |k,v|
if v == nil
event.remove(k)
end
end
"
}
aggregate {
task_id => "%{id}"
code => ""
push_previous_map_as_event => true
inactivity_timeout => 600
}
ruby {
path => "{{ data_volume }}/logstash-repo/pipelines/filter/SubscriptionTypeFilter.rb"
}
}
output {
elasticsearch {
document_id => "%{id}"
document_type => "subscription"
index => "index_name"
hosts => ["https://{{ es_node }}:{{ es_http_port }}"]
user => "{{ es_user }}"
password => "{{ es_password }}"
template => "{{ data_volume }}/logstash-repo/templates/template.json"
template_name => "template"
template_overwrite => true
}
stdout {
codec => rubydebug
}
}