Logstash Elastic Search JDBC very slow


(Wu Chun Wa) #1

hi, my dataset limited to 400k. And, i run the following setting. around 20 seconds to import 10k records. How can i optimize the config?

Version

docker-elastic search: 5.4.1
docker-logstash: 5.4.1

Config

input {
jdbc {
jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-5.1.44-bin.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://mysql:3306/database?useCursorFetch=true&zeroDateTimeBehavior=convertToNull"
jdbc_user => "${MYSQL_USER}"
jdbc_password => "${MYSQL_PASSWORD}"
schedule => "* * * * *"
statement => "Select * table limit 40000"
use_column_value => true
tracking_column => "updateat"
jdbc_paging_enabled => "true"
jdbc_page_size => "10000"
jdbc_fetch_size => "20000"
last_run_metadata_path => "/opt/logstash/cache/.jdbc_place_db"
}
}
filter {
mutate {
remove_field => ["@version", "@timestamp"]
}
}
output {
elasticsearch {
hosts => [ "${ES_HOST}" ]
user => "${ES_USER}"
password => "${ES_PW}"
index => "${ES_INDEX_ar_SA}"
document_type => "${ES_DOC_PLACE}"
document_id => "%{id}"
}
}


(system) #2

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.