Hello,
I use logstash with sql procedure statement to insert json in elastic, no problem here.
But when i use server with less RAM i have error
java.lang.OutOfMemoryError: Java heap space with proc statement CALL get_search_data()
What parameter i can change for work with server with less capatcities, jdbc_fetch_size ?
input {
jdbc {
jdbc_driver_library => "/usr/share/logstash/bin/mysql-connector-java-8.0.23.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://${DB_HOST}:${DB_PORT}/${DB_NAME}"
jdbc_user => "${DB_USER}"
jdbc_password => "${DB_PASSWORD}"
use_column_value => false
last_run_metadata_path => "/usr/share/logstash/.logstash_jdbc_last_run"
jdbc_page_size => 1000
jdbc_fetch_size => 1000
clean_run => false
# schedule => "*/10 * * * * *"
statement => "CALL get_search_data('${DATE_SQL}')"
jdbc_default_timezone => "Europe/Paris"
}
}
filter {
json {
source => "availabilities"
target => "availabilities"
}
json {
source => "results"
target => "results"
}
json {
source => "boolean_facets"
target => "booleanFacets"
remove_field => ["boolean_facets"]
}
json {
source => "string_facets"
target => "stringFacets"
remove_field => ["string_facets"]
}
json {
source => "location_facets"
target => "locationFacets"
remove_field => ["location_facets"]
}
json {
source => "integer_facets"
target => "integerFacets"
remove_field => ["integer_facets"]
}
json {
source => "decimal_facets"
target => "decimalFacets"
remove_field => ["decimal_facets"]
}
json {
source => "datetime_facets"
target => "datetimeFacets"
remove_field => ["datetime_facets"]
}
}
output {
# stdout { codec => rubydebug }
elasticsearch {
hosts => ["${ES_HOST}"]
user => "${ES_USER}"
password => "${ES_PASSWORD}"
index => "${ES_INDEX}"
action => "%{action}"
document_id => "%{id}"
manage_template => false
ssl => true
ilm_enabled => false
}
}
Thanks