exception=>LogStash::Outputs::ElasticSearch::HttpClient::Pool::NoConnectionAvailableError

Hi Team,
Elasticsearch connection is up and running. We have total 260000 documents. In this 30K copied after that we get the error

Attempted to send a bulk request but there are no living connections in the pool (perhaps Elasticsearch is unreachable or down?) {:message=>"No Available connections", :exception=>LogStash::Outputs::Elasticsearch::HttpClient::Pool::NoConnectionAvailableError, :will_retry_in_seconds=>64}

below is the logstash conf we use

input{
elasticsearch {
hosts => ["host1"]
index => "test-index-"
user => "user"
password => "password"
query => '{ "query": { "query_string": { "query": "
" } } }'
size => 5000
scroll => "10m"
slices => 8
docinfo => true
docinfo_target => "@metadata"
ssl => false
ca_file => "/usr/share/logstash/elasticsearch/ca.crt"
}
}
filter {
ruby {
code => "
indexName = event.get('[@metadata][_index]').split('-')
indexDate = indexName[1] if indexName.length > 1
if !(indexName.is_a?(Array) && indexName.length > 1)
indexName[1] = ''
end
event.set('[@metadata][index_date]', indexDate)
"
}
mutate { remove_field => ["index_date"] }
}
output {

#stdout { codec => rubydebug    { metadata => true } }
elasticsearch {
  hosts => ["host1"]
  user => "user"
  password => "password"
  index => "test-index1-%{[@metadata][index_date]}"
  ssl => true
  ssl_certificate_verification => false
  doc_as_upsert => true 
  action => "update"
  ilm_enabled => false
  http_compression => true
  timeout => 180
  pool_max => 10000
  pool_max_per_route => 1000
  validate_after_inactivity => 5000
  resurrect_delay => 120
}

}

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.