Hi,
I am using logstash 6.5.4 with elasticsearch output plugin to index 15M documents and it keeps crashing my elasticsearch (6.5.4). I'm getting:
[WARN ][logstash.outputs.elasticsearch] Marking url as dead. Last error: [LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError] Elasticsearch Unreachable: [http://localhost:9200/][Manticore::SocketTimeout] Read timed out {:url=>http://localhost:9200/, :error_message=>"Elasticsearch Unreachable: [http://localhost:9200/][Manticore::SocketTimeout] Read timed out", :error_class=>"LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError"}
[ERROR][logstash.outputs.elasticsearch] Attempted to send a bulk request to elasticsearch' but Elasticsearch appears to be unreachable or down! {:error_message=>"Elasticsearch Unreachable: [http://localhost:9200/][Manticore::SocketTimeout] Read timed out", :class=>"LogStash::Outputs::ElasticSearch::HttpClient::Pool::HostUnreachableError", :will_retry_in_seconds=>2}
My configuration:
input {
jdbc {
jdbc_connection_string => "jdbc:sqlserver://localhost;database=XXX;user=xxx;password=xxx"
jdbc_user => nil
jdbc_driver_library => "C:\Program Files\Microsoft JDBC Driver 6.2 for SQL Server\sqljdbc_6.2\enu\jre8\sqljdbc4-3.0.jar"
jdbc_driver_class => "com.microsoft.sqlserver.jdbc.SQLServerDriver"
jdbc_fetch_size => 500
statement => "..."
}
}
filter {
ruby {
code => 'event.set("[my_suggest][contexts][location]", [{"lat" => event.get("my_suggest.contexts.location.lat"),"lon" => event.get("my_suggest.contexts.location.lon")}]) if event.get("my_suggest.contexts.location.lon");'
}
mutate {
remove_field => "my_suggest.contexts.location.lat"
remove_field => "my_suggest.contexts.location.lon"
rename => {
"my_suggest.input" => "[my_suggest][input]"
"my_suggest.weight" => "[my_suggest][weight]"
}
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
index => "my_index"
}
}
Please advise. Thank you.