Hi everyone!
I have written a docoker-compose to up ELK,but when I run it i get
Unable to retrieve version information from Elasticsearch nodes
from my kibana container!
Here is my structure:
first my compoese is like it:
version: '3'
services:
elasticsearch-core:
image: docker.elastic.co/elasticsearch/elasticsearch:7.13.1
container_name: ${NODE_NAME}-es
restart: unless-stopped
hostname: ${NODE_NAME}-es
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 131072
hard: 131072
nproc: 8192
fsize: -1
network_mode: bridge
ports:
- 9200:9200/tcp
- 9300:9300/tcp
volumes:
- $PWD/Elasticsearch-TOR/var/lib/elasticsearch:/usr/share/elasticsearch/data
- $PWD/Elasticsearch-TOR/etc/certs:/usr/share/elasticsearch/config/certificates
environment:
ES_JAVA_OPTS: '-Xms12g -Xmx12g'
ELASTIC_PASSWORD: ${ELASTIC_PASSWORD}
cluster.name: elk
node.name: ${NODE_NAME}-es
bootstrap.memory_lock: 'true'
network.bind_host: 0.0.0.0
network.publish_host: ${NETWORK_PUBLISH_HOST}
http.port: 9200
http.publish_port: 9200
transport.port: 9300
transport.publish_port: 9300
discovery.seed_hosts: '${ELK_HOSTS_1},${ELK_HOSTS_2},${ELK_HOSTS_3}'
cluster.initial_master_nodes: 'ELK_NODE_1-es,ELK_NODE_2-es,ELK_NODE_3-es'
indices.query.bool.max_clause_count: 8192
search.max_buckets: 250000
action.destructive_requires_name: 'true'
reindex.remote.whitelist: '*:*'
reindex.ssl.verification_mode: 'none'
xpack.security.http.ssl.key: /usr/share/elasticsearch/config/certificates/node.key
xpack.security.http.ssl.certificate: /usr/share/elasticsearch/config/certificates/node.pem
xpack.security.http.ssl.certificate_authorities: /usr/share/elasticsearch/config/certificates/root-ca-key.pem
xpack.security.http.ssl.verification_mode: 'none'
xpack.security.http.ssl.enabled: 'true'
xpack.security.transport.ssl.key: /usr/share/elasticsearch/config/certificates/node.key
xpack.security.transport.ssl.certificate: /usr/share/elasticsearch/config/certificates/node.pem
xpack.security.transport.ssl.certificate_authorities: /usr/share/elasticsearch/config/certificates/root-ca-key.pem
xpack.security.transport.ssl.verification_mode: 'none'
xpack.security.transport.ssl.enabled: 'false'
xpack.monitoring.collection.enabled: 'true'
xpack.monitoring.collection.interval: 30s
xpack.security.enabled: 'true'
xpack.security.audit.enabled: 'false'
kibana-ui:
image: docker.elastic.co/kibana/kibana:7.13.1
container_name: ${NODE_NAME}-kibana-ui
restart: always
hostname: ${NODE_NAME}-kibana-ui
network_mode: bridge
ports:
- 5601:5601/tcp
volumes:
- ./kibana/kibana.yml:/usr/share/kibana/config/kibana.yml
environment:
TELEMETRY_OPTIN: 'false'
TELEMETRY_ENABLED: 'false'
NEWSFEED_ENABLED: 'false'
SERVER_NAME: ${NODE_NAME}-kibana-ui
SERVER_HOST: '0.0.0.0'
SERVER_PORT: 5601
SERVER_MAXPAYLOADBYTES: 8388608
ELASTICSEARCH_ENDPOINTS: '["https://172.31.44.131:9200","https://172.31.44.132:9200","https://172.31.44.133:9200"]'
ELASTICSEARCH_USER: ${ELASTIC_USER}
ELASTICSEARCH_SECRET: ${ELASTIC_PASSWORD}
ES_REQUEST_TIMEOUT: 132000
ES_SHARD_TIMEOUT: 120000
# ES_SSL_CERT: /etc/kibana/certs/ui/ui.crt
# ES_SSL_KEY: /etc/kibana/certs/ui/ui.key
# ES_SSL_CA: /etc/kibana/certs/ca/ca.crt
ES_SSL_VERIFY: 'none'
KIBANA_SUGGEST_TIMEOUT: 3000
KIBANA_SUGGEST_LIMIT: 2500000
VEGA_ALLOW_URLS: 'true'
XPACK_MAPS_AVAILABLE_VISUALS: 'true'
XPACK_SECURITY_ENCRYPTIONKEY: 'MyEncryptionKey_0123456789_0123456789_0123456789'
XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: 'aCSQOG1/q6Mzo+myAyjslONJtC7rNgc0VhsZHZfyWTA='
depends_on:
- elasticsearch-core
logstash-worker:
image: docker.elastic.co/logstash/logstash:7.13.1
container_name: ${NODE_NAME}-logstash-agent
restart: always
hostname: ${NODE_NAME}-logstash-agent
network_mode: bridge
ports:
- 9600:9600
volumes:
- $PWD/logstash/configurations:/usr/share/logstash/config
- $PWD/logstash/data-flow:/usr/share/logstash/pipeline
# - $PWD/logstash/configurations:/usr/share/logstash/config
environment:
# JAVA_OPTS: '-Xms256m -Xmx2g'
XPACK_STATS: 'false'
XPACK_CTRL: 'false'
depends_on:
- elasticsearch-core
links:
- elasticsearch-core
also here is my .env
cat .env
ELASTIC_USER=elastic
ELASTIC_PASSWORD=m2Jsr6V8k9q7xNpZd6Q2
NODE_NAME=ELK_NODE_1
NETWORK_PUBLISH_HOST=172.31.44.131
ELK_HOSTS_1=172.31.44.131
ELK_HOSTS_2=172.31.44.132
ELK_HOSTS_3=172.31.44.133
here is my stracture directories:
tree .
.
├── docker-compose.yml
├── Elasticsearch-TOR
│ ├── docker-compose.yml
│ ├── etc
│ │ └── certs
│ │ ├── cert-gen.sh
│ │ ├── node.key
│ │ ├── node.pem
│ │ ├── root-ca-key.pem
│ │ ├── root-ca.pem
│ │ └── root-ca.srl
│ └── var
│ └── lib
│ └── elasticsearch
│ └── nodes
│ └── 0
│ ├── node.lock
│ ├── snapshot_cache
│ │ ├── segments_1
│ │ └── write.lock
│ └── _state
│ ├── _0.cfe
│ ├── _0.cfs
│ ├── _0.si
│ ├── manifest-0.st
│ ├── node-0.st
│ ├── segments_1
│ └── write.lock
├── env-sample
├── kibana
│ └── kibana.yml
├── kibana-7.12.x-codex-dark.ndjson
├── logstash
│ ├── configurations
│ │ ├── jvm.options
│ │ ├── log4j2.properties
│ │ └── logstash.yml
│ └── data-flow
│ └── pipeline.conf
also here is my kibana.yml
cat kibana/kibana.yml
#server.port: 5601
#server.host: "0.0.0.0"
elasticsearch.preserveHost: true
kibana.index: ".kibana"
elasticsearch.username: kibana_system
elasticsearch.password: m2Jsr6V8k9q7xNpZd6Q2
elasticsearch.ssl.verificationMode: "none"
and this is my logstash.yml and log4j2.properties and jvm.options
cat log4j2.properties
status = error
name = LogstashPropertiesConfig
appender.console.type = Console
appender.console.name = plain_console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %-.10000m%n
rootLogger.level = info
rootLogger.appenderRef.console.ref = plain_console
cat logstash.yml
http.host: "0.0.0.0"
path.config: /usr/share/logstash/pipeline
cat jvm.options
## JVM configuration
# Xms represents the initial size of total heap space
# Xmx represents the maximum size of total heap space
-Xms1g
-Xmx1g
################################################################
## Expert settings
################################################################
##
## All settings below this section are considered
## expert settings. Don't tamper with them unless
## you understand what you are doing
##
################################################################
## GC configuration
11-13:-XX:+UseConcMarkSweepGC
11-13:-XX:CMSInitiatingOccupancyFraction=75
11-13:-XX:+UseCMSInitiatingOccupancyOnly
## Locale
# Set the locale language
#-Duser.language=en
# Set the locale country
#-Duser.country=US
# Set the locale variant, if any
#-Duser.variant=
## basic
# set the I/O temp directory
#-Djava.io.tmpdir=$HOME
# set to headless, just in case
-Djava.awt.headless=true
# ensure UTF-8 encoding by default (e.g. filenames)
-Dfile.encoding=UTF-8
# use our provided JNA always versus the system one
#-Djna.nosys=true
# Turn on JRuby invokedynamic
-Djruby.compile.invokedynamic=true
## heap dumps
# generate a heap dump when an allocation from the Java heap fails
# heap dumps are created in the working directory of the JVM
-XX:+HeapDumpOnOutOfMemoryError
# specify an alternative path for heap dumps
# ensure the directory exists and has sufficient space
#-XX:HeapDumpPath=${LOGSTASH_HOME}/heapdump.hprof
## GC logging
#-Xlog:gc*,gc+age=trace,safepoint:file=@loggc@:utctime,pid,tags:filecount=32,filesize=64m
# log GC status to a file with time stamps
# ensure the directory exists
#-Xloggc:${LS_GC_LOG_FILE}
# Entropy source for randomness
-Djava.security.egd=file:/dev/urandom
# Copy the logging context from parent threads to children
-Dlog4j2.isThreadContextMapInheritable=true
my container status for logstash is up!but in browser for http I get
Kibana server is not ready yet
and for https I get
Secure Connection Failed An error occurred during a connection to 172.31.44.131:5601. SSL received a record that exceeded the maximum permissible length. Error code: SSL_ERROR_RX_RECORD_TOO_LONG The page you are trying to view cannot be shown because the authenticity of the received data could not be verified. Please contact the website owners to inform them of this problem.
I have ping and curl from kibana container to elastichsearch container,and there is no network or firewall problem!