I have deployed Elastic Stack using ECK operator. The issue is that fleet server or agent is not connecting to correct Elasticsearch host which seems to be strange.
Following are my configs for kibana, elastic and fleet.
Fleet Server config:
apiVersion: agent.k8s.elastic.co/v1alpha1
kind: Agent
metadata:
name: fleet-server
spec:
version: 8.8.0
mode: fleet
fleetServerEnabled: true
kibanaRef:
name: eck-kibana
elasticsearchRefs:
- name: eck-master-and-worker-nodes
daemonSet:
podTemplate:
spec:
containers:
- name: elastic-agent
image: docker.elastic.co/beats/elastic-agent:8.8.0
Kibana config:
apiVersion: kibana.k8s.elastic.co/v1
kind: Kibana
metadata:
name: eck-kibana
spec:
config:
xpack.fleet.agents.elasticsearch.hosts: ["https://eck-master-and-worker-nodes-es-http.monitoring.svc.cluster.local:9200"]
xpack.fleet.agents.fleet_server.hosts: ["https://fleet-server-agent-http.monitoring.svc.cluster.local:8220"]
xpack.fleet.packages:
- name: system
version: latest
- name: elastic_agent
version: latest
- name: fleet_server
version: latest
xpack.fleet.agentPolicies:
- name: Fleet Server on ECK policy
id: fleet-server
namespace: monitoring
monitoring_enabled:
- logs
- metrics
unenroll_timeout: 900
package_policies:
- name: fleet_server-1
id: fleet_server-1
package:
name: fleet_server
- name: Elastic Agent on ECK policy
id: eck-agent
namespace: monitoring
monitoring_enabled:
- logs
- metrics
unenroll_timeout: 900
is_default: true
package_policies:
- name: system-1
id: system-1
package:
name: system
version: 8.8.0
count: 3
elasticsearchRef:
name: eck-master-and-worker-nodes
namespace: monitoring
podTemplate:
spec:
nodeSelector:
env: mon
tier: mgmt
containers:
- name: kibana
env:
- name: monitoring.ui.ccs.enabled
value: "false"
- name: xpack.encryptedSavedObjects.encryptionKey
value: "my encryption key"
Elastic config:
apiVersion: elasticsearch.k8s.elastic.co/v1
kind: Elasticsearch
metadata:
name: eck-master-and-worker-nodes
spec:
version: 8.8.0
nodeSets:
- name: master
count: 1
config:
node.roles: ["master"]
xpack.ml.enabled: false
node.store.allow_mmap: false
xpack.security.enabled: true
xpack.security.http.ssl.enabled: true
podTemplate:
metadata:
labels:
app: elasticsearch
spec:
nodeSelector:
env: mon
tier: mgmt
affinity:
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- elasticsearch
topologyKey: "kubernetes.io/hostname"
containers:
- name: elasticsearch
resources:
requests:
memory: 6Gi
limits:
memory: 6Gi
env:
- name: ES_JAVA_OPTS
value: -Xmx3g -Xms3g # Specify your desired heap size here
volumeClaimTemplates:
- metadata:
name: elasticsearch-data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 32Gi
storageClassName: elasticdisk
- name: worker
count: 3
config:
node.roles: ["data", "ingest", "ml", "transform", "remote_cluster_client"]
node.store.allow_mmap: false
xpack.ml.enabled: false
xpack.security.enabled: true
xpack.security.http.ssl.enabled: true
podTemplate:
metadata:
labels:
app: elasticsearch
spec:
nodeSelector:
env: mon
tier: mgmt
affinity:
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions:
- key: app
operator: In
values:
- elasticsearch
topologyKey: "kubernetes.io/hostname"
containers:
- name: elasticsearch
resources:
requests:
memory: 12Gi
limits:
memory: 12Gi
env:
- name: ES_JAVA_OPTS
value: -Xmx6g -Xms6g # Specify your desired heap size here
volumeClaimTemplates:
- metadata:
name: elasticsearch-data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 512Gi
storageClassName: elasticdisk
Full error log is as follows:
{"log.level":"error","@timestamp":"2023-07-17T11:35:20.398Z","message":"Failed to connect to backoff(elasticsearch(http://elasticsearch:9200)): Get "http://elasticsearch:9200": lookup elasticsearch on 10.80.0.10:53: no such host","component":{"binary":"metricbeat","dataset":"elastic_agent.metricbeat","id":"beat/metrics-monitoring","type":"beat/metrics"},"log":{"source":"beat/metrics-monitoring"},"log.origin":{"file.line":148,"file.name":"pipeline/client_worker.go"},"service.name":"metricbeat","ecs.version":"1.6.0","log.logger":"publisher_pipeline_output","ecs.version":"1.6.0"}