I have added docker containers with the ELK components plus filebeat
networks:
elk:
external:
name: elk
services:
elasticsearch:
environment:
http.host: 0.0.0.0
transport.host: 127.0.0.1
image: docker.elastic.co/elasticsearch/elasticsearch:5.2.2
networks:
elk: null
ports:
- 9200:9200
restart: unless-stopped
volumes:
- elasticsearch:/usr/share/elasticsearch/data:rw
filebeat:
build:
context: /Volumes/Disk/Development/spaces/docker/docker-elk/filebeat
depends_on:
- elasticsearch
hostname: filebeat
networks:
elk: null
restart: unless-stopped
volumes:
- /var/lib/docker/containers:/hostfs/var/lib/docker/containers:rw
kibana:
depends_on:
- elasticsearch
environment:
ELASTICSEARCH_PASSWORD: changeme
ELASTICSEARCH_URL: http://elasticsearch:9200
ELASTICSEARCH_USERNAME: elastic
image: docker.elastic.co/kibana/kibana:5.2.2
networks:
elk: null
ports:
- 5601:5601
restart: unless-stopped
logstash:
build:
context: /Volumes/Disk/Development/spaces/docker/docker-elk/logstash
depends_on:
- elasticsearch
environment:
LOGSPOUT: ignore
networks:
elk: null
restart: unless-stopped
version: '2.0'
volumes:
elasticsearch:
driver: local
Like suggested by ruflin the container for filebeat has the /var/lib/docker/containers host directory mounted and is configured like
filebeat.prospectors:
- input_type: log
paths:
- /hostfs/var/lib/docker/containers/*/*.log
document_type: docker
json.message_key: log
output.logstash:
hosts: ["logstash:5044"]
No filebeat events are coming into kibana, filebeat logs
filebeat_1 | 2017/03/06 17:08:42.139813 beat.go:267: INFO Home path: [/usr/share/filebeat] Config path: [/usr/share/filebeat] Data path: [/usr/share/filebeat/data] Logs path: [/usr/share/filebeat/logs]
filebeat_1 | 2017/03/06 17:08:42.140590 beat.go:177: INFO Setup Beat: filebeat; Version: 5.2.2
filebeat_1 | 2017/03/06 17:08:42.140645 logstash.go:90: INFO Max Retries set to: 3
filebeat_1 | 2017/03/06 17:08:42.140777 outputs.go:106: INFO Activated logstash as output plugin.
filebeat_1 | 2017/03/06 17:08:42.141530 publish.go:291: INFO Publisher name: filebeat
filebeat_1 | 2017/03/06 17:08:42.141628 async.go:63: INFO Flush Interval set to: 1s
filebeat_1 | 2017/03/06 17:08:42.141768 async.go:64: INFO Max Bulk Size set to: 2048
filebeat_1 | 2017/03/06 17:08:42.142226 beat.go:207: INFO filebeat start running.
filebeat_1 | 2017/03/06 17:08:42.145507 registrar.go:85: INFO Registry file set to: /usr/share/filebeat/data/registry
filebeat_1 | 2017/03/06 17:08:42.145561 registrar.go:106: INFO Loading registrar data from /usr/share/filebeat/data/registry
filebeat_1 | 2017/03/06 17:08:42.145590 registrar.go:123: INFO States Loaded from registrar: 0
filebeat_1 | 2017/03/06 17:08:42.145614 crawler.go:34: INFO Loading Prospectors: 1
filebeat_1 | 2017/03/06 17:08:42.145663 prospector_log.go:57: INFO Prospector with previous states loaded: 0
filebeat_1 | 2017/03/06 17:08:42.145729 crawler.go:48: INFO Loading Prospectors completed. Number of prospectors: 1
filebeat_1 | 2017/03/06 17:08:42.145747 crawler.go:63: INFO All prospectors are initialised and running with 0 states to persist
filebeat_1 | 2017/03/06 17:08:42.145835 logp.go:219: INFO Metrics logging every 30s
filebeat_1 | 2017/03/06 17:08:42.145882 registrar.go:236: INFO Starting Registrar
filebeat_1 | 2017/03/06 17:08:42.145901 sync.go:41: INFO Start sending events to output
filebeat_1 | 2017/03/06 17:08:42.145924 spooler.go:63: INFO Starting spooler: spool_size: 2048; idle_timeout: 5s
filebeat_1 | 2017/03/06 17:08:42.145949 prospector.go:112: INFO Starting prospector of type: log
filebeat_1 | 2017/03/06 17:09:12.146731 logp.go:232: INFO No non-zero metrics in the last 30s
filebeat_1 | 2017/03/06 17:09:42.146925 logp.go:232: INFO No non-zero metrics in the last 30s
filebeat_1 | 2017/03/06 17:10:12.147896 logp.go:232: INFO No non-zero metrics in the last 30s
filebeat_1 | 2017/03/06 17:10:42.147144 logp.go:232: INFO No non-zero metrics in the last 30s
filebeat_1 | 2017/03/06 17:11:12.146622 logp.go:232: INFO No non-zero metrics in the last 30s
So I'd like to hear how filebeat is supposed to be used for processing docker container logs. Earlier I had used logspout in an earlier configuration which was collecting container logs and feeding into logstash.