Hello,
My enviroment:
Host: windows 10
Containers: linux
I am building a spring boot application and I am logging my logs to "C:/elk/spring-boot-elk.log"
I want to have a docker setup such that logstash will be able to read from that file whenever it is updated. I have no idea how should I proceed with this.
my docker file:
version: "3.3"
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.9.3
container_name: elasticsearch
environment:
- discovery.type=single-node
volumes:
- elasticsearch-data:/usr/share/elasticsearch/data
ports:
- 9200:9200
networks:
- elknetwork
kibana:
container_name: kibana
image: docker.elastic.co/kibana/kibana:7.9.3
environment:
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200 # address of elasticsearch docker container which kibana will connect
ports:
- 5601:5601
depends_on:
- elasticsearch # kibana will start when elasticsearch has started
networks:
- elknetwork
logstash:
image: logstash:7.9.3
container_name: logstash
links:
- elasticsearch:elasticsearch
volumes:
- ./:/config-dir
command: logstash -f /config-dir/logstash.conf
depends_on:
- elasticsearch
networks:
- elknetwork
networks:
elknetwork:
driver: bridge
volumes:
elasticsearch-data:
my logstash.conf file
input {
file {
type => "java"
path => "C:/elk/spring-boot-elk.log"
start_position => "beginning"
codec => multiline {
pattern => "^%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}.*"
negate => "true"
what => "previous"
}
}
}
output {
elasticsearch {
hosts => ["elasticsearch:9200"]
}
}
however when running the docker containers, i get this error:
[2020-10-29T02:59:02,245][ERROR][logstash.javapipeline ][main] Pipeline error {:pipeline_id=>"main", :exception=>#<ArgumentError: File paths must be absolute, relative path specified: C:/elk/spring-boot-elk.log>, :backtrace=>["/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-file-4.2.2/lib/logstash/inputs/file.rb:283:in `block in register'", "org/jruby/RubyArray.java:1809:in `each'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-input-file-4.2.2/lib/logstash/inputs/file.rb:281:in `register'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:228:in `block in register_plugins'", "org/jruby/RubyArray.java:1809:in `each'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:227:in `register_plugins'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:386:in `start_inputs'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:311:in `start_workers'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:185:in `run'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:137:in `block in start'"], "pipeline.sources"=>["/config-dir/logstash.conf"], :thread=>"#<Thread:0x4fcfe8a8 run>"}
my guess is that logstash can't read the log file because logstash is running on a docker container and thus it will not be able to see my host file.