Alright @stephenb I decided to boldly just follow the configs u provided. Bypassed those certs and dared to docker-compose up my logstash
logstash.conf
input {
file {
path => "D:/ATS Event Logs/For-Logstash (ML)/*.csv"
start_position => "beginning"
sincedb_path => "NULL"
}
}
filter {
csv {
separator => ","
columns => [ "id","uniqueid","alarm","eventtype","system","subsystem","sourcetime","operator","alarmvalue","value","equipment","location","severity","description","state","mmsstate","zone","graphicelement"]
}
}
output {
elasticsearch {
index => "ats-logs"
hosts => ["https://es01:9200"]
manage_template => false
user => "elastic"
password => "elastic123"
ssl_verification_mode=> "none"
# ssl_enabled => true
# cacert => "/usr/share/logstash/certs/ca/ca.crt"
}
stdout{}
}
docker-compose.yml
version: "3.8"
volumes:
logstashdata01:
driver: local
networks:
default:
name: elastic
external: true # Says use the other network
services:
logstash:
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
labels:
co.elastic.logs/module: logstash
user: root
environment:
- xpack.monitoring.enabled=false
volumes:
- ./:/usr/share/logstash/pipeline/
command: logstash -r -f /usr/share/logstash/pipeline/logstash.conf
ports:
- "5044:5044"
mem_limit: ${LS_MEM_LIMIT}
Good news is that the logstash docker image is healthy. never crash on me.
However, the pipeline is not setup and has error.
2023-11-16 00:26:06 2023/11/15 16:26:06 Setting 'xpack.monitoring.enabled' from environment.
2023-11-16 00:26:06 Using bundled JDK: /usr/share/logstash/jdk
2023-11-16 00:26:40 Sending Logstash logs to /usr/share/logstash/logs which is now configured via log4j2.properties
2023-11-16 00:26:40 [2023-11-15T16:26:40,130][WARN ][deprecation.logstash.runner] NOTICE: Running Logstash as superuser is not recommended and won't be allowed in the future. Set 'allow_superuser' to 'false' to avoid startup errors in future releases.
2023-11-16 00:26:40 [2023-11-15T16:26:40,148][INFO ][logstash.runner ] Log4j configuration path used is: /usr/share/logstash/config/log4j2.properties
2023-11-16 00:26:40 [2023-11-15T16:26:40,150][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"8.11.0", "jruby.version"=>"jruby 9.4.2.0 (3.1.0) 2023-03-08 90d2913fda OpenJDK 64-Bit Server VM 17.0.9+9 on 17.0.9+9 +indy +jit [x86_64-linux]"}
2023-11-16 00:26:40 [2023-11-15T16:26:40,155][INFO ][logstash.runner ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, -Dls.cgroup.cpuacct.path.override=/, -Dls.cgroup.cpu.path.override=/, -Djruby.regexp.interruptible=true, -Djdk.io.File.enableADS=true, --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED]
2023-11-16 00:26:40 [2023-11-15T16:26:40,174][INFO ][logstash.settings ] Creating directory {:setting=>"path.queue", :path=>"/usr/share/logstash/data/queue"}
2023-11-16 00:26:40 [2023-11-15T16:26:40,178][INFO ][logstash.settings ] Creating directory {:setting=>"path.dead_letter_queue", :path=>"/usr/share/logstash/data/dead_letter_queue"}
2023-11-16 00:26:40 [2023-11-15T16:26:40,576][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
2023-11-16 00:26:40 [2023-11-15T16:26:40,596][INFO ][logstash.agent ] No persistent UUID file found. Generating new UUID {:uuid=>"93c92697-1707-49a1-bf50-640c85f4879e", :path=>"/usr/share/logstash/data/uuid"}
2023-11-16 00:26:41 [2023-11-15T16:26:41,887][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
2023-11-16 00:26:42 [2023-11-15T16:26:42,403][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [^\\r\\n], \"\\r\", \"\\n\" at line 46, column 4 (byte 1133) after # }", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:239:in `initialize'", "org/logstash/execution/AbstractPipelineExt.java:173:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:48:in `initialize'", "org/jruby/RubyClass.java:931:in `new'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:49:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:386:in `block in converge_state'"]}
2023-11-16 00:26:45 [2023-11-15T16:26:45,730][ERROR][logstash.agent ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [^\\r\\n], \"\\r\", \"\\n\" at line 46, column 4 (byte 1133) after # }", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:239:in `initialize'", "org/logstash/execution/AbstractPipelineExt.java:173:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:48:in `initialize'", "org/jruby/RubyClass.java:931:in `new'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:49:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:386:in `block in converge_state'"]}
I know I need to see json object entry for every csv row being piped in - to confirm that I have encountered SUCCESS.
Provided current state of my .conf and ymls for your Inspection.