I think I have improvement. The pipeline appears setup but I dont see the json objects showing up (ie parsing of each csv row into the pipeline).
logstash.conf
input {
file {
path => "/logs/csv_files/*.csv"
start_position => "beginning"
sincedb_path => "NULL"
}
}
filter {
csv {
separator => ","
columns => [ "id","uniqueid","alarm","eventtype","system","subsystem","sourcetime","operator","alarmvalue","value","equipment","location","severity","description","state","mmsstate","zone","graphicelement"]
}
}
output {
elasticsearch {
index => "ats-logs"
hosts => ["https://es01:9200"]
manage_template => false
user => "elastic"
password => "elastic123"
ssl_verification_mode=> "none"
}
stdout{}
}
docker-compose.yml
version: "3.8"
volumes:
logstashdata01:
driver: local
networks:
default:
name: elastic
external: true
services:
logstash:
image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
labels:
co.elastic.logs/module: logstash
user: root
environment:
- xpack.monitoring.enabled=false
volumes:
- ./:/usr/share/logstash/pipeline/
- /d/ATS_Event_Logs/Logstash:/logs/csv_files"
command: logstash -r -f /usr/share/logstash/pipeline/logstash.conf
ports:
- "5044:5044"
mem_limit: ${LS_MEM_LIMIT}
# version: "3.8"
# volumes:
# logstashdata01:
# driver: local
# networks:
# default:
# name: elastic
# external: true
# services:
# setup:
# logstash:
# image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
# labels:
# co.elastic.logs/module: logstash
# user: root
# volumes:
# - logstashdata01:/usr/share/logstash/data
# - certs:/usr/share/logstash/certs
# - ./logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro
# environment:
# - NODE_NAME="logstash"
# - xpack.monitoring.enabled=false
# - ELASTIC_USER=elastic
# - ELASTIC_PASSWORD={ELASTIC_PASSWORD}
# - ELASTIC_HOSTS=https://es01:9200
# command: logstash -f /usr/share/logstash/pipeline/logstash.conf
# ports:
# - "5044:5044"
# mem_limit: ${LS_MEM_LIMIT}
Logs
2023-11-16 10:34:25 Using bundled JDK: /usr/share/logstash/jdk
2023-11-16 10:34:54 Sending Logstash logs to /usr/share/logstash/logs which is now configured via log4j2.properties
2023-11-16 10:34:54 [2023-11-16T02:34:54,588][WARN ][deprecation.logstash.runner] NOTICE: Running Logstash as superuser is not recommended and won't be allowed in the future. Set 'allow_superuser' to 'false' to avoid startup errors in future releases.
2023-11-16 10:34:54 [2023-11-16T02:34:54,607][INFO ][logstash.runner ] Log4j configuration path used is: /usr/share/logstash/config/log4j2.properties
2023-11-16 10:34:54 [2023-11-16T02:34:54,611][INFO ][logstash.runner ] Starting Logstash {"logstash.version"=>"8.11.0", "jruby.version"=>"jruby 9.4.2.0 (3.1.0) 2023-03-08 90d2913fda OpenJDK 64-Bit Server VM 17.0.9+9 on 17.0.9+9 +indy +jit [x86_64-linux]"}
2023-11-16 10:34:54 [2023-11-16T02:34:54,616][INFO ][logstash.runner ] JVM bootstrap flags: [-Xms1g, -Xmx1g, -Djava.awt.headless=true, -Dfile.encoding=UTF-8, -Djruby.compile.invokedynamic=true, -XX:+HeapDumpOnOutOfMemoryError, -Djava.security.egd=file:/dev/urandom, -Dlog4j2.isThreadContextMapInheritable=true, -Dls.cgroup.cpuacct.path.override=/, -Dls.cgroup.cpu.path.override=/, -Djruby.regexp.interruptible=true, -Djdk.io.File.enableADS=true, --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED, --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED, --add-opens=java.base/java.security=ALL-UNNAMED, --add-opens=java.base/java.io=ALL-UNNAMED, --add-opens=java.base/java.nio.channels=ALL-UNNAMED, --add-opens=java.base/sun.nio.ch=ALL-UNNAMED, --add-opens=java.management/sun.management=ALL-UNNAMED]
2023-11-16 10:34:54 [2023-11-16T02:34:54,638][INFO ][logstash.settings ] Creating directory {:setting=>"path.queue", :path=>"/usr/share/logstash/data/queue"}
2023-11-16 10:34:54 [2023-11-16T02:34:54,641][INFO ][logstash.settings ] Creating directory {:setting=>"path.dead_letter_queue", :path=>"/usr/share/logstash/data/dead_letter_queue"}
2023-11-16 10:34:55 [2023-11-16T02:34:55,002][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
2023-11-16 10:34:55 [2023-11-16T02:34:55,026][INFO ][logstash.agent ] No persistent UUID file found. Generating new UUID {:uuid=>"3d0ac5c3-1817-445c-a396-17ed7034d597", :path=>"/usr/share/logstash/data/uuid"}
2023-11-16 10:34:56 [2023-11-16T02:34:56,263][INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
2023-11-16 10:34:57 [2023-11-16T02:34:57,531][INFO ][org.reflections.Reflections] Reflections took 258 ms to scan 1 urls, producing 132 keys and 464 values
2023-11-16 10:34:59 [2023-11-16T02:34:59,026][INFO ][logstash.javapipeline ] Pipeline `main` is configured with `pipeline.ecs_compatibility: v8` setting. All plugins in this pipeline will default to `ecs_compatibility => v8` unless explicitly configured otherwise.
2023-11-16 10:34:59 [2023-11-16T02:34:59,055][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["https://es01:9200"]}
2023-11-16 10:34:59 [2023-11-16T02:34:59,063][WARN ][logstash.outputs.elasticsearch][main] You have enabled encryption but DISABLED certificate verification, to make sure your data is secure set `ssl_verification_mode => full`
2023-11-16 10:34:59 [2023-11-16T02:34:59,346][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[https://elastic:xxxxxx@es01:9200/]}}
2023-11-16 10:34:59 [2023-11-16T02:34:59,789][WARN ][logstash.outputs.elasticsearch][main] Restored connection to ES instance {:url=>"https://elastic:xxxxxx@es01:9200/"}
2023-11-16 10:34:59 [2023-11-16T02:34:59,791][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch version determined (8.11.0) {:es_version=>8}
2023-11-16 10:34:59 [2023-11-16T02:34:59,792][WARN ][logstash.outputs.elasticsearch][main] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>8}
2023-11-16 10:34:59 [2023-11-16T02:34:59,822][INFO ][logstash.outputs.elasticsearch][main] Not eligible for data streams because config contains one or more settings that are not compatible with data streams: {"index"=>"ats-logs"}
2023-11-16 10:34:59 [2023-11-16T02:34:59,823][INFO ][logstash.outputs.elasticsearch][main] Data streams auto configuration (`data_stream => auto` or unset) resolved to `false`
2023-11-16 10:34:59 [2023-11-16T02:34:59,832][INFO ][logstash.filters.csv ][main] ECS compatibility is enabled but `target` option was not specified. This may cause fields to be set at the top-level of the event where they are likely to clash with the Elastic Common Schema. It is recommended to set the `target` option to avoid potential schema conflicts (if your data is ECS compliant or non-conflicting, feel free to ignore this message)
2023-11-16 10:34:59 [2023-11-16T02:34:59,866][INFO ][logstash.javapipeline ][main] Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>8, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>1000, "pipeline.sources"=>["/usr/share/logstash/pipeline/logstash.conf"], :thread=>"#<Thread:0x57985312 /usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:134 run>"}
2023-11-16 10:35:01 [2023-11-16T02:35:01,685][INFO ][logstash.javapipeline ][main] Pipeline Java execution initialization time {"seconds"=>1.82}
2023-11-16 10:35:01 [2023-11-16T02:35:01,704][INFO ][logstash.javapipeline ][main] Pipeline started {"pipeline.id"=>"main"}
2023-11-16 10:35:01 [2023-11-16T02:35:01,724][INFO ][filewatch.observingtail ][main][e02c9724c891efb1566ae46168785e2fbdf858e6919ac37455d441ec44a7d909] START, creating Discoverer, Watch with file and sincedb collections
2023-11-16 10:35:01 [2023-11-16T02:35:01,737][INFO ][logstash.agent ] Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
2023-11-16 10:34:25 2023/11/16 02:34:25 Setting 'xpack.monitoring.enabled' from environment.