After replacing the ${pwd} value with a specific path and setting the logstash log level to debug this is the output:
[DEBUG][logstash.config.source.local.configpathloader] Reading config file {:config_file=>"some-metrics/logstash.conf"}
[DEBUG][logstash.agent ] Trying to start API WebServer {:port=>9600, :ssl_enabled=>false}
[DEBUG][logstash.agent ] Converging pipelines state {:actions_count=>1}
[DEBUG][logstash.agent ] Executing action {:action=>LogStash::PipelineAction::Create/pipeline_id:main}
[DEBUG][org.logstash.secret.store.SecretStoreFactory] Attempting to exists or secret store with implementation: org.logstash.secret.store.backend.JavaKeyStore
[INFO ][logstash.agent ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[DEBUG][org.logstash.secret.store.SecretStoreFactory] Attempting to exists or secret store with implementation: org.logstash.secret.store.backend.JavaKeyStore
[DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"file", :type=>"input", :class=>LogStash::Inputs::File}
[DEBUG][org.logstash.secret.store.SecretStoreFactory] Attempting to exists or secret store with implementation: org.logstash.secret.store.backend.JavaKeyStore
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@start_position = "beginning"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@path = ["some-metrics/some-metrics.csv"]
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_completed_action = "log"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@sincedb_path = "/dev/null"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_completed_log_path = "some-metrics/done.log"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@enable_metric = true
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@add_field = {}
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@stat_interval = 1.0
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@discover_interval = 15
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@sincedb_write_interval = 15.0
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@delimiter = "\n"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@close_older = 3600.0
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@mode = "tail"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@sincedb_clean_after = 1209600.0
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_chunk_size = 32768
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_chunk_count = 140737488355327
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_sort_by = "last_modified"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@file_sort_direction = "asc"
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@exit_after_read = false
[DEBUG][logstash.inputs.file ] config LogStash::Inputs::File/@check_archive_validity = false
[DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"csv", :type=>"filter", :class=>LogStash::Filters::CSV}
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@skip_header = true
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@separator = ","
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@columns = ["applicationIdentification", "applicationDescription", "applicationName"]
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@enable_metric = true
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@add_tag = []
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@remove_tag = []
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@add_field = {}
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@remove_field = []
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@periodic_flush = false
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@source = "message"
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@quote_char = "\""
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@autogenerate_column_names = true
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@skip_empty_columns = false
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@skip_empty_rows = false
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@convert = {}
[DEBUG][logstash.filters.csv ] config LogStash::Filters::CSV/@autodetect_column_names = false
[DEBUG][logstash.plugins.registry] On demand adding plugin to the registry {:name=>"elasticsearch", :type=>"output", :class=>LogStash::Outputs::ElasticSearch}
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@password = <password>
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@hosts = [https://elasticsearch-example.com:443]
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_enabled = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@index = "some-metrics-%{+YYYY.MM}"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@user = "****"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_certificate_authorities = ["some-metrics/cacerts/elasticsearch-example.com.cer"]
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@enable_metric = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@workers = 1
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_certificate_verification = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_verification_mode = "full"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ssl_supported_protocols = []
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing = false
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@sniffing_delay = 5
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@timeout = 60
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@failure_type_logging_whitelist = []
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@silence_errors_in_log = []
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max = 1000
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pool_max_per_route = 100
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@resurrect_delay = 5
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@validate_after_inactivity = 10000
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@http_compression = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@compression_level = 1
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@custom_headers = {}
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_initial_interval = 2
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_max_interval = 64
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@dlq_custom_codes = []
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@dlq_on_failed_indexname_interpolation = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@data_stream_type = "logs"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@data_stream_dataset = "generic"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@data_stream_namespace = "default"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@data_stream_sync_fields = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@data_stream_auto_routing = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@manage_template = true
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_overwrite = false
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@template_api = "auto"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@parent = nil
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@join_field = nil
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@upsert = ""
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@doc_as_upsert = false
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script = ""
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_type = "inline"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_lang = "painless"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@script_var_name = "event"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@scripted_upsert = false
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@retry_on_conflict = 1
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@pipeline = nil
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ilm_enabled = "auto"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ilm_pattern = "{now/d}-000001"
[DEBUG][logstash.outputs.elasticsearch] config LogStash::Outputs::ElasticSearch/@ilm_policy = "logstash-policy"
[INFO ][logstash.javapipeline ] Pipeline `main` is configured with `pipeline.ecs_compatibility: v8` setting. All plugins in this pipeline will default to `ecs_compatibility => v8` unless explicitly configured otherwise.
[DEBUG][org.logstash.execution.AbstractPipelineExt] Flow metric registered: `input_throughput` in namespace `[:stats, :pipelines, :main, :flow]`
[DEBUG][org.logstash.execution.AbstractPipelineExt] Flow metric registered: `filter_throughput` in namespace `[:stats, :pipelines, :main, :flow]`
[DEBUG][org.logstash.execution.AbstractPipelineExt] Flow metric registered: `output_throughput` in namespace `[:stats, :pipelines, :main, :flow]`
[DEBUG][org.logstash.execution.AbstractPipelineExt] Flow metric registered: `queue_backpressure` in namespace `[:stats, :pipelines, :main, :flow]`
[DEBUG][org.logstash.execution.AbstractPipelineExt] Flow metric registered: `worker_concurrency` in namespace `[:stats, :pipelines, :main, :flow]`
[DEBUG][logstash.javapipeline ] Starting pipeline {:pipeline_id=>"main"}
[INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["https://elasticsearch-example.com:443"]}
[DEBUG][logstash.outputs.elasticsearch][main] Normalizing http path {:path=>nil, :normalized=>nil}
[INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[https://****:xxxxxx@elasticsearch-example.com:443/]}}
[DEBUG][logstash.outputs.elasticsearch][main] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>"https://****:xxxxxx@elasticsearch-example.com:443/", :path=>"/"}
[ERROR][logstash.outputs.elasticsearch][main] Unable to retrieve Elasticsearch version {:exception=>LogStash::Json::ParserError, :message=>"Unexpected character ('<' (code 60)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false')\n at [Source: (byte[])\"<!DOCTYPE html><html lang=\"en\"><head><meta charSet=\"utf-8\"/><meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge,chrome=1\"/><meta name=\"viewport\" content=\"width=device-width\"/><title>Elastic</title><style>\n \n @font-face {\n font-family: 'Inter';\n font-style: normal;\n font-weight: 100;\n src: url('/ui/fonts/inter/Inter-Thin.woff2') format('woff2'), url('/ui/fonts/inter/Inter-Thin.woff') format('woff');\n }\n\n @font-face {\n font-f\"[truncated 191025 bytes]; line: 1, column: 2]"}
[ERROR][logstash.javapipeline ][main] Pipeline error {:pipeline_id=>"main", :exception=>#<LogStash::ConfigurationError: Could not connect to a compatible version of Elasticsearch>, :backtrace=>["/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:277:in `block in healthcheck!'", "org/jruby/RubyHash.java:1587:in `each'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:262:in `healthcheck!'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:396:in `update_urls'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:104:in `update_initial_urls'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:98:in `start'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client.rb:369:in `build_pool'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client.rb:63:in `initialize'", "org/jruby/RubyClass.java:904:in `new'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client_builder.rb:106:in `create_http_client'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch/http_client_builder.rb:102:in `build'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/plugin_mixins/elasticsearch/common.rb:42:in `build_client'", "/usr/share/logstash/vendor/bundle/jruby/3.1.0/gems/logstash-output-elasticsearch-11.19.0-java/lib/logstash/outputs/elasticsearch.rb:301:in `register'", "org/logstash/config/ir/compiler/AbstractOutputDelegatorExt.java:69:in `register'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:237:in `block in register_plugins'", "org/jruby/RubyArray.java:1987:in `each'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:236:in `register_plugins'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:610:in `maybe_setup_out_plugins'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:249:in `start_workers'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:194:in `run'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:146:in `block in start'"], "pipeline.sources"=>["some-metrics/logstash.conf"], :thread=>"#<Thread:0x3e7095fd /usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:134 run>"}
Do you have any ideas?