Logstash failed to execute action

I have a docker-compose controlled elk, with the following configs.
logstash/logstash.conf :

input {
  jdbc {
    jdbc_driver_library => "/home/user/extvol/elk/mysql-connector-j-8.2.0.jar"
    jdbc_driver_class => "com.mysql.jdbc.Driver"
    jdbc_connection_string => "jdbc:mysql://192.168.10.14:3306/book"
    jdbc_user => "USER"
    jdbc_password => "APASS"
    statement => "SELECT * from story"
    #schedule => "*/5 * * * * *"
  }
}

filter {}

output {
  Elasticsearch {
    hosts => ["192.168.10.15:9200"]
    index => "book"
    user => "elastic"
    password => "changeme"
    ssl =>> true
    ssl_certificate_verfication => false
  }
  stdout { codec => rubydebug { metadata => true } }
}

docker-compose.yml :

version: '3.6'
services:
  Elasticsearch:
    image: elasticsearch:7.16.2
    container_name: elasticsearch
    restart: always
    volumes:
    - elastic_data:/usr/share/elasticsearch/data/
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
      discovery.type: single-node
    ports:
    - '9200:9200'
    - '9300:9300'
    networks:
      - elk

  Logstash:
    image: logstash:7.16.2
    container_name: logstash
    restart: always
    volumes:
    - /home/user/extvol/elk/logstash/logstash.conf:/etc/logstash/conf.d/logstash.conf:ro
    command: logstash -f /etc/logstash/conf.d/logstash.conf
    depends_on:
      - Elasticsearch
    ports:
    - '9600:9600'
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk

  Kibana:
    image: kibana:7.16.2
    container_name: kibana
    restart: always
    ports:
    - '5601:5601'
    environment:
      - ELASTICSEARCH_URL=http://elasticsearch:9200
    depends_on:
      - Elasticsearch
    networks:
      - elk
volumes:
  elastic_data: {}

networks:
  elk:

.env :

# Project namespace (defaults to the current folder name if not set)
COMPOSE_PROJECT_NAME=es

# Password for the 'elastic' user (at least 6 characters)
ELASTIC_PASSWORD=changeme

# Password for the 'kibana_system' user (at least 6 characters)
KIBANA_PASSWORD=changeme

# Version of Elastic products
#https://www.elastic.co/downloads/past-releases#elasticsearch
STACK_VERSION=8.8.2

# Set the cluster name
CLUSTER_NAME=docker-cluster

# Set to 'basic' or 'trial' to automatically start the 30-day trial
LICENSE=basic
#LICENSE=trial

# Port to expose Elasticsearch HTTP API to the host
ES_PORT=9200

# Port to expose Kibana to the host
KIBANA_PORT=5601

# Port to expose Fleet to the host
FLEET_PORT=8220

# Port to expose APM to the host
APMSERVER_PORT=8200

# APM Secret Token for POC environments only
ELASTIC_APM_SECRET_TOKEN=supersecrettoken

# Increase or decrease based on the available host memory (in bytes)
ES_MEM_LIMIT=3073741824
KB_MEM_LIMIT=1073741824
LS_MEM_LIMIT=1073741824

# SAMPLE Predefined Key only to be used in POC environments
ENCRYPTION_KEY=XXXXXXXXXXXXXXXXX

When I build the containers with docker-compose up the following error will be found somewhere in the output :

logstash         | [2024-03-06T14:19:39,866][ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", [A-Za-z0-9_-], '\"', \"'\", [A-Za-z_], \"-\", [0-9], \"[\", \"{\" at line 21, column 11 (byte 508) after output {\n  Elasticsearch {\n    hosts => [\"192.168.10.15:9200\"]\n    index => \"book\"\n    user => \"elastic\"\n    password => \"changeme\"\n    ssl =>", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:187:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:47:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:52:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:383:in `block in converge_state'"]}

What do you think?

The input is a MySQL database called book at a server with ip 192.168.10.14 and there is a table called story. Remote access to MySQL is checked and all the interconnections between the computers is granted.

I think this is wrong:

output {
  Elasticsearch {
    hosts => ["192.168.10.15:9200"]
    index => "book"
    user => "elastic"
    password => "changeme"
    ssl =>> true
    ssl_certificate_verfication => false
  }
  stdout { codec => rubydebug { metadata => true } }
}

Lot of errors I think. It should be something like:

output {
  elasticsearch {
    hosts => ["192.168.10.15:9200"]
    index => "book"
    user => "elastic"
    password => "changeme"
    ssl => true
    ssl_certificate_verification => false
  }
  stdout { codec => rubydebug { metadata => true } }
}

I removed ">" as you pointed. But this error is still there:

logstash         | [2024-03-06T15:04:44,349][ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [ \\t\\r\\n], \"#\", [A-Za-z0-9_-], '\"', \"'\", [A-Za-z_], \"-\", [0-9], \"[\", \"{\" at line 21, column 11 (byte 508) after output {\n  Elasticsearch {\n    hosts => [\"192.168.10.15:9200\"]\n    index => \"book\"\n    user => \"elastic\"\n    password => \"changeme\"\n    ssl =>", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:32:in `compile_imperative'", "org/logstash/execution/AbstractPipelineExt.java:187:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:72:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:47:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:52:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:383:in `block in converge_state'"]}

That was not only that. I changed 3 things...

Thank you, the error with this correction removed. However, the data did not fetch to elasticsearch :frowning:

sudo docker start -a 2da902a1e4db
Using bundled JDK: /usr/share/logstash/jdk
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
Sending Logstash logs to /usr/share/logstash/logs which is now configured via log4j2.properties
[2024-03-06T15:49:22,318][INFO ][logstash.runner          ] Log4j configuration path used is: /usr/share/logstash/config/log4j2.properties
[2024-03-06T15:49:22,328][INFO ][logstash.runner          ] Starting Logstash {"logstash.version"=>"7.16.2", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.13+8 on 11.0.13+8 +indy +jit [linux-x86_64]"}
[2024-03-06T15:49:22,649][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified
[2024-03-06T15:49:23,560][WARN ][logstash.monitoringextension.pipelineregisterhook] xpack.monitoring.enabled has not been defined, but found elasticsearch configuration. Please explicitly set `xpack.monitoring.enabled: true` in logstash.yml
[2024-03-06T15:49:23,563][WARN ][deprecation.logstash.monitoringextension.pipelineregisterhook] Internal collectors option for Logstash monitoring is deprecated and may be removed in a future release.
Please configure Metricbeat to monitor Logstash. Documentation can be found at:
https://www.elastic.co/guide/en/logstash/current/monitoring-with-metricbeat.html
[2024-03-06T15:49:23,904][WARN ][deprecation.logstash.codecs.plain] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:23,968][WARN ][deprecation.logstash.outputs.elasticsearch] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:24,290][INFO ][logstash.licensechecker.licensereader] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elasticsearch:9200/]}}
[2024-03-06T15:49:24,479][WARN ][logstash.licensechecker.licensereader] Restored connection to ES instance {:url=>"http://elasticsearch:9200/"}
[2024-03-06T15:49:24,492][INFO ][logstash.licensechecker.licensereader] Elasticsearch version determined (7.16.2) {:es_version=>7}
[2024-03-06T15:49:24,494][WARN ][logstash.licensechecker.licensereader] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2024-03-06T15:49:24,596][INFO ][logstash.monitoring.internalpipelinesource] Monitoring License OK
[2024-03-06T15:49:24,597][INFO ][logstash.monitoring.internalpipelinesource] Validated license for monitoring. Enabling monitoring pipeline.
[2024-03-06T15:49:24,858][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[2024-03-06T15:49:25,667][INFO ][org.reflections.Reflections] Reflections took 89 ms to scan 1 urls, producing 119 keys and 417 values
[2024-03-06T15:49:26,741][WARN ][deprecation.logstash.codecs.plain] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,741][WARN ][deprecation.logstash.codecs.plain] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,757][WARN ][deprecation.logstash.inputs.jdbc] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,778][WARN ][deprecation.logstash.codecs.plain] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,779][WARN ][deprecation.logstash.codecs.plain] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,827][WARN ][deprecation.logstash.outputs.elasticsearch] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,827][WARN ][deprecation.logstash.outputs.elasticsearchmonitoring] Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[2024-03-06T15:49:26,901][INFO ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearchMonitoring", :hosts=>["http://elasticsearch:9200"]}
[2024-03-06T15:49:26,924][INFO ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elasticsearch:9200/]}}
[2024-03-06T15:49:26,963][WARN ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] Restored connection to ES instance {:url=>"http://elasticsearch:9200/"}
[2024-03-06T15:49:26,970][INFO ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] Elasticsearch version determined (7.16.2) {:es_version=>7}
[2024-03-06T15:49:26,970][WARN ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2024-03-06T15:49:27,046][WARN ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] Configuration is data stream compliant but due backwards compatibility Logstash 7.x will not assume writing to a data-stream, default behavior will change on Logstash 8.0 (set `data_stream => true/false` to disable this warning)
[2024-03-06T15:49:27,046][WARN ][logstash.outputs.elasticsearchmonitoring][.monitoring-logstash] Configuration is data stream compliant but due backwards compatibility Logstash 7.x will not assume writing to a data-stream, default behavior will change on Logstash 8.0 (set `data_stream => true/false` to disable this warning)
[2024-03-06T15:49:27,046][INFO ][logstash.outputs.elasticsearch][main] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//192.168.10.15:9200"]}
[2024-03-06T15:49:27,055][WARN ][logstash.outputs.elasticsearch][main] ** WARNING ** Detected UNSAFE options in elasticsearch output configuration!
** WARNING ** You have enabled encryption but DISABLED certificate verification.
** WARNING ** To make sure your data is secure change :ssl_certificate_verification to true
[2024-03-06T15:49:27,061][WARN ][logstash.javapipeline    ][.monitoring-logstash] 'pipeline.ordered' is enabled and is likely less efficient, consider disabling if preserving event order is not necessary
[2024-03-06T15:49:27,094][INFO ][logstash.outputs.elasticsearch][main] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[https://elastic:xxxxxx@192.168.10.15:9200/]}}
[2024-03-06T15:49:27,126][INFO ][logstash.javapipeline    ][.monitoring-logstash] Starting pipeline {:pipeline_id=>".monitoring-logstash", "pipeline.workers"=>1, "pipeline.batch.size"=>2, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>2, "pipeline.sources"=>["monitoring pipeline"], :thread=>"#<Thread:0xc8356bd run>"}
[2024-03-06T15:49:27,250][ERROR][logstash.javapipeline    ][main] Pipeline error {:pipeline_id=>"main", :exception=>#<Manticore::UnknownException: Unsupported or unrecognized SSL message>, :backtrace=>["/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.1-java/lib/manticore/response.rb:36:in `block in initialize'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/manticore-0.7.1-java/lib/manticore/response.rb:79:in `call'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb:74:in `perform_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:326:in `perform_request_to_url'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:235:in `health_check_request'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:242:in `block in healthcheck!'", "org/jruby/RubyHash.java:1415:in `each'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:240:in `healthcheck!'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:374:in `update_urls'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:89:in `update_initial_urls'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client/pool.rb:83:in `start'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client.rb:359:in `build_pool'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client.rb:63:in `initialize'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client_builder.rb:106:in `create_http_client'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch/http_client_builder.rb:102:in `build'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/plugin_mixins/elasticsearch/common.rb:34:in `build_client'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-output-elasticsearch-11.2.3-java/lib/logstash/outputs/elasticsearch.rb:275:in `register'", "org/logstash/config/ir/compiler/OutputStrategyExt.java:131:in `register'", "org/logstash/config/ir/compiler/AbstractOutputDelegatorExt.java:68:in `register'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:232:in `block in register_plugins'", "org/jruby/RubyArray.java:1821:in `each'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:231:in `register_plugins'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:589:in `maybe_setup_out_plugins'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:244:in `start_workers'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:189:in `run'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:141:in `block in start'"], "pipeline.sources"=>["/etc/logstash/conf.d/logstash.conf"], :thread=>"#<Thread:0x4d24c149@/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:54 run>"}
[2024-03-06T15:49:27,253][INFO ][logstash.javapipeline    ][main] Pipeline terminated {"pipeline.id"=>"main"}
[2024-03-06T15:49:27,275][ERROR][logstash.agent           ] Failed to execute action {:id=>:main, :action_type=>LogStash::ConvergeResult::FailedAction, :message=>"Could not execute action: PipelineAction::Create<main>, action_result: false", :backtrace=>nil}
[2024-03-06T15:49:27,807][INFO ][logstash.javapipeline    ][.monitoring-logstash] Pipeline Java execution initialization time {"seconds"=>0.68}
[2024-03-06T15:49:27,844][INFO ][logstash.javapipeline    ][.monitoring-logstash] Pipeline started {"pipeline.id"=>".monitoring-logstash"}
[2024-03-06T15:49:30,029][INFO ][logstash.javapipeline    ][.monitoring-logstash] Pipeline terminated {"pipeline.id"=>".monitoring-logstash"}
[2024-03-06T15:49:30,100][INFO ][logstash.runner          ] Logstash shut down.

May be check this? Jdbc input plugin | Logstash Reference [8.12] | Elastic

But I can't really tell more and suggest to open a new discussion as this one is solved.

1 Like

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.