Difficultés de chargement de données (mysql) via logstash

Bonjour,
je viens solliciter votre aide pour debugger cette app. Je veux mettre en place un moteur de recherche avec élasticseach. J'ai créé mon docker-compose avec les services suivants: mysql, elasticsearch, kibana et logstash. Lorsque je fais le docker-compose up -d, je n'arrive malheureusement pas à charger mes données. Depuis plus d'un mois, je n'avance pas parce que ça ne marche pas. Je souhaite votre aide.
Je vous envoie les élèments:
services:
mysql:
build:
context: mysql/
args:
MYSQL_VERSION: $MYSQL_VERSION
volumes:
- type: bind
source: ./mysql/config/mysql.cnf
target: /etc/mysql/conf.d/my.cnf
read_only: true
# Initialize database using SQL scripts
- type: bind
source: ./mysql/sql
target: /docker-entrypoint-initdb.d
read_only: true
# Persist data on host machine
- type: volume
source: mysql
target: /var/lib/mysql
ports:
- "3307:3306"
# Docker
environment:
- MYSQL_ROOT_PASSWORD=root
- MYSQL_DATABASE=wordpress
- MYSQL_USER=root
- MYSQL_PASSWORD=root
networks:
- mysql-elk

elasticsearch:
build:
context: elasticsearch/
args:
ELK_VERSION: $ELK_VERSION
volumes:
- type: bind
source: ./elasticsearch/config/elasticsearch.yml
target: /usr/share/elasticsearch/config/elasticsearch.yml
read_only: true
# Persist data on host machine
- type: volume
source: elasticsearch
target: /usr/share/elasticsearch/data
ports:
- "9200:9200"
- "9300:9300"
environment:
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
ELASTIC_PASSWORD: changeme
# Use single node discovery in order to disable production mode and avoid bootstrap checks.
# see: Bootstrap Checks | Elasticsearch Guide [8.8] | Elastic
discovery.type: single-node
networks:
- mysql-elk

logstash:
build:
context: logstash/
args:
ELK_VERSION: $ELK_VERSION
volumes:
- type: bind
source: ./logstash/config/logstash.yml
target: /usr/share/logstash/config/logstash.yml
read_only: true
# List id and config path of pipelines
- type: bind
source: ./logstash/pipeline/pipelines.yml
target: /usr/share/logstash/config/pipelines.yml
read_only: true
# Add configurations of pipelines
- type: bind
source: ./logstash/pipeline/conf
target: /usr/share/logstash/pipeline
read_only: true
# Add SQL scripts used by JDBC input plugin
- type: bind
source: ./logstash/pipeline/sql
target: /usr/share/logstash/sql
read_only: true
ports:
- "5044:5044"
- "5000:5000/tcp"
- "5000:5000/udp"
- "9600:9600"
environment:
LS_JAVA_OPTS: "-Xmx256m -Xms256m"
networks:
- mysql-elk
depends_on:
- elasticsearch

kibana:
build:
context: kibana/
args:
ELK_VERSION: $ELK_VERSION
volumes:
- type: bind
source: ./kibana/config/kibana.yml
target: /usr/share/kibana/config/kibana.yml
read_only: true
ports:
- "5601:5601"
networks:
- mysql-elk
depends_on:
- elasticsearch

networks:
mysql-elk:
driver: bridge

volumes:
mysql:
elasticsearch:

--Dockerfile--
ARG ELK_VERSION

https://www.docker.elastic.co/

FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}

Download MySQL JDBC driver to connect Logstash to MySQL

RUN curl -Lo "mysql-connector-java-8.0.22.tar.gz" "https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-8.0.22.tar.gz"
&& tar -xf "mysql-connector-java-8.0.22.tar.gz" "mysql-connector-java-8.0.22/mysql-connector-java-8.0.22.jar"
&& mv "mysql-connector-java-8.0.22/mysql-connector-java-8.0.22.jar" "mysql-connector-java-8.0.22.jar"
&& rm -r "mysql-connector-java-8.0.22" "mysql-connector-java-8.0.22.tar.gz"

ENTRYPOINT ["/usr/local/bin/docker-entrypoint"]

Add your logstash plugins setup here

Example: RUN logstash-plugin install logstash-filter-json

--logstash.conf--
input {

Jdbc input plugin | Logstash Reference [8.8] | Elastic

jdbc {
jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-8.0.22.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_connection_string => "jdbc:mysql://mysql:3306/wordpress"
jdbc_user => "root"
jdbc_password => "root"
sql_log_level => "debug"
clean_run => true
record_last_run => false
statement => "SELECT * FROM mod341_10_posts"
#statement_filepath => "/usr/share/logstash/sql/wordpress_table.sql"
}
}

https://www.elastic.co/guide/en/logstash/current/filter-plugins.html+

filter {
mutate {
remove_field => ["@version", "@timestamp"]
}
}
output {

Elasticsearch output plugin | Logstash Reference [8.8] | Elastic

elasticsearch {
hosts => "elasticsearch:9200"
#user => "elastic"
#password => "changeme"
ecs_compatibility => disabled
index => "word_index"
document_id => "%{id}"
}
}

--pipelines.yml--

  • pipeline.id: wordpress-table
    path.config: "/usr/share/logstash/pipeline/wordpress_table.conf"

--.env---
MYSQL_VERSION=8.0
ELK_VERSION=7.13.0

Bonjour

Peux-tu éditer ton post STP et le formatter avec le bouton </> afin de le rendre plus lisible et agréable à lire ?

Merci.

Que se passe t'il ? Est-ce que les services de ton docker-compose sont tous up ? Est-ce qu'Elasticsearch tourne bien ?

J'ai un doute sur:

ES_JAVA_OPTS: "-Xmx256m -Xms256m"

Ca me semble très faible pour Elasticsearch. Je mettrais plutôt 1g...

Je vous remercie pour votre assistance. Ayant changé les valeurs de la variable ES_JAVA_OPTS: "-Xmx256m -Xms256m" à 1g, ça n'a rien changé et ça ne marche toujours pas.

type or paste code here
```version: "3"
services:
  mysql:
    image: mysql:8
    container_name: sem_mysql
    # restart: on-failure
    ports:
      - 3307:3306
    environment:
      MYSQL_RANDOM_ROOT_PASSWORD: "yes"
      MYSQL_DATABASE: wordpress
    #  MYSQL_USER: root
      MYSQL_PASSWORD: root
    volumes:
      # Dump files for initiating tables
      - ./data/:/docker-entrypoint-initdb.d/
    logging:
        driver: "json-file"
        options:
            max-size: "10k"
            max-file: "10"
  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch:7.9.3
    container_name: sem_elasticsearch
    # restart: on-failure
    environment:
      - discovery.type=single-node
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms1g -Xmx1g"
    ports:
      - 9201:9200
      - 9301:9300
    ulimits:
      memlock:
        soft: -1
        hard: -1
    volumes:
      - ./volumes/elasticsearch:/usr/share/elasticsearch/data
    logging:
        driver: "json-file"
        options:
            max-size: "10k"
            max-file: "10"
  logstash:
    build:
      context: .
      dockerfile: Dockerfile-logstash
    container_name: sem_logstash
    # restart: on-failure
    depends_on:
      - mysql
      - elasticsearch
    volumes:
      - ./volumes/logstash/pipeline/:/usr/share/logstash/pipeline/
      - ./volumes/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml
      - ./volumes/logstash/config/pipelines.yml:/usr/share/logstash/config/pipelines.yml
      - ./volumes/logstash/config/queries/:/usr/share/logstash/config/queries/
    logging:
      driver: "json-file"
      options:
        max-size: "10k"
        max-file: "10"
  kibana:
    image: docker.elastic.co/kibana/kibana:7.9.3
    container_name: sem_kibana
    environment:
      - "ELASTICSEARCH_URL=http://elasticsearch:9200"
      - "SERVER_NAME=127.0.0.1"
    ports:
      - 5600:5601
    depends_on:
      - elasticsearch

--logstash---`
input {
  jdbc {
    jdbc_driver_library => "/usr/share/logstash/mysql-connector-java-8.0.22.jar"
    jdbc_driver_class => "com.mysql.jdbc.Driver"
    jdbc_connection_string => "jdbc:mysql://mysql:3306"
    jdbc_user => "root"
    jdbc_password => "root"
    sql_log_level => "debug"  # Set Logstash logging level as this
    clean_run => true # Set to true for indexing from scratch
    record_last_run => false
    statement_filepath => "/usr/share/logstash/config/queries/wordpress.sql"
  }
}

filter {
  mutate {
    remove_field => ["@version", "@timestamp"]
  }
}

output {
  stdout { codec => rubydebug { metadata => true } }
  elasticsearch {
    hosts => ["http://elasticsearch:9200"]`Preformatted text`
    index => "posts"
    action => "index"
    document_id => "%{id}"
  }
}


Result
J'ai ce retour lorsque je teste

type or paste code here

"error": {
"root_cause": [
{
"type": "index_not_found_exception",
"reason": "no such index [posts]",
"resource.type": "index_or_alias",
"resource.id": "posts",
"index_uuid": "_na_",
"index": "posts"
}
],
"type": "index_not_found_exception",
"reason": "no such index [posts]",
"resource.type": "index_or_alias",
"resource.id": "posts",
"index_uuid": "_na_",
"index": "posts"
},
"status": 404
}

Quels sont les logs du container Logstash ?

Voici les logs de logstash:

type or paste sem_kibana       | {"type":"response","@timestamp":"2023-06-09T05:23:29Z","tags":[],"pid":6,"method":"post","statusCode":200,"req":{"url":"/api/index_management/indices/reload","method":"post","headers":{"host":"localhost:5600","connection":"keep-alive","content-length":"24","sec-ch-ua":"\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"","content-type":"application/json","kbn-version":"7.9.3","sec-ch-ua-mobile":"?0","user-agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36","sec-ch-ua-platform":"\"Linux\"","accept":"*/*","origin":"http://localhost:5600","sec-fetch-site":"same-origin","sec-fetch-mode":"cors","sec-fetch-dest":"empty","referer":"http://localhost:5600/app/management/data/index_management/indices","accept-encoding":"gzip, deflate, br","accept-language":"fr-FR,fr;q=0.9,en-US;q=0.8,en;q=0.7"},"remoteAddress":"172.31.0.1","userAgent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36","referer":"http://localhost:5600/app/management/data/index_management/indices"},"res":{"statusCode":200,"responseTime":24,"contentLength":9},"message":"POST /api/index_management/indices/reload 200 24ms - 9.0B"}
sem_logstash     | [2023-06-09T05:52:16,350][ERROR][logstash.inputs.jdbc     ][from-scratch-pipeline][0ef987ad3420011a276adcaea4c34a3d6023d52e881d1820627479eb3043d30a] Unable to connect to database. Tried 1 times {:error_message=>"Java::ComMysqlCjJdbcExceptions::CommunicationsException: Communications link failure\n\nThe last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server."}
sem_logstash     | [2023-06-09T05:52:16,351][ERROR][logstash.javapipeline    ][from-scratch-pipeline][0ef987ad3420011a276adcaea4c34a3d6023d52e881d1820627479eb3043d30a] A plugin had an unrecoverable error. Will restart this plugin.
sem_logstash     |   Pipeline_id:from-scratch-pipeline
sem_logstash     |   Plugin: <LogStash::Inputs::Jdbc jdbc_user=>"root", jdbc_password=><password>, statement_filepath=>"/usr/share/logstash/config/queries/from-scratch.sql", clean_run=>true, jdbc_driver_library=>"/usr/share/logstash/mysql-connector-java-8.0.22.jar", jdbc_connection_string=>"jdbc:mysql://mysql:3306", sql_log_level=>"debug", id=>"0ef987ad3420011a276adcaea4c34a3d6023d52e881d1820627479eb3043d30a", jdbc_driver_class=>"com.mysql.jdbc.Driver", record_last_run=>false, enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_ec20c909-1625-401c-a77b-c7e26b766ff6", enable_metric=>true, charset=>"UTF-8">, jdbc_paging_enabled=>false, jdbc_page_size=>100000, jdbc_validate_connection=>false, jdbc_validation_timeout=>3600, jdbc_pool_timeout=>5, connection_retry_attempts=>1, connection_retry_attempts_wait_time=>0.5, plugin_timezone=>"utc", last_run_metadata_path=>"/usr/share/logstash/.logstash_jdbc_last_run", use_column_value=>false, tracking_column_type=>"numeric", lowercase_column_names=>true, use_prepared_statements=>false>
sem_logstash     |   Error: Java::ComMysqlCjJdbcExceptions::CommunicationsException: Communications link failure
sem_logstash     | 
sem_logstash     | The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
sem_logstash     |   Exception: Sequel::DatabaseConnectionError
sem_logstash     |   Stack: com.mysql.cj.jdbc.exceptions.SQLError.createCommunicationsException(com/mysql/cj/jdbc/exceptions/SQLError.java:174)
sem_logstash     | com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(com/mysql/cj/jdbc/exceptions/SQLExceptionsMapping.java:64)
sem_logstash     | com.mysql.cj.jdbc.ConnectionImpl.createNewIO(com/mysql/cj/jdbc/ConnectionImpl.java:836)
sem_logstash     | com.mysql.cj.jdbc.ConnectionImpl.<init>(com/mysql/cj/jdbc/ConnectionImpl.java:456)
sem_logstash     | com.mysql.cj.jdbc.ConnectionImpl.getInstance(com/mysql/cj/jdbc/ConnectionImpl.java:246)
sem_logstash     | com.mysql.cj.jdbc.NonRegisteringDriver.connect(com/mysql/cj/jdbc/NonRegisteringDriver.java:198)
sem_logstash     | jdk.internal.reflect.GeneratedMethodAccessor59.invoke(jdk/internal/reflect/GeneratedMethodAccessor59)
sem_logstash     | jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(jdk/internal/reflect/DelegatingMethodAccessorImpl.java:43)
sem_logstash     | java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:566)
sem_logstash     | org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(org/jruby/javasupport/JavaMethod.java:471)
sem_logstash     | org.jruby.javasupport.JavaMethod.invokeDirect(org/jruby/javasupport/JavaMethod.java:329)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.adapters.jdbc.connect(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/adapters/jdbc.rb:227)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.make_new(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool.rb:122)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.threaded.assign_connection(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool/threaded.rb:209)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.threaded.acquire(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool/threaded.rb:139)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.threaded.hold(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool/threaded.rb:91)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.database.connecting.synchronize(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/database/connecting.rb:270)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.database.connecting.test_connection(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/database/connecting.rb:279)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.database.connecting.connect(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/database/connecting.rb:58)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.core.connect(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/core.rb:124)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_jdbc_minus_5_dot_0_dot_6.lib.logstash.plugin_mixins.jdbc.jdbc.jdbc_connect(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-jdbc-5.0.6/lib/logstash/plugin_mixins/jdbc/jdbc.rb:114)
sem_logstash     | org.jruby.RubyKernel.loop(org/jruby/RubyKernel.java:1442)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_jdbc_minus_5_dot_0_dot_6.lib.logstash.plugin_mixins.jdbc.jdbc.jdbc_connect(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-jdbc-5.0.6/lib/logstash/plugin_mixins/jdbc/jdbc.rb:111)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_jdbc_minus_5_dot_0_dot_6.lib.logstash.plugin_mixins.jdbc.jdbc.open_jdbc_connection(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-jdbc-5.0.6/lib/logstash/plugin_mixins/jdbc/jdbc.rb:139)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_jdbc_minus_5_dot_0_dot_6.lib.logstash.plugin_mixins.jdbc.jdbc.execute_statement(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-jdbc-5.0.6/lib/logstash/plugin_mixins/jdbc/jdbc.rb:197)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_jdbc_minus_5_dot_0_dot_6.lib.logstash.inputs.jdbc.execute_query(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-jdbc-5.0.6/lib/logstash/inputs/jdbc.rb:316)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_jdbc_minus_5_dot_0_dot_6.lib.logstash.inputs.jdbc.run(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-jdbc-5.0.6/lib/logstash/inputs/jdbc.rb:284)
sem_logstash     | usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.inputworker(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:405)
sem_logstash     | usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.RUBY$method$inputworker$0$__VARARGS__(usr/share/logstash/logstash_minus_core/lib/logstash//usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb)
sem_logstash     | usr.share.logstash.logstash_minus_core.lib.logstash.java_pipeline.start_input(/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:396)
sem_logstash     | org.jruby.RubyProc.call(org/jruby/RubyProc.java:318)
sem_logstash     | java.lang.Thread.run(java/lang/Thread.java:834)
sem_logstash     | [2023-06-09T05:52:16,366][ERROR][logstash.inputs.jdbc     ][wordpress-pipeline][9abc34fc956254f2a75641214680cf7a64031af54b6c253a990164495fbc49e7] Unable to connect to database. Tried 1 times {:error_message=>"Java::ComMysqlCjJdbcExceptions::CommunicationsException: Communications link failure\n\nThe last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server."}
sem_logstash     | [2023-06-09T05:52:16,368][ERROR][logstash.javapipeline    ][wordpress-pipeline][9abc34fc956254f2a75641214680cf7a64031af54b6c253a990164495fbc49e7] A plugin had an unrecoverable error. Will restart this plugin.code here

Je pense qu'on voit clairement le problème de connexion à la base de données. Vérifie ce point.

Est-ce que MySQL fonctionne bien ? Est-il accessible depuis le conteneur Logstash ?

mysql est résolu. il es démarré correctement. Voici les logs:

sem_mysql        | 2023-06-09 06:25:47+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 8.0.33-1.el8 started.
sem_mysql        | 2023-06-09 06:25:48+00:00 [Note] [Entrypoint]: Switching to dedicated user 'mysql'
sem_mysql        | 2023-06-09 06:25:48+00:00 [Note] [Entrypoint]: Entrypoint script for MySQL Server 8.0.33-1.el8 started.
sem_mysql        | '/var/lib/mysql/mysql.sock' -> '/var/run/mysqld/mysqld.sock'
sem_mysql        | 2023-06-09T06:25:51.474780Z 0 [Warning] [MY-011068] [Server] The syntax '--skip-host-cache' is deprecated and will be removed in a future release. Please use SET GLOBAL host_cache_size=0 instead.
sem_mysql        | 2023-06-09T06:25:51.476730Z 0 [System] [MY-010116] [Server] /usr/sbin/mysqld (mysqld 8.0.33) starting as process 1
sem_mysql        | 2023-06-09T06:25:51.563143Z 1 [System] [MY-013576] [InnoDB] InnoDB initialization has started.
sem_mysql        | 2023-06-09T06:25:55.184649Z 1 [System] [MY-013577] [InnoDB] InnoDB initialization has ended.
sem_mysql        | 2023-06-09T06:25:56.611022Z 0 [System] [MY-010229] [Server] Starting XA crash recovery...
sem_mysql        | 2023-06-09T06:25:56.686770Z 0 [System] [MY-010232] [Server] XA crash recovery finished.
sem_mysql        | 2023-06-09T06:25:57.513615Z 0 [Warning] [MY-010068] [Server] CA certificate ca.pem is self signed.
sem_mysql        | 2023-06-09T06:25:57.514244Z 0 [System] [MY-013602] [Server] Channel mysql_main configured to support TLS. Encrypted connections are now supported for this channel.
sem_mysql        | 2023-06-09T06:25:57.554945Z 0 [Warning] [MY-011810] [Server] Insecure configuration for --pid-file: Location '/var/run/mysqld' in the path is accessible to all OS users. Consider choosing a different directory.
sem_mysql        | 2023-06-09T06:25:57.653809Z 0 [System] [MY-011323] [Server] X Plugin ready for connections. Bind-address: '::' port: 33060, socket: /var/run/mysqld/mysqlx.sock
sem_mysql        | 2023-06-09T06:25:57.654892Z 0 [System] [MY-010931] [Server] /usr/sbin/mysqld: ready for connections. Version: '8.0.33'  socket: '/var/run/mysqld/mysqld.sock'  port: 3306  MySQL Community Server - GPL.

La suite est logstash qui cause un nouveau problème, voici son log
```sem_logstash     |   Error: Java::ComMysqlCjJdbcExceptions::CommunicationsException: Communications link failure
sem_logstash     | 
sem_logstash     | The last packet sent successfully to the server was 0 milliseconds ago. The driver has not received any packets from the server.
sem_logstash     |   Exception: Sequel::DatabaseConnectionError
sem_logstash     |   Stack: com.mysql.cj.jdbc.exceptions.SQLError.createCommunicationsException(com/mysql/cj/jdbc/exceptions/SQLError.java:174)
sem_logstash     | com.mysql.cj.jdbc.exceptions.SQLExceptionsMapping.translateException(com/mysql/cj/jdbc/exceptions/SQLExceptionsMapping.java:64)
sem_logstash     | com.mysql.cj.jdbc.ConnectionImpl.createNewIO(com/mysql/cj/jdbc/ConnectionImpl.java:836)
sem_logstash     | com.mysql.cj.jdbc.ConnectionImpl.<init>(com/mysql/cj/jdbc/ConnectionImpl.java:456)
sem_logstash     | com.mysql.cj.jdbc.ConnectionImpl.getInstance(com/mysql/cj/jdbc/ConnectionImpl.java:246)
sem_logstash     | com.mysql.cj.jdbc.NonRegisteringDriver.connect(com/mysql/cj/jdbc/NonRegisteringDriver.java:198)
sem_logstash     | jdk.internal.reflect.GeneratedMethodAccessor59.invoke(jdk/internal/reflect/GeneratedMethodAccessor59)
sem_logstash     | jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(jdk/internal/reflect/DelegatingMethodAccessorImpl.java:43)
sem_logstash     | java.lang.reflect.Method.invoke(java/lang/reflect/Method.java:566)
sem_logstash     | org.jruby.javasupport.JavaMethod.invokeDirectWithExceptionHandling(org/jruby/javasupport/JavaMethod.java:471)
sem_logstash     | org.jruby.javasupport.JavaMethod.invokeDirect(org/jruby/javasupport/JavaMethod.java:329)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.adapters.jdbc.connect(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/adapters/jdbc.rb:227)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.make_new(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool.rb:122)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.threaded.assign_connection(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool/threaded.rb:209)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.threaded.acquire(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool/threaded.rb:139)
sem_logstash     | usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.sequel_minus_5_dot_37_dot_0.lib.sequel.connection_pool.threaded.hold(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/sequel-5.37.0/lib/sequel/connection_pool/threaded.rb:91)
type or paste code here
Je pense que le problème vient de la communication entre logstash et mysql.
En vérifiant ma config, je ne vois pas l'erreur.

Je pense que tu as une typo ici:

`3307:3306`

Ça devrait être:

`3306:3306`

le port de sortie 3306 est occupé, c'est pour cela que j'ai choisi le 3307

Est-ce que c'est correct du coup?

jdbc_connection_string => "jdbc:mysql://mysql:3306"

Bonjour Mr David,

j'ai bossé intensivement sur le projet sans succès.
Je vous envoie un copie de celle-ci, si vous avez un peu de temps pour y jeter un coup d'oeil.
Peut-être, vous trouverez le bug.
En attente de votre retour.
Merci d'avance
elk.tar

Bonjour

Je ne peux pas accéder à ce fichier et de toute façon, je préfère ne pas ouvrir ce type de contenu.
De quoi s'agit-il ?

Peux-tu le déposer en clair sur gist.github.com par exemple ?

Je vous remercie pour votre aide, sinon voici le repo du projet: git clone https://adimicool@bitbucket.org/pibafricateam/elk.git

Cordialement

Repository not found

Bonjour,
je vous remercie pour votre assistance.
Ayant essayé plusieurs fois, plus d'un mois sans succès.
Je n'arrive pas à charger les données dans elasticsearch depuis mysql.
Voici le dépôt: https://adimicool@bitbucket.org/pibafricateam/elk.git

Je vous remercie!

Même message d'erreur et j'avoue que je ne sais pas ce que tu veux partager.

Peux-tu décrire ce que tu veux partager ?

Je suis en train de mettre en place un moteur de recherche avec elasticsearch.
J'ai des erreurs, je n'arrive pas à index les données dans elasticsearch avec logstash.
Je vous envoie ma config afin que vous regardiez avec moi d'où provient l'erreur.

Merci d'avance

Donc merci de juste partager la config ici. Pas besoin de passer par un repo git je pense...

#docker-compose.yml```

version: "3"
services:
  mysql:
    image: mysql:8.0
    ports:
      - 3313:3306
    environment:
      - MYSQL_ROOT_USER=root
      - MYSQL_ROOT_PASSWORD=root
      - MYSQL_DATABASE=wordpress
    volumes:
      - ./mysql-data:/var/lib/mysql

  phpmyadmin:
    image: phpmyadmin/phpmyadmin
    ports:
      - 8080:80
    environment:
      - PMA_ARBITRARY=1
      - PMA_HOST=mysql
      - MYSQL_ROOT_USER=root
      - MYSQL_ROOT_PASSWORD=root

  elasticsearch:
    image: docker.elastic.co/elasticsearch/elasticsearch:8.7.0
    container_name: sem_elasticsearch
    environment:
      - discovery.type=single-node
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms256m -Xmx256m"
    ulimits:
      memlock:
        soft: -1
        hard: -1
    volumes:
      - ./volumes/elasticsearch:/usr/share/elasticsearch/data
    logging:
      driver: "json-file"
      options:
        max-size: "10k"
        max-file: "10"

  logstash:
    build:
      context: .
      dockerfile: Dockerfile-logstash
    container_name: sem_logstash
    depends_on:
      - mysql
      - elasticsearch
    environment:
      - LS_JAVA_HOME=/usr/share/logstash/jdk
    ports:
      - 9200:9200
      - 9300:9300
    volumes:
      - ./volumes/logstash/pipeline/:/usr/share/logstash/pipeline/
      - ./volumes/logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml
      - ./volumes/logstash/config/pipelines.yml:/usr/share/logstash/config/pipelines.yml
      - ./volumes/logstash/config/queries/:/usr/share/logstash/config/queries/
    logging:
      driver: "json-file"
      options:
        max-size: "10k"
        max-file: "10"

  kibana:
    image: docker.elastic.co/kibana/kibana:8.7.0
    container_name: sem_kibana
    environment:
      - "ELASTICSEARCH_URL=http://elasticsearch:9200"
      - "SERVER_NAME=127.0.0.1"
    ports:
      - 5602:5601
    depends_on:
      - elasticsearch

volumes:
  esdata:
  mysql-data:


#wordpress.conf

input {
  jdbc {
    jdbc_driver_library => "/usr/share/logstash/mysql-connector-j-8.0.22.jar"
    jdbc_driver_class => "com.mysql.jdbc.Driver"
    jdbc_connection_string => "jdbc:mysql://mysql:3306"
    jdbc_user => "root"
    jdbc_password => "root"
    sql_log_level => "debug"  # Set Logstash logging level as this
    clean_run => true # Set to true for indexing from scratch
    record_last_run => false
    statement_filepath => "/usr/share/logstash/config/queries/wordpress.sql"
  }
}

filter {
  mutate {
    remove_field => ["@version", "@timestamp"]
  }
}FROM docker.elastic.co/logstash/logstash:8.7.0

# Download JDBC connector for Logstash
RUN curl -L --output "mysql-connector-java-8.0.22.tar.gz" "https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-8.0.22.tar.gz" \
    && tar -xf "mysql-connector-java-8.0.22.tar.gz" "mysql-connector-java-8.0.22/mysql-connector-java-8.0.22.jar" \
    && mv "mysql-connector-java-8.0.22/mysql-connector-java-8.0.22.jar" "mysql-connector-java-8.0.22.jar" \
    && rm -r "mysql-connector-java-8.0.22" "mysql-connector-java-8.0.22.tar.gz"

ENTRYPOINT ["/usr/local/bin/docker-entrypoint"]


output {
  # stdout { codec => rubydebug { metadata => true } }
  elasticsearch {
    hosts => ["http://elasticsearch:9200"]
    index => "posts"
    action => "index"
    document_id => "%{id}"
  }
}

#Dockerfile
FROM docker.elastic.co/logstash/logstash:8.7.0

# Download JDBC connector for Logstash
RUN curl -L --output "mysql-connector-java-8.0.22.tar.gz" "https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-8.0.22.tar.gz" \
    && tar -xf "mysql-connector-java-8.0.22.tar.gz" "mysql-connector-java-8.0.22/mysql-connector-java-8.0.22.jar" \
    && mv "mysql-connector-java-8.0.22/mysql-connector-java-8.0.22.jar" "mysql-connector-java-8.0.22.jar" \
    && rm -r "mysql-connector-java-8.0.22" "mysql-connector-java-8.0.22.tar.gz"

ENTRYPOINT ["/usr/local/bin/docker-entrypoint"]