Docker with SLM automatically snapshot

Hello,

I'm not the expert, I have followed Elasticsearch's docker documentation, and it worked!

Here's my docker-compose.yml file

    version: '2.2'
services:
  dropbot-es01:
    restart: always
    image: docker.elastic.co/elasticsearch/elasticsearch:7.6.1
    container_name: dropbot-es01
    environment:
      - node.name=dropbot-es01
      - cluster.name=es-docker-cluster
      - discovery.seed_hosts=dropbot-es02,dropbot-es03
      - cluster.initial_master_nodes=dropbot-es01,dropbot-es02,dropbot-es03
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
    ulimits:
      memlock:
        soft: -1
        hard: -1
    volumes:
      - data01:/usr/share/elasticsearch/data
    ports:
      - 127.0.0.1:9200:9200
    networks:
      - elastic
  dropbot-es02:
    restart: always
    image: docker.elastic.co/elasticsearch/elasticsearch:7.6.1
    container_name: dropbot-es02
    environment:
      - node.name=dropbot-es02
      - cluster.name=es-docker-cluster
      - discovery.seed_hosts=dropbot-es01,dropbot-es03
      - cluster.initial_master_nodes=dropbot-es01,dropbot-es02,dropbot-es03
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
    ulimits:
      memlock:
        soft: -1
        hard: -1
    volumes:
      - data02:/usr/share/elasticsearch/data
    networks:
      - elastic
  dropbot-es03:
    restart: always
    image: docker.elastic.co/elasticsearch/elasticsearch:7.6.1
    container_name: dropbot-es03
    environment:
      - node.name=dropbot-es03
      - cluster.name=es-docker-cluster
      - discovery.seed_hosts=dropbot-es01,dropbot-es02
      - cluster.initial_master_nodes=dropbot-es01,dropbot-es02,dropbot-es03
      - bootstrap.memory_lock=true
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
    ulimits:
      memlock:
        soft: -1
        hard: -1
    volumes:
      - data03:/usr/share/elasticsearch/data
    networks:
      - elastic

volumes:
  data01:
    driver: local
  data02:
    driver: local
  data03:
    driver: local

networks:
  elastic:
    driver: bridge

Then, I want to use the SLM feature, where I can back up my entire indexes automatically.

I have used the PUT request to _snapshot endpoint, but it didn't work.

My CURL response

    {
  "error" : {
    "root_cause" : [
      {
        "type" : "repository_exception",
        "reason" : "[backup_elasticsearch] location [/home/backup_elasticsearch] doesn't match any of the locations specified by path.repo because this setting is empty"
      }
    ],
    "type" : "repository_exception",
    "reason" : "[backup_elasticsearch] failed to create repository",
    "caused_by" : {
      "type" : "repository_exception",
      "reason" : "[backup_elasticsearch] location [/home/backup_elasticsearch] doesn't match any of the locations specified by path.repo because this setting is empty"
    }
  },
  "status" : 500
}

Can you please help me in setting the repo.path in docker-compose.yml file?

And do I need to set path.repo for every cluster or just the first one?

Thank you

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.