Reaching elasticsearch from jupyter notebook

I am running elasticsearch from docker-compose and I want to connect python to it and test it. The docker-compose has the following form:

version: '3.2'

services:
 
  elasticsearch:
    container_name: elasticsearch
    build:
      context: elasticsearch/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - type: bind
        source: ./elasticsearch/config/elasticsearch.yml
        target: /usr/share/elasticsearch/config/elasticsearch.yml
        read_only: true
      - type: volume
        source: elasticsearch
        target: /usr/share/elasticsearch/data
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
      ELASTIC_PASSWORD: changeme
    links: 
      - kibana
    networks:
      - elk

  logstash:
    container_name: logstash
    build:
      context: logstash/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - type: bind
        source: ./logstash/config/logstash.yml
        target: /usr/share/logstash/config/logstash.yml
        read_only: true
      - type: bind
        source: ./logstash/pipeline
        target: /usr/share/logstash/pipeline
        read_only: true
    ports:
      - "5000:5000"
      - "9600:9600"
    expose: 
      - "5044"
    networks:
      - elk
    depends_on:
      - elasticsearch

  kibana:
    container_name: kibana
    build:
      context: kibana/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - type: bind
        source: ./kibana/config/kibana.yml
        target: /usr/share/kibana/config/kibana.yml
        read_only: true
    ports:
      - "5601:5601"
    networks:
      - elk

  app:
    container_name: app
    build : ./app
    volumes:
      - ./app/:/usr/src/app
      - /usr/src/app/node_modules/ # make node_module empty in container
    command: npm start
    ports:
      - "3000:3000"
    networks:
      - elk

  nginx:
    container_name: nginx
    build: ./nginx
    volumes:
      - ./nginx/config:/etc/nginx/conf.d
      - ./nginx/log:/var/log/nginx
    ports:
      - "80:80"
      - "443:443"
    links:
      - app:app
    depends_on: 
      - app
    networks:
      - elk

  filebeat:
    container_name: filebeat
    build: ./filebeat
    entrypoint: "filebeat -e -strict.perms=false"
    volumes:
      - ./filebeat/config/filebeat.yml:/usr/share/filebeat/filebeat.yml
      - ./nginx/log:/var/log/nginx
    networks:
      - elk
    depends_on: 
      - app
      - nginx
      - logstash
      - elasticsearch
      - kibana
    links: 
      - logstash

networks:
  elk:
    driver: bridge

volumes:
  elasticsearch:

I am using in jupyter-notebook this very simple code to connect to elasticsearch and test it:

from elasticsearch import Elasticsearch

es = Elasticsearch(['http://elasticsearch:9200/'])

if not es.ping():
    raise ValueError("Connection failed")

However, the result is ValueError: Connection failed. Is there a problem reaching the localhost outside the docker?

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.