Hi, I'm trying to set up ELK stack and while setting up pipeline in logstash is gave me error:
[ERROR][logstash.agent           ] Failed to execute action {:action=>LogStash::PipelineAction::Create/pipeline_id:main, :exception=>"LogStash::ConfigurationError", :message=>"Expected one of [A-Za-z0-9_-], [ \\t\\r\\n], \"#\", \"{\", [0-9], \".\", \"}\" at line 7, column 23 (byte 219) after input {\n\tmongodb {\n    uri => 'mongodb://localhost:27017/ecommerce'\n    placeholder_db_dir => '/opt/logstash-mongodb/'\n    placeholder_db_name => 'logstash_sqlite.db'\n    collection => 'products'\n    batch_size => 5000", :backtrace=>["/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:58:in `compile_imperative'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:66:in `compile_graph'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:28:in `block in compile_sources'", "org/jruby/RubyArray.java:2577:in `map'", "/usr/share/logstash/logstash-core/lib/logstash/compiler.rb:27:in `compile_sources'", "org/logstash/execution/AbstractPipelineExt.java:181:in `initialize'", "org/logstash/execution/JavaBasePipelineExt.java:67:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/java_pipeline.rb:43:in `initialize'", "/usr/share/logstash/logstash-core/lib/logstash/pipeline_action/create.rb:52:in `execute'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:342:in `block in converge_state'"]}
Here are my configurations:
docker-compose.yml:
version: '3.8'
services:
  elasticsearch:
    build:
      context: elasticsearch/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - type: bind
        source: ./elasticsearch/config/elasticsearch.yml
        target: /usr/share/elasticsearch/config/elasticsearch.yml
        read_only: true
      - type: volume
        source: elasticsearch
        target: /usr/share/elasticsearch/data
    ports:
      - "9200:9200"
      - "9300:9300"
    environment:
      ES_JAVA_OPTS: "-Xmx256m -Xms256m"
      ELASTIC_PASSWORD: $ELASTIC_PASSWORD
      node.max_local_storage_nodes: '3'
      # Use single node discovery in order to disable production mode and avoid bootstrap checks
      # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html
      discovery.type: single-node
    networks:
      - elk
  logstash:
    build:
      context: logstash/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - type: bind
        source: ./logstash/config/logstash.yml
        target: /usr/share/logstash/config/logstash.yml
        read_only: true
      - type: bind
        source: ./logstash/pipeline
        target: /usr/share/logstash/pipeline
        read_only: true
    ports:
      - "5000:5000/tcp"
      - "5000:5000/udp"
      - "9600:9600"
    environment:
      LS_JAVA_OPTS: "-Xmx256m -Xms256m"
    networks:
      - elk
    depends_on:
      - elasticsearch
  kibana:
    build:
      context: kibana/
      args:
        ELK_VERSION: $ELK_VERSION
    volumes:
      - type: bind
        source: ./kibana/config/kibana.yml
        target: /usr/share/kibana/config/kibana.yml
        read_only: true
    ports:
      - "5601:5601"
    networks:
      - elk
    depends_on:
      - elasticsearch
networks:
  elk:
    driver: bridge
volumes:
  elasticsearch:
Docker file for logstash:
ARG ELK_VERSION
# https://www.docker.elastic.co/
FROM docker.elastic.co/logstash/logstash:${ELK_VERSION}
# Add your logstash plugins setup here
# Example: RUN logstash-plugin install logstash-filter-json
RUN rm -f /usr/share/logstash/pipeline/logstash.conf && \
bin/logstash-plugin install logstash-input-mongodb .
indent preformatted text by 4 spaces
and my logstash.conf -
input {
	mongodb {
    uri => 'mongodb://localhost:27017/ecommerce'
    placeholder_db_dir => '/opt/logstash-mongodb/'
    placeholder_db_name => 'logstash_sqlite.db'
    collection => 'products'
    batch_size => 5000,
		generateId => "true"
		parse_method => "simple"
  }
}
filter {
	mutate {
		remove_field => ["_id"]
	}
}
output {
	elasticsearch {
		hosts => ["localhost:9200"]
		user => "elastic"
		password => "changeme"
		index => "test"
	}
	stdout { codec => rubydebug }
}
I'm trying to set up a pipeline from mongodb to elastic search. Any guidance on best practice would be great!
Thanks,
Abhijeet
