Setting Up Logstash In Docker-Compose For Bulk Ingest Of CSV Files In Local Machine

Changes implemented. However, the folder is still empty.
image

docker-compose.yml

version: "3.8"

volumes:
  logstashdata01:
    driver: local

networks:
  default:
    name: elastic
    external: true
    
services:
  logstash:
    image: docker.elastic.co/logstash/logstash:${STACK_VERSION}
    labels:
      co.elastic.logs/module: logstash
    user: root
    environment:
      - xpack.monitoring.enabled=false
    volumes:
      - ./:/usr/share/logstash/pipeline/
      - /d/ATS_Event_Logs/For-Logstash_(ML)/Logstash:/usr/share/logstash/csv_files
    command: logstash -r -f /usr/share/logstash/pipeline/logstash.conf
    ports:
      - "5044:5044"
    mem_limit: ${LS_MEM_LIMIT}

logstash.conf

input { 
    file { 
        path => "/usr/share/logstash/csv_files/events2022-01-01.csv"
        start_position => "beginning" 
        sincedb_path => "/dev/null"
    } 
}

filter { 
    csv { 
        separator => ","
        columns => [ "id","uniqueid","alarm","eventtype","system","subsystem","sourcetime","operator","alarmvalue","value","equipment","location","severity","description","state","mmsstate","zone","graphicelement"] 
    } 
}

output { 
    elasticsearch { 
        index => "ats-logs" 
        hosts => ["https://es01:9200"]
        manage_template => false
        user => "elastic"
        password => "elastic123"
        ssl_verification_mode=> "none"
    }
    stdout{} 
}