security.manager.enabled: false
repositories.hdfs:
uri: "hdfs://hostname:port/" # optional - Hadoop file-system URI
path: "user/ESindex" # required - path with the file-system where data is stored/loaded
Note: the path is not created in hdfs.
when I try to create a repository now..it is getting created somewhere..It's not showing any error in elasticsearch..
[2018-01-22 11:11:10,746][INFO ][repositories ] [Zero-G] update repository [hdfsbackup]
[2018-01-22 11:11:10,748][WARN ][rest.suppressed ] path: /_snapshot/hdfsbackup, params: {repository=hdfsbackup}
RepositoryVerificationException[[hdfsbackup] path is not accessible on master node]; nested: IOException[Mkdirs failed to create file:/user/ESindex/tests-I893LvM6SQiy77Sk22pZLg (exists=false, cwd=file:/appl/essapp/encrypt_try/elasticsearch-2.3.5/bin)];
at org.elasticsearch.repositories.blobstore.BlobStoreRepository.startVerification(BlobStoreRepository.java:648)
at org.elasticsearch.repositories.RepositoriesService.verifyRepository(RepositoriesService.java:211)
at org.elasticsearch.repositories.RepositoriesService$VerifyingRegisterRepositoryListener.onResponse(RepositoriesService.java:436)
at org.elasticsearch.repositories.RepositoriesService$VerifyingRegisterRepositoryListener.onResponse(RepositoriesService.java:421)
at org.elasticsearch.cluster.AckedClusterStateUpdateTask.onAllNodesAcked(AckedClusterStateUpdateTask.java:63)
at org.elasticsearch.cluster.service.InternalClusterService$SafeAckedClusterStateTaskListener.onAllNodesAcked(InternalClusterService.java:733)
at org.elasticsearch.cluster.service.InternalClusterService$AckCountDownListener.onNodeAck(InternalClusterService.java:1013)
at org.elasticsearch.cluster.service.InternalClusterService$DelegetingAckListener.onNodeAck(InternalClusterService.java:952)
at org.elasticsearch.cluster.service.InternalClusterService.runTasksForExecutor(InternalClusterService.java:637)
at org.elasticsearch.cluster.service.InternalClusterService$UpdateTask.run(InternalClusterService.java:772)
at org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor$TieBreakingPrioritizedRunnable.runAndClean(PrioritizedEsThreadPoolExecutor.java:231)
at org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor$TieBreakingPrioritizedRunnable.run(PrioritizedEsThreadPoolExecutor.java:194)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: Mkdirs failed to create file:/user/ESindex/tests-I893LvM6SQiy77Sk22pZLg (exists=false, cwd=file:/appl/essapp/encrypt_try/elasticsearch-2.3.5/bin)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:450)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:435)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:909)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:890)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:852)
at org.elasticsearch.hadoop.hdfs.blobstore.HdfsBlobContainer.createOutput(HdfsBlobContainer.java:132)
at org.elasticsearch.hadoop.hdfs.blobstore.HdfsBlobContainer.access$000(HdfsBlobContainer.java:43)
at org.elasticsearch.hadoop.hdfs.blobstore.HdfsBlobContainer$6.doInHdfs(HdfsBlobContainer.java:121)
at org.elasticsearch.hadoop.hdfs.blobstore.HdfsBlobContainer$6.doInHdfs(HdfsBlobContainer.java:118)
at org.elasticsearch.repositories.hdfs.SecurityUtils$1.run(SecurityUtils.java:42)
at java.security.AccessController.doPrivileged(Native Method)
at org.elasticsearch.repositories.hdfs.SecurityUtils.execute(SecurityUtils.java:39)
at org.elasticsearch.repositories.hdfs.SecurityUtils.execute(SecurityUtils.java:34)
at org.elasticsearch.hadoop.hdfs.blobstore.HdfsBlobContainer.writeBlob(HdfsBlobContainer.java:118)
at org.elasticsearch.repositories.blobstore.BlobStoreRepository.startVerification(BlobStoreRepository.java:642)
... 14 more
Apache, Apache Lucene, Apache Hadoop, Hadoop, HDFS and the yellow elephant
logo are trademarks of the
Apache Software Foundation
in the United States and/or other countries.