ES HADOOP(7.9.0) , ELASTICSEARCH(7.9.0), HIVE(3.1.2)

hi~

i'm setting HADOOP cluster , ES cluster.
so i need to transfer data 'HDFS' to 'ES'

HDFS(hive) --> ES(static index)

  1. hive table

CREATE EXTERNAL TABLE es_test(breed string, sex string)
STORED BY 'org.elasticsearch.hadoop.hive.EsStorageHandler'
TBLPROPERTIES('es.resource'='hive_test',
'es.nodes' = 'localhost',
'es.port' = '9200',
'es.mapping.names' = 'hivecol1:escol1, hivecol2:escol2');

  1. ES (exists 'hive_test' index)

then type in the hive beeline

select * from es_test;

ERROR MESSAGE
Exception in thread "HiveServer2-Handler-Pool: Thread-43" java.lang.ExceptionInInitializerError
org.apache.thrift.transport.TTransportException
at org.elasticsearch.hadoop.rest.commonshttp.CommonsHttpTransportFactory.create(CommonsHttpTransportFactory.java:40)
at org.elasticsearch.hadoop.rest.NetworkClient.selectNextNode(NetworkClient.java:99)
at org.elasticsearch.hadoop.rest.NetworkClient.(NetworkClient.java:82)
at org.elasticsearch.hadoop.rest.NetworkClient.(NetworkClient.java:58)
at org.elasticsearch.hadoop.rest.RestClient.(RestClient.java:101)
at org.elasticsearch.hadoop.rest.InitializationUtils.discoverClusterInfo(InitializationUtils.java:335)
at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:132)
at org.elasticsearch.hadoop.hive.HiveUtils.init(HiveUtils.java:197)
at org.elasticsearch.hadoop.hive.EsHiveInputFormat.getSplits(EsHiveInputFormat.java:112)
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
at org.elasticsearch.hadoop.hive.EsHiveInputFormat.getSplits(EsHiveInputFormat.java:51)
at org.apache.thrift.transport.TSaslTransport.readLength(TSaslTransport.java:376)
at org.apache.hadoop.hive.ql.exec.FetchOperator.generateWrappedSplits(FetchOperator.java:425)
at org.apache.hadoop.hive.ql.exec.FetchOperator.getNextSplits(FetchOperator.java:395)
at org.apache.thrift.transport.TSaslTransport.readFrame(TSaslTransport.java:453)
at org.apache.hadoop.hive.ql.exec.FetchOperator.getRecordReader(FetchOperator.java:314)
at org.apache.thrift.transport.TSaslTransport.read(TSaslTransport.java:435)
at org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:540)
at org.apache.thrift.transport.TSaslClientTransport.read(TSaslClientTransport.java:37)
at org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:509)
at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
at org.apache.hadoop.hive.ql.exec.FetchTask.fetch(FetchTask.java:146)
at org.apache.hadoop.hive.ql.Driver.getResults(Driver.java:2691)
at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429)
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.getResults(ReExecDriver.java:229)
at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318)
at org.apache.hive.service.cli.operation.SQLOperation.getNextRowSet(SQLOperation.java:460)
at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219)
at org.apache.hive.service.cli.operation.OperationManager.getOperationNextRowSet(OperationManager.java:309)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
at org.apache.hive.service.cli.session.HiveSessionImpl.fetchResults(HiveSessionImpl.java:905)
at org.apache.hive.service.cli.CLIService.fetchResults(CLIService.java:561)
at org.apache.hive.service.rpc.thrift.TCLIService$Client.recv_FetchResults(TCLIService.java:567
at org.apache.hive.service.cli.thrift.ThriftCLIService.FetchResults(ThriftCLIService.java:786)
at org.apache.hive.service.rpc.thrift.TCLIService$Client.FetchResults(TCLIService.java:554)
at org.apache.hive.service.rpc.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1837)
at org.apache.hive.service.rpc.thrift.TCLIService$Processor$FetchResults.getResult(TCLIService.java:1822)
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39
at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:56)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hive.jdbc.HiveConnection$SynchronizedHandler.invoke(HiveConnection.java:1585)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at com.sun.proxy.$Proxy20.FetchResults(Unknown Source) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
at org.apache.hive.jdbc.HiveQueryResultSet.next(HiveQueryResultSet.java:373)
at org.apache.hive.beeline.BufferedRows.(BufferedRows.java:56)
at org.apache.hive.beeline.IncrementalRowsWithNormalization.(IncrementalRowsWithNormalization.java:50)
at org.apache.hive.beeline.BeeLine.print(BeeLine.java:2250)
at org.apache.hive.beeline.Commands.executeInternal(Commands.java:1026)
at org.apache.hive.beeline.Commands.execute(Commands.java:1201)
at org.apache.hive.beeline.Commands.sql(Commands.java:1130)
Caused by: java.lang.NullPointerException at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1425)

    at org.elasticsearch.hadoop.util.ReflectionUtils.makeAccessible(ReflectionUtils.java:69)        
	at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1287)
    at org.elasticsearch.hadoop.rest.commonshttp.CommonsHttpTransport.<clinit>(CommonsHttpTransport.java:82)
    at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:1071)
    ... 31 more
    at org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:538)
    at org.apache.hive.beeline.BeeLine.main(BeeLine.java:520)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.util.RunJar.run(RunJar.java:323)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:236)

21/02/05 15:35:57 [main]: DEBUG transport.TSaslTransport: writing data length: 117
21/02/05 15:35:57 [main]: DEBUG transport.TSaslTransport: writing data length: 96
21/02/05 15:35:57 [main]: DEBUG transport.TSaslTransport: writing data length: 96
Unknown HS2 problem when communicating with Thrift server.
Error: org.apache.thrift.transport.TTransportException: java.net.SocketException: broken pipe (Write failed) (state=08S01,code=0)

i dont know why ㅠㅠ
help me~~ plz~~~~~