Insert Data into ES 2.1.1 from hive 1.2 HDP2.3

Hi I am facing the while loading data using the recommended way please find attached code , This works perfectly it the target is ES 1.6 but failed for ES 2.1 with String index out of range error PFB the stack trace :
add jar elasticsearch-hadoop-2.1.1.jar ;
add jar commons-httpclient-3.1.jar;

CREATE EXTERNAL TABLE marketplace_olap_short_view (
order_id bigint,
order_item_id bigint,
fulfillment_id bigint,
promocode_id bigint,
payment_status int,
channel_id string,
so_promocode string,
customer_id bigint,
customer_email string,
customer_firstname string,
customer_lastname string,
customer_type int,
so_remote_ip string,
so_subtotal bigint,
so_order_discount bigint,
so_total bigint,
so_shipping_charges bigint,
so_shipping_amount bigint,
so_grandtotal bigint,
product_id bigint,
vertical_id bigint,
merchant_id bigint,
order_item_creation_date timestamp

)
STORED BY 'org.elasticsearch.hadoop.hive.EsStorageHandler'
TBLPROPERTIES('es.resource'='testmktplace/testmktplace',
'es.index.auto.create' = 'true',
'es.nodes'='192.168.8.150',
'es.mapping.id' = 'order_item_id',
'es.write.operation'='upsert',
'es.batch.size.entries'='10000',
'es.field.read.empty.as.null' ='true'
);

insert into table marketplace_olap_short_view
select COALESCE(order_id,0), COALESCE(order_item_id,0) ,COALESCE(fulfillment_id,0) ,COALESCE(promocode_id,0), COALESCE(payment_status,0) ,COALESCE(channel_id,"0"), COALESCE(so_promocode,0), COALESCE(customer_id,0), COALESCE(customer_email,"NA"), COALESCE(customer_firstname,"NA"), COALESCE(customer_lastname,"NA"), COALESCE(customer_type,0), COALESCE(so_remote_ip,"NA"), so_subtotal, so_order_discount, so_total, COALESCE(so_shipping_charges,0.0), COALESCE(so_shipping_amount,0.0), COALESCE(so_grandtotal,0.0), COALESCE(product_id,0), vertical_id, merchant_id, order_item_creation_date from naved_workspace.mktplace_olap limit 10001 ;

Caused by: java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row (tag=0) {"key":{},"value":{"_col0":1314306859,"_col1":13630276323,"_col2":106138893,"_col3":0,"_col4":2,"_col5":"ANDROIDAPP 4.7.0","_col6":"0","_col7":15212122710,"_col8":"asdasdaasd","_col9":"asda","_col10":"","_col11":1,"_col12":"117.192.17.81","_col13":360000,"_col14":0,"_col15":0,"_col16":18000.0,"_col17":18000.0,"_col18":378000.0,"_col19":26963933,"_col20":31,"_col21":77439,"_col22":"2015-11-05 21:18:39"}}
at org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource.pushRecord(ReduceRecordSource.java:302)
at org.apache.hadoop.hive.ql.exec.tez.ReduceRecordProcessor.run(ReduceRecordProcessor.java:249)
at org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:148)
... 14 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row (tag=0) {"key":{},"value":{"_col0":1314206859,"_col1":136316323,"_col2":1067138893,"_col3":0,"_col4":2,"_col5":"ANDROIDAPP 4.7.0","_col6":"0","_col7":152122710,"_col8":"asdasd","_col9":"asdasd","_col10":"","_col11":1,"_col12":"117.192.17.81","_col13":360000,"_col14":0,"_col15":0,"_col16":18000.0,"_col17":18000.0,"_col18":378000.0,"_col19":26963933,"_col20":31,"_col21":77439,"_col22":"2015-11-05 21:18:39"}}
at org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource$GroupIterator.next(ReduceRecordSource.java:370)
at org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource.pushRecord(ReduceRecordSource.java:292)
... 16 more
Caused by: java.lang.StringIndexOutOfBoundsException: String index out of range: -1
at java.lang.String.substring(String.java:1911)
at org.elasticsearch.hadoop.rest.RestClient.discoverNodes(RestClient.java:110)
at org.elasticsearch.hadoop.rest.InitializationUtils.discoverNodesIfNeeded(InitializationUtils.java:58)
at org.elasticsearch.hadoop.rest.RestService.createWriter(RestService.java:374)
at org.elasticsearch.hadoop.mr.EsOutputFormat$EsRecordWriter.init(EsOutputFormat.java:173)
at org.elasticsearch.hadoop.hive.EsHiveOutputFormat$EsHiveRecordWriter.write(EsHiveOutputFormat.java:58)
at org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:753)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:838)
at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:838)
at org.apache.hadoop.hive.ql.exec.LimitOperator.process(LimitOperator.java:54)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:838)
at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
at org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource$GroupIterator.next(ReduceRecordSource.java:361)

resolved now used ES 2.2.0 beta connector . ES 2.1.1 connector only supports ES 1.x .

You might want to try the just released ES-Hadoop 2.2-rc1.