Elasticsearch with nodejs

I tried to import my json file into elastic search using nodejs
using bulk method but I got this error
[2017-01-27 12:24:39,812][DEBUG][action.bulk ] [Smuggler II] [logs][3] failed to execute bulk item (index) index {[logs][record][AVnetF6wQ_w-mHvOpRo5], source[na]}
MapperParsingException[failed to parse]; nested: NotXContentException[Compressor detection can only be called on some xcontent bytes or compressed xcontent bytes];

Here is my code ..help me friends ..thanks in advance
(function () {
'use strict';

var fs = require('fs');
var elasticsearch = require('elasticsearch');
var esClient = new elasticsearch.Client({
host: '127.0.0.1:9200',
log: 'error'
});

var bulkIndex = function bulkIndex(index, type, data) {
var bulkBody = [];

//data.forEach(item => {
for (var item in data) {
  bulkBody.push({
    index: {
      _index: index,
      _type: type,
      _id: item.id
    }
  });

  bulkBody.push(item);
};

esClient.bulk({body: bulkBody})

.then(response => {
  var errorCount = 0;
  response.items.forEach(item => {
    if (item.index && item.index.error) {
      console.log(++errorCount, item.index.error);
    }
  });
 //console.log(`Successfully indexed ${data.length - errorCount} out of ${data.length} items`);
})
.catch(console.err);

};

// only for testing purposes
// all calls should be initiated through the module
var test = function test() {
var articles = JSON.parse(fs.readFileSync('./logstash.json', 'utf8'));
//console.log(${articles.length} items parsed from data file);
bulkIndex('logs', 'record', articles);
};

test();

/module.exports = {
bulkIndex
};
/
} ());

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.