Hi Team,
I am using ES-1.3.7(version) In sometimes it causes very high load on particular node what can we do for this type of issue.
I Checked with logs too there is no abnormal logs at all.
Please help on this.Thanks in advance.
::: [NODE_NAME][NODE_ID][elasticseasrch][inet[XXX__IP__XXX]]{master=false}
102.2% (510.8ms out of 500ms) cpu usage by thread 'elasticsearch[NODE_NAME][management][T#3]'
4/10 snapshots sharing following 30 elements
sun.reflect.Reflection.getCallerClass(Native Method)
java.lang.Class.getDeclaredFields(Class.java:1805)
org.apache.lucene.util.RamUsageEstimator.shallowSizeOfInstance(RamUsageEstimator.java:382)
org.apache.lucene.util.RamUsageEstimator.shallowSizeOf(RamUsageEstimator.java:360)
org.apache.lucene.util.fst.Outputs.ramBytesUsed(Outputs.java:104)
org.apache.lucene.util.fst.ByteSequenceOutputs.ramBytesUsed(ByteSequenceOutputs.java:152)
org.apache.lucene.util.fst.ByteSequenceOutputs.ramBytesUsed(ByteSequenceOutputs.java:34)
org.apache.lucene.util.fst.FST.ramBytesUsed(FST.java:409)
org.apache.lucene.util.fst.FST.ramBytesUsed(FST.java:427)102.2% (510.8ms out of 500ms) cpu usage by thread 'elasticsearch[NODE_NAME][management][T#3]'
org.apache.lucene.codecs.blocktree.FieldReader.ramBytesUsed(FieldReader.java:187)
org.apache.lucene.codecs.blocktree.BlockTreeTermsReader.ramBytesUsed(BlockTreeTermsReader.java:308)
org.elasticsearch.index.codec.postingsformat.BloomFilterPostingsFormat$BloomFilteredFieldsProducer.ramBytesUsed(BloomFilterPostingsFormat.java:201)
org.apache.lucene.codecs.perfield.PerFieldPostingsFormat$FieldsReader.ramBytesUsed(PerFieldPostingsFormat.java:238)
org.apache.lucene.index.SegmentCoreReaders.ramBytesUsed(SegmentCoreReaders.java:226)
org.apache.lucene.index.SegmentReader.ramBytesUsed(SegmentReader.java:620)
org.elasticsearch.index.engine.internal.InternalEngine.getReaderRamBytesUsed(InternalEngine.java:1152)
org.elasticsearch.index.engine.internal.InternalEngine.segmentsStats(InternalEngine.java:1164)
org.elasticsearch.index.shard.service.InternalIndexShard.segmentStats(InternalIndexShard.java:540)
org.elasticsearch.action.admin.indices.stats.CommonStats.(CommonStats.java:166)
org.elasticsearch.action.admin.indices.stats.ShardStats.(ShardStats.java:49)
org.elasticsearch.indices.InternalIndicesService.stats(InternalIndicesService.java:209)
org.elasticsearch.node.service.NodeService.stats(NodeService.java:156)
org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction.nodeOperation(TransportNodesStatsAction.java:95)
org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction.nodeOperation(TransportNodesStatsAction.java:43)
org.elasticsearch.action.support.nodes.TransportNodesOperationAction$NodeTransportHandler.messageReceived(TransportNodesOperationAction.java:277)
org.elasticsearch.action.support.nodes.TransportNodesOperationAction$NodeTransportHandler.messageReceived(TransportNodesOperationAction.java:268)
org.elasticsearch.transport.netty.MessageChannelHandler$RequestHandler.run(MessageChannelHandler.java:275)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
java.lang.Thread.run(Thread.java:745)
2/10 snapshots sharing following 11 elements
org.elasticsearch.action.admin.indices.stats.ShardStats.(ShardStats.java:49)
org.elasticsearch.indices.InternalIndicesService.stats(InternalIndicesService.java:209)
org.elasticsearch.node.service.NodeService.stats(NodeService.java:156)
org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction.nodeOperation(TransportNodesStatsAction.java:95)
org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction.nodeOperation(TransportNodesStatsAction.java:43)
org.elasticsearch.action.support.nodes.TransportNodesOperationAction$NodeTransportHandler.messageReceived(TransportNodesOperationAction.java:277)
org.elasticsearch.action.support.nodes.TransportNodesOperationAction$NodeTransportHandler.messageReceived(TransportNodesOperationAction.java:268)
org.elasticsearch.transport.netty.MessageChannelHandler$RequestHandler.run(MessageChannelHandler.java:275)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
java.lang.Thread.run(Thread.java:745)
4/10 snapshots sharing following 2 elements
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
java.lang.Thread.run(Thread.java:745)