es version is 2.4.1
query request is
{"aggregations":{"0-0":{"aggregations":{"0-1":{"aggregations":{"0-2":{"aggregations":{"1-0":{"aggregations":{"1-1":{"reverse_nested":{}}},"filters":{"filters":[{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1672848000,"t":1672934399}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1672934400,"t":1673020799}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1673020800,"t":1673107199}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1673107200,"t":1673193599}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1673193600,"t":1673279999}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1673280000,"t":1673366399}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1673366400,"t":1673452799}}}},{"script":{"script":{"file":"field_trend_report_filter","params":{"fuuid":"field005","start":1673452800,"t":1673539199}}}}]}}},"terms":{"field":"field_value_history.value","min_doc_count":1,"order":{"_term":"asc"},"size":0}}},"filter":{"bool":{"must":{"term":{"field_value_history.field_uuid":"field005"}}}}}},"nested":{"path":"field_value_history"}}},"query":{"bool":{"filter":[{"term":{"project_uuid":"7JAiRb5k2xerTPSt"}},{"bool":{"must":[{"bool":{"must":{"terms":{"project_uuid":["7JAiRb5k2xerTPSt"]}}}},{"bool":{"minimum_should_match":"1","should":{"bool":{"must":[{"terms":{"issue_type_uuid":["9fUM1sSW"]}},{"bool":{"must_not":{"terms":{"sub_issue_type_uuid":["G3PEYXYy","2SiXUETa"]}}}}]}}}}]}},{"term":{"team_uuid":"HWdP9bRK"}},{"terms":{"status":[1]}}]}},"size":0}
field_trend_report_filter content is:
fuuid == doc["field_value_history.field_uuid"].value &&
t >= doc["field_value_history.valid_from"].value &&
t < doc["field_value_history.valid_to"].value
have about 100 requests per second
will occurs high CPU problem
es hot thread log
::: {elasticsearch-ha-statefulset-1}{i5_vmtlOQAWhNO6ZY9NTYQ}{172.27.186.146}{172.27.186.146:9300}{master=false}
Hot threads at 2022-08-23T13:28:30.391Z, interval=500ms, busiestThreads=3, ignoreIdleThreads=true:
100.3% (501.5ms out of 500ms) cpu usage by thread 'elasticsearch[elasticsearch-ha-statefulset-1][search][T#1]'
10/10 snapshots sharing following 32 elements
java.security.AccessController.doPrivileged(Native Method)
org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:278)
org.elasticsearch.index.query.ScriptQueryParser$ScriptQuery$1$1.get(ScriptQueryParser.java:171)
org.apache.lucene.search.RandomAccessWeight$1.matches(RandomAccessWeight.java:63)
org.apache.lucene.search.Weight$DefaultBulkScorer.scoreAll(Weight.java:228)
org.apache.lucene.search.Weight$DefaultBulkScorer.score(Weight.java:172)
org.apache.lucene.search.BulkScorer.score(BulkScorer.java:39)
org.apache.lucene.search.LRUQueryCache.cacheImpl(LRUQueryCache.java:422)
org.apache.lucene.search.LRUQueryCache$CachingWrapperWeight.cache(LRUQueryCache.java:608)
org.apache.lucene.search.LRUQueryCache$CachingWrapperWeight.scorer(LRUQueryCache.java:625)
org.elasticsearch.indices.cache.query.IndicesQueryCache$CachingWeightWrapper.scorer(IndicesQueryCache.java:263)
org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.getLeafCollector(FiltersAggregator.java:90)
org.elasticsearch.search.aggregations.AggregatorBase.getLeafCollector(AggregatorBase.java:132)
org.elasticsearch.search.aggregations.AggregatorBase.getLeafCollector(AggregatorBase.java:131)
org.elasticsearch.search.aggregations.AggregatorBase.getLeafCollector(AggregatorBase.java:38)
org.apache.lucene.search.MultiCollector.getLeafCollector(MultiCollector.java:121)
org.apache.lucene.search.TimeLimitingCollector.getLeafCollector(TimeLimitingCollector.java:144)
org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:812)
org.apache.lucene.search.IndexSearcher.search(IndexSearcher.java:535)
org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:384)
org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:113)
org.elasticsearch.search.SearchService.loadOrExecuteQueryPhase(SearchService.java:372)
org.elasticsearch.search.SearchService.executeQueryPhase(SearchService.java:385)
org.elasticsearch.search.action.SearchServiceTransportAction$SearchQueryTransportHandler.messageReceived(SearchServiceTransportAction.java:368)
org.elasticsearch.search.action.SearchServiceTransportAction$SearchQueryTransportHandler.messageReceived(SearchServiceTransportAction.java:365)
org.elasticsearch.transport.TransportRequestHandler.messageReceived(TransportRequestHandler.java:33)
org.elasticsearch.transport.RequestHandlerRegistry.processMessageReceived(RequestHandlerRegistry.java:77)
org.elasticsearch.transport.netty.MessageChannelHandler$RequestHandler.doRun(MessageChannelHandler.java:293)
org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:37)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)
maybe is groovy script lead to dead lock?
if have any advice thank you very much indeed!