Visualize data with kibana

I want to create pie chart wich contains sum of text of each query but kibana show me this message


are you sure you have data in the selected time range ? (time picer top right)

could you open spy panel (up arrow at the bottom of chart) and copy paste es request and resposne ?

Request : {
"version": true,
"size": 500,
"sort": [
{
"@timestamp": {
"order": "desc",
"unmapped_type": "boolean"
}
}
],
"_source": {
"excludes": []
},
"aggs": {
"2": {
"date_histogram": {
"field": "@timestamp",
"interval": "30s",
"time_zone": "Africa/Lagos",
"min_doc_count": 1
}
}
},
"stored_fields": [
""
],
"script_fields": {},
"docvalue_fields": [
"@timestamp",
"ts"
],
"query": {
"bool": {
"must": [
{
"match_all": {}
},
{
"range": {
"@timestamp": {
"gte": 1525872285062,
"lte": 1525873185062,
"format": "epoch_millis"
}
}
}
],
"filter": [],
"should": [],
"must_not": []
}
},
"highlight": {
"pre_tags": [
"@kibana-highlighted-field@"
],
"post_tags": [
"@/kibana-highlighted-field@"
],
"fields": {
"
": {}
},
"fragment_size": 2147483647
}
}
Response:
{
"took": 22,
"hits": {
"hits": [
{
"_index": "yos",
"_type": "test",
"_id": "OaoVRWMBJkgbPPM80hXe",
"_version": 1,
"_score": null,
"_source": {
"path": "/home/yosra/ELK2/logstash-6.2.2/test4.log",
"thread": "CRA_GENERATION_THREADS-263",
"@version": "1",
"message": "Dec 18, 2017 18:14:44:251 |Pal : TRACE |Persistence-[CRA_GENERATION_THREADS-263]: COR:vm2: ElapsedTime for Query Execution 4 milliseconds.",
"text": 4,
"host": "yosra",
"@timestamp": "2018-05-09T13:27:28.021Z",
"ts": "2017-12-18T17:14:44.251Z",
"query": "Find query : : ( select T.type , T.id , T.groupeType , T.updatorUserId , T.fessAmount , T.currency$identifier , T.maxAmount , T.feeTypeIndex , T.fixedFeesAmount , T.feesRate , T.updateDate , T.version , T.creationDate , T.creatorUserId , T.localTaxeOnFeesAmount , T.isForcedFees , T.isCdscFee , T.localTaxeOnFeesRate from models_Fees T left join T.orderOPC as T0 where T0.caceisReference = ?1 ) "
},
"fields": {
"@timestamp": [
"2018-05-09T13:27:28.021Z"
],
"ts": [
"2017-12-18T17:14:44.251Z"
]
},
"sort": [
1525872448021
]
},
{
"_index": "yos",
"_type": "test",
"_id": "OqoVRWMBJkgbPPM80hXh",
"_version": 1,
"_score": null,
"_source": {
"path": "/home/yosra/ELK2/logstash-6.2.2/test4.log",
"thread": "CRA_GENERATION_THREADS-262",
"@version": "1",
"message": "Dec 18, 2017 18:14:44:249 |Pal : TRACE |Persistence-[CRA_GENERATION_THREADS-262]: COR:vm2: ElapsedTime for Query Execution 1 milliseconds.",
"text": 1,
"host": "yosra",
"@timestamp": "2018-05-09T13:27:28.020Z",
"ts": "2017-12-18T17:14:44.249Z",
"query": "Find query : : ( select T.type , T.id , T.groupeType , T.updatorUserId , T.fessAmount , T.currency$identifier , T.maxAmount , T.feeTypeIndex , T.fixedFeesAmount , T.feesRate , T.updateDate , T.version , T.creationDate , T.creatorUserId , T.localTaxeOnFeesAmount , T.isForcedFees , T.isCdscFee , T.localTaxeOnFeesRate from models_Fees T left join T.orderOPC as T0 where T0.caceisReference = ?1 ) "
},
"fields": {
"@timestamp": [
"2018-05-09T13:27:28.020Z"
],
"ts": [
"2017-12-18T17:14:44.249Z"
]
},
"sort": [
1525872448020
]
},
{
"_index": "yos",
"_type": "test",
"_id": "OKoVRWMBJkgbPPM80hXX",
"_version": 1,
"_score": null,
"_source": {
"path": "/home/yosra/ELK2/logstash-6.2.2/test4.log",
"thread": "CRA_GENERATION_THREADS-262",
"@version": "1",
"message": "Dec 18, 2017 18:14:44:248 |Pal : TRACE |Persistence-[CRA_GENERATION_THREADS-262]: COR:vm2: ElapsedTime for Query Execution 2 milliseconds.",
"text": 2,
"host": "yosra",
"@timestamp": "2018-05-09T13:27:28.019Z",
"ts": "2017-12-18T17:14:44.248Z",
"query": "Find query : : ( select T.flot , T.id , T.groupeType , T.updatorUserId , T.fessAmount , T.currency$identifier , T.maxAmount , T.feeTypeIndex , T.fixedFeesAmount , T.feesRate , T.updateDate , T.version , T.creationDate , T.creatorUserId , T.localTaxeOnFeesAmount , T.isForcedFees , T.isCdscFee , T.localTaxeOnFeesRate from models_Fees T left join T.orderOPC as T0 where T0.caceisReference = ?1 ) "
},
"fields": {
"@timestamp": [
"2018-05-09T13:27:28.019Z"
],
"ts": [
"2017-12-18T17:14:44.248Z"
]
},
"sort": [
1525872448019
]
}
],
"total": 3,
"max_score": 0
},
"aggregations": {
"2": {
"buckets": [
{
"key_as_string": "2018-05-09T14:27:00.000+01:00",
"key": 1525872420000,
"doc_count": 3
}
]
}
}
}

when I reduce the size of query in the log , it works
what's the problem?
thank you

This topic was automatically closed 28 days after the last reply. New replies are no longer allowed.