Limits in floating point precision as well as rounding errors are unfortunately a thing. I would generally go for scaled_floats
with a scaling factor of 100 here. To make this a little more clear let's look at a minimal example based on your data:
Mapping, sample document, query:
DELETE sap-lo-po
PUT sap-lo-po
{
"settings": {
"number_of_shards": 1
},
"mappings": {
"_doc": {
"properties": {
"EKPO": {
"properties": {
"NETWR": {
"type": "scaled_float",
"scaling_factor": 100
}
}
}
}
}
}
}
PUT sap-lo-po/_doc/1
{
"EBELN": "4500017596",
"EKPO": [
{
"NETWR": 149314.2
}
]
}
POST sap-lo-po/_search
{
"query": {
"match": {
"EBELN": "4500017596"
}
},
"aggs": {
"Bestellwert": {
"extended_stats": {
"field": "EKPO.NETWR"
}
}
},
"size": 0,
"_source": [
"EBELN.keyword",
"EKPO.NETWR"
]
}
Result:
{
"took" : 40,
"timed_out" : false,
"_shards" : {
"total" : 1,
"successful" : 1,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : 1,
"max_score" : 0.0,
"hits" : [ ]
},
"aggregations" : {
"Bestellwert" : {
"count" : 1,
"min" : 149314.2,
"max" : 149314.2,
"avg" : 149314.2,
"sum" : 149314.2,
"sum_of_squares" : 2.2294730321640003E10,
"variance" : 0.0,
"std_deviation" : 0.0,
"std_deviation_bounds" : {
"upper" : 149314.2,
"lower" : 149314.2
}
}
}
}
This looks better
Though it won't solve all your problems. I hope it's good enough for money that will always have two digits after the comma, but see elasticsearch/issues/32570 for issues in more edge case situations.