Browse Source

Fix sum test

It was relying on the compensated sum working but the test framework was
dodging it. This forces the accuracy tests to come from a single shard
where we get the proper compensated sum.

Closes #56757
Nik Everett 5 years ago
parent
commit
1a3b110888

+ 16 - 5
server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java

@@ -161,11 +161,10 @@ public class SumAggregatorTests extends AggregatorTestCase {
             "Re-index with correct docvalues type.", e.getMessage());
     }
 
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/56757")
     public void testSummationAccuracy() throws IOException {
         // Summing up a normal array and expect an accurate value
         double[] values = new double[]{0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7};
-        verifySummationOfDoubles(values, 15.3, 0d);
+        verifySummationOfDoubles(values, 15.3, Double.MIN_NORMAL);
 
         // Summing up an array which contains NaN and infinities and expect a result same as naive summation
         int n = randomIntBetween(5, 10);
@@ -198,9 +197,21 @@ public class SumAggregatorTests extends AggregatorTestCase {
             sum("_name").field(FIELD_NAME),
             new MatchAllDocsQuery(),
             iw -> {
-                for (double value : values) {
-                    iw.addDocument(singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value))));
-                }
+                /*
+                 * The sum agg uses a Kahan sumation on the shard to limit
+                 * floating point errors. But it doesn't ship the sums to the
+                 * coordinating node, so floaing point error can creep in when
+                 * reducing many sums. The test framework aggregates each
+                 * segment as though it were a separate shard, then reduces
+                 * those togther. Fun. But it means we don't get the full
+                 * accuracy of the Kahan sumation. And *that* accuracy is
+                 * what this method is trying to test. So we have to stick
+                 * all the documents on the same leaf. `addDocuments` does
+                 * that.
+                 */
+                iw.addDocuments(Arrays.stream(values).mapToObj(value ->
+                    singleton(new NumericDocValuesField(FIELD_NAME, NumericUtils.doubleToSortableLong(value)))
+                ).collect(toList()));
             },
             result -> assertEquals(expected, result.getValue(), delta),
             defaultFieldType(NumberType.DOUBLE)