Browse Source

Replace AggregatorTestCase#search with AggregatorTestCase#searchAndReduce (#60683)

* Replace AggregatorTestCase#search with AggregatorTestCase#searchAndReduce

This commit removes the ability to test the top level result of an aggregator
before it runs the final reduce. All aggregator tests that use AggregatorTestCase#search
are rewritten with AggregatorTestCase#searchAndReduce in order to ensure that we test
the final output (the one sent to the end user) rather than an intermediary result
that could be different.
This change also removes spurious commits triggered on top of a random index writer.
These commits slow down the tests and are redundant with the commits that the
random index writer performs.
Jim Ferenczi 5 years ago
parent
commit
5de0ed9432
39 changed files with 480 additions and 868 deletions
  1. 4 43
      modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java
  2. 3 3
      modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java
  3. 3 2
      modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java
  4. 2 2
      server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java
  5. 8 15
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java
  6. 8 16
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java
  7. 22 36
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java
  8. 28 88
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java
  9. 36 97
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java
  10. 70 52
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java
  11. 56 34
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java
  12. 32 73
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java
  13. 1 17
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java
  14. 12 11
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java
  15. 4 4
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java
  16. 3 3
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java
  17. 2 2
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java
  18. 6 35
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java
  19. 5 34
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java
  20. 7 36
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java
  21. 38 82
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java
  22. 16 13
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java
  23. 4 4
      server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
  24. 1 1
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java
  25. 5 5
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java
  26. 5 5
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java
  27. 2 2
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java
  28. 23 14
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java
  29. 2 2
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java
  30. 1 1
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java
  31. 2 2
      server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java
  32. 0 20
      server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java
  33. 0 4
      server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java
  34. 54 97
      test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java
  35. 1 1
      x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java
  36. 1 1
      x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java
  37. 1 1
      x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
  38. 7 5
      x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/aggregations/metrics/GeoShapeCentroidAggregatorTests.java
  39. 5 5
      x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java

+ 4 - 43
modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java

@@ -53,9 +53,9 @@ public class MatrixStatsAggregatorTests extends AggregatorTestCase {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
                     .fields(Collections.singletonList("field"));
-                InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
+                InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
                 assertNull(stats.getStats());
-                assertFalse(MatrixAggregationInspectionHelper.hasValue(stats));
+                assertEquals(0L, stats.getDocCount());
             }
         }
     }
@@ -72,9 +72,9 @@ public class MatrixStatsAggregatorTests extends AggregatorTestCase {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
                     .fields(Collections.singletonList("bogus"));
-                InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
+                InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
                 assertNull(stats.getStats());
-                assertFalse(MatrixAggregationInspectionHelper.hasValue(stats));
+                assertEquals(0L, stats.getDocCount());
             }
         }
     }
@@ -85,43 +85,6 @@ public class MatrixStatsAggregatorTests extends AggregatorTestCase {
         String fieldB = "b";
         MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE);
 
-        try (Directory directory = newDirectory();
-            RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
-
-            int numDocs = scaledRandomIntBetween(8192, 16384);
-            Double[] fieldAValues = new Double[numDocs];
-            Double[] fieldBValues = new Double[numDocs];
-            for (int docId = 0; docId < numDocs; docId++) {
-                Document document = new Document();
-                fieldAValues[docId] = randomDouble();
-                document.add(new SortedNumericDocValuesField(fieldA, NumericUtils.doubleToSortableLong(fieldAValues[docId])));
-
-                fieldBValues[docId] = randomDouble();
-                document.add(new SortedNumericDocValuesField(fieldB, NumericUtils.doubleToSortableLong(fieldBValues[docId])));
-                indexWriter.addDocument(document);
-            }
-
-            MultiPassStats multiPassStats = new MultiPassStats(fieldA, fieldB);
-            multiPassStats.computeStats(Arrays.asList(fieldAValues), Arrays.asList(fieldBValues));
-            try (IndexReader reader = indexWriter.getReader()) {
-                IndexSearcher searcher = new IndexSearcher(reader);
-                MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
-                    .fields(Arrays.asList(fieldA, fieldB));
-                InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB);
-                // Since `search` doesn't do any reduction, and the InternalMatrixStats object will have a null `MatrixStatsResults`
-                // object.  That is created during the final reduction, which also does a final round of computations
-                // So we have to create a MatrixStatsResults object here manually so that the final `compute()` is called
-                multiPassStats.assertNearlyEqual(new MatrixStatsResults(stats.getStats()));
-            }
-        }
-    }
-
-    public void testTwoFieldsReduce() throws Exception {
-        String fieldA = "a";
-        MappedFieldType ftA = new NumberFieldMapper.NumberFieldType(fieldA, NumberFieldMapper.NumberType.DOUBLE);
-        String fieldB = "b";
-        MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE);
-
         try (Directory directory = newDirectory();
              RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
 
@@ -145,8 +108,6 @@ public class MatrixStatsAggregatorTests extends AggregatorTestCase {
                 MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
                     .fields(Arrays.asList(fieldA, fieldB));
                 InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB);
-                // Unlike testTwoFields, `searchAndReduce` will execute reductions so the `MatrixStatsResults` object
-                // will be populated and fully computed.  We should use that value directly to test against
                 multiPassStats.assertNearlyEqual(stats);
                 assertTrue(MatrixAggregationInspectionHelper.hasValue(stats));
             }

+ 3 - 3
modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java

@@ -303,7 +303,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
         aggregationBuilder.subAggregation(new MinAggregationBuilder("in_parent").field("number"));
 
         MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
-        InternalParent result = search(indexSearcher, query, aggregationBuilder, fieldType);
+        InternalParent result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
         verify.accept(result);
     }
 
@@ -314,7 +314,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
         aggregationBuilder.subAggregation(new TermsAggregationBuilder("value_terms").field("number"));
 
         MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
-        InternalParent result = search(indexSearcher, query, aggregationBuilder, fieldType);
+        InternalParent result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
         verify.accept(result);
     }
 
@@ -328,7 +328,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
 
         MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
         MappedFieldType subFieldType = new NumberFieldMapper.NumberFieldType("subNumber", NumberFieldMapper.NumberType.LONG);
-        LongTerms result = search(indexSearcher, query, aggregationBuilder, fieldType, subFieldType);
+        LongTerms result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, subFieldType);
         verify.accept(result);
     }
 

+ 3 - 2
modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java

@@ -164,7 +164,8 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
                         expectedOddMin = Math.min(expectedOddMin, e.getValue().v2());
                     }
                 }
-                StringTerms result = search(indexSearcher, new MatchAllDocsQuery(), request, longField("number"), keywordField("kwd"));
+                StringTerms result =
+                    searchAndReduce(indexSearcher, new MatchAllDocsQuery(), request, longField("number"), keywordField("kwd"));
 
                 StringTerms.Bucket evenBucket = result.getBucketByKey("even");
                 InternalChildren evenChildren = evenBucket.getAggregations().get("children");
@@ -254,7 +255,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
         aggregationBuilder.subAggregation(new MinAggregationBuilder("in_child").field("number"));
 
         MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
-        InternalChildren result = search(indexSearcher, query, aggregationBuilder, fieldType);
+        InternalChildren result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
         verify.accept(result);
     }
 

+ 2 - 2
server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java

@@ -400,7 +400,6 @@ public class InternalVariableWidthHistogram
         }
 
         mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext);
-
         return reducedBuckets;
     }
 
@@ -451,7 +450,8 @@ public class InternalVariableWidthHistogram
             }
             toMerge.add(buckets.get(startIdx)); // Don't remove the startIdx bucket because it will be replaced by the merged bucket
 
-            reduceContext.consumeBucketsAndMaybeBreak(- (toMerge.size() - 1));
+            int toRemove = toMerge.stream().mapToInt(b -> countInnerBucket(b)+1).sum();
+            reduceContext.consumeBucketsAndMaybeBreak(-toRemove + 1);
             Bucket merged_bucket = reduceBucket(toMerge, reduceContext);
 
             buckets.set(startIdx, merged_bucket);

+ 8 - 15
server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java

@@ -1879,7 +1879,7 @@ public class CompositeAggregatorTests  extends AggregatorTestCase {
             )
         );
 
-        executeTestCase(true, false, new TermQuery(new Term("foo", "bar")),
+        executeTestCase(true, new TermQuery(new Term("foo", "bar")),
             dataset,
             () ->
                 new CompositeAggregationBuilder("name",
@@ -1899,7 +1899,7 @@ public class CompositeAggregatorTests  extends AggregatorTestCase {
         );
 
         // source field and index sorting config have different order
-        executeTestCase(true, false, new TermQuery(new Term("foo", "bar")),
+        executeTestCase(true, new TermQuery(new Term("foo", "bar")),
             dataset,
             () ->
                 new CompositeAggregationBuilder("name",
@@ -1936,7 +1936,7 @@ public class CompositeAggregatorTests  extends AggregatorTestCase {
         );
 
         for (SortOrder order : SortOrder.values()) {
-            executeTestCase(true, false, new MatchAllDocsQuery(),
+            executeTestCase(true, new MatchAllDocsQuery(),
                 dataset,
                 () ->
                     new CompositeAggregationBuilder("name",
@@ -1959,7 +1959,7 @@ public class CompositeAggregatorTests  extends AggregatorTestCase {
                 }
             );
 
-            executeTestCase(true, false, new MatchAllDocsQuery(),
+            executeTestCase(true, new MatchAllDocsQuery(),
                 dataset,
                 () ->
                     new CompositeAggregationBuilder("name",
@@ -1989,14 +1989,12 @@ public class CompositeAggregatorTests  extends AggregatorTestCase {
                                 Supplier<CompositeAggregationBuilder> create,
                                 Consumer<InternalComposite> verify) throws IOException {
         for (Query query : queries) {
-            executeTestCase(false, false, query, dataset, create, verify);
-            executeTestCase(false, true, query, dataset, create, verify);
-            executeTestCase(true, true, query, dataset, create, verify);
+            executeTestCase(false, query, dataset, create, verify);
+            executeTestCase(true, query, dataset, create, verify);
         }
     }
 
     private void executeTestCase(boolean useIndexSort,
-                                 boolean reduced,
                                  Query query,
                                  List<Map<String, List<Object>>> dataset,
                                  Supplier<CompositeAggregationBuilder> create,
@@ -2019,18 +2017,13 @@ public class CompositeAggregatorTests  extends AggregatorTestCase {
                     indexWriter.addDocument(document);
                     document.clear();
                 }
-                if (reduced == false && randomBoolean()) {
+                if (rarely()) {
                     indexWriter.forceMerge(1);
                 }
             }
             try (IndexReader indexReader = DirectoryReader.open(directory)) {
                 IndexSearcher indexSearcher = new IndexSearcher(indexReader);
-                final InternalComposite composite;
-                if (reduced) {
-                    composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
-                } else {
-                    composite = search(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
-                }
+                InternalComposite composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
                 verify.accept(composite);
             }
         }

+ 8 - 16
server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java

@@ -62,7 +62,7 @@ public class FilterAggregatorTests extends AggregatorTestCase {
         IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
         QueryBuilder filter = QueryBuilders.termQuery("field", randomAlphaOfLength(5));
         FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter);
-        InternalFilter response = search(indexSearcher, new MatchAllDocsQuery(), builder,
+        InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder,
                 fieldType);
         assertEquals(response.getDocCount(), 0);
         assertFalse(AggregationInspectionHelper.hasValue(response));
@@ -80,7 +80,7 @@ public class FilterAggregatorTests extends AggregatorTestCase {
         for (int i = 0; i < numDocs; i++) {
             if (frequently()) {
                 // make sure we have more than one segment to test the merge
-                indexWriter.getReader().close();
+                indexWriter.commit();
             }
             int value = randomInt(maxTerm-1);
             expectedBucketCount[value] += 1;
@@ -98,20 +98,12 @@ public class FilterAggregatorTests extends AggregatorTestCase {
             QueryBuilder filter = QueryBuilders.termQuery("field", Integer.toString(value));
             FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter);
 
-            for (boolean doReduce : new boolean[]{true, false}) {
-                final InternalFilter response;
-                if (doReduce) {
-                    response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder,
-                        fieldType);
-                } else {
-                    response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
-                }
-                assertEquals(response.getDocCount(), (long) expectedBucketCount[value]);
-                if (expectedBucketCount[value] > 0) {
-                    assertTrue(AggregationInspectionHelper.hasValue(response));
-                } else {
-                    assertFalse(AggregationInspectionHelper.hasValue(response));
-                }
+            final InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
+            assertEquals(response.getDocCount(), (long) expectedBucketCount[value]);
+            if (expectedBucketCount[value] > 0) {
+                assertTrue(AggregationInspectionHelper.hasValue(response));
+            } else {
+                assertFalse(AggregationInspectionHelper.hasValue(response));
             }
         } finally {
             indexReader.close();

+ 22 - 36
server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java

@@ -60,7 +60,7 @@ public class FiltersAggregatorTests extends AggregatorTestCase {
         }
         FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters);
         builder.otherBucketKey("other");
-        InternalFilters response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
+        InternalFilters response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
         assertEquals(response.getBuckets().size(), numFilters);
         for (InternalFilters.InternalBucket filter : response.getBuckets()) {
             assertEquals(filter.getDocCount(), 0);
@@ -113,22 +113,15 @@ public class FiltersAggregatorTests extends AggregatorTestCase {
         FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", keys);
         builder.otherBucket(true);
         builder.otherBucketKey("other");
-        for (boolean doReduce : new boolean[] {true, false}) {
-            final InternalFilters filters;
-            if (doReduce) {
-                filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
-            } else {
-                filters = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
-            }
-            assertEquals(filters.getBuckets().size(), 7);
-            assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2);
-            assertEquals(filters.getBucketByKey("foo").getDocCount(), 2);
-            assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2);
-            assertEquals(filters.getBucketByKey("bar").getDocCount(), 1);
-            assertEquals(filters.getBucketByKey("same").getDocCount(), 1);
-            assertEquals(filters.getBucketByKey("other").getDocCount(), 2);
-            assertTrue(AggregationInspectionHelper.hasValue(filters));
-        }
+        final InternalFilters filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
+        assertEquals(filters.getBuckets().size(), 7);
+        assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2);
+        assertEquals(filters.getBucketByKey("foo").getDocCount(), 2);
+        assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2);
+        assertEquals(filters.getBucketByKey("bar").getDocCount(), 1);
+        assertEquals(filters.getBucketByKey("same").getDocCount(), 1);
+        assertEquals(filters.getBucketByKey("other").getDocCount(), 2);
+        assertTrue(AggregationInspectionHelper.hasValue(filters));
 
         indexReader.close();
         directory.close();
@@ -175,28 +168,21 @@ public class FiltersAggregatorTests extends AggregatorTestCase {
             builder.otherBucket(true);
             builder.otherBucketKey("other");
 
-            for (boolean doReduce : new boolean[]{true, false}) {
-                final InternalFilters response;
-                if (doReduce) {
-                    response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
-                } else {
-                    response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
-                }
-                List<InternalFilters.InternalBucket> buckets = response.getBuckets();
-                assertEquals(buckets.size(), filters.length + 1);
+            final InternalFilters response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
+            List<InternalFilters.InternalBucket> buckets = response.getBuckets();
+            assertEquals(buckets.size(), filters.length + 1);
 
-                for (InternalFilters.InternalBucket bucket : buckets) {
-                    if ("other".equals(bucket.getKey())) {
-                        assertEquals(bucket.getDocCount(), expectedOtherCount);
-                    } else {
-                        int index = Integer.parseInt(bucket.getKey());
-                        assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]);
-                    }
+            for (InternalFilters.InternalBucket bucket : buckets) {
+                if ("other".equals(bucket.getKey())) {
+                    assertEquals(bucket.getDocCount(), expectedOtherCount);
+                } else {
+                    int index = Integer.parseInt(bucket.getKey());
+                    assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]);
                 }
-
-                // Always true because we include 'other' in the agg
-                assertTrue(AggregationInspectionHelper.hasValue(response));
             }
+
+            // Always true because we include 'other' in the agg
+            assertTrue(AggregationInspectionHelper.hasValue(response));
         } finally {
             indexReader.close();
             directory.close();

+ 28 - 88
server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java

@@ -98,7 +98,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
     private static final Query DEFAULT_QUERY = new MatchAllDocsQuery();
 
     public void testMatchNoDocs() throws IOException {
-        testBothCases(new MatchNoDocsQuery(), DATES_WITH_TIME,
+        testSearchCase(new MatchNoDocsQuery(), DATES_WITH_TIME,
             aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
             histogram -> {
                 assertEquals(0, histogram.getBuckets().size());
@@ -115,20 +115,16 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2015-01-01T00:00:00.000Z", 3);
         expectedDocCount.put("2016-01-01T00:00:00.000Z", 1);
         expectedDocCount.put("2017-01-01T00:00:00.000Z", 1);
-        testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME,
-            aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
-        );
         expectedDocCount.put("2011-01-01T00:00:00.000Z", 0);
         expectedDocCount.put("2014-01-01T00:00:00.000Z", 0);
-        testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME,
+        testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME,
             aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
     }
 
     public void testSubAggregations() throws IOException {
-        testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME,
+        testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME,
             aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD)
                 .subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)),
             histogram -> {
@@ -249,7 +245,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
             expectedMax.put("2020-01-01T00:00:00.000Z", 2.0);
             expectedMax.put("2021-01-01T00:00:00.000Z", 3.0);
             assertThat(maxAsMap(ak1adh), equalTo(expectedMax));
-    
+
             StringTerms.Bucket b = terms.getBucketByKey("b");
             StringTerms bk1 = b.getAggregations().get("k1");
             StringTerms.Bucket bk1a = bk1.getBucketByKey("a");
@@ -391,7 +387,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
                 assertFalse(AggregationInspectionHelper.hasValue(histogram));
             }
         );
-        testSearchAndReduceCase(DEFAULT_QUERY, dates, aggregation,
+        testSearchCase(DEFAULT_QUERY, dates, aggregation,
             histogram -> {
                 assertEquals(0, histogram.getBuckets().size());
                 assertFalse(AggregationInspectionHelper.hasValue(histogram));
@@ -431,7 +427,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
         final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli();
         final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end);
-        testBothCases(rangeQuery, DATES_WITH_TIME,
+        testSearchCase(rangeQuery, DATES_WITH_TIME,
             aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
             histogram -> {
                 final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC);
@@ -460,7 +456,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-01-01T00:00:00.000Z", 1);
         expectedDocCount.put("2017-02-01T00:00:00.000Z", 2);
         expectedDocCount.put("2017-03-01T00:00:00.000Z", 3);
-        testBothCases(DEFAULT_QUERY, datesForMonthInterval,
+        testSearchCase(DEFAULT_QUERY, datesForMonthInterval,
             aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
@@ -490,12 +486,8 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-02-02T00:00:00.000Z", 2);
         expectedDocCount.put("2017-02-03T00:00:00.000Z", 3);
         expectedDocCount.put("2017-02-05T00:00:00.000Z", 1);
-        testSearchCase(DEFAULT_QUERY, datesForDayInterval,
-            aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
-        );
         expectedDocCount.put("2017-02-04T00:00:00.000Z", 0);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval,
+        testSearchCase(DEFAULT_QUERY, datesForDayInterval,
             aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
@@ -515,12 +507,8 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2);
         expectedDocCount.put("2017-02-02T00:00:00.000-01:00", 3);
         expectedDocCount.put("2017-02-04T00:00:00.000-01:00", 1);
-        testSearchCase(DEFAULT_QUERY, datesForDayInterval,
-            aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
-        );
         expectedDocCount.put("2017-02-03T00:00:00.000-01:00", 0);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval,
+        testSearchCase(DEFAULT_QUERY, datesForDayInterval,
             aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
          );
@@ -546,13 +534,9 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-02-01T15:00:00.000Z", 1);
         expectedDocCount.put("2017-02-01T15:00:00.000Z", 1);
         expectedDocCount.put("2017-02-01T16:00:00.000Z", 3);
-        testSearchCase(DEFAULT_QUERY, datesForHourInterval,
-            aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
-        );
         expectedDocCount.put("2017-02-01T11:00:00.000Z", 0);
         expectedDocCount.put("2017-02-01T12:00:00.000Z", 0);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
+        testSearchCase(DEFAULT_QUERY, datesForHourInterval,
             aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
@@ -560,7 +544,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-02-01T09:00:00.000Z", 3);
         expectedDocCount.put("2017-02-01T12:00:00.000Z", 3);
         expectedDocCount.put("2017-02-01T15:00:00.000Z", 4);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
+        testSearchCase(DEFAULT_QUERY, datesForHourInterval,
             aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
@@ -585,13 +569,9 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-02-01T13:00:00.000-01:00", 2);
         expectedDocCount.put("2017-02-01T14:00:00.000-01:00", 1);
         expectedDocCount.put("2017-02-01T15:00:00.000-01:00", 3);
-        testSearchCase(DEFAULT_QUERY, datesForHourInterval,
-            aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
-        );
         expectedDocCount.put("2017-02-01T10:00:00.000-01:00", 0);
         expectedDocCount.put("2017-02-01T11:00:00.000-01:00", 0);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval,
+        testSearchCase(DEFAULT_QUERY, datesForHourInterval,
             aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
@@ -612,7 +592,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         bucketsToExpectedDocCountMap.put(10, 30);
         bucketsToExpectedDocCountMap.put(3, 60);
         final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+        testSearchCase(DEFAULT_QUERY, dataset,
             aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
             histogram -> {
                 final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -641,7 +621,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         bucketsToExpectedDocCountMap.put(10, 30);
         bucketsToExpectedDocCountMap.put(3, 60);
         final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+        testSearchCase(DEFAULT_QUERY, dataset,
             aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
             histogram -> {
                 final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -669,7 +649,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         bucketsToExpectedDocCountMap.put(12, 12);
         bucketsToExpectedDocCountMap.put(3, 24);
         final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+        testSearchCase(DEFAULT_QUERY, dataset,
             aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
             histogram -> {
                 final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -693,7 +673,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         }
         final int randomChoice = randomIntBetween(1, 3);
         if (randomChoice == 1) {
-            testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+            testSearchCase(DEFAULT_QUERY, dataset,
                 aggregation -> aggregation.setNumBuckets(length).field(DATE_FIELD),
                 histogram -> {
                     final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -704,7 +684,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
                     assertEquals(1, bucket.getDocCount());
                 });
         } else if (randomChoice == 2) {
-            testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+            testSearchCase(DEFAULT_QUERY, dataset,
                 aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD),
                 histogram -> {
                     final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -716,7 +696,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
                     assertEquals(expectedDocCount, bucket.getDocCount());
                 });
         } else if (randomChoice == 3) {
-            testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+            testSearchCase(DEFAULT_QUERY, dataset,
                 aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD),
                 histogram -> {
                     final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -742,7 +722,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         bucketsToExpectedDocCountMap.put(30, 3);
         bucketsToExpectedDocCountMap.put(6, 12);
         final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+        testSearchCase(DEFAULT_QUERY, dataset,
             aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
             histogram -> {
                 final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -772,7 +752,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         bucketsToExpectedDocCountMap.put(10, 50);
         bucketsToExpectedDocCountMap.put(5, 100);
         final Map.Entry<Integer, Integer> randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet());
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset,
+        testSearchCase(DEFAULT_QUERY, dataset,
             aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD),
             histogram -> {
                 final List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -797,29 +777,20 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         skeletonDocCount.put("2017-02-01T09:02:00.000Z", 2);
         skeletonDocCount.put("2017-02-01T09:15:00.000Z", 1);
         skeletonDocCount.put("2017-02-01T09:16:00.000Z", 2);
-        testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
-            aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(skeletonDocCount))
-        );
         Map<String, Integer> fullDocCount = new TreeMap<>();
         fullDocCount.put("2017-02-01T09:02:00.000Z", 2);
         fullDocCount.put("2017-02-01T09:07:00.000Z", 0);
         fullDocCount.put("2017-02-01T09:12:00.000Z", 3);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval,
+        testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
             aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount))
         );
-
-        testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
-            aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(skeletonDocCount))
-        );
         fullDocCount.clear();
         fullDocCount.putAll(skeletonDocCount);
         for (int minute = 3; minute < 15; minute++) {
-            fullDocCount.put(String.format(Locale.ROOT, "2017-02-01T09:%02d:00.000Z", minute), 0);    
+            fullDocCount.put(String.format(Locale.ROOT, "2017-02-01T09:%02d:00.000Z", minute), 0);
         }
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval,
+        testSearchCase(DEFAULT_QUERY, datesForMinuteInterval,
             aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount))
         );
@@ -837,22 +808,18 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         expectedDocCount.put("2017-02-01T00:00:05.000Z", 1);
         expectedDocCount.put("2017-02-01T00:00:07.000Z", 2);
         expectedDocCount.put("2017-02-01T00:00:11.000Z", 3);
-        testSearchCase(DEFAULT_QUERY, datesForSecondInterval,
-            aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD),
-            result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
-        );
         expectedDocCount.put("2017-02-01T00:00:06.000Z", 0);
         expectedDocCount.put("2017-02-01T00:00:08.000Z", 0);
         expectedDocCount.put("2017-02-01T00:00:09.000Z", 0);
         expectedDocCount.put("2017-02-01T00:00:10.000Z", 0);
-        testSearchAndReduceCase(DEFAULT_QUERY, datesForSecondInterval,
+        testSearchCase(DEFAULT_QUERY, datesForSecondInterval,
             aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD),
             result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount))
         );
     }
 
     public void testWithPipelineReductions() throws IOException {
-        testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME,
+        testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME,
             aggregation -> aggregation.setNumBuckets(1).field(DATE_FIELD)
                 .subAggregation(AggregationBuilders.histogram("histo").field(NUMERIC_FIELD).interval(1)
                     .subAggregation(AggregationBuilders.max("max").field(NUMERIC_FIELD))
@@ -880,25 +847,6 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
             });
     }
 
-    private void testSearchCase(final Query query, final List<ZonedDateTime> dataset,
-                                final Consumer<AutoDateHistogramAggregationBuilder> configure,
-                                final Consumer<InternalAutoDateHistogram> verify) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify);
-    }
-
-    private void testSearchAndReduceCase(final Query query, final List<ZonedDateTime> dataset,
-                                         final Consumer<AutoDateHistogramAggregationBuilder> configure,
-                                         final Consumer<InternalAutoDateHistogram> verify) throws IOException {
-        executeTestCase(true, query, dataset, configure, verify);
-    }
-
-    private void testBothCases(final Query query, final List<ZonedDateTime> dataset,
-                               final Consumer<AutoDateHistogramAggregationBuilder> configure,
-                               final Consumer<InternalAutoDateHistogram> verify) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify);
-        executeTestCase(true, query, dataset, configure, verify);
-    }
-
     @Override
     protected IndexSettings createIndexSettings() {
         final Settings nodeSettings = Settings.builder()
@@ -913,7 +861,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         );
     }
 
-    private void executeTestCase(final boolean reduced, final Query query, final List<ZonedDateTime> dataset,
+    private void testSearchCase(final Query query, final List<ZonedDateTime> dataset,
                                  final Consumer<AutoDateHistogramAggregationBuilder> configure,
                                  final Consumer<InternalAutoDateHistogram> verify) throws IOException {
         try (Directory directory = newDirectory()) {
@@ -936,12 +884,8 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
                 MappedFieldType numericFieldType
                     = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG);
 
-                final InternalAutoDateHistogram histogram;
-                if (reduced) {
-                    histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType);
-                } else {
-                    histogram = search(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType);
-                }
+                final InternalAutoDateHistogram histogram =
+                    searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType);
                 verify.accept(histogram);
             }
         }
@@ -951,10 +895,6 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
         final Document document = new Document();
         int i = 0;
         for (final ZonedDateTime date : dataset) {
-            if (frequently()) {
-                indexWriter.commit();
-            }
-
             final long instant = date.toInstant().toEpochMilli();
             document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
             document.add(new LongPoint(INSTANT_FIELD, instant));

+ 36 - 97
server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java

@@ -67,7 +67,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
             "2017-12-12T22:55:46");
 
     public void testMatchNoDocsDeprecatedInterval() throws IOException {
-        testBothCases(new MatchNoDocsQuery(), DATASET,
+        testSearchCase(new MatchNoDocsQuery(), DATASET,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
                 histogram -> {
                     assertEquals(0, histogram.getBuckets().size());
@@ -78,11 +78,11 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testMatchNoDocs() throws IOException {
-        testBothCases(new MatchNoDocsQuery(), DATASET,
+        testSearchCase(new MatchNoDocsQuery(), DATASET,
             aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
-        testBothCases(new MatchNoDocsQuery(), DATASET,
+        testSearchCase(new MatchNoDocsQuery(), DATASET,
             aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE),
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
@@ -92,20 +92,13 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         Query query = new MatchAllDocsQuery();
 
         testSearchCase(query, DATASET,
-                aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
-                histogram -> {
-                    assertEquals(6, histogram.getBuckets().size());
-                    assertTrue(AggregationInspectionHelper.hasValue(histogram));
-                }, false
-        );
-        testSearchAndReduceCase(query, DATASET,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
                 histogram -> {
                     assertEquals(8, histogram.getBuckets().size());
                     assertTrue(AggregationInspectionHelper.hasValue(histogram));
                 }, false
         );
-        testBothCases(query, DATASET,
+        testSearchCase(query, DATASET,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L),
                 histogram -> {
                     assertEquals(6, histogram.getBuckets().size());
@@ -122,34 +115,26 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         for (int i = 0; i < 1000; i++) {
             foo.add(DATASET.get(randomIntBetween(0, DATASET.size()-1)));
         }
-        testSearchAndReduceCase(query, foo,
+        testSearchCase(query, foo,
             aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d"))
                     .field(AGGREGABLE_DATE).order(BucketOrder.count(false)),
             histogram -> assertEquals(8, histogram.getBuckets().size()), false
         );
 
         testSearchCase(query, DATASET,
-            aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
-            histogram -> assertEquals(6, histogram.getBuckets().size()), false
-        );
-        testSearchAndReduceCase(query, DATASET,
             aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
             histogram -> assertEquals(8, histogram.getBuckets().size()), false
         );
-        testBothCases(query, DATASET,
+        testSearchCase(query, DATASET,
             aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L),
             histogram -> assertEquals(6, histogram.getBuckets().size()), false
         );
 
         testSearchCase(query, DATASET,
-            aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE),
-            histogram -> assertEquals(6, histogram.getBuckets().size()), false
-        );
-        testSearchAndReduceCase(query, DATASET,
             aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE),
             histogram -> assertEquals(8, histogram.getBuckets().size()), false
         );
-        testBothCases(query, DATASET,
+        testSearchCase(query, DATASET,
             aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE).minDocCount(1L),
             histogram -> assertEquals(6, histogram.getBuckets().size()), false
         );
@@ -206,7 +191,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
             assertEquals(0, histogram.getBuckets().size());
             assertFalse(AggregationInspectionHelper.hasValue(histogram));
         }, false);
-        testSearchAndReduceCase(query, dates, aggregation, histogram -> {
+        testSearchCase(query, dates, aggregation, histogram -> {
             assertEquals(0, histogram.getBuckets().size());
             assertFalse(AggregationInspectionHelper.hasValue(histogram));
         }, false);
@@ -221,7 +206,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         testSearchCase(query, dates, aggregation,
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
-        testSearchAndReduceCase(query, dates, aggregation,
+        testSearchCase(query, dates, aggregation,
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
 
@@ -230,13 +215,13 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         testSearchCase(query, dates, aggregation,
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
-        testSearchAndReduceCase(query, dates, aggregation,
+        testSearchCase(query, dates, aggregation,
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
     }
 
     public void testAggregateWrongFieldDeprecated() throws IOException {
-        testBothCases(new MatchAllDocsQuery(), DATASET,
+        testSearchCase(new MatchAllDocsQuery(), DATASET,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field("wrong_field"),
                 histogram -> {
                     assertEquals(0, histogram.getBuckets().size());
@@ -247,18 +232,18 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testAggregateWrongField() throws IOException {
-        testBothCases(new MatchAllDocsQuery(), DATASET,
+        testSearchCase(new MatchAllDocsQuery(), DATASET,
             aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field("wrong_field"),
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
-        testBothCases(new MatchAllDocsQuery(), DATASET,
+        testSearchCase(new MatchAllDocsQuery(), DATASET,
             aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field("wrong_field"),
             histogram -> assertEquals(0, histogram.getBuckets().size()), false
         );
     }
 
     public void testIntervalYearDeprecated() throws IOException {
-        testBothCases(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET,
+        testSearchCase(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
                 histogram -> {
                     List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -281,7 +266,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalYear() throws IOException {
-        testBothCases(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET,
+        testSearchCase(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET,
             aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE),
             histogram -> {
                 List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -303,7 +288,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalMonthDeprecated() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
                 Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"),
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE),
                 histogram -> {
@@ -327,7 +312,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalMonth() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"),
             aggregation -> aggregation.calendarInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE),
             histogram -> {
@@ -350,7 +335,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalDayDeprecated() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
                 Arrays.asList(
                         "2017-02-01",
                         "2017-02-02",
@@ -386,7 +371,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalDay() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01",
                 "2017-02-02",
@@ -418,7 +403,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                 assertEquals(1, bucket.getDocCount());
             }, false
         );
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01",
                 "2017-02-02",
@@ -453,7 +438,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalHourDeprecated() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
                 Arrays.asList(
                         "2017-02-01T09:02:00.000Z",
                         "2017-02-01T09:35:00.000Z",
@@ -500,7 +485,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalHour() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T09:02:00.000Z",
                 "2017-02-01T09:35:00.000Z",
@@ -543,7 +528,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                 assertEquals(3, bucket.getDocCount());
             }, false
         );
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T09:02:00.000Z",
                 "2017-02-01T09:35:00.000Z",
@@ -589,7 +574,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalMinuteDeprecated() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
                 Arrays.asList(
                         "2017-02-01T09:02:35.000Z",
                         "2017-02-01T09:02:59.000Z",
@@ -619,7 +604,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalMinute() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T09:02:35.000Z",
                 "2017-02-01T09:02:59.000Z",
@@ -645,7 +630,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                 assertEquals(2, bucket.getDocCount());
             }, false
         );
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T09:02:35.000Z",
                 "2017-02-01T09:02:59.000Z",
@@ -674,7 +659,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalSecondDeprecated() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
                 Arrays.asList(
                         "2017-02-01T00:00:05.015Z",
                         "2017-02-01T00:00:11.299Z",
@@ -705,7 +690,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testIntervalSecond() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T00:00:05.015Z",
                 "2017-02-01T00:00:11.299Z",
@@ -732,7 +717,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                 assertEquals(3, bucket.getDocCount());
             }, false
         );
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T00:00:05.015Z",
                 "2017-02-01T00:00:11.299Z",
@@ -762,7 +747,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
     }
 
     public void testNanosIntervalSecond() throws IOException {
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T00:00:05.015298384Z",
                 "2017-02-01T00:00:11.299954583Z",
@@ -789,7 +774,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                 assertEquals(3, bucket.getDocCount());
             }, true
         );
-        testBothCases(new MatchAllDocsQuery(),
+        testSearchCase(new MatchAllDocsQuery(),
             Arrays.asList(
                 "2017-02-01T00:00:05.015298384Z",
                 "2017-02-01T00:00:11.299954583Z",
@@ -829,7 +814,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         );
 
         // 5 sec interval with minDocCount = 0
-        testSearchAndReduceCase(query, timestamps,
+        testSearchCase(query, timestamps,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L),
                 histogram -> {
                     List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -854,7 +839,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         );
 
         // 5 sec interval with minDocCount = 3
-        testSearchAndReduceCase(query, timestamps,
+        testSearchCase(query, timestamps,
                 aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L),
                 histogram -> {
                     List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -879,7 +864,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         );
 
         // 5 sec interval with minDocCount = 0
-        testSearchAndReduceCase(query, timestamps,
+        testSearchCase(query, timestamps,
             aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L),
             histogram -> {
                 List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -904,7 +889,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
         );
 
         // 5 sec interval with minDocCount = 3
-        testSearchAndReduceCase(query, timestamps,
+        testSearchCase(query, timestamps,
             aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L),
             histogram -> {
                 List<? extends Histogram.Bucket> buckets = histogram.getBuckets();
@@ -1175,43 +1160,6 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                                 Consumer<DateHistogramAggregationBuilder> configure,
                                 Consumer<InternalDateHistogram> verify,
                                 int maxBucket, boolean useNanosecondResolution) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify, maxBucket, useNanosecondResolution);
-    }
-
-    private void testSearchAndReduceCase(Query query, List<String> dataset,
-                                         Consumer<DateHistogramAggregationBuilder> configure,
-                                         Consumer<InternalDateHistogram> verify, boolean useNanosecondResolution) throws IOException {
-        testSearchAndReduceCase(query, dataset, configure, verify, 1000, useNanosecondResolution);
-    }
-
-    private void testSearchAndReduceCase(Query query, List<String> dataset,
-                                         Consumer<DateHistogramAggregationBuilder> configure,
-                                         Consumer<InternalDateHistogram> verify,
-                                         int maxBucket, boolean useNanosecondResolution) throws IOException {
-        executeTestCase(true, query, dataset, configure, verify, maxBucket, useNanosecondResolution);
-    }
-
-    private void testBothCases(Query query, List<String> dataset,
-                               Consumer<DateHistogramAggregationBuilder> configure,
-                               Consumer<InternalDateHistogram> verify, boolean useNanosecondResolution) throws IOException {
-        testBothCases(query, dataset, configure, verify, 10000, useNanosecondResolution);
-    }
-
-    private void testBothCases(Query query, List<String> dataset,
-                               Consumer<DateHistogramAggregationBuilder> configure,
-                               Consumer<InternalDateHistogram> verify,
-                               int maxBucket, boolean useNanosecondResolution) throws IOException {
-        testSearchCase(query, dataset, configure, verify, maxBucket, useNanosecondResolution);
-        testSearchAndReduceCase(query, dataset, configure, verify, maxBucket, useNanosecondResolution);
-    }
-
-    private void executeTestCase(boolean reduced,
-                                 Query query,
-                                 List<String> dataset,
-                                 Consumer<DateHistogramAggregationBuilder> configure,
-                                 Consumer<InternalDateHistogram> verify,
-                                 int maxBucket, boolean useNanosecondResolution) throws IOException {
-
         boolean aggregableDateIsSearchable = randomBoolean();
         DateFieldMapper.DateFieldType fieldType = aggregableDateFieldType(useNanosecondResolution, aggregableDateIsSearchable);
 
@@ -1220,10 +1168,6 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 Document document = new Document();
                 for (String date : dataset) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
-
                     long instant = asLong(date, fieldType);
                     document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant));
                     if (aggregableDateIsSearchable) {
@@ -1243,12 +1187,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
                     configure.accept(aggregationBuilder);
                 }
 
-                InternalDateHistogram histogram;
-                if (reduced) {
-                    histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, null, fieldType);
-                } else {
-                    histogram = search(indexSearcher, query, aggregationBuilder, maxBucket, fieldType);
-                }
+                InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, fieldType);
                 verify.accept(histogram);
             }
         }

+ 70 - 52
server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java

@@ -55,7 +55,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
     public void testLongs() throws Exception {
         try (Directory dir = newDirectory();
                 RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
-            for (long value : new long[] {7, 3, -10, -6, 5, 50}) {
+            for (long value : new long[] {7, 3, -10, -6, 5, 15}) {
                 Document doc = new Document();
                 doc.add(new SortedNumericDocValuesField("field", value));
                 w.addDocument(doc);
@@ -66,16 +66,20 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                     .interval(5);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field"));
-                assertEquals(4, histogram.getBuckets().size());
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field"));
+                assertEquals(6, histogram.getBuckets().size());
                 assertEquals(-10d, histogram.getBuckets().get(0).getKey());
                 assertEquals(2, histogram.getBuckets().get(0).getDocCount());
-                assertEquals(0d, histogram.getBuckets().get(1).getKey());
-                assertEquals(1, histogram.getBuckets().get(1).getDocCount());
-                assertEquals(5d, histogram.getBuckets().get(2).getKey());
-                assertEquals(2, histogram.getBuckets().get(2).getDocCount());
-                assertEquals(50d, histogram.getBuckets().get(3).getKey());
-                assertEquals(1, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(-5d, histogram.getBuckets().get(1).getKey());
+                assertEquals(0, histogram.getBuckets().get(1).getDocCount());
+                assertEquals(0d, histogram.getBuckets().get(2).getKey());
+                assertEquals(1, histogram.getBuckets().get(2).getDocCount());
+                assertEquals(5d, histogram.getBuckets().get(3).getKey());
+                assertEquals(2, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(10d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d, histogram.getBuckets().get(5).getKey());
+                assertEquals(1, histogram.getBuckets().get(5).getDocCount());
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }
@@ -84,7 +88,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
     public void testDoubles() throws Exception {
         try (Directory dir = newDirectory();
                 RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
-            for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 50.1}) {
+            for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 15.1}) {
                 Document doc = new Document();
                 doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value)));
                 w.addDocument(doc);
@@ -95,16 +99,21 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                     .interval(5);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field"));
-                assertEquals(4, histogram.getBuckets().size());
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field"));
+                assertEquals(6, histogram.getBuckets().size());
+                assertEquals(6, histogram.getBuckets().size());
                 assertEquals(-10d, histogram.getBuckets().get(0).getKey());
                 assertEquals(2, histogram.getBuckets().get(0).getDocCount());
-                assertEquals(0d, histogram.getBuckets().get(1).getKey());
-                assertEquals(1, histogram.getBuckets().get(1).getDocCount());
-                assertEquals(5d, histogram.getBuckets().get(2).getKey());
-                assertEquals(2, histogram.getBuckets().get(2).getDocCount());
-                assertEquals(50d, histogram.getBuckets().get(3).getKey());
-                assertEquals(1, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(-5d, histogram.getBuckets().get(1).getKey());
+                assertEquals(0, histogram.getBuckets().get(1).getDocCount());
+                assertEquals(0d, histogram.getBuckets().get(2).getKey());
+                assertEquals(1, histogram.getBuckets().get(2).getDocCount());
+                assertEquals(5d, histogram.getBuckets().get(3).getKey());
+                assertEquals(2, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(10d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d, histogram.getBuckets().get(5).getKey());
+                assertEquals(1, histogram.getBuckets().get(5).getDocCount());
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }
@@ -130,10 +139,6 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
              RandomIndexWriter indexWriter = new RandomIndexWriter(random(), dir)) {
             Document document = new Document();
             for (String date : dataset) {
-                if (frequently()) {
-                    indexWriter.commit();
-                }
-
                 long instant = fieldType.parse(date);
                 document.add(new SortedNumericDocValuesField(fieldName, instant));
                 indexWriter.addDocument(document);
@@ -145,7 +150,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                 .interval(1000 * 60 * 60 * 24);
             try (IndexReader reader = indexWriter.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }
@@ -165,16 +170,20 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                     .interval(Math.PI);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field"));
-                assertEquals(4, histogram.getBuckets().size());
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field"));
+                assertEquals(6, histogram.getBuckets().size());
                 assertEquals(-4 * Math.PI, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
                 assertEquals(-3 * Math.PI, histogram.getBuckets().get(1).getKey());
                 assertEquals(1, histogram.getBuckets().get(1).getDocCount());
-                assertEquals(0d, histogram.getBuckets().get(2).getKey());
-                assertEquals(2, histogram.getBuckets().get(2).getDocCount());
-                assertEquals(Math.PI, histogram.getBuckets().get(3).getKey());
-                assertEquals(1, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(-2 * Math.PI, histogram.getBuckets().get(2).getKey());
+                assertEquals(0, histogram.getBuckets().get(2).getDocCount());
+                assertEquals(-Math.PI, histogram.getBuckets().get(3).getKey());
+                assertEquals(0, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(0d, histogram.getBuckets().get(4).getKey());
+                assertEquals(2, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(Math.PI, histogram.getBuckets().get(5).getKey());
+                assertEquals(1, histogram.getBuckets().get(5).getDocCount());
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }
@@ -209,7 +218,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
     public void testMissing() throws Exception {
         try (Directory dir = newDirectory();
                 RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
-            for (long value : new long[] {7, 3, -10, -6, 5, 50}) {
+            for (long value : new long[] {7, 3, -10, -6, 5, 15}) {
                 Document doc = new Document();
                 doc.add(new SortedNumericDocValuesField("field", value));
                 w.addDocument(doc);
@@ -222,16 +231,20 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                     .missing(2d);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field"));
-                assertEquals(4, histogram.getBuckets().size());
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field"));
+                assertEquals(6, histogram.getBuckets().size());
                 assertEquals(-10d, histogram.getBuckets().get(0).getKey());
                 assertEquals(2, histogram.getBuckets().get(0).getDocCount());
-                assertEquals(0d, histogram.getBuckets().get(1).getKey());
-                assertEquals(7, histogram.getBuckets().get(1).getDocCount());
-                assertEquals(5d, histogram.getBuckets().get(2).getKey());
-                assertEquals(2, histogram.getBuckets().get(2).getDocCount());
-                assertEquals(50d, histogram.getBuckets().get(3).getKey());
-                assertEquals(1, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(-5d, histogram.getBuckets().get(1).getKey());
+                assertEquals(0, histogram.getBuckets().get(1).getDocCount());
+                assertEquals(0d, histogram.getBuckets().get(2).getKey());
+                assertEquals(7, histogram.getBuckets().get(2).getDocCount());
+                assertEquals(5d, histogram.getBuckets().get(3).getKey());
+                assertEquals(2, histogram.getBuckets().get(3).getDocCount());
+                assertEquals(10d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d, histogram.getBuckets().get(5).getKey());
+                assertEquals(1, histogram.getBuckets().get(5).getDocCount());
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }
@@ -252,7 +265,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
             MappedFieldType type = null;
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, type);
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, type);
 
                 assertEquals(1, histogram.getBuckets().size());
 
@@ -280,7 +293,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 Throwable t = expectThrows(IllegalArgumentException.class, () -> {
-                    search(searcher, new MatchAllDocsQuery(), aggBuilder, type);
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, type);
                 });
                 // This throws a number format exception (which is a subclass of IllegalArgumentException) and might be ok?
                 assertThat(t.getMessage(), containsString(missingValue));
@@ -304,7 +317,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                 IndexSearcher searcher = new IndexSearcher(reader);
 
                 expectThrows(IllegalArgumentException.class, () -> {
-                    search(searcher, new MatchAllDocsQuery(), aggBuilder, keywordField("field"));
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, keywordField("field"));
                 });
             }
         }
@@ -326,14 +339,16 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                     .offset(Math.PI);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field"));
-                assertEquals(3, histogram.getBuckets().size());
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field"));
+                assertEquals(4, histogram.getBuckets().size());
                 assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey());
                 assertEquals(2, histogram.getBuckets().get(0).getDocCount());
-                assertEquals(Math.PI, histogram.getBuckets().get(1).getKey());
-                assertEquals(2, histogram.getBuckets().get(1).getDocCount());
-                assertEquals(5 + Math.PI, histogram.getBuckets().get(2).getKey());
-                assertEquals(1, histogram.getBuckets().get(2).getDocCount());
+                assertEquals(-5 + Math.PI, histogram.getBuckets().get(1).getKey());
+                assertEquals(0, histogram.getBuckets().get(1).getDocCount());
+                assertEquals(Math.PI, histogram.getBuckets().get(2).getKey());
+                assertEquals(2, histogram.getBuckets().get(2).getDocCount());
+                assertEquals(5 + Math.PI, histogram.getBuckets().get(3).getKey());
+                assertEquals(1, histogram.getBuckets().get(3).getDocCount());
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }
@@ -358,18 +373,21 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase {
                 .offset(offset);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field"));
-                assertEquals(3, histogram.getBuckets().size());
+                InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field"));
+                assertEquals(4, histogram.getBuckets().size());
 
                 assertEquals(-10 + expectedOffset, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
 
-                assertEquals(expectedOffset, histogram.getBuckets().get(1).getKey());
-                assertEquals(1, histogram.getBuckets().get(1).getDocCount());
+                assertEquals(-5 + expectedOffset, histogram.getBuckets().get(1).getKey());
+                assertEquals(0, histogram.getBuckets().get(1).getDocCount());
 
-                assertEquals(5 + expectedOffset, histogram.getBuckets().get(2).getKey());
+                assertEquals(expectedOffset, histogram.getBuckets().get(2).getKey());
                 assertEquals(1, histogram.getBuckets().get(2).getDocCount());
 
+                assertEquals(5 + expectedOffset, histogram.getBuckets().get(3).getKey());
+                assertEquals(1, histogram.getBuckets().get(3).getDocCount());
+
                 assertTrue(AggregationInspectionHelper.hasValue(histogram));
             }
         }

+ 56 - 34
server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java

@@ -57,7 +57,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket 0 5
                 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -5, 0
                 new RangeFieldMapper.Range(rangeType, 4.2, 13.3, true, true), // bucket 0, 5, 10
-                new RangeFieldMapper.Range(rangeType, 42.5, 49.3, true, true), // bucket 40, 45
+                new RangeFieldMapper.Range(rangeType, 22.5, 29.3, true, true), // bucket 20, 25
             }) {
                 Document doc = new Document();
                 BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range));
@@ -71,8 +71,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
-                assertEquals(6, histogram.getBuckets().size());
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                assertEquals(7, histogram.getBuckets().size());
 
                 assertEquals(-5d, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
@@ -86,11 +87,14 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 assertEquals(10d, histogram.getBuckets().get(3).getKey());
                 assertEquals(1, histogram.getBuckets().get(3).getDocCount());
 
-                assertEquals(40d, histogram.getBuckets().get(4).getKey());
-                assertEquals(1, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
 
-                assertEquals(45d, histogram.getBuckets().get(5).getKey());
+                assertEquals(20d, histogram.getBuckets().get(5).getKey());
                 assertEquals(1, histogram.getBuckets().get(5).getDocCount());
+
+                assertEquals(25d, histogram.getBuckets().get(6).getKey());
+                assertEquals(1, histogram.getBuckets().get(6).getDocCount());
             }
         }
     }
@@ -103,7 +107,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5
                 new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0
                 new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), // bucket 0, 5, 10
-                new RangeFieldMapper.Range(rangeType, 42L, 49L, true, true), // bucket 40, 45
+                new RangeFieldMapper.Range(rangeType, 22L, 29L, true, true), // bucket 20, 25
             }) {
                 Document doc = new Document();
                 BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range));
@@ -117,8 +121,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
-                assertEquals(6, histogram.getBuckets().size());
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                assertEquals(7, histogram.getBuckets().size());
 
                 assertEquals(-5d, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
@@ -132,11 +137,14 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 assertEquals(10d, histogram.getBuckets().get(3).getKey());
                 assertEquals(1, histogram.getBuckets().get(3).getDocCount());
 
-                assertEquals(40d, histogram.getBuckets().get(4).getKey());
-                assertEquals(1, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
 
-                assertEquals(45d, histogram.getBuckets().get(5).getKey());
+                assertEquals(20d, histogram.getBuckets().get(5).getKey());
                 assertEquals(1, histogram.getBuckets().get(5).getDocCount());
+
+                assertEquals(25d, histogram.getBuckets().get(6).getKey());
+                assertEquals(1, histogram.getBuckets().get(6).getDocCount());
             }
         }
     }
@@ -150,7 +158,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5
                 new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0
                 new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), // bucket 0, 5, 10
-                new RangeFieldMapper.Range(rangeType, 42L, 49L, true, true) // bucket 40, 45
+                new RangeFieldMapper.Range(rangeType, 22L, 29L, true, true) // bucket 20, 25, 30
             ));
             doc.add(new BinaryDocValuesField("field", encodedRange));
             w.addDocument(doc);
@@ -161,8 +169,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
-                assertEquals(6, histogram.getBuckets().size());
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                assertEquals(7, histogram.getBuckets().size());
 
                 assertEquals(-5d, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
@@ -176,11 +185,14 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 assertEquals(10d, histogram.getBuckets().get(3).getKey());
                 assertEquals(1, histogram.getBuckets().get(3).getDocCount());
 
-                assertEquals(40d, histogram.getBuckets().get(4).getKey());
-                assertEquals(1, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
 
-                assertEquals(45d, histogram.getBuckets().get(5).getKey());
+                assertEquals(20d, histogram.getBuckets().get(5).getKey());
                 assertEquals(1, histogram.getBuckets().get(5).getDocCount());
+
+                assertEquals(25d, histogram.getBuckets().get(6).getKey());
+                assertEquals(1, histogram.getBuckets().get(6).getDocCount());
             }
         }
 
@@ -206,7 +218,8 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
                 assertEquals(3, histogram.getBuckets().size());
 
                 assertEquals(0d, histogram.getBuckets().get(0).getKey());
@@ -243,7 +256,8 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
                 assertEquals(6, histogram.getBuckets().size());
 
                 assertEquals(-1 * Math.PI, histogram.getBuckets().get(0).getKey());
@@ -315,7 +329,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket -1, 4
                 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -6 -1 4
                 new RangeFieldMapper.Range(rangeType, 4.2, 13.3, true, true), // bucket 4, 9
-                new RangeFieldMapper.Range(rangeType, 42.5, 49.3, true, true), // bucket 39, 44, 49
+                new RangeFieldMapper.Range(rangeType, 22.5, 29.3, true, true), // bucket 19, 24, 29
             }) {
                 Document doc = new Document();
                 BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range));
@@ -330,8 +344,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
-                //assertEquals(7, histogram.getBuckets().size());
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                assertEquals(8, histogram.getBuckets().size());
 
                 assertEquals(-6d, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
@@ -345,14 +360,17 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 assertEquals(9d, histogram.getBuckets().get(3).getKey());
                 assertEquals(1, histogram.getBuckets().get(3).getDocCount());
 
-                assertEquals(39d, histogram.getBuckets().get(4).getKey());
-                assertEquals(1, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(14d, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
 
-                assertEquals(44d, histogram.getBuckets().get(5).getKey());
+                assertEquals(19d, histogram.getBuckets().get(5).getKey());
                 assertEquals(1, histogram.getBuckets().get(5).getDocCount());
 
-                assertEquals(49d, histogram.getBuckets().get(6).getKey());
+                assertEquals(24d, histogram.getBuckets().get(6).getKey());
                 assertEquals(1, histogram.getBuckets().get(6).getDocCount());
+
+                assertEquals(29d, histogram.getBuckets().get(7).getKey());
+                assertEquals(1, histogram.getBuckets().get(7).getDocCount());
             }
         }
     }
@@ -365,7 +383,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket 0 5
                 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -5, 0
                 new RangeFieldMapper.Range(rangeType, 4.2, 13.3, true, true), // bucket 0, 5, 10
-                new RangeFieldMapper.Range(rangeType, 42.5, 49.3, true, true), // bucket 40, 45
+                new RangeFieldMapper.Range(rangeType, 22.5, 29.3, true, true), // bucket 20, 25
             }) {
                 Document doc = new Document();
                 BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range));
@@ -386,8 +404,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
-                assertEquals(6, histogram.getBuckets().size());
+                InternalHistogram histogram =
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType));
+                assertEquals(7, histogram.getBuckets().size());
 
                 assertEquals(-5d + expectedOffset, histogram.getBuckets().get(0).getKey());
                 assertEquals(1, histogram.getBuckets().get(0).getDocCount());
@@ -401,11 +420,14 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
                 assertEquals(10d + expectedOffset, histogram.getBuckets().get(3).getKey());
                 assertEquals(1, histogram.getBuckets().get(3).getDocCount());
 
-                assertEquals(40d + expectedOffset, histogram.getBuckets().get(4).getKey());
-                assertEquals(1, histogram.getBuckets().get(4).getDocCount());
+                assertEquals(15d + expectedOffset, histogram.getBuckets().get(4).getKey());
+                assertEquals(0, histogram.getBuckets().get(4).getDocCount());
 
-                assertEquals(45d + expectedOffset, histogram.getBuckets().get(5).getKey());
+                assertEquals(20d + expectedOffset, histogram.getBuckets().get(5).getKey());
                 assertEquals(1, histogram.getBuckets().get(5).getDocCount());
+
+                assertEquals(25d + expectedOffset, histogram.getBuckets().get(6).getKey());
+                assertEquals(1, histogram.getBuckets().get(6).getDocCount());
             }
         }
     }
@@ -429,7 +451,7 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase {
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 Exception e = expectThrows(IllegalArgumentException.class, () ->
-                    search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)));
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)));
                 assertThat(e.getMessage(), equalTo("Expected numeric range type but found non-numeric range [ip_range]"));
             }
         }

+ 32 - 73
server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java

@@ -65,7 +65,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
 
     public void testNoDocs() throws Exception{
         final List<Number> dataset = Arrays.asList();
-        testBothCases(DEFAULT_QUERY, dataset, true,
+        testSearchCase(DEFAULT_QUERY, dataset, true,
             aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(6).setInitialBuffer(4),
             histogram -> {
                 final List<InternalVariableWidthHistogram.Bucket> buckets = histogram.getBuckets();
@@ -87,7 +87,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         expectedMins.put(-3d, -3d);
         expectedMins.put(10d, 10d);
 
-        testBothCases(DEFAULT_QUERY, dataset, true,
+        testSearchCase(DEFAULT_QUERY, dataset, true,
             aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4),
             histogram -> {
                 final List<InternalVariableWidthHistogram.Bucket> buckets = histogram.getBuckets();
@@ -164,7 +164,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         expectedMaxesOnlySearch.put(8.8, 8.8);
 
         testSearchCase(DEFAULT_QUERY, dataset, false,
-            aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(6).setInitialBuffer(4),
+            aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4).setShardSize(6).setInitialBuffer(4),
             histogram -> {
                 final List<InternalVariableWidthHistogram.Bucket> buckets = histogram.getBuckets();
                 assertEquals(expectedCentroidsOnlySearch.size(), buckets.size());
@@ -198,7 +198,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         expectedMaxesSearchReduce.put(5.3, 5.9);
         expectedMaxesSearchReduce.put(8.8, 8.8);
 
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset, false,
+        testSearchCase(DEFAULT_QUERY, dataset, false,
             aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4).setShardSize(6).setInitialBuffer(4),
             histogram -> {
             final List<InternalVariableWidthHistogram.Bucket> buckets = histogram.getBuckets();
@@ -220,16 +220,12 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         final List<Number> dataset = Arrays.asList(-1, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 40, 30, 25, 32, 36, 80, 50, 75, 60);
         double doubleError = 1d / 10000d;
 
-        // Search (no reduce)
-
-        // Expected clusters: [ (-1), (1), (3), (5), (7), (9), (11), (13), (15), (17),
-        //                      (19), (25, 30, 32), (36, 40, 50), (60), (75, 80) ]
-        // Corresponding keys (centroids): [ -1, 1, 3, ..., 17, 19, 29, 42, 77.5]
-        // Note: New buckets are created for 30, 50, and 80 because they are distant from the other buckets
-        final List<Double> keys = Arrays.asList(-1d, 1d, 3d, 5d, 7d, 9d, 11d, 13d, 15d, 17d, 19d, 29d, 42d, 60d, 77.5d);
-        final List<Double> mins = Arrays.asList(-1d, 1d, 3d, 5d, 7d, 9d, 11d, 13d, 15d, 17d, 19d, 25d, 36d, 60d, 75d);
-        final List<Double> maxes = Arrays.asList(-1d, 1d, 3d, 5d, 7d, 9d, 11d, 13d, 15d, 17d, 19d, 32d, 50d, 60d, 80d);
-        final List<Integer> docCounts = Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 1, 2);
+        // Expected clusters: [ (-1, 1), (3, 5), (7, 9), (11, 13), (15, 17),
+        //                      (19), (25), (30), (32), (36), (40), (50), (60), (75), (80) ]
+        final List<Double> keys = Arrays.asList(0d, 4d, 8d, 12d, 16d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d);
+        final List<Double> mins = Arrays.asList(-1d, 3d, 7d, 11d, 15d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d);
+        final List<Double> maxes = Arrays.asList(1d, 5d, 9d, 13d, 17d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d);
+        final List<Integer> docCounts = Arrays.asList(2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1);
         assert keys.size() == docCounts.size() && keys.size() == keys.size();
 
         final Map<Double, Integer> expectedDocCountOnlySearch = new HashMap<>();
@@ -242,7 +238,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         }
 
         testSearchCase(DEFAULT_QUERY, dataset, false,
-            aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(16).setInitialBuffer(12),
+            aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15),
             histogram -> {
                 final List<InternalVariableWidthHistogram.Bucket> buckets = histogram.getBuckets();
                 assertEquals(expectedDocCountOnlySearch.size(), buckets.size());
@@ -267,7 +263,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         }
 
         testSearchCase(DEFAULT_QUERY, dataset.stream().map(n -> Double.valueOf(n.doubleValue() * Long.MAX_VALUE)).collect(toList()), false,
-            aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(16).setInitialBuffer(12),
+            aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15),
             histogram -> {
                 final List<InternalVariableWidthHistogram.Bucket> buckets = histogram.getBuckets();
                 assertEquals(expectedDocCountOnlySearch.size(), buckets.size());
@@ -284,28 +280,22 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
 
     // There should not be more than `shard_size` documents on a node, even when very distant documents appear
     public void testNewBucketLimit() throws Exception{
-        final List<Number> dataset =  Arrays.asList(1,2,3,4,5, 10, 20, 50, 100, 5400, -900);
+        final List<Number> dataset =  Arrays.asList(1, 2, 3, 4, 5, 10, 20, 50, 100, 5400, -900);
         double doubleError = 1d / 10000d;
 
-        // Expected clusters: [ (-900, 1, 2), (3, 4), (5), (10, 20, 50, 100, 5400)]
-        // Corresponding keys (centroids): [ -299, 3.5, 5, 1116]
+        // Expected clusters: [ (-900, 1, 2, 3, 4, 5), (10, 20, 50, 100, 5400)]
+        // Corresponding keys (centroids): [ -147.5, 1116]
         final Map<Double, Integer> expectedDocCount = new HashMap<>();
-        expectedDocCount.put(-299d, 3);
-        expectedDocCount.put(3.5d, 2);
-        expectedDocCount.put(5d, 1);
-        expectedDocCount.put(1116d, 5);
+        expectedDocCount.put(-147.5d, 6);
+        expectedDocCount.put(1116.0d, 5);
 
         final Map<Double, Double> expectedMins = new HashMap<>();
-        expectedMins.put(-299d, -900d);
-        expectedMins.put(3.5d, 3d);
-        expectedMins.put(5d, 5d);
-        expectedMins.put(1116d, 10d);
+        expectedMins.put(-147.5d, -900d);
+        expectedMins.put(1116.0d, 10d);
 
         final Map<Double, Double> expectedMaxes = new HashMap<>();
-        expectedMaxes.put(-299d, 2d);
-        expectedMaxes.put(3.5d, 4d);
-        expectedMaxes.put(5d, 5d);
-        expectedMaxes.put(1116d, 5400d);
+        expectedMaxes.put(-147.5d, 5d);
+        expectedMaxes.put(1116.0d, 5400d);
 
         testSearchCase(DEFAULT_QUERY, dataset, false,
             aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(2).setShardSize(4).setInitialBuffer(5),
@@ -325,7 +315,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
     public void testSimpleSubAggregations() throws IOException{
         final List<Number> dataset =  Arrays.asList(5, 1, 9, 2, 8);
 
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset, false,
+        testSearchCase(DEFAULT_QUERY, dataset, false,
             aggregation -> aggregation.field(NUMERIC_FIELD)
                 .setNumBuckets(3)
                 .setInitialBuffer(3)
@@ -426,7 +416,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         // To account for this case of a document switching clusters, we check that each cluster centroid is within
         // a certain range, rather than asserting exact values.
 
-        testSearchAndReduceCase(DEFAULT_QUERY, dataset, true,
+        testSearchCase(DEFAULT_QUERY, dataset, true,
             aggregation -> aggregation.field(NUMERIC_FIELD)
                 .setNumBuckets(2)
                 .setInitialBuffer(4)
@@ -508,12 +498,11 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
 
     public void testHugeShardSize() throws Exception {
         final List<Number> dataset = Arrays.asList(1, 2, 3);
-        testBothCases(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setShardSize(1000000000), histogram -> {
-            assertThat(
+        testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setShardSize(1000000000),
+            histogram -> assertThat(
                 histogram.getBuckets().stream().map(InternalVariableWidthHistogram.Bucket::getKey).collect(toList()),
-                equalTo(List.of(1.0, 2.0, 3.0))
-            );
-        });
+                equalTo(List.of(1.0, 2.0, 3.0)))
+        );
     }
 
     public void testSmallInitialBuffer() throws Exception {
@@ -529,7 +518,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
 
     public void testOutOfOrderInitialBuffer() throws Exception {
         final List<Number> dataset = Arrays.asList(1, 2, 3);
-        testBothCases(
+        testSearchCase(
             DEFAULT_QUERY,
             dataset,
             true,
@@ -553,26 +542,6 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         assertThat(new VariableWidthHistogramAggregationBuilder("test").setNumBuckets(3).getInitialBuffer(), equalTo(1500));
     }
 
-    private void testSearchCase(final Query query, final List<Number> dataset, boolean multipleSegments,
-                                final Consumer<VariableWidthHistogramAggregationBuilder> configure,
-                                final Consumer<InternalVariableWidthHistogram> verify) throws IOException {
-        executeTestCase(false, query, dataset, multipleSegments, configure, verify);
-    }
-
-
-    private void testSearchAndReduceCase(final Query query, final List<Number> dataset, boolean multipleSegments,
-                                         final Consumer<VariableWidthHistogramAggregationBuilder> configure,
-                                         final Consumer<InternalVariableWidthHistogram> verify) throws IOException {
-        executeTestCase(true, query, dataset, multipleSegments, configure, verify);
-    }
-
-    private void testBothCases(final Query query, final List<Number> dataset, boolean multipleSegments,
-                               final Consumer<VariableWidthHistogramAggregationBuilder> configure,
-                               final Consumer<InternalVariableWidthHistogram> verify) throws IOException {
-        executeTestCase(true, query, dataset, multipleSegments, configure, verify);
-        executeTestCase(false, query, dataset, multipleSegments, configure, verify);
-    }
-
     @Override
     protected IndexSettings createIndexSettings() {
         final Settings nodeSettings = Settings.builder()
@@ -588,10 +557,9 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
         );
     }
 
-    private void executeTestCase(final boolean reduced, final Query query,
-                                 final List<Number> dataset, boolean multipleSegments,
-                                 final Consumer<VariableWidthHistogramAggregationBuilder> configure,
-                                 final Consumer<InternalVariableWidthHistogram> verify) throws IOException {
+    private void testSearchCase(final Query query, final List<Number> dataset, boolean multipleSegments,
+                                final Consumer<VariableWidthHistogramAggregationBuilder> configure,
+                                final Consumer<InternalVariableWidthHistogram> verify) throws IOException {
         try (Directory directory = newDirectory()) {
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 indexSampleData(dataset, indexWriter, multipleSegments);
@@ -619,12 +587,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
 
 
 
-                final InternalVariableWidthHistogram histogram;
-                if (reduced) {
-                    histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
-                } else {
-                    histogram = search(indexSearcher, query, aggregationBuilder, fieldType);
-                }
+                final InternalVariableWidthHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
                 verify.accept(histogram);
             }
         }
@@ -645,10 +608,6 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase {
             // Create multiple segments in the index
             final Document document = new Document();
             for (final Number doc : dataset) {
-                if (frequently()) {
-                    indexWriter.commit();
-                }
-
                 long fieldVal = convertDocumentToSortableValue(doc);
                 document.add(new SortedNumericDocValuesField(NUMERIC_FIELD, fieldVal));
                 indexWriter.addDocument(document);

+ 1 - 17
server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java

@@ -393,17 +393,6 @@ public class MissingAggregatorTests extends AggregatorTestCase {
                           CheckedConsumer<RandomIndexWriter, IOException> writeIndex,
                           Consumer<InternalMissing> verify,
                           Collection<MappedFieldType> fieldTypes) throws IOException {
-        testCaseWithReduce(query, builder, writeIndex, verify, fieldTypes, false);
-        testCaseWithReduce(query, builder, writeIndex, verify, fieldTypes, true);
-    }
-
-    private void testCaseWithReduce(Query query,
-                                    MissingAggregationBuilder builder,
-                                    CheckedConsumer<RandomIndexWriter, IOException> writeIndex,
-                                    Consumer<InternalMissing> verify,
-                                    Collection<MappedFieldType> fieldTypes,
-                                    boolean reduced) throws IOException {
-
         try (Directory directory = newDirectory()) {
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 writeIndex.accept(indexWriter);
@@ -412,12 +401,7 @@ public class MissingAggregatorTests extends AggregatorTestCase {
             try (IndexReader indexReader = DirectoryReader.open(directory)) {
                 final IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
                 final MappedFieldType[] fieldTypesArray = fieldTypes.toArray(new MappedFieldType[0]);
-                final InternalMissing missing;
-                if (reduced) {
-                    missing = searchAndReduce(indexSearcher, query, builder, fieldTypesArray);
-                } else {
-                    missing = search(indexSearcher, query, builder, fieldTypesArray);
-                }
+                final InternalMissing missing = searchAndReduce(indexSearcher, query, builder, fieldTypesArray);
                 verify.accept(missing);
             }
         }

+ 12 - 11
server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java

@@ -151,7 +151,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 nestedBuilder.subAggregation(maxAgg);
                 MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG);
 
-                InternalNested nested = search(newSearcher(indexReader, false, true),
+                InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
 
                 assertEquals(NESTED_AGG, nested.getName());
@@ -196,7 +196,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 nestedBuilder.subAggregation(maxAgg);
                 MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG);
 
-                InternalNested nested = search(newSearcher(indexReader, false, true),
+                InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -248,7 +248,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
 
                 MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG);
 
-                InternalNested nested = search(newSearcher(indexReader, false, true),
+                InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -303,7 +303,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 nestedBuilder.subAggregation(sumAgg);
                 MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG);
 
-                InternalNested nested = search(newSearcher(indexReader, false, true),
+                InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -386,7 +386,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
                 bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT);
 
-                InternalNested nested = search(newSearcher(indexReader, false, true),
+                InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true),
                     new ConstantScoreQuery(bq.build()), nestedBuilder, fieldType);
 
                 assertEquals(NESTED_AGG, nested.getName());
@@ -424,7 +424,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 nestedBuilder.subAggregation(maxAgg);
                 termsBuilder.subAggregation(nestedBuilder);
 
-                Terms terms = search(newSearcher(indexReader, false, true),
+                Terms terms = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2);
 
                 assertEquals(7, terms.getBuckets().size());
@@ -473,7 +473,8 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 nestedBuilder.subAggregation(maxAgg);
                 termsBuilder.subAggregation(nestedBuilder);
 
-                terms = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2);
+                terms = searchAndReduce(newSearcher(indexReader, false, true),
+                    new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2);
 
                 assertEquals(7, terms.getBuckets().size());
                 assertEquals("authors", terms.getName());
@@ -561,7 +562,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 nestedBuilder.subAggregation(minAgg);
                 termsBuilder.subAggregation(nestedBuilder);
 
-                Terms terms = search(newSearcher(indexReader, false, true),
+                Terms terms = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2);
 
                 assertEquals(books.size(), terms.getBuckets().size());
@@ -658,7 +659,7 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("key");
                 MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("value");
 
-                Filter filter = search(newSearcher(indexReader, false, true),
+                Filter filter = searchAndReduce(newSearcher(indexReader, false, true),
                     Queries.newNonNestedFilter(), filterAggregationBuilder, fieldType1, fieldType2);
 
                 assertEquals("filterAgg", filter.getName());
@@ -719,9 +720,9 @@ public class NestedAggregatorTests extends AggregatorTestCase {
                 NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(
                     max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias"));
 
-                InternalNested nested = search(newSearcher(indexReader, false, true),
+                InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), agg, fieldType);
-                Nested aliasNested = search(newSearcher(indexReader, false, true),
+                Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), aliasAgg, fieldType);
 
                 assertEquals(nested, aliasNested);

+ 4 - 4
server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java

@@ -98,7 +98,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
                 reverseNestedBuilder.subAggregation(maxAgg);
                 MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG);
 
-                Nested nested = search(newSearcher(indexReader, false, true),
+                Nested nested = searchAndReduce(newSearcher(indexReader, false, true),
                         new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 ReverseNested reverseNested = (ReverseNested)
                         ((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME);
@@ -160,7 +160,7 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
                 reverseNestedBuilder.subAggregation(maxAgg);
                 MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG);
 
-                Nested nested = search(newSearcher(indexReader, false, true),
+                Nested nested = searchAndReduce(newSearcher(indexReader, false, true),
                         new MatchAllDocsQuery(), nestedBuilder, fieldType);
                 assertEquals(expectedNestedDocs, nested.getDocCount());
 
@@ -224,9 +224,9 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
                 NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation(
                     reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg));
 
-                Nested nested = search(newSearcher(indexReader, false, true),
+                Nested nested = searchAndReduce(newSearcher(indexReader, false, true),
                         new MatchAllDocsQuery(), agg, fieldType);
-                Nested aliasNested = search(newSearcher(indexReader, false, true),
+                Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true),
                     new MatchAllDocsQuery(), aliasAgg, fieldType);
 
                 ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME);

+ 3 - 3
server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java

@@ -113,7 +113,7 @@ public class IpRangeAggregatorTests extends AggregatorTestCase {
             MappedFieldType fieldType = new IpFieldMapper.IpFieldType("field");
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalBinaryRange range = search(searcher, new MatchAllDocsQuery(), builder, fieldType);
+                InternalBinaryRange range = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldType);
                 assertEquals(numRanges, range.getBuckets().size());
                 for (int i = 0; i < range.getBuckets().size(); i++) {
                     Tuple<BytesRef, BytesRef> expected = requestedRanges[i];
@@ -148,7 +148,7 @@ public class IpRangeAggregatorTests extends AggregatorTestCase {
                 .missing("192.168.100.42"); // Apparently we expect a string here
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalBinaryRange range = search(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null);
+                InternalBinaryRange range = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null);
                 assertEquals(1, range.getBuckets().size());
             }
         }
@@ -169,7 +169,7 @@ public class IpRangeAggregatorTests extends AggregatorTestCase {
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 expectThrows(IllegalArgumentException.class, () -> {
-                    search(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null);
+                    searchAndReduce(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null);
                 });
             }
         }

+ 2 - 2
server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java

@@ -192,7 +192,7 @@ public class DiversifiedSamplerTests extends AggregatorTestCase {
                 .shardSize(shardSize)
                 .subAggregation(new TermsAggregationBuilder("terms").field("id"));
 
-        InternalSampler result = search(indexSearcher, query, builder, genreFieldType, idFieldType);
+        InternalSampler result = searchAndReduce(indexSearcher, query, builder, genreFieldType, idFieldType);
         verify.accept(result);
     }
 
@@ -211,7 +211,7 @@ public class DiversifiedSamplerTests extends AggregatorTestCase {
                 .field(genreFieldType.name())
                 .subAggregation(new TermsAggregationBuilder("terms").field("id"));
 
-        InternalSampler result = search(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType);
+        InternalSampler result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType);
         Terms terms = result.getAggregations().get("terms");
         assertEquals(0, terms.getBuckets().size());
         indexReader.close();

+ 6 - 35
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java

@@ -59,7 +59,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
     }
 
     public void testMatchNoDocs() throws IOException {
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(BINARY_FIELD),
             agg -> assertEquals(0, agg.getBuckets().size()), ValueType.STRING
         );
@@ -68,7 +68,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
     public void testMatchAllDocs() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(BINARY_FIELD),
             agg -> {
                 assertEquals(9, agg.getBuckets().size());
@@ -87,7 +87,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
 
         // Make sure the include/exclude fails regardless of how the user tries to type hint the agg
         AggregationExecutionException e = expectThrows(AggregationExecutionException.class,
-            () -> testBothCases(new MatchNoDocsQuery(), dataset,
+            () -> testSearchCase(new MatchNoDocsQuery(), dataset,
                 aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"),
                 agg -> fail("test should have failed with exception"), null // default, no hint
             ));
@@ -95,7 +95,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
             "they can only be applied to string fields. Use an array of values for include/exclude clauses"));
 
         e = expectThrows(AggregationExecutionException.class,
-            () -> testBothCases(new MatchNoDocsQuery(), dataset,
+            () -> testSearchCase(new MatchNoDocsQuery(), dataset,
                 aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"),
                 agg -> fail("test should have failed with exception"), ValueType.STRING // string type hint
             ));
@@ -104,7 +104,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
     }
 
     public void testBadUserValueTypeHint() throws IOException {
-        IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testBothCases(new MatchNoDocsQuery(), dataset,
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(BINARY_FIELD),
             agg -> fail("test should have failed with exception"), ValueType.NUMERIC // numeric type hint
         ));
@@ -114,34 +114,10 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
     private void testSearchCase(Query query, List<Long> dataset,
                                 Consumer<TermsAggregationBuilder> configure,
                                 Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify, valueType);
-    }
-
-    private void testSearchAndReduceCase(Query query, List<Long> dataset,
-                                         Consumer<TermsAggregationBuilder> configure,
-                                         Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        executeTestCase(true, query, dataset, configure, verify, valueType);
-    }
-
-    private void testBothCases(Query query, List<Long> dataset,
-                               Consumer<TermsAggregationBuilder> configure,
-                               Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        testSearchCase(query, dataset, configure, verify, valueType);
-        testSearchAndReduceCase(query, dataset, configure, verify, valueType);
-    }
-
-    private void executeTestCase(boolean reduced, Query query, List<Long> dataset,
-                                 Consumer<TermsAggregationBuilder> configure,
-                                 Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-
         try (Directory directory = newDirectory()) {
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 Document document = new Document();
                 for (Long value : dataset) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
-
                     document.add(new BinaryFieldMapper.CustomBinaryDocValuesField(BINARY_FIELD, Numbers.longToBytes(value)));
                     indexWriter.addDocument(document);
                     document.clear();
@@ -161,12 +137,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase {
 
                 MappedFieldType binaryFieldType = new BinaryFieldMapper.BinaryFieldType(BINARY_FIELD);
 
-                InternalMappedTerms rareTerms;
-                if (reduced) {
-                    rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, binaryFieldType);
-                } else {
-                    rareTerms = search(indexSearcher, query, aggregationBuilder, binaryFieldType);
-                }
+                InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, binaryFieldType);
                 verify.accept(rareTerms);
             }
         }

+ 5 - 34
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java

@@ -56,12 +56,12 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase {
     }
 
     public void testMatchNoDocs() throws IOException {
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD),
             agg -> assertEquals(0, agg.getBuckets().size()), null // without type hint
         );
 
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD),
             agg -> assertEquals(0, agg.getBuckets().size()), ValueType.STRING // with type hint
         );
@@ -70,7 +70,7 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase {
     public void testMatchAllDocs() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD),
             agg -> {
                 assertEquals(9, agg.getBuckets().size());
@@ -82,7 +82,7 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase {
             }, null // without type hint
         );
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD),
             agg -> {
                 assertEquals(9, agg.getBuckets().size());
@@ -98,34 +98,10 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase {
     private void testSearchCase(Query query, List<String> dataset,
                                 Consumer<TermsAggregationBuilder> configure,
                                 Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify, valueType);
-    }
-
-    private void testSearchAndReduceCase(Query query, List<String> dataset,
-                                         Consumer<TermsAggregationBuilder> configure,
-                                         Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        executeTestCase(true, query, dataset, configure, verify, valueType);
-    }
-
-    private void testBothCases(Query query, List<String> dataset,
-                               Consumer<TermsAggregationBuilder> configure,
-                               Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        testSearchCase(query, dataset, configure, verify, valueType);
-        testSearchAndReduceCase(query, dataset, configure, verify, valueType);
-    }
-
-    private void executeTestCase(boolean reduced, Query query, List<String> dataset,
-                                 Consumer<TermsAggregationBuilder> configure,
-                                 Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-
         try (Directory directory = newDirectory()) {
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 Document document = new Document();
                 for (String value : dataset) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
-
                     document.add(new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef(value)));
                     indexWriter.addDocument(document);
                     document.clear();
@@ -145,12 +121,7 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase {
 
                 MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType(KEYWORD_FIELD);
 
-                InternalMappedTerms rareTerms;
-                if (reduced) {
-                    rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordFieldType);
-                } else {
-                    rareTerms = search(indexSearcher, query, aggregationBuilder, keywordFieldType);
-                }
+                InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordFieldType);
                 verify.accept(rareTerms);
             }
         }

+ 7 - 36
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java

@@ -59,12 +59,12 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
 
     public void testMatchNoDocs() throws IOException {
 
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(LONG_FIELD),
             agg -> assertEquals(0, agg.getBuckets().size()), null // without type hint
         );
 
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(LONG_FIELD),
             agg -> assertEquals(0, agg.getBuckets().size()), ValueType.NUMERIC // with type hint
         );
@@ -73,7 +73,7 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
     public void testMatchAllDocs() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(LONG_FIELD),
             agg -> {
                 assertEquals(9, agg.getBuckets().size());
@@ -85,7 +85,7 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
             }, null //without type hint
         );
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(LONG_FIELD),
             agg -> {
                 assertEquals(9, agg.getBuckets().size());
@@ -104,7 +104,7 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
         // Numerics don't support any regex include/exclude, so should fail no matter what we do
 
         AggregationExecutionException e = expectThrows(AggregationExecutionException.class,
-            () -> testBothCases(new MatchNoDocsQuery(), dataset,
+            () -> testSearchCase(new MatchNoDocsQuery(), dataset,
                 aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"),
                 agg -> fail("test should have failed with exception"), null
             ));
@@ -113,7 +113,7 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
             "values for include/exclude clauses used to filter numeric fields"));
 
         e = expectThrows(AggregationExecutionException.class,
-            () -> testBothCases(new MatchNoDocsQuery(), dataset,
+            () -> testSearchCase(new MatchNoDocsQuery(), dataset,
                 aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"),
                 agg -> fail("test should have failed with exception"), ValueType.NUMERIC // with type hint
             ));
@@ -126,34 +126,10 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
     private void testSearchCase(Query query, List<Long> dataset,
                                 Consumer<TermsAggregationBuilder> configure,
                                 Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify, valueType);
-    }
-
-    private void testSearchAndReduceCase(Query query, List<Long> dataset,
-                                         Consumer<TermsAggregationBuilder> configure,
-                                         Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        executeTestCase(true, query, dataset, configure, verify, valueType);
-    }
-
-    private void testBothCases(Query query, List<Long> dataset,
-                               Consumer<TermsAggregationBuilder> configure,
-                               Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-        testSearchCase(query, dataset, configure, verify, valueType);
-        testSearchAndReduceCase(query, dataset, configure, verify, valueType);
-    }
-
-    private void executeTestCase(boolean reduced, Query query, List<Long> dataset,
-                                 Consumer<TermsAggregationBuilder> configure,
-                                 Consumer<InternalMappedTerms> verify, ValueType valueType) throws IOException {
-
         try (Directory directory = newDirectory()) {
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 Document document = new Document();
                 for (Long value : dataset) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
-
                     document.add(new SortedNumericDocValuesField(LONG_FIELD, value));
                     document.add(new LongPoint(LONG_FIELD, value));
                     indexWriter.addDocument(document);
@@ -175,12 +151,7 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase {
                 MappedFieldType longFieldType
                     = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG);
 
-                InternalMappedTerms rareTerms;
-                if (reduced) {
-                    rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longFieldType);
-                } else {
-                    rareTerms = search(indexSearcher, query, aggregationBuilder, longFieldType);
-                }
+                InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longFieldType);
                 verify.accept(rareTerms);
             }
         }

+ 38 - 82
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java

@@ -99,11 +99,11 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
     }
 
     public void testMatchNoDocs() throws IOException {
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1),
             agg -> assertEquals(0, agg.getBuckets().size())
         );
-        testBothCases(new MatchNoDocsQuery(), dataset,
+        testSearchCase(new MatchNoDocsQuery(), dataset,
             aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1),
             agg -> assertEquals(0, agg.getBuckets().size())
         );
@@ -112,7 +112,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
     public void testMatchAllDocs() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1),
             agg -> {
                 assertEquals(1, agg.getBuckets().size());
@@ -121,7 +121,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
                 assertThat(bucket.getDocCount(), equalTo(1L));
             }
         );
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1),
             agg -> {
                 assertEquals(1, agg.getBuckets().size());
@@ -144,7 +144,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
         // The one rare term
         d.add(0L);
 
-        testSearchAndReduceCase(query, d,
+        testSearchCase(query, d,
             aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1),
             agg -> {
                 assertEquals(1, agg.getBuckets().size());
@@ -153,7 +153,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
                 assertThat(bucket.getDocCount(), equalTo(1L));
             }
         );
-        testSearchAndReduceCase(query, d,
+        testSearchCase(query, d,
             aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1),
             agg -> {
                 assertEquals(1, agg.getBuckets().size());
@@ -167,7 +167,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
     public void testIncludeExclude() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(LONG_FIELD)
                 .maxDocCount(2) // bump to 2 since we're only including "2"
                 .includeExclude(new IncludeExclude(new long[]{2}, new long[]{})),
@@ -178,7 +178,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
                 assertThat(bucket.getDocCount(), equalTo(2L));
             }
         );
-        testBothCases(query, dataset,
+        testSearchCase(query, dataset,
             aggregation -> aggregation.field(KEYWORD_FIELD)
                 .maxDocCount(2) // bump to 2 since we're only including "2"
                 .includeExclude(new IncludeExclude(new String[]{"2"}, new String[]{})),
@@ -194,7 +194,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
     public void testEmbeddedMaxAgg() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset, aggregation -> {
+        testSearchCase(query, dataset, aggregation -> {
                 MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD);
                 aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(max);
             },
@@ -210,7 +210,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
                 assertThat(((Max)(children.asList().get(0))).getValue(), equalTo(1.0));
             }
         );
-        testBothCases(query, dataset, aggregation -> {
+        testSearchCase(query, dataset, aggregation -> {
                 MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD);
                 aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(max);
             },
@@ -240,11 +240,11 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
             agg -> assertEquals(0, agg.getBuckets().size())
         );
 
-        testSearchAndReduceCase(query, Collections.emptyList(),
+        testSearchCase(query, Collections.emptyList(),
             aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1),
             agg -> assertEquals(0, agg.getBuckets().size())
         );
-        testSearchAndReduceCase(query, Collections.emptyList(),
+        testSearchCase(query, Collections.emptyList(),
             aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1),
             agg -> assertEquals(0, agg.getBuckets().size())
         );
@@ -312,7 +312,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
     public void testNestedTerms() throws IOException {
         Query query = new MatchAllDocsQuery();
 
-        testBothCases(query, dataset, aggregation -> {
+        testSearchCase(query, dataset, aggregation -> {
                 TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms")
                     .field(KEYWORD_FIELD);
                 aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(terms);
@@ -331,7 +331,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
             }
         );
 
-        testBothCases(query, dataset, aggregation -> {
+        testSearchCase(query, dataset, aggregation -> {
                 TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms")
                     .field(KEYWORD_FIELD);
                 aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(terms);
@@ -352,22 +352,20 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
     }
 
     public void testInsideTerms() throws IOException {
-        for (boolean reduce : new boolean[] {false, true}) {
-            for (String field : new String[] {KEYWORD_FIELD, LONG_FIELD}) {
-                AggregationBuilder builder = new TermsAggregationBuilder("terms").field("even_odd").subAggregation(
-                        new RareTermsAggregationBuilder("rare").field(field).maxDocCount(2));
-                StringTerms terms = (StringTerms) executeTestCase(reduce, new MatchAllDocsQuery(), dataset, builder);
-
-                StringTerms.Bucket even = terms.getBucketByKey("even");
-                InternalRareTerms<?, ?> evenRare = even.getAggregations().get("rare");
-                assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("2"));
-                assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(2L));
-
-                StringTerms.Bucket odd = terms.getBucketByKey("odd");
-                InternalRareTerms<?, ?> oddRare = odd.getAggregations().get("rare");
-                assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("1"));
-                assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(1L));
-            }
+        for (String field : new String[] {KEYWORD_FIELD, LONG_FIELD}) {
+            AggregationBuilder builder = new TermsAggregationBuilder("terms").field("even_odd").subAggregation(
+                new RareTermsAggregationBuilder("rare").field(field).maxDocCount(2));
+            StringTerms terms = executeTestCase(new MatchAllDocsQuery(), dataset, builder);
+
+            StringTerms.Bucket even = terms.getBucketByKey("even");
+            InternalRareTerms<?, ?> evenRare = even.getAggregations().get("rare");
+            assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("2"));
+            assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(2L));
+
+            StringTerms.Bucket odd = terms.getBucketByKey("odd");
+            InternalRareTerms<?, ?> oddRare = odd.getAggregations().get("rare");
+            assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("1"));
+            assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(1L));
         }
     }
 
@@ -530,34 +528,6 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
         return documents;
     }
 
-    private void testSearchCase(
-        Query query,
-        List<Long> dataset,
-        Consumer<RareTermsAggregationBuilder> configure,
-        Consumer<InternalMappedRareTerms<?, ?>> verify
-    ) throws IOException {
-        executeTestCase(false, query, dataset, configure, verify);
-    }
-
-    private void testSearchAndReduceCase(
-        Query query,
-        List<Long> dataset,
-        Consumer<RareTermsAggregationBuilder> configure,
-        Consumer<InternalMappedRareTerms<?, ?>> verify
-    ) throws IOException {
-        executeTestCase(true, query, dataset, configure, verify);
-    }
-
-    private void testBothCases(
-        Query query,
-        List<Long> dataset,
-        Consumer<RareTermsAggregationBuilder> configure,
-        Consumer<InternalMappedRareTerms<?, ?>> verify
-    ) throws IOException {
-        testSearchCase(query, dataset, configure, verify);
-        testSearchAndReduceCase(query, dataset, configure, verify);
-    }
-
     @Override
     protected IndexSettings createIndexSettings() {
         Settings nodeSettings = Settings.builder()
@@ -572,38 +542,27 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
         );
     }
 
-    private void executeTestCase(boolean reduced, Query query, List<Long> dataset,
-                                 Consumer<RareTermsAggregationBuilder> configure,
-                                 Consumer<InternalMappedRareTerms<?, ?>> verify) throws IOException {
+    private void testSearchCase(Query query,
+                                List<Long> dataset,
+                                Consumer<RareTermsAggregationBuilder> configure,
+                                Consumer<InternalMappedRareTerms<?, ?>> verify) throws IOException {
         RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name");
         if (configure != null) {
             configure.accept(aggregationBuilder);
         }
-        InternalMappedRareTerms<?, ?> result = (InternalMappedRareTerms<?, ?>) executeTestCase(
-            reduced,
-            query,
-            dataset,
-            aggregationBuilder
-        );
-        verify.accept(result);
+        verify.accept(executeTestCase(query, dataset, aggregationBuilder));
+
     }
 
-    private InternalAggregation executeTestCase(
-        boolean reduced,
-        Query query,
-        List<Long> dataset,
-        AggregationBuilder aggregationBuilder
-    ) throws IOException {
+    private  <A extends InternalAggregation> A executeTestCase(Query query,
+                                                               List<Long> dataset,
+                                                               AggregationBuilder aggregationBuilder) throws IOException {
         try (Directory directory = newDirectory()) {
             try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
                 Document document = new Document();
                 List<Long> shuffledDataset = new ArrayList<>(dataset);
                 Collections.shuffle(shuffledDataset, random());
                 for (Long value : shuffledDataset) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
-
                     document.add(new SortedNumericDocValuesField(LONG_FIELD, value));
                     document.add(new LongPoint(LONG_FIELD, value));
                     document.add(new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef(Long.toString(value))));
@@ -620,10 +579,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
                     keywordField(KEYWORD_FIELD),
                     longField(LONG_FIELD),
                     keywordField("even_odd")};
-                if (reduced) {
-                    return searchAndReduce(indexSearcher, query, aggregationBuilder, types);
-                }
-                return search(indexSearcher, query, aggregationBuilder, types);
+                return searchAndReduce(indexSearcher, query, aggregationBuilder, types);
             }
         }
     }

+ 16 - 13
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java

@@ -115,7 +115,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
         indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment
 
         try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) {
-            addMixedTextDocs(textFieldType, w);
+            addMixedTextDocs(w);
 
             SignificantTermsAggregationBuilder sigAgg = new SignificantTermsAggregationBuilder("sig_text").field("text");
             sigAgg.executionHint(randomExecutionHint());
@@ -259,7 +259,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
         indexWriterConfig.setMaxBufferedDocs(100);
         indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment
         try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) {
-            addMixedTextDocs(textFieldType, w);
+            addMixedTextDocs(w);
 
             // Attempt aggregation on unmapped field
             SignificantTermsAggregationBuilder sigAgg = new SignificantTermsAggregationBuilder("sig_text").field("unmapped_field");
@@ -328,7 +328,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
         indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment
 
         try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) {
-            addMixedTextDocs(textFieldType, w);
+            addMixedTextDocs(w);
 
             SignificantTermsAggregationBuilder agg = significantTerms("sig_text").field("text");
             SignificantTermsAggregationBuilder aliasAgg = significantTerms("sig_text").field("text-alias");
@@ -389,7 +389,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
                     IndexSearcher searcher = newIndexSearcher(reader);
                     SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f")
                         .executionHint(executionHint);
-                    SignificantStringTerms result = search(searcher, new MatchAllDocsQuery(), request, keywordField("f"));
+                    SignificantStringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, keywordField("f"));
                     assertThat(result.getSubsetSize(), equalTo(1L));
                 }
             }
@@ -409,7 +409,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
                 try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) {
                     IndexSearcher searcher = newIndexSearcher(reader);
                     SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f");
-                    SignificantLongTerms result = search(searcher, new MatchAllDocsQuery(), request, longField("f"));
+                    SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, longField("f"));
                     assertThat(result.getSubsetSize(), equalTo(1L));
                 }
             }
@@ -441,7 +441,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
                     IndexSearcher searcher = newIndexSearcher(reader);
                     SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f")
                         .executionHint(executionHint);
-                    SignificantStringTerms result = search(searcher, new MatchAllDocsQuery(), request, keywordField("f"));
+                    SignificantStringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, keywordField("f"));
                     assertThat(result.getSubsetSize(), equalTo(2L));
                 }
             }
@@ -463,7 +463,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
                 try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) {
                     IndexSearcher searcher = newIndexSearcher(reader);
                     SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f");
-                    SignificantLongTerms result = search(searcher, new MatchAllDocsQuery(), request, longField("f"));
+                    SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, longField("f"));
                     assertThat(result.getSubsetSize(), equalTo(2L));
                 }
             }
@@ -495,14 +495,17 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
                 try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) {
                     IndexSearcher searcher = newIndexSearcher(reader);
                     SignificantTermsAggregationBuilder kRequest = new SignificantTermsAggregationBuilder("k").field("k")
+                        .minDocCount(0)
                         .executionHint(executionHint);
                     SignificantTermsAggregationBuilder jRequest = new SignificantTermsAggregationBuilder("j").field("j")
+                        .minDocCount(0)
                         .executionHint(executionHint)
                         .subAggregation(kRequest);
                     SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i")
+                        .minDocCount(0)
                         .executionHint(executionHint)
                         .subAggregation(jRequest);
-                    SignificantStringTerms result = search(
+                    SignificantStringTerms result = searchAndReduce(
                         searcher,
                         new MatchAllDocsQuery(),
                         request,
@@ -549,10 +552,10 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
                 }
                 try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) {
                     IndexSearcher searcher = newIndexSearcher(reader);
-                    SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i")
-                        .subAggregation(new SignificantTermsAggregationBuilder("j").field("j")
-                            .subAggregation(new SignificantTermsAggregationBuilder("k").field("k")));
-                    SignificantLongTerms result = search(searcher, new MatchAllDocsQuery(), request,
+                    SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i").minDocCount(0)
+                        .subAggregation(new SignificantTermsAggregationBuilder("j").field("j").minDocCount(0)
+                            .subAggregation(new SignificantTermsAggregationBuilder("k").field("k").minDocCount(0)));
+                    SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request,
                         longField("i"), longField("j"), longField("k"));
                     assertThat(result.getSubsetSize(), equalTo(1000L));
                     for (int i = 0; i < 10; i++) {
@@ -576,7 +579,7 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase {
         }
     }
 
-    private void addMixedTextDocs(TextFieldType textFieldType, IndexWriter w) throws IOException {
+    private void addMixedTextDocs(IndexWriter w) throws IOException {
         for (int i = 0; i < 10; i++) {
             Document doc = new Document();
             StringBuilder text = new StringBuilder("common ");

+ 4 - 4
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java

@@ -1206,7 +1206,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
                             = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG);
                         try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) {
                             {
-                                InternalNested result = search(newSearcher(indexReader, false, true),
+                                InternalNested result = searchAndReduce(newSearcher(indexReader, false, true),
                                     // match root document only
                                     new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), nested, fieldType);
                                 InternalMultiBucketAggregation<?, ?> terms = result.getAggregations().get("terms");
@@ -1216,7 +1216,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
                             {
                                 FilterAggregationBuilder filter = new FilterAggregationBuilder("filter", new MatchAllQueryBuilder())
                                     .subAggregation(nested);
-                                InternalFilter result = search(newSearcher(indexReader, false, true),
+                                InternalFilter result = searchAndReduce(newSearcher(indexReader, false, true),
                                     // match root document only
                                     new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), filter, fieldType);
                                 InternalNested nestedResult = result.getAggregations().get("nested");
@@ -1276,7 +1276,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
                     TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i").executionHint(executionHint)
                         .subAggregation(new TermsAggregationBuilder("j").field("j").executionHint(executionHint)
                             .subAggregation(new TermsAggregationBuilder("k").field("k").executionHint(executionHint)));
-                    StringTerms result = search(searcher, new MatchAllDocsQuery(), request,
+                    StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request,
                         keywordField("i"), keywordField("j"), keywordField("k"));
                     for (int i = 0; i < 10; i++) {
                         StringTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i));
@@ -1316,7 +1316,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
                     TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i")
                         .subAggregation(new TermsAggregationBuilder("j").field("j")
                             .subAggregation(new TermsAggregationBuilder("k").field("k")));
-                    LongTerms result = search(searcher, new MatchAllDocsQuery(), request,
+                    LongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request,
                         longField("i"), longField("j"), longField("k"));
                     for (int i = 0; i < 10; i++) {
                         LongTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i));

+ 1 - 1
server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java

@@ -266,7 +266,7 @@ public class ExtendedStatsAggregatorTests extends AggregatorTestCase {
                 ExtendedStatsAggregationBuilder aggBuilder = new ExtendedStatsAggregationBuilder("my_agg")
                     .field("field")
                     .sigma(randomDoubleBetween(0, 10, true));
-                InternalExtendedStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
+                InternalExtendedStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
                 verify.accept(stats);
             }
         }

+ 5 - 5
server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java

@@ -57,7 +57,7 @@ public class GeoBoundsAggregatorTests extends AggregatorTestCase {
             MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertTrue(Double.isInfinite(bounds.top));
                 assertTrue(Double.isInfinite(bounds.bottom));
                 assertTrue(Double.isInfinite(bounds.posLeft));
@@ -84,7 +84,7 @@ public class GeoBoundsAggregatorTests extends AggregatorTestCase {
             MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertTrue(Double.isInfinite(bounds.top));
                 assertTrue(Double.isInfinite(bounds.bottom));
                 assertTrue(Double.isInfinite(bounds.posLeft));
@@ -117,7 +117,7 @@ public class GeoBoundsAggregatorTests extends AggregatorTestCase {
 
                 try (IndexReader reader = w.getReader()) {
                     IndexSearcher searcher = new IndexSearcher(reader);
-                    InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                    InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                     assertThat(bounds.top, equalTo(lat));
                     assertThat(bounds.bottom, equalTo(lat));
                     assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY));
@@ -144,7 +144,7 @@ public class GeoBoundsAggregatorTests extends AggregatorTestCase {
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class,
-                    () -> search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType));
+                    () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType));
                 assertThat(exception.getMessage(), startsWith("unsupported symbol"));
             }
         }
@@ -194,7 +194,7 @@ public class GeoBoundsAggregatorTests extends AggregatorTestCase {
             MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE));
                 assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE));
                 assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE));

+ 5 - 5
server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java

@@ -51,7 +51,7 @@ public class GeoCentroidAggregatorTests extends AggregatorTestCase {
             MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field");
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertNull(result.centroid());
                 assertFalse(AggregationInspectionHelper.hasValue(result));
             }
@@ -71,11 +71,11 @@ public class GeoCentroidAggregatorTests extends AggregatorTestCase {
                 IndexSearcher searcher = new IndexSearcher(reader);
 
                 MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field");
-                InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertNull(result.centroid());
 
                 fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field");
-                result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertNull(result.centroid());
                 assertFalse(AggregationInspectionHelper.hasValue(result));
             }
@@ -97,7 +97,7 @@ public class GeoCentroidAggregatorTests extends AggregatorTestCase {
                 IndexSearcher searcher = new IndexSearcher(reader);
 
                 MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field");
-                InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertEquals(result.centroid(), expectedCentroid);
                 assertTrue(AggregationInspectionHelper.hasValue(result));
             }
@@ -161,7 +161,7 @@ public class GeoCentroidAggregatorTests extends AggregatorTestCase {
                 .field("field");
         try (IndexReader reader = w.getReader()) {
             IndexSearcher searcher = new IndexSearcher(reader);
-            InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+            InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
 
             assertEquals("my_agg", result.getName());
             GeoPoint centroid = result.centroid();

+ 2 - 2
server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java

@@ -66,7 +66,7 @@ public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase {
             = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
         try (IndexReader reader = new MultiReader()) {
             IndexSearcher searcher = new IndexSearcher(reader);
-            PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+            PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
             Percentile rank = ranks.iterator().next();
             assertEquals(Double.NaN, rank.getPercent(), 0d);
             assertEquals(0.5, rank.getValue(), 0d);
@@ -90,7 +90,7 @@ public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase {
                 = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 Iterator<Percentile> rankIterator = ranks.iterator();
                 Percentile rank = rankIterator.next();
                 assertEquals(0.1, rank.getValue(), 0d);

+ 23 - 14
server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java

@@ -138,8 +138,11 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
             return state;
         });
         SCRIPTS.put("reduceScript", params -> {
-            List<Integer> states = (List<Integer>) params.get("states");
-            return states.stream().mapToInt(Integer::intValue).sum();
+            List<?> states = (List<?>) params.get("states");
+            return states.stream()
+                .filter(a -> a instanceof Number)
+                .map(a -> (Number) a)
+                .mapToInt(Number::intValue).sum();
         });
 
         SCRIPTS.put("initScriptScore", params -> {
@@ -262,10 +265,11 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
             try (IndexReader indexReader = DirectoryReader.open(directory)) {
                 ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
                 aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT);
-                ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
+                ScriptedMetric scriptedMetric =
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
                 assertEquals(AGG_NAME, scriptedMetric.getName());
                 assertNotNull(scriptedMetric.aggregation());
-                assertEquals(0, ((HashMap<Object, String>) scriptedMetric.aggregation()).size());
+                assertEquals(0, scriptedMetric.aggregation());
             }
         }
     }
@@ -282,7 +286,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                 ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
                 aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT);
                 IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
-                    () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
+                    () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
                 assertEquals(exception.getMessage(), "[combineScript] must not be null: [scriptedMetric]");
             }
         }
@@ -300,7 +304,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                 ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
                 aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
                 IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
-                    () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
+                    () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
                 assertEquals(exception.getMessage(), "[reduceScript] must not be null: [scriptedMetric]");
             }
         }
@@ -321,7 +325,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                 ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
                 aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT)
                     .combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT);
-                ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
+                ScriptedMetric scriptedMetric =
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
                 assertEquals(AGG_NAME, scriptedMetric.getName());
                 assertNotNull(scriptedMetric.aggregation());
                 assertEquals(numDocs, scriptedMetric.aggregation());
@@ -344,11 +349,12 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                 ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
                 aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE)
                     .combineScript(COMBINE_SCRIPT_SCORE).reduceScript(REDUCE_SCRIPT);
-                ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
+                ScriptedMetric scriptedMetric =
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
                 assertEquals(AGG_NAME, scriptedMetric.getName());
                 assertNotNull(scriptedMetric.aggregation());
                 // all documents have score of 1.0
-                assertEquals((double) numDocs, scriptedMetric.aggregation());
+                assertEquals(numDocs, scriptedMetric.aggregation());
             }
         }
     }
@@ -359,13 +365,16 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                 for (int i = 0; i < 100; i++) {
                     indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
                 }
+                // force a single aggregator
+                indexWriter.forceMerge(1);
             }
 
             try (IndexReader indexReader = DirectoryReader.open(directory)) {
                 ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
                 aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS)
                     .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
-                ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
+                ScriptedMetric scriptedMetric =
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
 
                 // The result value depends on the script params.
                 assertEquals(4896, scriptedMetric.aggregation());
@@ -414,7 +423,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                     .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
 
                 IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
-                    search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
                 );
                 assertEquals("Parameter name \"" + CONFLICTING_PARAM_NAME + "\" used in both aggregation and script parameters",
                     ex.getMessage());
@@ -433,7 +442,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                     .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
 
                 IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
-                    search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
                 );
                 assertEquals("Iterable object is self-referencing itself (Scripted metric aggs init script)", ex.getMessage());
             }
@@ -454,7 +463,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                     .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
 
                 IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
-                    search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
                 );
                 assertEquals("Iterable object is self-referencing itself (Scripted metric aggs map script)", ex.getMessage());
             }
@@ -472,7 +481,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
                     .combineScript(COMBINE_SCRIPT_SELF_REF).reduceScript(REDUCE_SCRIPT);
 
                 IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
-                    search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
+                    searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
                 );
                 assertEquals("Iterable object is self-referencing itself (Scripted metric aggs combine script)", ex.getMessage());
             }

+ 2 - 2
server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java

@@ -234,7 +234,7 @@ public class StatsAggregatorTests extends AggregatorTestCase {
                  MultiReader multiReader = new MultiReader(mappedReader, unmappedReader)) {
 
                 final IndexSearcher searcher = new IndexSearcher(multiReader);
-                final InternalStats stats = search(searcher, new MatchAllDocsQuery(), builder, ft);
+                final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, ft);
 
                 assertEquals(expected.count, stats.getCount(), 0);
                 assertEquals(expected.sum, stats.getSum(), TOLERANCE);
@@ -433,7 +433,7 @@ public class StatsAggregatorTests extends AggregatorTestCase {
             try (IndexReader reader = indexWriter.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 final MappedFieldType[] fieldTypesArray = fieldTypes.toArray(new MappedFieldType[0]);
-                final InternalStats stats = search(searcher, new MatchAllDocsQuery(), builder, fieldTypesArray);
+                final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldTypesArray);
                 verify.accept(stats);
             }
         }

+ 1 - 1
server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java

@@ -256,7 +256,7 @@ public class SumAggregatorTests extends AggregatorTestCase {
 
                 final IndexSearcher searcher = newSearcher(multiReader, true, true);
 
-                final InternalSum internalSum = search(searcher, new MatchAllDocsQuery(), builder, fieldType);
+                final InternalSum internalSum = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldType);
                 assertEquals(sum, internalSum.getValue(), 0d);
                 assertTrue(AggregationInspectionHelper.hasValue(internalSum));
             }

+ 2 - 2
server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java

@@ -65,7 +65,7 @@ public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase {
         MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
         try (IndexReader reader = new MultiReader()) {
             IndexSearcher searcher = new IndexSearcher(reader);
-            PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+            PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
             Percentile rank = ranks.iterator().next();
             assertEquals(Double.NaN, rank.getPercent(), 0d);
             assertEquals(0.5, rank.getValue(), 0d);
@@ -88,7 +88,7 @@ public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase {
             MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 Iterator<Percentile> rankIterator = ranks.iterator();
                 Percentile rank = rankIterator.next();
                 assertEquals(0.1, rank.getValue(), 0d);

+ 0 - 20
server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java

@@ -354,9 +354,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase {
             indexWriter -> {
                 Document document = new Document();
                 for (int i = 0; i < valueCounts_empty.length; i++) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
                     for (int docs = 0; docs < valueCounts_empty[i]; docs++) {
                         document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i));
                         indexWriter.addDocument(document);
@@ -408,10 +405,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase {
                     if (randomBoolean())
                         valueCounts_empty_rnd[i] = 0L;
                     for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) {
-
-                        if (frequently()) {
-                            indexWriter.commit();
-                        }
                         document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i));
                         indexWriter.addDocument(document);
                         document.clear();
@@ -458,9 +451,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase {
             indexWriter -> {
                 Document document = new Document();
                 for (int i = 0; i < valueCounts_empty.length; i++) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
                     for (int docs = 0; docs < valueCounts_empty[i]; docs++) {
                         document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i));
                         indexWriter.addDocument(document);
@@ -514,9 +504,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase {
             indexWriter -> {
                 Document document = new Document();
                 for (int i = 0; i < valueCounts_empty.length; i++) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
                     for (int docs = 0; docs < valueCounts_empty[i]; docs++) {
                         document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i));
                         indexWriter.addDocument(document);
@@ -634,10 +621,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase {
                     if (randomBoolean())
                         valueCounts_empty_rnd[i] = 0L;
                     for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) {
-
-                        if (frequently()) {
-                            indexWriter.commit();
-                        }
                         document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i));
                         indexWriter.addDocument(document);
                         document.clear();
@@ -734,9 +717,6 @@ public class DerivativeAggregatorTests extends AggregatorTestCase {
         executeTestCase(query, aggBuilder, verify, indexWriter -> {
             Document document = new Document();
             for (int i = 0; i < numValueBuckets; i++) {
-                if (frequently()) {
-                    indexWriter.commit();
-                }
                 for (int docs = 0; docs < valueCounts[i]; docs++) {
                     document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i * interval));
                     indexWriter.addDocument(document);

+ 0 - 4
server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java

@@ -131,10 +131,6 @@ public class MovFnAggrgatorTests extends AggregatorTestCase {
                 Document document = new Document();
                 int counter = 0;
                 for (String date : datasetTimes) {
-                    if (frequently()) {
-                        indexWriter.commit();
-                    }
-
                     long instant = asLong(date);
                     document.add(new SortedNumericDocValuesField(DATE_FIELD, instant));
                     document.add(new LongPoint(INSTANT_FIELD, instant));

+ 54 - 97
test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java

@@ -140,6 +140,7 @@ import java.util.stream.Collectors;
 import static java.util.Collections.singletonList;
 import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS;
 import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
 import static org.mockito.Matchers.anyObject;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doAnswer;
@@ -394,46 +395,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
         return null;
     }
 
-    protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSearcher searcher,
-                                                                             Query query,
-                                                                             AggregationBuilder builder,
-                                                                             MappedFieldType... fieldTypes) throws IOException {
-        return search(createIndexSettings(), searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes);
-    }
-
-    protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSettings indexSettings,
-                                                                             IndexSearcher searcher,
-                                                                             Query query,
-                                                                             AggregationBuilder builder,
-                                                                             MappedFieldType... fieldTypes) throws IOException {
-        return search(indexSettings, searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes);
-    }
-
-    protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSearcher searcher,
-                                                                             Query query,
-                                                                             AggregationBuilder builder,
-                                                                             int maxBucket,
-                                                                             MappedFieldType... fieldTypes) throws IOException {
-        return search(createIndexSettings(), searcher, query, builder, maxBucket, fieldTypes);
-    }
-
-    protected <A extends InternalAggregation, C extends Aggregator> A search(IndexSettings indexSettings,
-                                                                             IndexSearcher searcher,
-                                                                             Query query,
-                                                                             AggregationBuilder builder,
-                                                                             int maxBucket,
-                                                                             MappedFieldType... fieldTypes) throws IOException {
-        MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket,
-            new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST));
-        C a = createAggregator(query, builder, searcher, indexSettings, bucketConsumer, fieldTypes);
-        a.preCollection();
-        searcher.search(query, a);
-        a.postCollection();
-        @SuppressWarnings("unchecked")
-        A result = (A) a.buildTopLevel();
-        return result;
-    }
-
     protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduce(IndexSearcher searcher,
                                                                                       Query query,
                                                                                       AggregationBuilder builder,
@@ -469,74 +430,70 @@ public abstract class AggregatorTestCase extends ESTestCase {
                                                                                       int maxBucket,
                                                                                       MappedFieldType... fieldTypes) throws IOException {
         final IndexReaderContext ctx = searcher.getTopReaderContext();
-
-        final ShardSearcher[] subSearchers;
-        if (ctx instanceof LeafReaderContext) {
-            subSearchers = new ShardSearcher[1];
-            subSearchers[0] = new ShardSearcher((LeafReaderContext) ctx, ctx);
-        } else {
-            final CompositeReaderContext compCTX = (CompositeReaderContext) ctx;
-            final int size = compCTX.leaves().size();
-            subSearchers = new ShardSearcher[size];
-            for(int searcherIDX=0;searcherIDX<subSearchers.length;searcherIDX++) {
-                final LeafReaderContext leave = compCTX.leaves().get(searcherIDX);
-                subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX);
-            }
-        }
-
-        PipelineTree pipelines = builder.buildPipelineTree();
+        final PipelineTree pipelines = builder.buildPipelineTree();
         List<InternalAggregation> aggs = new ArrayList<>();
         Query rewritten = searcher.rewrite(query);
-        Weight weight = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f);
         MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket,
             new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST));
         C root = createAggregator(query, builder, searcher, bucketConsumer, fieldTypes);
 
-        for (ShardSearcher subSearcher : subSearchers) {
-            MultiBucketConsumer shardBucketConsumer = new MultiBucketConsumer(maxBucket,
-                new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST));
-            C a = createAggregator(query, builder, subSearcher, indexSettings, shardBucketConsumer, fieldTypes);
-            a.preCollection();
-            subSearcher.search(weight, a);
-            a.postCollection();
-            InternalAggregation agg = a.buildTopLevel();
-            aggs.add(agg);
-        }
-        if (aggs.isEmpty()) {
-            return (A) root.buildEmptyAggregation();
-        } else {
-            if (randomBoolean() && aggs.size() > 1) {
-                // sometimes do an incremental reduce
-                int toReduceSize = aggs.size();
-                Collections.shuffle(aggs, random());
-                int r = randomIntBetween(1, toReduceSize);
-                List<InternalAggregation> toReduce = aggs.subList(0, r);
-                InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forPartialReduction(
-                        root.context().bigArrays(), getMockScriptService(), () -> PipelineAggregator.PipelineTree.EMPTY);
-                A reduced = (A) aggs.get(0).reduce(toReduce, context);
-                aggs = new ArrayList<>(aggs.subList(r, toReduceSize));
-                aggs.add(reduced);
+        if (randomBoolean() && searcher.getIndexReader().leaves().size() > 0) {
+            assertThat(ctx, instanceOf(CompositeReaderContext.class));
+            final CompositeReaderContext compCTX = (CompositeReaderContext) ctx;
+            final int size = compCTX.leaves().size();
+            final ShardSearcher[] subSearchers = new ShardSearcher[size];
+            for (int searcherIDX = 0; searcherIDX < subSearchers.length; searcherIDX++) {
+                final LeafReaderContext leave = compCTX.leaves().get(searcherIDX);
+                subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX);
             }
-            // now do the final reduce
-            MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket,
-                new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST));
-            InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction(
-                    root.context().bigArrays(), getMockScriptService(), reduceBucketConsumer, pipelines);
+            for (ShardSearcher subSearcher : subSearchers) {
+                MultiBucketConsumer shardBucketConsumer = new MultiBucketConsumer(maxBucket,
+                    new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST));
+                C a = createAggregator(query, builder, subSearcher, indexSettings, shardBucketConsumer, fieldTypes);
+                a.preCollection();
+                Weight weight = subSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f);
+                subSearcher.search(weight, a);
+                a.postCollection();
+                aggs.add(a.buildTopLevel());
+            }
+        } else {
+            root.preCollection();
+            searcher.search(rewritten, root);
+            root.postCollection();
+            aggs.add(root.buildTopLevel());
+        }
+
+        if (randomBoolean() && aggs.size() > 1) {
+            // sometimes do an incremental reduce
+            int toReduceSize = aggs.size();
+            Collections.shuffle(aggs, random());
+            int r = randomIntBetween(1, toReduceSize);
+            List<InternalAggregation> toReduce = aggs.subList(0, r);
+            InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forPartialReduction(
+                root.context().bigArrays(), getMockScriptService(), () -> PipelineAggregator.PipelineTree.EMPTY);
+            A reduced = (A) aggs.get(0).reduce(toReduce, context);
+            aggs = new ArrayList<>(aggs.subList(r, toReduceSize));
+            aggs.add(reduced);
+        }
+
+        // now do the final reduce
+        MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket,
+            new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST));
+        InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction(
+            root.context().bigArrays(), getMockScriptService(), reduceBucketConsumer, pipelines);
 
-            @SuppressWarnings("unchecked")
-            A internalAgg = (A) aggs.get(0).reduce(aggs, context);
+        @SuppressWarnings("unchecked")
+        A internalAgg = (A) aggs.get(0).reduce(aggs, context);
 
-            // materialize any parent pipelines
-            internalAgg = (A) internalAgg.reducePipelines(internalAgg, context, pipelines);
+        // materialize any parent pipelines
+        internalAgg = (A) internalAgg.reducePipelines(internalAgg, context, pipelines);
 
-            // materialize any sibling pipelines at top level
-            for (PipelineAggregator pipelineAggregator : pipelines.aggregators()) {
-                internalAgg = (A) pipelineAggregator.reduce(internalAgg, context);
-            }
-            doAssertReducedMultiBucketConsumer(internalAgg, reduceBucketConsumer);
-            return internalAgg;
+        // materialize any sibling pipelines at top level
+        for (PipelineAggregator pipelineAggregator : pipelines.aggregators()) {
+            internalAgg = (A) pipelineAggregator.reduce(internalAgg, context);
         }
-
+        doAssertReducedMultiBucketConsumer(internalAgg, reduceBucketConsumer);
+        return internalAgg;
     }
 
     protected void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) {

+ 1 - 1
x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java

@@ -94,7 +94,7 @@ public class TDigestPreAggregatedPercentileRanksAggregatorTests extends Aggregat
             MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", true, Collections.emptyMap());
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 Iterator<Percentile> rankIterator = ranks.iterator();
                 Percentile rank = rankIterator.next();
                 assertEquals(0.1, rank.getValue(), 0d);

+ 1 - 1
x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java

@@ -531,7 +531,7 @@ public class TopMetricsAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader indexReader = DirectoryReader.open(directory)) {
                 IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
-                return search(indexSearcher, query, builder, fields);
+                return searchAndReduce(indexSearcher, query, builder, fields);
             }
         }
     }

+ 1 - 1
x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java

@@ -654,7 +654,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
 
             CompositeAggregation result = null;
             try {
-                result = search(searcher, query, aggBuilder, fieldTypes);
+                result = searchAndReduce(searcher, query, aggBuilder, fieldTypes);
             } catch (IOException e) {
                 listener.onFailure(e);
             }

+ 7 - 5
x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/aggregations/metrics/GeoShapeCentroidAggregatorTests.java

@@ -65,7 +65,7 @@ public class GeoShapeCentroidAggregatorTests extends AggregatorTestCase {
                 = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap());
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertNull(result.centroid());
                 assertFalse(AggregationInspectionHelper.hasValue(result));
             }
@@ -86,12 +86,12 @@ public class GeoShapeCentroidAggregatorTests extends AggregatorTestCase {
 
                 MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("another_field",
                     true, true, Collections.emptyMap());
-                InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertNull(result.centroid());
 
                 fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field",
                     true, true, Collections.emptyMap());
-                result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertNull(result.centroid());
                 assertFalse(AggregationInspectionHelper.hasValue(result));
             }
@@ -116,7 +116,7 @@ public class GeoShapeCentroidAggregatorTests extends AggregatorTestCase {
 
                 MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("another_field",
                     true, true, Collections.emptyMap());
-                InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertThat(result.centroid(), equalTo(expectedCentroid));
                 assertTrue(AggregationInspectionHelper.hasValue(result));
             }
@@ -166,6 +166,8 @@ public class GeoShapeCentroidAggregatorTests extends AggregatorTestCase {
                     compensatedSumWeight.add(weight);
                 }
             }
+            // force using a single aggregator to compute the centroid
+            w.forceMerge(1);
             GeoPoint expectedCentroid = new GeoPoint(compensatedSumLat.value() / compensatedSumWeight.value(),
                 compensatedSumLon.value() / compensatedSumWeight.value());
             assertCentroid(w, expectedCentroid);
@@ -179,7 +181,7 @@ public class GeoShapeCentroidAggregatorTests extends AggregatorTestCase {
             .field("field");
         try (IndexReader reader = w.getReader()) {
             IndexSearcher searcher = new IndexSearcher(reader);
-            InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+            InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
 
             assertEquals("my_agg", result.getName());
             GeoPoint centroid = result.centroid();

+ 5 - 5
x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java

@@ -61,7 +61,7 @@ public class GeoShapeBoundsAggregatorTests extends AggregatorTestCase {
                 = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap());
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertTrue(Double.isInfinite(bounds.top));
                 assertTrue(Double.isInfinite(bounds.bottom));
                 assertTrue(Double.isInfinite(bounds.posLeft));
@@ -89,7 +89,7 @@ public class GeoShapeBoundsAggregatorTests extends AggregatorTestCase {
                 = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap());
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertTrue(Double.isInfinite(bounds.top));
                 assertTrue(Double.isInfinite(bounds.bottom));
                 assertTrue(Double.isInfinite(bounds.posLeft));
@@ -122,7 +122,7 @@ public class GeoShapeBoundsAggregatorTests extends AggregatorTestCase {
 
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertThat(bounds.top, equalTo(lat));
                 assertThat(bounds.bottom, equalTo(lat));
                 assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY));
@@ -149,7 +149,7 @@ public class GeoShapeBoundsAggregatorTests extends AggregatorTestCase {
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
                 IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
-                    () -> search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType));
+                    () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType));
                 assertThat(exception.getMessage(), startsWith("Unknown geometry type"));
             }
         }
@@ -204,7 +204,7 @@ public class GeoShapeBoundsAggregatorTests extends AggregatorTestCase {
                 = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap());
             try (IndexReader reader = w.getReader()) {
                 IndexSearcher searcher = new IndexSearcher(reader);
-                InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
+                InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
                 assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE));
                 assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE));
                 assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE));