瀏覽代碼

Add supported type tests for Percentiles/Ranks (#52597)

Also makes some tweaks to the test itself:

- Makes it final so tests can't accidentally override
- Adds specific example docs for the floating points, since they are
encoded and some aggs (HDR histo) cannot use negative numbers, so blindly
using a random long can encode to a negative
- Some reformatting to make it more readable
Zachary Tong 5 年之前
父節點
當前提交
437273f03c

+ 16 - 4
server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java

@@ -30,20 +30,32 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.NumericUtils;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.NumberFieldMapper;
+import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregatorTestCase;
-import org.elasticsearch.search.aggregations.metrics.Percentile;
-import org.elasticsearch.search.aggregations.metrics.PercentileRanks;
-import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder;
-import org.elasticsearch.search.aggregations.metrics.PercentilesMethod;
 import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
+import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
 import org.hamcrest.Matchers;
 
 import java.io.IOException;
 import java.util.Iterator;
+import java.util.List;
 
 
 public class HDRPercentileRanksAggregatorTests extends AggregatorTestCase {
 
+    @Override
+    protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
+        return new PercentileRanksAggregationBuilder("hdr_ranks", new double[]{0.1, 0.5, 12})
+            .field(fieldName)
+            .percentilesConfig(new PercentilesConfig.Hdr());
+    }
+
+    @Override
+    protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
+        return List.of(CoreValuesSourceType.NUMERIC);
+    }
+
     public void testEmpty() throws IOException {
         PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5})
                 .field("field")

+ 16 - 0
server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java

@@ -33,10 +33,14 @@ import org.apache.lucene.store.Directory;
 import org.elasticsearch.common.CheckedConsumer;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.NumberFieldMapper;
+import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregatorTestCase;
 import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
+import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.function.Consumer;
 
 import static java.util.Arrays.asList;
@@ -46,6 +50,18 @@ import static org.hamcrest.Matchers.equalTo;
 
 public class HDRPercentilesAggregatorTests extends AggregatorTestCase {
 
+    @Override
+    protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
+        return new PercentilesAggregationBuilder("hdr_percentiles")
+            .field(fieldName)
+            .percentilesConfig(new PercentilesConfig.Hdr());
+    }
+
+    @Override
+    protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
+        return List.of(CoreValuesSourceType.NUMERIC);
+    }
+
     public void testNoDocs() throws IOException {
         testCase(new MatchAllDocsQuery(), iw -> {
             // Intentionally not writing any docs

+ 16 - 4
server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java

@@ -30,20 +30,32 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.NumericUtils;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.NumberFieldMapper;
+import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregatorTestCase;
-import org.elasticsearch.search.aggregations.metrics.Percentile;
-import org.elasticsearch.search.aggregations.metrics.PercentileRanks;
-import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder;
-import org.elasticsearch.search.aggregations.metrics.PercentilesMethod;
 import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
+import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
 import org.hamcrest.Matchers;
 
 import java.io.IOException;
 import java.util.Iterator;
+import java.util.List;
 
 
 public class TDigestPercentileRanksAggregatorTests extends AggregatorTestCase {
 
+    @Override
+    protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
+        return new PercentileRanksAggregationBuilder("tdigest_ranks", new double[]{0.1, 0.5, 12})
+            .field(fieldName)
+            .percentilesConfig(new PercentilesConfig.TDigest());
+    }
+
+    @Override
+    protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
+        return List.of(CoreValuesSourceType.NUMERIC);
+    }
+
     public void testEmpty() throws IOException {
         PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.5})
                 .field("field")

+ 16 - 0
server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java

@@ -33,10 +33,14 @@ import org.apache.lucene.store.Directory;
 import org.elasticsearch.common.CheckedConsumer;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.NumberFieldMapper;
+import org.elasticsearch.search.aggregations.AggregationBuilder;
 import org.elasticsearch.search.aggregations.AggregatorTestCase;
 import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
+import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.function.Consumer;
 
 import static java.util.Arrays.asList;
@@ -46,6 +50,18 @@ import static org.hamcrest.Matchers.equalTo;
 
 public class TDigestPercentilesAggregatorTests extends AggregatorTestCase {
 
+    @Override
+    protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
+        return new PercentilesAggregationBuilder("tdist_percentiles")
+            .field(fieldName)
+            .percentilesConfig(new PercentilesConfig.TDigest());
+    }
+
+    @Override
+    protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
+        return List.of(CoreValuesSourceType.NUMERIC);
+    }
+
     public void testNoDocs() throws IOException {
         testCase(new MatchAllDocsQuery(), iw -> {
             // Intentionally not writing any docs

+ 33 - 19
test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java

@@ -19,6 +19,7 @@
 package org.elasticsearch.search.aggregations;
 
 import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.HalfFloatPoint;
 import org.apache.lucene.document.InetAddressPoint;
 import org.apache.lucene.document.LatLonDocValuesField;
 import org.apache.lucene.document.SortedNumericDocValuesField;
@@ -41,6 +42,7 @@ import org.apache.lucene.search.ScoreMode;
 import org.apache.lucene.search.Weight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
 import org.elasticsearch.Version;
 import org.elasticsearch.cluster.metadata.IndexMetaData;
 import org.elasticsearch.common.breaker.CircuitBreaker;
@@ -74,6 +76,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.Mapper;
 import org.elasticsearch.index.mapper.Mapper.BuilderContext;
 import org.elasticsearch.index.mapper.MapperService;
+import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.index.mapper.ObjectMapper;
 import org.elasticsearch.index.mapper.ObjectMapper.Nested;
 import org.elasticsearch.index.mapper.RangeFieldMapper;
@@ -605,7 +608,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
      *
      * Exception types/messages are not currently checked, just presence/absence of an exception.
      */
-    public void testSupportedFieldTypes() throws IOException {
+    public final void testSupportedFieldTypes() throws IOException {
         MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
         Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build();
         String fieldName = "typeTestFieldName";
@@ -675,67 +678,78 @@ public abstract class AggregatorTestCase extends ESTestCase {
      */
     private void writeTestDoc(MappedFieldType fieldType, String fieldName, RandomIndexWriter iw) throws IOException {
 
-        if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.NUMERIC)) {
+        String typeName = fieldType.typeName();
+        ValuesSourceType vst = fieldType.getValuesSourceType();
+        
+        if (vst.equals(CoreValuesSourceType.NUMERIC)) {
             // TODO note: once VS refactor adds DATE/BOOLEAN, this conditional will go away
-            if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE)
-                || fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
+            if (typeName.equals(DateFieldMapper.CONTENT_TYPE) || typeName.equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
                 iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong())));
-            } else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) {
+            } else if (typeName.equals(BooleanFieldMapper.CONTENT_TYPE)) {
                 iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomBoolean() ? 0 : 1)));
+            } else if (typeName.equals(NumberFieldMapper.NumberType.DOUBLE.typeName())) {
+                long encoded = NumericUtils.doubleToSortableLong(Math.abs(randomDouble()));
+                iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded)));
+            } else if (typeName.equals(NumberFieldMapper.NumberType.FLOAT.typeName())) {
+                long encoded = NumericUtils.floatToSortableInt(Math.abs(randomFloat()));
+                iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded)));
+            } else if (typeName.equals(NumberFieldMapper.NumberType.HALF_FLOAT.typeName())) {
+                long encoded = HalfFloatPoint.halfFloatToSortableShort(Math.abs(randomFloat()));
+                iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, encoded)));
             } else {
-                iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomLong())));
+                iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong())));
             }
-        } else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.BYTES)) {
-            if (fieldType.typeName().equals(BinaryFieldMapper.CONTENT_TYPE)) {
+        } else if (vst.equals(CoreValuesSourceType.BYTES)) {
+            if (typeName.equals(BinaryFieldMapper.CONTENT_TYPE)) {
                 iw.addDocument(singleton(new BinaryFieldMapper.CustomBinaryDocValuesField(fieldName, new BytesRef("a").bytes)));
-            } else if (fieldType.typeName().equals(IpFieldMapper.CONTENT_TYPE)) {
+            } else if (typeName.equals(IpFieldMapper.CONTENT_TYPE)) {
                 // TODO note: once VS refactor adds IP, this conditional will go away
                 boolean v4 = randomBoolean();
                 iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4))))));
             } else {
                 iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef("a"))));
             }
-        } else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.RANGE)) {
+        } else if (vst.equals(CoreValuesSourceType.RANGE)) {
             Object start;
             Object end;
             RangeType rangeType;
 
-            if (fieldType.typeName().equals(RangeType.DOUBLE.typeName())) {
+            if (typeName.equals(RangeType.DOUBLE.typeName())) {
                 start = randomDouble();
                 end = RangeType.DOUBLE.nextUp(start);
                 rangeType = RangeType.DOUBLE;
-            } else if (fieldType.typeName().equals(RangeType.FLOAT.typeName())) {
+            } else if (typeName.equals(RangeType.FLOAT.typeName())) {
                 start = randomFloat();
                 end = RangeType.FLOAT.nextUp(start);
                 rangeType = RangeType.DOUBLE;
-            } else if (fieldType.typeName().equals(RangeType.IP.typeName())) {
+            } else if (typeName.equals(RangeType.IP.typeName())) {
                 boolean v4 = randomBoolean();
                 start = randomIp(v4);
                 end = RangeType.IP.nextUp(start);
                 rangeType = RangeType.IP;
-            } else if (fieldType.typeName().equals(RangeType.LONG.typeName())) {
+            } else if (typeName.equals(RangeType.LONG.typeName())) {
                 start = randomLong();
                 end = RangeType.LONG.nextUp(start);
                 rangeType = RangeType.LONG;
-            } else if (fieldType.typeName().equals(RangeType.INTEGER.typeName())) {
+            } else if (typeName.equals(RangeType.INTEGER.typeName())) {
                 start = randomInt();
                 end = RangeType.INTEGER.nextUp(start);
                 rangeType = RangeType.INTEGER;
-            } else if (fieldType.typeName().equals(RangeType.DATE.typeName())) {
+            } else if (typeName.equals(RangeType.DATE.typeName())) {
                 start = randomNonNegativeLong();
                 end = RangeType.DATE.nextUp(start);
                 rangeType = RangeType.DATE;
             } else {
-                throw new IllegalStateException("Unknown type of range [" + fieldType.typeName() + "]");
+                throw new IllegalStateException("Unknown type of range [" + typeName + "]");
             }
 
             final RangeFieldMapper.Range range = new RangeFieldMapper.Range(rangeType, start, end, true, true);
             iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(Collections.singleton(range)))));
 
-        }  else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.GEOPOINT)) {
+        }  else if (vst.equals(CoreValuesSourceType.GEOPOINT)) {
             iw.addDocument(singleton(new LatLonDocValuesField(fieldName, randomDouble(), randomDouble())));
         } else {
-            throw new IllegalStateException("Unknown field type [" + fieldType.typeName() + "]");
+            throw new IllegalStateException("Unknown field type [" + typeName + "]");
         }
     }