Browse Source

Add back support for `ip` range aggregations. #17859

This commit adds support for range aggregations on `ip` fields. However it will
only work on 5.x indices.

Closes #17700
Adrien Grand 9 years ago
parent
commit
638da06c1d
22 changed files with 1602 additions and 1372 deletions
  1. 1 2
      core/src/main/java/org/elasticsearch/index/mapper/ip/LegacyIpFieldMapper.java
  2. 5 5
      core/src/main/java/org/elasticsearch/search/SearchModule.java
  3. 4 4
      core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java
  4. 234 0
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java
  5. 70 0
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorFactory.java
  6. 311 0
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java
  7. 330 0
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java
  8. 126 0
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java
  9. 0 277
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeAggregatorBuilder.java
  10. 0 164
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java
  11. 0 64
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java
  12. 0 44
      core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/Ipv4RangeAggregatorFactory.java
  13. 1 6
      core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java
  14. 2 1
      core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java
  15. 0 71
      core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeTests.java
  16. 268 0
      core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java
  17. 90 0
      core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java
  18. 142 0
      core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java
  19. 2 2
      docs/java-api/aggregations/bucket/iprange-aggregation.asciidoc
  20. 9 15
      docs/reference/aggregations/bucket/iprange-aggregation.asciidoc
  21. 7 0
      docs/reference/migration/migrate_5_0/aggregations.asciidoc
  22. 0 717
      modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java

+ 1 - 2
core/src/main/java/org/elasticsearch/index/mapper/ip/LegacyIpFieldMapper.java

@@ -50,7 +50,6 @@ import org.elasticsearch.index.mapper.core.LegacyLongFieldMapper.CustomLongNumer
 import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper;
 import org.elasticsearch.index.query.QueryShardContext;
 import org.elasticsearch.search.DocValueFormat;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.InternalIPv4Range;
 import org.joda.time.DateTimeZone;
 
 import java.io.IOException;
@@ -218,7 +217,7 @@ public class LegacyIpFieldMapper extends LegacyNumberFieldMapper {
                 }
                 if (fromTo != null) {
                     return rangeQuery(fromTo[0] == 0 ? null : fromTo[0],
-                            fromTo[1] == InternalIPv4Range.MAX_IP ? null : fromTo[1], true, false);
+                            fromTo[1] == MAX_IP ? null : fromTo[1], true, false);
                 }
             }
             return super.termQuery(value, context);

+ 5 - 5
core/src/main/java/org/elasticsearch/search/SearchModule.java

@@ -131,9 +131,9 @@ import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange
 import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder;
 import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser;
 import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorBuilder;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.InternalIPv4Range;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.IpRangeParser;
+import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange;
+import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser;
 import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder;
 import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser;
 import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
@@ -507,7 +507,7 @@ public class SearchModule extends AbstractModule {
                 SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
         registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
         registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
-        registerAggregation(IPv4RangeAggregatorBuilder::new, new IpRangeParser(), IPv4RangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
+        registerAggregation(IpRangeAggregatorBuilder::new, new IpRangeParser(), IpRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
         registerAggregation(HistogramAggregatorBuilder::new, new HistogramParser(), HistogramAggregatorBuilder.AGGREGATION_NAME_FIELD);
         registerAggregation(DateHistogramAggregatorBuilder::new, new DateHistogramParser(),
                 DateHistogramAggregatorBuilder.AGGREGATION_NAME_FIELD);
@@ -736,7 +736,7 @@ public class SearchModule extends AbstractModule {
         UnmappedTerms.registerStreams();
         InternalRange.registerStream();
         InternalDateRange.registerStream();
-        InternalIPv4Range.registerStream();
+        InternalBinaryRange.registerStream();
         InternalHistogram.registerStream();
         InternalGeoDistance.registerStream();
         InternalNested.registerStream();

+ 4 - 4
core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java

@@ -45,7 +45,7 @@ import org.elasticsearch.search.aggregations.bucket.range.Range;
 import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder;
 import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder;
 import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
 import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder;
 import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
 import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder;
@@ -261,11 +261,11 @@ public class AggregationBuilders {
     }
 
     /**
-     * Create a new {@link IPv4RangeAggregatorBuilder} aggregation with the
+     * Create a new {@link IpRangeAggregatorBuilder} aggregation with the
      * given name.
      */
-    public static IPv4RangeAggregatorBuilder ipRange(String name) {
-        return new IPv4RangeAggregatorBuilder(name);
+    public static IpRangeAggregatorBuilder ipRange(String name) {
+        return new IpRangeAggregatorBuilder(name);
     }
 
     /**

+ 234 - 0
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java

@@ -0,0 +1,234 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket.range;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.Aggregator;
+import org.elasticsearch.search.aggregations.AggregatorFactories;
+import org.elasticsearch.search.aggregations.InternalAggregation;
+import org.elasticsearch.search.aggregations.LeafBucketCollector;
+import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
+import org.elasticsearch.search.aggregations.bucket.BucketsAggregator;
+import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.search.aggregations.support.AggregationContext;
+import org.elasticsearch.search.aggregations.support.ValuesSource;
+
+/** A range aggregator for values that are stored in SORTED_SET doc values. */
+public final class BinaryRangeAggregator extends BucketsAggregator {
+
+    public static class Range {
+
+        final String key;
+        final BytesRef from, to;
+
+        public Range(String key, BytesRef from, BytesRef to) {
+            this.key = key;
+            this.from = from;
+            this.to = to;
+        }
+    }
+
+    static final Comparator<Range> RANGE_COMPARATOR = (a, b) -> {
+        int cmp = compare(a.from, b.from, 1);
+        if (cmp == 0) {
+            cmp = compare(a.to, b.to, -1);
+        }
+        return cmp;
+    };
+
+    private static int compare(BytesRef a, BytesRef b, int m) {
+        return a == null
+                ? b == null ? 0 : -m
+                : b == null ? m : a.compareTo(b);
+    }
+
+    final ValuesSource.Bytes valuesSource;
+    final DocValueFormat format;
+    final boolean keyed;
+    final Range[] ranges;
+
+    public BinaryRangeAggregator(String name, AggregatorFactories factories,
+            ValuesSource.Bytes valuesSource, DocValueFormat format,
+            List<Range> ranges, boolean keyed, AggregationContext aggregationContext,
+            Aggregator parent, List<PipelineAggregator> pipelineAggregators,
+            Map<String, Object> metaData) throws IOException {
+        super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
+        this.valuesSource = valuesSource;
+        this.format = format;
+        this.keyed = keyed;
+        this.ranges = ranges.toArray(new Range[0]);
+        Arrays.sort(this.ranges, RANGE_COMPARATOR);
+
+    }
+
+    @Override
+    public boolean needsScores() {
+        return (valuesSource != null && valuesSource.needsScores()) || super.needsScores();
+    }
+
+    @Override
+    protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
+        if (valuesSource == null) {
+            return LeafBucketCollector.NO_OP_COLLECTOR;
+        }
+        if (valuesSource instanceof ValuesSource.Bytes.WithOrdinals) {
+            SortedSetDocValues values = ((ValuesSource.Bytes.WithOrdinals) valuesSource).ordinalsValues(ctx);
+            return new SortedSetRangeLeafCollector(values, ranges, sub) {
+                @Override
+                protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws IOException {
+                    collectBucket(sub, doc, bucket);
+                }
+            };
+        }
+        throw new IllegalArgumentException("binary range aggregation expects a values source that supports ordinals");
+    }
+
+    static abstract class SortedSetRangeLeafCollector extends LeafBucketCollectorBase {
+
+        final long[] froms, tos, maxTos;
+        final SortedSetDocValues values;
+        final LeafBucketCollector sub;
+
+        SortedSetRangeLeafCollector(SortedSetDocValues values,
+                Range[] ranges, LeafBucketCollector sub) {
+            super(sub, values);
+            for (int i = 1; i < ranges.length; ++i) {
+                if (RANGE_COMPARATOR.compare(ranges[i-1], ranges[i]) > 0) {
+                    throw new IllegalArgumentException("Ranges must be sorted");
+                }
+            }
+            this.values = values;
+            this.sub = sub;
+            froms = new long[ranges.length];
+            tos = new long[ranges.length]; // inclusive
+            maxTos = new long[ranges.length];
+            for (int i = 0; i < ranges.length; ++i) {
+                if (ranges[i].from == null) {
+                    froms[i] = 0;
+                } else {
+                    froms[i] = values.lookupTerm(ranges[i].from);
+                    if (froms[i] < 0) {
+                        froms[i] = -1 - froms[i];
+                    }
+                }
+                if (ranges[i].to == null) {
+                    tos[i] = values.getValueCount() - 1;
+                } else {
+                    long ord = values.lookupTerm(ranges[i].to);
+                    if (ord < 0) {
+                        tos[i] = -2 - ord;
+                    } else {
+                        tos[i] = ord - 1;
+                    }
+                }
+            }
+            maxTos[0] = tos[0];
+            for (int i = 1; i < tos.length; ++i) {
+                maxTos[i] = Math.max(maxTos[i-1], tos[i]);
+            }
+        }
+
+        @Override
+        public void collect(int doc, long bucket) throws IOException {
+            values.setDocument(doc);
+            int lo = 0;
+            for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) {
+                lo = collect(doc, ord, bucket, lo);
+            }
+        }
+
+        private int collect(int doc, long ord, long bucket, int lowBound) throws IOException {
+            int lo = lowBound, hi = froms.length - 1; // all candidates are between these indexes
+            int mid = (lo + hi) >>> 1;
+            while (lo <= hi) {
+                if (ord < froms[mid]) {
+                    hi = mid - 1;
+                } else if (ord > maxTos[mid]) {
+                    lo = mid + 1;
+                } else {
+                    break;
+                }
+                mid = (lo + hi) >>> 1;
+            }
+            if (lo > hi) return lo; // no potential candidate
+
+            // binary search the lower bound
+            int startLo = lo, startHi = mid;
+            while (startLo <= startHi) {
+                final int startMid = (startLo + startHi) >>> 1;
+                if (ord > maxTos[startMid]) {
+                    startLo = startMid + 1;
+                } else {
+                    startHi = startMid - 1;
+                }
+            }
+
+            // binary search the upper bound
+            int endLo = mid, endHi = hi;
+            while (endLo <= endHi) {
+                final int endMid = (endLo + endHi) >>> 1;
+                if (ord < froms[endMid]) {
+                    endHi = endMid - 1;
+                } else {
+                    endLo = endMid + 1;
+                }
+            }
+
+            assert startLo == lowBound || ord > maxTos[startLo - 1];
+            assert endHi == froms.length - 1 || ord < froms[endHi + 1];
+
+            for (int i = startLo; i <= endHi; ++i) {
+                if (ord <= tos[i]) {
+                    doCollect(sub, doc, bucket * froms.length + i);
+                }
+            }
+
+            return endHi + 1;
+        }
+
+        protected abstract void doCollect(LeafBucketCollector sub, int doc, long bucket) throws IOException;
+    }
+
+    @Override
+    public InternalAggregation buildAggregation(long bucket) throws IOException {
+        InternalBinaryRange.Bucket[] buckets = new InternalBinaryRange.Bucket[ranges.length];
+        for (int i = 0; i < buckets.length; ++i) {
+            long bucketOrd = bucket * ranges.length + i;
+            buckets[i] = new InternalBinaryRange.Bucket(format, keyed,
+                    ranges[i].key, ranges[i].from, ranges[i].to,
+                    bucketDocCount(bucketOrd), bucketAggregations(bucketOrd));
+        }
+        return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators(), metaData());
+    }
+
+    @Override
+    public InternalAggregation buildEmptyAggregation() {
+        return new InternalBinaryRange(name, format, keyed, new InternalBinaryRange.Bucket[0], pipelineAggregators(), metaData());
+    }
+
+}

+ 70 - 0
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorFactory.java

@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket.range;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.elasticsearch.search.aggregations.Aggregator;
+import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
+import org.elasticsearch.search.aggregations.AggregatorFactory;
+import org.elasticsearch.search.aggregations.InternalAggregation.Type;
+import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.search.aggregations.support.AggregationContext;
+import org.elasticsearch.search.aggregations.support.ValuesSource;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
+import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
+
+public class BinaryRangeAggregatorFactory
+        extends ValuesSourceAggregatorFactory<ValuesSource.Bytes, BinaryRangeAggregatorFactory> {
+
+    private final List<BinaryRangeAggregator.Range> ranges;
+    private final boolean keyed;
+
+    public BinaryRangeAggregatorFactory(String name, Type type,
+            ValuesSourceConfig<ValuesSource.Bytes> config,
+            List<BinaryRangeAggregator.Range> ranges, boolean keyed,
+            AggregationContext context,
+            AggregatorFactory<?> parent, Builder subFactoriesBuilder,
+            Map<String, Object> metaData) throws IOException {
+        super(name, type, config, context, parent, subFactoriesBuilder, metaData);
+        this.ranges = ranges;
+        this.keyed = keyed;
+    }
+
+    @Override
+    protected Aggregator createUnmapped(Aggregator parent,
+            List<PipelineAggregator> pipelineAggregators,
+            Map<String, Object> metaData) throws IOException {
+        return new BinaryRangeAggregator(name, factories, null, config.format(),
+                ranges, keyed, context, parent, pipelineAggregators, metaData);
+    }
+
+    @Override
+    protected Aggregator doCreateInternal(ValuesSource.Bytes valuesSource,
+            Aggregator parent,
+            boolean collectsFromSingleBucket,
+            List<PipelineAggregator> pipelineAggregators,
+            Map<String, Object> metaData) throws IOException {
+        return new BinaryRangeAggregator(name, factories, valuesSource, config.format(),
+                ranges, keyed, context, parent, pipelineAggregators, metaData);
+    }
+
+}

+ 311 - 0
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java

@@ -0,0 +1,311 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket.range;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.AggregationStreams;
+import org.elasticsearch.search.aggregations.Aggregations;
+import org.elasticsearch.search.aggregations.InternalAggregation;
+import org.elasticsearch.search.aggregations.InternalAggregations;
+import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
+import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
+import org.elasticsearch.search.aggregations.bucket.BucketStreams;
+import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+
+/** A range aggregation for data that is encoded in doc values using a binary representation. */
+public final class InternalBinaryRange
+        extends InternalMultiBucketAggregation<InternalBinaryRange, InternalBinaryRange.Bucket>
+        implements Range {
+
+    public static final Type TYPE = new Type("binary_range");
+
+    private final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
+        @Override
+        public InternalBinaryRange readResult(StreamInput in) throws IOException {
+            InternalBinaryRange range = new InternalBinaryRange();
+            range.readFrom(in);
+            return range;
+        }
+    };
+
+    private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
+        @Override
+        public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
+            Bucket bucket = new Bucket(context.format(), context.keyed());
+            bucket.readFrom(in);
+            return bucket;
+        }
+
+        @Override
+        public BucketStreamContext getBucketStreamContext(Bucket bucket) {
+            BucketStreamContext context = new BucketStreamContext();
+            context.format(bucket.format);
+            context.keyed(bucket.keyed);
+            return context;
+        }
+    };
+
+    public static void registerStream() {
+        AggregationStreams.registerStream(STREAM, TYPE.stream());
+        BucketStreams.registerStream(BUCKET_STREAM, TYPE.stream());
+    }
+
+    public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Range.Bucket {
+
+        private final transient DocValueFormat format;
+        private final transient boolean keyed;
+        private String key;
+        private BytesRef from, to;
+        private long docCount;
+        private InternalAggregations aggregations;
+
+        public Bucket(DocValueFormat format, boolean keyed, String key, BytesRef from, BytesRef to,
+                long docCount, InternalAggregations aggregations) {
+            this(format, keyed);
+            this.key = key;
+            this.from = from;
+            this.to = to;
+            this.docCount = docCount;
+            this.aggregations = aggregations;
+        }
+
+        // for serialization
+        private Bucket(DocValueFormat format, boolean keyed) {
+            this.format = format;
+            this.keyed = keyed;
+        }
+
+        @Override
+        public Object getKey() {
+            return key;
+        }
+
+        @Override
+        public String getKeyAsString() {
+            return key;
+        }
+
+        @Override
+        public long getDocCount() {
+            return docCount;
+        }
+
+        @Override
+        public Aggregations getAggregations() {
+            return aggregations;
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+            String key = this.key;
+            if (keyed) {
+                if (key == null) {
+                    StringBuilder keyBuilder = new StringBuilder();
+                    keyBuilder.append(from == null ? "*" : format.format(from));
+                    keyBuilder.append("-");
+                    keyBuilder.append(to == null ? "*" : format.format(to));
+                    key = keyBuilder.toString();
+                }
+                builder.startObject(key);
+            } else {
+                builder.startObject();
+                if (key != null) {
+                    builder.field(CommonFields.KEY, key);
+                }
+            }
+            if (from != null) {
+                builder.field(CommonFields.FROM, getFrom());
+            }
+            if (to != null) {
+                builder.field(CommonFields.TO, getTo());
+            }
+            builder.field(CommonFields.DOC_COUNT, docCount);
+            aggregations.toXContentInternal(builder, params);
+            builder.endObject();
+            return builder;
+        }
+
+        @Override
+        public void readFrom(StreamInput in) throws IOException {
+            key = in.readOptionalString();
+            if (in.readBoolean()) {
+                from = in.readBytesRef();
+            } else {
+                from = null;
+            }
+            if (in.readBoolean()) {
+                to = in.readBytesRef();
+            } else {
+                to = null;
+            }
+            docCount = in.readLong();
+            aggregations = InternalAggregations.readAggregations(in);
+        }
+
+        @Override
+        public void writeTo(StreamOutput out) throws IOException {
+            out.writeOptionalString(key);
+            out.writeBoolean(from != null);
+            if (from != null) {
+                out.writeBytesRef(from);
+            }
+            out.writeBoolean(to != null);
+            if (to != null) {
+                out.writeBytesRef(to);
+            }
+            out.writeLong(docCount);
+            aggregations.writeTo(out);
+        }
+
+        @Override
+        public Object getFrom() {
+            return getFromAsString();
+        }
+
+        @Override
+        public String getFromAsString() {
+            return from == null ? null : format.format(from);
+        }
+
+        @Override
+        public Object getTo() {
+            return getToAsString();
+        }
+
+        @Override
+        public String getToAsString() {
+            return to == null ? null : format.format(to);
+        }
+
+    }
+
+    private DocValueFormat format;
+    private boolean keyed;
+    private Bucket[] buckets;
+
+    public InternalBinaryRange(String name, DocValueFormat format, boolean keyed, Bucket[] buckets,
+            List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
+        super(name, pipelineAggregators, metaData);
+        this.format = format;
+        this.keyed = keyed;
+        this.buckets = buckets;
+    }
+
+    private InternalBinaryRange() {} // for serialization
+
+    @Override
+    public List<Range.Bucket> getBuckets() {
+        return Arrays.asList(buckets);
+    }
+
+    @Override
+    public InternalBinaryRange create(List<Bucket> buckets) {
+        return new InternalBinaryRange(name, format, keyed, buckets.toArray(new Bucket[0]),
+                pipelineAggregators(), metaData);
+    }
+
+    @Override
+    public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
+        return new Bucket(format, keyed, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations);
+    }
+
+    @Override
+    public Type type() {
+        return TYPE;
+    }
+
+    @Override
+    public InternalAggregation doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
+        long[] docCounts = new long[buckets.length];
+        InternalAggregations[][] aggs = new InternalAggregations[buckets.length][];
+        for (int i = 0; i < aggs.length; ++i) {
+            aggs[i] = new InternalAggregations[aggregations.size()];
+        }
+        for (int i = 0; i < aggregations.size(); ++i) {
+            InternalBinaryRange range = (InternalBinaryRange) aggregations.get(i);
+            if (range.buckets.length != buckets.length) {
+                throw new IllegalStateException("Expected " + buckets.length + " buckets, but got " + range.buckets.length);
+            }
+            for (int j = 0; j < buckets.length; ++j) {
+                Bucket bucket = range.buckets[j];
+                docCounts[j] += bucket.docCount;
+                aggs[j][i] = bucket.aggregations;
+            }
+        }
+        Bucket[] buckets = new Bucket[this.buckets.length];
+        for (int i = 0; i < buckets.length; ++i) {
+            Bucket b = this.buckets[i];
+            buckets[i] = new Bucket(format, keyed, b.key, b.from, b.to, docCounts[i],
+                    InternalAggregations.reduce(Arrays.asList(aggs[i]), reduceContext));
+        }
+        return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators(), metaData);
+    }
+
+    @Override
+    public XContentBuilder doXContentBody(XContentBuilder builder,
+            Params params) throws IOException {
+        if (keyed) {
+            builder.startObject(CommonFields.BUCKETS);
+        } else {
+            builder.startArray(CommonFields.BUCKETS);
+        }
+        for (Bucket range : buckets) {
+            range.toXContent(builder, params);
+        }
+        if (keyed) {
+            builder.endObject();
+        } else {
+            builder.endArray();
+        }
+        return builder;
+    }
+
+    @Override
+    protected void doWriteTo(StreamOutput out) throws IOException {
+        out.writeNamedWriteable(format);
+        out.writeBoolean(keyed);
+        out.writeVInt(buckets.length);
+        for (Bucket bucket : buckets) {
+            bucket.writeTo(out);
+        }
+    }
+
+    @Override
+    protected void doReadFrom(StreamInput in) throws IOException {
+        format = in.readNamedWriteable(DocValueFormat.class);
+        keyed = in.readBoolean();
+        Bucket[] buckets = new Bucket[in.readVInt()];
+        for (int i = 0; i < buckets.length; ++i) {
+            Bucket bucket = new Bucket(format, keyed);
+            bucket.readFrom(in);
+            buckets[i] = bucket;
+        }
+        this.buckets = buckets;
+    }
+
+}

+ 330 - 0
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeAggregatorBuilder.java

@@ -0,0 +1,330 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket.range.ip;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+import org.apache.lucene.document.InetAddressPoint;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.network.InetAddresses;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
+import org.elasticsearch.search.aggregations.AggregatorFactory;
+import org.elasticsearch.search.aggregations.InternalAggregation;
+import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator;
+import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregatorFactory;
+import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
+import org.elasticsearch.search.aggregations.support.AggregationContext;
+import org.elasticsearch.search.aggregations.support.ValueType;
+import org.elasticsearch.search.aggregations.support.ValuesSource;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
+import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
+
+
+public final class IpRangeAggregatorBuilder
+        extends ValuesSourceAggregatorBuilder<ValuesSource.Bytes, IpRangeAggregatorBuilder> {
+    private static final String NAME = "ip_range";
+    public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
+    private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME);
+
+    public static class Range implements ToXContent {
+        private final String key;
+        private final String from;
+        private final String to;
+
+        Range(String key, String from, String to) {
+            if (from != null) {
+                InetAddresses.forString(from);
+            }
+            if (to != null) {
+                InetAddresses.forString(to);
+            }
+            this.key = key;
+            this.from = from;
+            this.to = to;
+        }
+
+        Range(String key, String mask) {
+            String[] splits = mask.split("/");
+            if (splits.length != 2) {
+                throw new IllegalArgumentException("Expected [ip/prefix_length] but got [" + mask
+                        + "], which contains zero or more than one [/]");
+            }
+            InetAddress value = InetAddresses.forString(splits[0]);
+            int prefixLength = Integer.parseInt(splits[1]);
+            // copied from InetAddressPoint.newPrefixQuery
+            if (prefixLength < 0 || prefixLength > 8 * value.getAddress().length) {
+                throw new IllegalArgumentException("illegal prefixLength [" + prefixLength
+                        + "] in [" + mask + "]. Must be 0-32 for IPv4 ranges, 0-128 for IPv6 ranges");
+            }
+            // create the lower value by zeroing out the host portion, upper value by filling it with all ones.
+            byte lower[] = value.getAddress();
+            byte upper[] = value.getAddress();
+            for (int i = prefixLength; i < 8 * lower.length; i++) {
+                int m = 1 << (7 - (i & 7));
+                lower[i >> 3] &= ~m;
+                upper[i >> 3] |= m;
+            }
+            this.key = key;
+            try {
+                this.from = InetAddresses.toAddrString(InetAddress.getByAddress(lower));
+                this.to = InetAddresses.toAddrString(InetAddress.getByAddress(upper));
+            } catch (UnknownHostException bogus) {
+                throw new AssertionError(bogus);
+            }
+        }
+
+        private Range(StreamInput in) throws IOException {
+            this.key = in.readOptionalString();
+            this.from = in.readOptionalString();
+            this.to = in.readOptionalString();
+        }
+
+        void writeTo(StreamOutput out) throws IOException {
+            out.writeOptionalString(key);
+            out.writeOptionalString(from);
+            out.writeOptionalString(to);
+        }
+
+        public String getKey() {
+            return key;
+        }
+
+        public String getFrom() {
+            return from;
+        }
+
+        public String getTo() {
+            return to;
+        }
+
+        @Override
+        public boolean equals(Object obj) {
+            if (obj == null || getClass() != obj.getClass()) {
+                return false;
+            }
+            Range that = (Range) obj;
+            return Objects.equals(key, that.key)
+                    && Objects.equals(from, that.from)
+                    && Objects.equals(to, that.to);
+        }
+
+        @Override
+        public int hashCode() {
+            return Objects.hash(getClass(), key, from, to);
+        }
+
+        @Override
+        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+            builder.startObject();
+            if (key != null) {
+                builder.field(RangeAggregator.Range.KEY_FIELD.getPreferredName(), key);
+            }
+            if (from != null) {
+                builder.field(RangeAggregator.Range.FROM_FIELD.getPreferredName(), from);
+            }
+            if (to != null) {
+                builder.field(RangeAggregator.Range.TO_FIELD.getPreferredName(), to);
+            }
+            builder.endObject();
+            return builder;
+        }
+    }
+
+    private boolean keyed = false;
+    private List<Range> ranges = new ArrayList<>();
+
+    public IpRangeAggregatorBuilder(String name) {
+        super(name, TYPE, ValuesSourceType.BYTES, ValueType.IP);
+    }
+
+    @Override
+    public String getWriteableName() {
+        return NAME;
+    }
+
+    public IpRangeAggregatorBuilder keyed(boolean keyed) {
+        this.keyed = keyed;
+        return this;
+    }
+
+    public boolean keyed() {
+        return keyed;
+    }
+
+    /** Get the current list or ranges that are configured on this aggregation. */
+    public List<Range> getRanges() {
+        return Collections.unmodifiableList(ranges);
+    }
+
+    /** Add a new {@link Range} to this aggregation. */
+    public IpRangeAggregatorBuilder addRange(Range range) {
+        ranges.add(range);
+        return this;
+    }
+
+    /**
+     * Add a new range to this aggregation.
+     *
+     * @param key
+     *            the key to use for this range in the response
+     * @param from
+     *            the lower bound on the distances, inclusive
+     * @param to
+     *            the upper bound on the distances, exclusive
+     */
+    public IpRangeAggregatorBuilder addRange(String key, String from, String to) {
+        addRange(new Range(key, from, to));
+        return this;
+    }
+
+    /**
+     * Add a new range to this aggregation using the CIDR notation.
+     */
+    public IpRangeAggregatorBuilder addMaskRange(String key, String mask) {
+        return addRange(new Range(key, mask));
+    }
+
+    /**
+     * Same as {@link #addMaskRange(String, String)} but uses the mask itself as
+     * a key.
+     */
+    public IpRangeAggregatorBuilder addMaskRange(String mask) {
+        return addRange(new Range(mask, mask));
+    }
+
+    /**
+     * Same as {@link #addRange(String, String, String)} but the key will be
+     * automatically generated.
+     */
+    public IpRangeAggregatorBuilder addRange(String from, String to) {
+        return addRange(null, from, to);
+    }
+
+    /**
+     * Same as {@link #addRange(String, String, String)} but there will be no
+     * lower bound.
+     */
+    public IpRangeAggregatorBuilder addUnboundedTo(String key, String to) {
+        addRange(new Range(key, null, to));
+        return this;
+    }
+
+    /**
+     * Same as {@link #addUnboundedTo(String, String)} but the key will be
+     * generated automatically.
+     */
+    public IpRangeAggregatorBuilder addUnboundedTo(String to) {
+        return addUnboundedTo(null, to);
+    }
+
+    /**
+     * Same as {@link #addRange(String, String, String)} but there will be no
+     * upper bound.
+     */
+    public IpRangeAggregatorBuilder addUnboundedFrom(String key, String from) {
+        addRange(new Range(key, from, null));
+        return this;
+    }
+
+    @Override
+    public IpRangeAggregatorBuilder script(Script script) {
+        throw new IllegalArgumentException("[ip_range] does not support scripts");
+    }
+
+    /**
+     * Same as {@link #addUnboundedFrom(String, String)} but the key will be
+     * generated automatically.
+     */
+    public IpRangeAggregatorBuilder addUnboundedFrom(String from) {
+        return addUnboundedFrom(null, from);
+    }
+
+    public IpRangeAggregatorBuilder(StreamInput in) throws IOException {
+        super(in, TYPE, ValuesSourceType.BYTES, ValueType.IP);
+        final int numRanges = in.readVInt();
+        for (int i = 0; i < numRanges; ++i) {
+            addRange(new Range(in));
+        }
+        keyed = in.readBoolean();
+    }
+
+    @Override
+    protected void innerWriteTo(StreamOutput out) throws IOException {
+        out.writeVInt(ranges.size());
+        for (Range range : ranges) {
+            range.writeTo(out);
+        }
+        out.writeBoolean(keyed);
+    }
+
+    private static BytesRef toBytesRef(String ip) {
+        if (ip == null) {
+            return null;
+        }
+        InetAddress address = InetAddresses.forString(ip);
+        byte[] bytes = InetAddressPoint.encode(address);
+        return new BytesRef(bytes);
+    }
+
+    @Override
+    protected ValuesSourceAggregatorFactory<ValuesSource.Bytes, ?> innerBuild(
+            AggregationContext context, ValuesSourceConfig<ValuesSource.Bytes> config,
+            AggregatorFactory<?> parent, Builder subFactoriesBuilder)
+                    throws IOException {
+        List<BinaryRangeAggregator.Range> ranges = new ArrayList<>();
+        for (Range range : this.ranges) {
+            ranges.add(new BinaryRangeAggregator.Range(range.key, toBytesRef(range.from), toBytesRef(range.to)));
+        }
+        return new BinaryRangeAggregatorFactory(name, TYPE, config, ranges,
+                keyed, context, parent, subFactoriesBuilder, metaData);
+    }
+
+    @Override
+    protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
+        builder.field(RangeAggregator.RANGES_FIELD.getPreferredName(), ranges);
+        builder.field(RangeAggregator.KEYED_FIELD.getPreferredName(), keyed);
+        return builder;
+    }
+
+    @Override
+    protected int innerHashCode() {
+        return Objects.hash(keyed, ranges);
+    }
+
+    @Override
+    protected boolean innerEquals(Object obj) {
+        IpRangeAggregatorBuilder that = (IpRangeAggregatorBuilder) obj;
+        return keyed == that.keyed
+                && ranges.equals(that.ranges);
+    }
+}

+ 126 - 0
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ip/IpRangeParser.java

@@ -0,0 +1,126 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket.range.ip;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParseFieldMatcher;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParser.Token;
+import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser;
+import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
+import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder.Range;
+import org.elasticsearch.search.aggregations.support.ValueType;
+import org.elasticsearch.search.aggregations.support.ValuesSource;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
+
+/**
+ * A parser for ip range aggregations.
+ */
+public class IpRangeParser extends BytesValuesSourceParser {
+
+    private static final ParseField MASK_FIELD = new ParseField("mask");
+
+    public IpRangeParser() {
+        super(false, false);
+    }
+
+    @Override
+    protected ValuesSourceAggregatorBuilder<ValuesSource.Bytes, ?> createFactory(
+            String aggregationName, ValuesSourceType valuesSourceType,
+            ValueType targetValueType, Map<ParseField, Object> otherOptions) {
+        IpRangeAggregatorBuilder range = new IpRangeAggregatorBuilder(aggregationName);
+        @SuppressWarnings("unchecked")
+        Iterable<Range> ranges = (Iterable<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
+        if (otherOptions.containsKey(RangeAggregator.RANGES_FIELD)) {
+            for (Range r : ranges) {
+                range.addRange(r);
+            }
+        }
+        if (otherOptions.containsKey(RangeAggregator.KEYED_FIELD)) {
+            range.keyed((Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD));
+        }
+        return range;
+    }
+
+    private Range parseRange(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
+        String key = null;
+        String from = null;
+        String to = null;
+        String mask = null;
+
+        if (parser.currentToken() != Token.START_OBJECT) {
+            throw new ParsingException(parser.getTokenLocation(), "[ranges] must contain objects, but hit a " + parser.currentToken());
+        }
+        while (parser.nextToken() != Token.END_OBJECT) {
+            if (parser.currentToken() == Token.FIELD_NAME) {
+                continue;
+            }
+            if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.KEY_FIELD)) {
+                key = parser.text();
+            } else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.FROM_FIELD)) {
+                from = parser.text();
+            } else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.Range.TO_FIELD)) {
+                to = parser.text();
+            } else if (parseFieldMatcher.match(parser.currentName(), MASK_FIELD)) {
+                mask = parser.text();
+            } else {
+                throw new ParsingException(parser.getTokenLocation(), "Unexpected ip range parameter: [" + parser.currentName() + "]");
+            }
+        }
+        if (mask != null) {
+            if (key == null) {
+                key = mask;
+            }
+            return new Range(key, mask);
+        } else {
+            return new Range(key, from, to);
+        }
+    }
+
+    @Override
+    protected boolean token(String aggregationName, String currentFieldName,
+            Token token, XContentParser parser,
+            ParseFieldMatcher parseFieldMatcher,
+            Map<ParseField, Object> otherOptions) throws IOException {
+        if (parseFieldMatcher.match(currentFieldName, RangeAggregator.RANGES_FIELD)) {
+            if (parser.currentToken() != Token.START_ARRAY) {
+                throw new ParsingException(parser.getTokenLocation(), "[ranges] must be passed as an array, but got a " + token);
+            }
+            List<Range> ranges = new ArrayList<>();
+            while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+                Range range = parseRange(parser, parseFieldMatcher);
+                ranges.add(range);
+            }
+            otherOptions.put(RangeAggregator.RANGES_FIELD, ranges);
+            return true;
+        } else if (parseFieldMatcher.match(parser.currentName(), RangeAggregator.KEYED_FIELD)) {
+            otherOptions.put(RangeAggregator.KEYED_FIELD, parser.booleanValue());
+            return true;
+        }
+        return false;
+    }
+
+}

+ 0 - 277
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeAggregatorBuilder.java

@@ -1,277 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.aggregations.bucket.range.ipv4;
-
-import org.elasticsearch.common.ParseField;
-import org.elasticsearch.common.ParseFieldMatcher;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.network.Cidrs;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.search.aggregations.AggregatorFactory;
-import org.elasticsearch.search.DocValueFormat;
-import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
-import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
-import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
-import org.elasticsearch.search.aggregations.support.AggregationContext;
-import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
-import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
-import org.elasticsearch.search.internal.SearchContext;
-
-import java.io.IOException;
-import java.util.Objects;
-
-public class IPv4RangeAggregatorBuilder extends AbstractRangeBuilder<IPv4RangeAggregatorBuilder, IPv4RangeAggregatorBuilder.Range> {
-    public static final String NAME = InternalIPv4Range.TYPE.name();
-    public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
-
-    public IPv4RangeAggregatorBuilder(String name) {
-        super(name, InternalIPv4Range.FACTORY);
-    }
-
-    /**
-     * Read from a stream.
-     */
-    public IPv4RangeAggregatorBuilder(StreamInput in) throws IOException {
-        super(in, InternalIPv4Range.FACTORY, Range::new);
-    }
-
-    @Override
-    public String getWriteableName() {
-        return NAME;
-    }
-
-    /**
-     * Add a new range to this aggregation.
-     *
-     * @param key
-     *            the key to use for this range in the response
-     * @param from
-     *            the lower bound on the distances, inclusive
-     * @param to
-     *            the upper bound on the distances, exclusive
-     */
-    public IPv4RangeAggregatorBuilder addRange(String key, String from, String to) {
-        addRange(new Range(key, from, to));
-        return this;
-    }
-
-    /**
-     * Same as {@link #addMaskRange(String, String)} but uses the mask itself as
-     * a key.
-     */
-    public IPv4RangeAggregatorBuilder addMaskRange(String key, String mask) {
-        return addRange(new Range(key, mask));
-    }
-
-    /**
-     * Same as {@link #addMaskRange(String, String)} but uses the mask itself as
-     * a key.
-     */
-    public IPv4RangeAggregatorBuilder addMaskRange(String mask) {
-        return addRange(new Range(mask, mask));
-    }
-
-    /**
-     * Same as {@link #addRange(String, String, String)} but the key will be
-     * automatically generated.
-     */
-    public IPv4RangeAggregatorBuilder addRange(String from, String to) {
-        return addRange(null, from, to);
-    }
-
-    /**
-     * Same as {@link #addRange(String, String, String)} but there will be no
-     * lower bound.
-     */
-    public IPv4RangeAggregatorBuilder addUnboundedTo(String key, String to) {
-        addRange(new Range(key, null, to));
-        return this;
-    }
-
-    /**
-     * Same as {@link #addUnboundedTo(String, String)} but the key will be
-     * generated automatically.
-     */
-    public IPv4RangeAggregatorBuilder addUnboundedTo(String to) {
-        return addUnboundedTo(null, to);
-    }
-
-    /**
-     * Same as {@link #addRange(String, String, String)} but there will be no
-     * upper bound.
-     */
-    public IPv4RangeAggregatorBuilder addUnboundedFrom(String key, String from) {
-        addRange(new Range(key, from, null));
-        return this;
-    }
-
-    /**
-     * Same as {@link #addUnboundedFrom(String, String)} but the key will be
-     * generated automatically.
-     */
-    public IPv4RangeAggregatorBuilder addUnboundedFrom(String from) {
-        return addUnboundedFrom(null, from);
-    }
-
-    @Override
-    protected Ipv4RangeAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig<Numeric> config,
-            AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
-        return new Ipv4RangeAggregatorFactory(name, type, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder,
-                metaData);
-    }
-
-    public static class Range extends RangeAggregator.Range {
-        static final ParseField MASK_FIELD = new ParseField("mask");
-
-        private final String cidr;
-
-        public Range(String key, Double from, Double to) {
-            this(key, from, null, to, null, null);
-        }
-
-        public Range(String key, String from, String to) {
-            this(key, null, from, null, to, null);
-        }
-
-        public Range(String key, String cidr) {
-            this(key, null, null, null, null, cidr);
-        }
-
-        private Range(String key, Double from, String fromAsStr, Double to, String toAsStr, String cidr) {
-            super(key, from, fromAsStr, to, toAsStr);
-            this.cidr = cidr;
-        }
-
-        /**
-         * Read from a stream.
-         */
-        public Range(StreamInput in) throws IOException {
-            super(in);
-            cidr = in.readOptionalString();
-        }
-
-        @Override
-        public void writeTo(StreamOutput out) throws IOException {
-            super.writeTo(out);
-            out.writeOptionalString(cidr);
-        }
-
-        public String mask() {
-            return cidr;
-        }
-
-        @Override
-        public Range process(DocValueFormat parser, SearchContext context) {
-            assert parser != null;
-            Double from = this.from;
-            Double to = this.to;
-            String key = this.key;
-            if (fromAsStr != null) {
-                from = parser.parseDouble(fromAsStr, false, context.nowCallable());
-            }
-            if (toAsStr != null) {
-                to = parser.parseDouble(toAsStr, false, context.nowCallable());
-            }
-            if (cidr != null) {
-                long[] fromTo = Cidrs.cidrMaskToMinMax(cidr);
-                from = fromTo[0] == 0 ? Double.NEGATIVE_INFINITY : fromTo[0];
-                to = fromTo[1] == InternalIPv4Range.MAX_IP ? Double.POSITIVE_INFINITY : fromTo[1];
-                if (this.key == null) {
-                    key = cidr;
-                }
-            }
-            return new Range(key, from, to);
-        }
-
-        public static Range fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
-            XContentParser.Token token;
-            String currentFieldName = null;
-            double from = Double.NEGATIVE_INFINITY;
-            String fromAsStr = null;
-            double to = Double.POSITIVE_INFINITY;
-            String toAsStr = null;
-            String key = null;
-            String cidr = null;
-            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
-                if (token == XContentParser.Token.FIELD_NAME) {
-                    currentFieldName = parser.currentName();
-                } else if (token == XContentParser.Token.VALUE_NUMBER) {
-                    if (parseFieldMatcher.match(currentFieldName, FROM_FIELD)) {
-                        from = parser.doubleValue();
-                    } else if (parseFieldMatcher.match(currentFieldName, TO_FIELD)) {
-                        to = parser.doubleValue();
-                    }
-                } else if (token == XContentParser.Token.VALUE_STRING) {
-                    if (parseFieldMatcher.match(currentFieldName, FROM_FIELD)) {
-                        fromAsStr = parser.text();
-                    } else if (parseFieldMatcher.match(currentFieldName, TO_FIELD)) {
-                        toAsStr = parser.text();
-                    } else if (parseFieldMatcher.match(currentFieldName, KEY_FIELD)) {
-                        key = parser.text();
-                    } else if (parseFieldMatcher.match(currentFieldName, MASK_FIELD)) {
-                        cidr = parser.text();
-                    }
-                }
-            }
-            return new Range(key, from, fromAsStr, to, toAsStr, cidr);
-        }
-
-        @Override
-        public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-            builder.startObject();
-            if (key != null) {
-                builder.field(KEY_FIELD.getPreferredName(), key);
-            }
-            if (cidr != null) {
-                builder.field(MASK_FIELD.getPreferredName(), cidr);
-            } else {
-                if (Double.isFinite(from)) {
-                    builder.field(FROM_FIELD.getPreferredName(), from);
-                }
-                if (Double.isFinite(to)) {
-                    builder.field(TO_FIELD.getPreferredName(), to);
-                }
-                if (fromAsStr != null) {
-                    builder.field(FROM_FIELD.getPreferredName(), fromAsStr);
-                }
-                if (toAsStr != null) {
-                    builder.field(TO_FIELD.getPreferredName(), toAsStr);
-                }
-            }
-            builder.endObject();
-            return builder;
-        }
-
-        @Override
-        public int hashCode() {
-            return Objects.hash(super.hashCode(), cidr);
-        }
-
-        @Override
-        public boolean equals(Object obj) {
-            return super.equals(obj)
-                    && Objects.equals(cidr, ((Range) obj).cidr);
-        }
-
-    }
-
-}

+ 0 - 164
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/InternalIPv4Range.java

@@ -1,164 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.aggregations.bucket.range.ipv4;
-
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.search.DocValueFormat;
-import org.elasticsearch.search.aggregations.AggregationStreams;
-import org.elasticsearch.search.aggregations.InternalAggregation;
-import org.elasticsearch.search.aggregations.InternalAggregations;
-import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
-import org.elasticsearch.search.aggregations.bucket.BucketStreams;
-import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
-import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.elasticsearch.search.aggregations.support.ValueType;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, InternalIPv4Range> {
-    public static final long MAX_IP = 1L << 32;
-
-    public final static Type TYPE = new Type("ip_range", "iprange");
-
-    private final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
-        @Override
-        public InternalIPv4Range readResult(StreamInput in) throws IOException {
-            InternalIPv4Range range = new InternalIPv4Range();
-            range.readFrom(in);
-            return range;
-        }
-    };
-
-    private final static BucketStreams.Stream<Bucket> BUCKET_STREAM = new BucketStreams.Stream<Bucket>() {
-        @Override
-        public Bucket readResult(StreamInput in, BucketStreamContext context) throws IOException {
-            Bucket buckets = new Bucket(context.keyed());
-            buckets.readFrom(in);
-            return buckets;
-        }
-
-        @Override
-        public BucketStreamContext getBucketStreamContext(Bucket bucket) {
-            BucketStreamContext context = new BucketStreamContext();
-            context.keyed(bucket.keyed());
-            return context;
-        }
-    };
-
-    public static void registerStream() {
-        AggregationStreams.registerStream(STREAM, TYPE.stream());
-        BucketStreams.registerStream(BUCKET_STREAM, TYPE.stream());
-    }
-
-    public static final Factory FACTORY = new Factory();
-
-    public static class Bucket extends InternalRange.Bucket {
-
-        public Bucket(boolean keyed) {
-            super(keyed, DocValueFormat.IP);
-        }
-
-        public Bucket(String key, double from, double to, long docCount, List<InternalAggregation> aggregations, boolean keyed) {
-            super(key, from, to, docCount, new InternalAggregations(aggregations), keyed, DocValueFormat.IP);
-        }
-
-        public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed) {
-            super(key, from, to, docCount, aggregations, keyed, DocValueFormat.IP);
-        }
-
-        @Override
-        public String getFromAsString() {
-            double from = ((Number) this.from).doubleValue();
-            return Double.isInfinite(from) ? null : from == 0 ? null : DocValueFormat.IP.format(from);
-        }
-
-        @Override
-        public String getToAsString() {
-            double to = ((Number) this.to).doubleValue();
-            return Double.isInfinite(to) ? null : MAX_IP == to ? null : DocValueFormat.IP.format(to);
-        }
-
-        @Override
-        protected InternalRange.Factory<Bucket, ?> getFactory() {
-            return FACTORY;
-        }
-
-        boolean keyed() {
-            return keyed;
-        }
-    }
-
-    public static class Factory extends InternalRange.Factory<InternalIPv4Range.Bucket, InternalIPv4Range> {
-
-        @Override
-        public Type type() {
-            return TYPE;
-        }
-
-        @Override
-        public ValueType getValueType() {
-            return ValueType.IP;
-        }
-
-        @Override
-        public InternalIPv4Range create(String name, List<Bucket> ranges, DocValueFormat formatter, boolean keyed,
-                List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
-            return new InternalIPv4Range(name, ranges, keyed, pipelineAggregators, metaData);
-        }
-
-        @Override
-        public InternalIPv4Range create(List<Bucket> ranges, InternalIPv4Range prototype) {
-            return new InternalIPv4Range(prototype.name, ranges, prototype.keyed, prototype.pipelineAggregators(), prototype.metaData);
-        }
-
-        @Override
-        public Bucket createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, boolean keyed,
-                DocValueFormat formatter) {
-            return new Bucket(key, from, to, docCount, aggregations, keyed);
-        }
-
-        @Override
-        public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
-            return new Bucket(prototype.getKey(), ((Number) prototype.getFrom()).doubleValue(), ((Number) prototype.getTo()).doubleValue(),
-                    prototype.getDocCount(), aggregations, prototype.getKeyed());
-        }
-    }
-
-    public InternalIPv4Range() {} // for serialization
-
-    public InternalIPv4Range(String name, List<InternalIPv4Range.Bucket> ranges, boolean keyed, List<PipelineAggregator> pipelineAggregators,
-            Map<String, Object> metaData) {
-        super(name, ranges, DocValueFormat.IP, keyed, pipelineAggregators, metaData);
-    }
-
-    @Override
-    public Type type() {
-        return TYPE;
-    }
-
-    @Override
-    public InternalRange.Factory<Bucket, InternalIPv4Range> getFactory() {
-        return FACTORY;
-    }
-}

+ 0 - 64
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java

@@ -1,64 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.search.aggregations.bucket.range.ipv4;
-
-import org.elasticsearch.common.ParseField;
-import org.elasticsearch.common.ParseFieldMatcher;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
-import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
-import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
-import org.elasticsearch.search.aggregations.support.ValueType;
-import org.elasticsearch.search.aggregations.support.ValuesSourceType;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-/**
- *
- */
-public class IpRangeParser extends RangeParser {
-
-    public IpRangeParser() {
-        super(true, false, false);
-    }
-
-    @Override
-    protected Range parseRange(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
-        return IPv4RangeAggregatorBuilder.Range.fromXContent(parser, parseFieldMatcher);
-    }
-
-    @Override
-    protected IPv4RangeAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
-            ValueType targetValueType, Map<ParseField, Object> otherOptions) {
-        IPv4RangeAggregatorBuilder factory = new IPv4RangeAggregatorBuilder(aggregationName);
-        @SuppressWarnings("unchecked")
-        List<IPv4RangeAggregatorBuilder.Range> ranges = (List<IPv4RangeAggregatorBuilder.Range>) otherOptions
-                .get(RangeAggregator.RANGES_FIELD);
-        for (IPv4RangeAggregatorBuilder.Range range : ranges) {
-            factory.addRange(range);
-        }
-        Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
-        if (keyed != null) {
-            factory.keyed(keyed);
-        }
-        return factory;
-    }
-}

+ 0 - 44
core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/Ipv4RangeAggregatorFactory.java

@@ -1,44 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.aggregations.bucket.range.ipv4;
-
-import org.elasticsearch.search.aggregations.AggregatorFactories;
-import org.elasticsearch.search.aggregations.AggregatorFactory;
-import org.elasticsearch.search.aggregations.InternalAggregation.Type;
-import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeAggregatorFactory;
-import org.elasticsearch.search.aggregations.bucket.range.InternalRange.Factory;
-import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
-import org.elasticsearch.search.aggregations.support.AggregationContext;
-import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-public class Ipv4RangeAggregatorFactory
-        extends AbstractRangeAggregatorFactory<Ipv4RangeAggregatorFactory, IPv4RangeAggregatorBuilder.Range> {
-
-    public Ipv4RangeAggregatorFactory(String name, Type type, ValuesSourceConfig<Numeric> config,
-            List<IPv4RangeAggregatorBuilder.Range> ranges, boolean keyed, Factory<?, ?> rangeFactory, AggregationContext context,
-            AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException {
-        super(name, type, config, ranges, keyed, rangeFactory, context, parent, subFactoriesBuilder, metaData);
-    }
-
-}

+ 1 - 6
core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java

@@ -70,12 +70,7 @@ public enum ValueType implements Writeable {
             return true;
         }
     },
-    IP((byte) 6, "ip", "ip", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.IP) {
-        @Override
-        public boolean isNumeric() {
-            return true;
-        }
-    },
+    IP((byte) 6, "ip", "ip", ValuesSourceType.BYTES, IndexFieldData.class, DocValueFormat.IP),
     NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, DocValueFormat.RAW) {
         @Override
         public boolean isNumeric() {

+ 2 - 1
core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java

@@ -86,9 +86,10 @@ public abstract class BaseAggregationTestCase<AB extends AggregatorBuilder<AB>>
     protected static final String DOUBLE_FIELD_NAME = "mapped_double";
     protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
     protected static final String DATE_FIELD_NAME = "mapped_date";
+    protected static final String IP_FIELD_NAME = "mapped_ip";
     protected static final String OBJECT_FIELD_NAME = "mapped_object";
     protected static final String[] mappedFieldNames = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME,
-            DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME};
+            DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, IP_FIELD_NAME, OBJECT_FIELD_NAME};
 
     private static Injector injector;
     private static Index index;

+ 0 - 71
core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeTests.java

@@ -1,71 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.search.aggregations.bucket;
-
-import org.elasticsearch.common.network.Cidrs;
-import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper;
-import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorBuilder;
-import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorBuilder.Range;
-
-public class IPv4RangeTests extends BaseAggregationTestCase<IPv4RangeAggregatorBuilder> {
-
-    @Override
-    protected IPv4RangeAggregatorBuilder createTestAggregatorBuilder() {
-        int numRanges = randomIntBetween(1, 10);
-        IPv4RangeAggregatorBuilder factory = new IPv4RangeAggregatorBuilder("foo");
-        for (int i = 0; i < numRanges; i++) {
-            String key = null;
-            if (randomBoolean()) {
-                key = randomAsciiOfLengthBetween(1, 20);
-            }
-            if (randomBoolean()) {
-                double from = randomBoolean() ? Double.NEGATIVE_INFINITY : randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE - 1000);
-                double to = randomBoolean() ? Double.POSITIVE_INFINITY
-                        : (Double.isInfinite(from) ? randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE)
-                                : randomIntBetween((int) from, Integer.MAX_VALUE));
-                if (randomBoolean()) {
-                    factory.addRange(new Range(key, from, to));
-                } else {
-                    String fromAsStr = Double.isInfinite(from) ? null : LegacyIpFieldMapper.longToIp((long) from);
-                    String toAsStr = Double.isInfinite(to) ? null : LegacyIpFieldMapper.longToIp((long) to);
-                    factory.addRange(new Range(key, fromAsStr, toAsStr));
-                }
-            } else {
-                int mask = randomInt(32);
-                long ipAsLong = randomIntBetween(0, Integer.MAX_VALUE);
-
-                long blockSize = 1L << (32 - mask);
-                ipAsLong = ipAsLong - (ipAsLong & (blockSize - 1));
-                String cidr = Cidrs.createCIDR(ipAsLong, mask);
-                factory.addRange(new Range(key, cidr));
-            }
-        }
-        factory.field(INT_FIELD_NAME);
-        if (randomBoolean()) {
-            factory.keyed(randomBoolean());
-        }
-        if (randomBoolean()) {
-            factory.missing(randomIntBetween(0, 10));
-        }
-        return factory;
-    }
-
-}

+ 268 - 0
core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java

@@ -0,0 +1,268 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
+import static org.hamcrest.Matchers.containsString;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Map;
+
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.inject.internal.Nullable;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.script.AbstractSearchScript;
+import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.NativeScriptFactory;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptModule;
+import org.elasticsearch.script.ScriptService.ScriptType;
+import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.AggregatorBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.Range;
+import org.elasticsearch.test.ESIntegTestCase;
+
+@ESIntegTestCase.SuiteScopeTestCase
+public class IpRangeIT extends ESIntegTestCase {
+
+    @Override
+    protected Collection<Class<? extends Plugin>> nodePlugins() {
+        return Arrays.asList(DummyScriptPlugin.class);
+    }
+
+    @Override
+    public void setupSuiteScopeCluster() throws Exception {
+        assertAcked(prepareCreate("idx")
+                .addMapping("type", "ip", "type=ip", "ips", "type=ip"));
+
+        indexRandom(true,
+                client().prepareIndex("idx", "type", "1").setSource(
+                        "ip", "192.168.1.7",
+                        "ips", Arrays.asList("192.168.0.13", "192.168.1.2")),
+                client().prepareIndex("idx", "type", "2").setSource(
+                        "ip", "192.168.1.10",
+                        "ips", Arrays.asList("192.168.1.25", "192.168.1.28")),
+                client().prepareIndex("idx", "type", "3").setSource(
+                        "ip", "2001:db8::ff00:42:8329",
+                        "ips", Arrays.asList("2001:db8::ff00:42:8329", "2001:db8::ff00:42:8380")));
+
+        assertAcked(prepareCreate("idx_unmapped"));
+    }
+
+    public void testSingleValuedField() {
+        SearchResponse rsp = client().prepareSearch("idx").addAggregation(
+                AggregationBuilders.ipRange("my_range")
+                    .field("ip")
+                    .addUnboundedTo("192.168.1.0")
+                    .addRange("192.168.1.0", "192.168.1.10")
+                    .addUnboundedFrom("192.168.1.10")).get();
+        assertSearchResponse(rsp);
+        Range range = rsp.getAggregations().get("my_range");
+        assertEquals(3, range.getBuckets().size());
+
+        Range.Bucket bucket1 = range.getBuckets().get(0);
+        assertNull(bucket1.getFrom());
+        assertEquals("192.168.1.0", bucket1.getTo());
+        assertEquals(0, bucket1.getDocCount());
+
+        Range.Bucket bucket2 = range.getBuckets().get(1);
+        assertEquals("192.168.1.0", bucket2.getFrom());
+        assertEquals("192.168.1.10", bucket2.getTo());
+        assertEquals(1, bucket2.getDocCount());
+
+        Range.Bucket bucket3 = range.getBuckets().get(2);
+        assertEquals("192.168.1.10", bucket3.getFrom());
+        assertNull(bucket3.getTo());
+        assertEquals(2, bucket3.getDocCount());
+    }
+
+    public void testMultiValuedField() {
+        SearchResponse rsp = client().prepareSearch("idx").addAggregation(
+                AggregationBuilders.ipRange("my_range")
+                    .field("ips")
+                    .addUnboundedTo("192.168.1.0")
+                    .addRange("192.168.1.0", "192.168.1.10")
+                    .addUnboundedFrom("192.168.1.10")).get();
+        assertSearchResponse(rsp);
+        Range range = rsp.getAggregations().get("my_range");
+        assertEquals(3, range.getBuckets().size());
+
+        Range.Bucket bucket1 = range.getBuckets().get(0);
+        assertNull(bucket1.getFrom());
+        assertEquals("192.168.1.0", bucket1.getTo());
+        assertEquals(1, bucket1.getDocCount());
+
+        Range.Bucket bucket2 = range.getBuckets().get(1);
+        assertEquals("192.168.1.0", bucket2.getFrom());
+        assertEquals("192.168.1.10", bucket2.getTo());
+        assertEquals(1, bucket2.getDocCount());
+
+        Range.Bucket bucket3 = range.getBuckets().get(2);
+        assertEquals("192.168.1.10", bucket3.getFrom());
+        assertNull(bucket3.getTo());
+        assertEquals(2, bucket3.getDocCount());
+    }
+
+    public void testIpMask() {
+        SearchResponse rsp = client().prepareSearch("idx").addAggregation(
+                AggregationBuilders.ipRange("my_range")
+                    .field("ips")
+                    .addMaskRange("::/0")
+                    .addMaskRange("0.0.0.0/0")
+                    .addMaskRange("2001:db8::/64")).get();
+        assertSearchResponse(rsp);
+        Range range = rsp.getAggregations().get("my_range");
+        assertEquals(3, range.getBuckets().size());
+
+        Range.Bucket bucket1 = range.getBuckets().get(0);
+        assertEquals("::/0", bucket1.getKey());
+        assertEquals(3, bucket1.getDocCount());
+
+        Range.Bucket bucket2 = range.getBuckets().get(1);
+        assertEquals("0.0.0.0/0", bucket2.getKey());
+        assertEquals(2, bucket2.getDocCount());
+
+        Range.Bucket bucket3 = range.getBuckets().get(2);
+        assertEquals("2001:db8::/64", bucket3.getKey());
+        assertEquals(1, bucket3.getDocCount());
+    }
+
+    public void testPartiallyUnmapped() {
+        SearchResponse rsp = client().prepareSearch("idx", "idx_unmapped").addAggregation(
+                AggregationBuilders.ipRange("my_range")
+                    .field("ip")
+                    .addUnboundedTo("192.168.1.0")
+                    .addRange("192.168.1.0", "192.168.1.10")
+                    .addUnboundedFrom("192.168.1.10")).get();
+        assertSearchResponse(rsp);
+        Range range = rsp.getAggregations().get("my_range");
+        assertEquals(3, range.getBuckets().size());
+
+        Range.Bucket bucket1 = range.getBuckets().get(0);
+        assertNull(bucket1.getFrom());
+        assertEquals("192.168.1.0", bucket1.getTo());
+        assertEquals(0, bucket1.getDocCount());
+
+        Range.Bucket bucket2 = range.getBuckets().get(1);
+        assertEquals("192.168.1.0", bucket2.getFrom());
+        assertEquals("192.168.1.10", bucket2.getTo());
+        assertEquals(1, bucket2.getDocCount());
+
+        Range.Bucket bucket3 = range.getBuckets().get(2);
+        assertEquals("192.168.1.10", bucket3.getFrom());
+        assertNull(bucket3.getTo());
+        assertEquals(2, bucket3.getDocCount());
+    }
+
+    public void testUnmapped() {
+        SearchResponse rsp = client().prepareSearch("idx_unmapped").addAggregation(
+                AggregationBuilders.ipRange("my_range")
+                    .field("ip")
+                    .addUnboundedTo("192.168.1.0")
+                    .addRange("192.168.1.0", "192.168.1.10")
+                    .addUnboundedFrom("192.168.1.10")).get();
+        assertSearchResponse(rsp);
+        Range range = rsp.getAggregations().get("my_range");
+        assertEquals(3, range.getBuckets().size());
+
+        Range.Bucket bucket1 = range.getBuckets().get(0);
+        assertNull(bucket1.getFrom());
+        assertEquals("192.168.1.0", bucket1.getTo());
+        assertEquals(0, bucket1.getDocCount());
+
+        Range.Bucket bucket2 = range.getBuckets().get(1);
+        assertEquals("192.168.1.0", bucket2.getFrom());
+        assertEquals("192.168.1.10", bucket2.getTo());
+        assertEquals(0, bucket2.getDocCount());
+
+        Range.Bucket bucket3 = range.getBuckets().get(2);
+        assertEquals("192.168.1.10", bucket3.getFrom());
+        assertNull(bucket3.getTo());
+        assertEquals(0, bucket3.getDocCount());
+    }
+
+    public void testRejectsScript() {
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+                () -> client().prepareSearch("idx").addAggregation(
+                        AggregationBuilders.ipRange("my_range")
+                        .script(new Script(DummyScript.NAME, ScriptType.INLINE, "native", Collections.emptyMap())) ).get());
+        assertThat(e.getMessage(), containsString("[ip_range] does not support scripts"));
+    }
+
+    public void testRejectsValueScript() {
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+                () -> client().prepareSearch("idx").addAggregation(
+                        AggregationBuilders.ipRange("my_range")
+                        .field("ip")
+                        .script(new Script(DummyScript.NAME, ScriptType.INLINE, "native", Collections.emptyMap())) ).get());
+        assertThat(e.getMessage(), containsString("[ip_range] does not support scripts"));
+    }
+
+    public static class DummyScriptPlugin extends Plugin {
+
+        @Override
+        public String name() {
+            return "DummyScriptPlugin";
+        }
+
+        @Override
+        public String description() {
+            return "A mock script plugin.";
+        }
+
+        public void onModule(ScriptModule module) {
+            module.registerScript(DummyScript.NAME, DummyScriptFactory.class);
+        }
+    }
+
+    public static class DummyScriptFactory implements NativeScriptFactory {
+        public DummyScriptFactory() {}
+        @Override
+        public ExecutableScript newScript(@Nullable Map<String, Object> params) {
+            return new DummyScript();
+        }
+        @Override
+        public boolean needsScores() {
+            return false;
+        }
+    }
+
+    private static class DummyScript extends AbstractSearchScript {
+
+        public static final String NAME = "dummy";
+
+        @Override
+        public Object run() {
+            return null;
+        }
+    }
+
+    public static void main(String[] args) {
+        AggregatorBuilder<?> aggregation =
+                AggregationBuilders
+                        .ipRange("agg")
+                        .field("ip")
+                        .addUnboundedTo("192.168.1.0")             // from -infinity to 192.168.1.0 (excluded)
+                        .addRange("192.168.1.0", "192.168.2.0")    // from 192.168.1.0 to 192.168.2.0 (excluded)
+                        .addUnboundedFrom("192.168.2.0");          // from 192.168.2.0 to +infinity
+    }
+}

+ 90 - 0
core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpRangeTests.java

@@ -0,0 +1,90 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.search.aggregations.bucket;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.elasticsearch.common.network.NetworkAddress;
+import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
+import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
+
+public class IpRangeTests extends BaseAggregationTestCase<IpRangeAggregatorBuilder> {
+
+    private static String randomIp(boolean v4) {
+        try {
+            if (v4) {
+                byte[] ipv4 = new byte[4];
+                random().nextBytes(ipv4);
+                return NetworkAddress.format(InetAddress.getByAddress(ipv4));
+            } else {
+                byte[] ipv6 = new byte[16];
+                random().nextBytes(ipv6);
+                return NetworkAddress.format(InetAddress.getByAddress(ipv6));
+            }
+        } catch (UnknownHostException e) {
+            throw new AssertionError();
+        }
+    }
+
+    @Override
+    protected IpRangeAggregatorBuilder createTestAggregatorBuilder() {
+        int numRanges = randomIntBetween(1, 10);
+        IpRangeAggregatorBuilder factory = new IpRangeAggregatorBuilder("foo");
+        for (int i = 0; i < numRanges; i++) {
+            String key = null;
+            if (randomBoolean()) {
+                key = randomAsciiOfLengthBetween(1, 20);
+            }
+            switch (randomInt(3)) {
+            case 0:
+                boolean v4 = randomBoolean();
+                int prefixLength;
+                if (v4) {
+                    prefixLength = randomInt(32);
+                } else {
+                    prefixLength = randomInt(128);
+                }
+                factory.addMaskRange(key, randomIp(v4) + "/" + prefixLength);
+                break;
+            case 1:
+                factory.addUnboundedFrom(key, randomIp(randomBoolean()));
+                break;
+            case 2:
+                factory.addUnboundedTo(key, randomIp(randomBoolean()));
+                break;
+            case 3:
+                factory.addRange(key, randomIp(randomBoolean()), randomIp(randomBoolean()));
+                break;
+            default:
+                fail();
+            }
+        }
+        factory.field(IP_FIELD_NAME);
+        if (randomBoolean()) {
+            factory.keyed(randomBoolean());
+        }
+        if (randomBoolean()) {
+            factory.missing(randomIp(randomBoolean()));
+        }
+        return factory;
+    }
+
+}

+ 142 - 0
core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java

@@ -0,0 +1,142 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations.bucket.range;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.TestUtil;
+import org.elasticsearch.search.aggregations.LeafBucketCollector;
+import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator.SortedSetRangeLeafCollector;
+import org.elasticsearch.test.ESTestCase;
+
+import com.carrotsearch.hppc.LongHashSet;
+
+public class BinaryRangeAggregatorTests extends ESTestCase {
+
+    private static class FakeSortedSetDocValues extends SortedSetDocValues {
+
+        private final BytesRef[] terms;
+        long[] ords;
+        private int i;
+
+        FakeSortedSetDocValues(BytesRef[] terms) {
+            this.terms = terms;
+        }
+
+        @Override
+        public void setDocument(int docID) {
+            i = 0;
+        }
+
+        @Override
+        public long nextOrd() {
+            if (i == ords.length) {
+                return NO_MORE_ORDS;
+            }
+            return ords[i++];
+        }
+
+        @Override
+        public BytesRef lookupOrd(long ord) {
+            return terms[(int) ord];
+        }
+
+        @Override
+        public long getValueCount() {
+            return terms.length;
+        }
+
+    }
+
+    private void doTestSortedSetRangeLeafCollector(int maxNumValuesPerDoc) throws Exception {
+        final Set<BytesRef> termSet = new HashSet<>();
+        final int numTerms = TestUtil.nextInt(random(), maxNumValuesPerDoc, 100);
+        while (termSet.size() < numTerms) {
+            termSet.add(new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))));
+        }
+        final BytesRef[] terms = termSet.toArray(new BytesRef[0]);
+        Arrays.sort(terms);
+
+        final int numRanges = randomIntBetween(1, 10);
+        BinaryRangeAggregator.Range[] ranges = new BinaryRangeAggregator.Range[numRanges];
+        for (int i = 0; i < numRanges; ++i) {
+            ranges[i] = new BinaryRangeAggregator.Range(Integer.toString(i),
+                    randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))),
+                    randomBoolean() ? null : new BytesRef(TestUtil.randomSimpleString(random(), randomInt(2))));
+        }
+        Arrays.sort(ranges, BinaryRangeAggregator.RANGE_COMPARATOR);
+
+        FakeSortedSetDocValues values = new FakeSortedSetDocValues(terms);
+        final int[] counts = new int[ranges.length];
+        SortedSetRangeLeafCollector collector = new SortedSetRangeLeafCollector(values, ranges, null) {
+            @Override
+            protected void doCollect(LeafBucketCollector sub, int doc, long bucket) throws IOException {
+                counts[(int) bucket]++;
+            }
+        };
+
+        final int[] expectedCounts = new int[ranges.length];
+        final int maxDoc = randomIntBetween(5, 10);
+        for (int doc = 0; doc < maxDoc; ++doc) {
+            LongHashSet ordinalSet = new LongHashSet();
+            final int numValues = randomInt(maxNumValuesPerDoc);
+            while (ordinalSet.size() < numValues) {
+                ordinalSet.add(random().nextInt(terms.length));
+            }
+            final long[] ords = ordinalSet.toArray();
+            Arrays.sort(ords);
+            values.ords = ords;
+
+            // simulate aggregation
+            collector.collect(doc);
+
+            // now do it the naive way
+            for (int i = 0; i < ranges.length; ++i) {
+                for (long ord : ords) {
+                    BytesRef term = terms[(int) ord];
+                    if ((ranges[i].from == null || ranges[i].from.compareTo(term) <= 0)
+                            && (ranges[i].to == null || ranges[i].to.compareTo(term) > 0)) {
+                        expectedCounts[i]++;
+                        break;
+                    }
+                }
+            }
+        }
+        assertArrayEquals(expectedCounts, counts);
+    }
+
+    public void testSortedSetRangeLeafCollectorSingleValued() throws Exception {
+        final int iters = randomInt(10);
+        for (int i = 0; i < iters; ++i) {
+            doTestSortedSetRangeLeafCollector(1);
+        }
+    }
+
+    public void testSortedSetRangeLeafCollectorMultiValued() throws Exception {
+        final int iters = randomInt(10);
+        for (int i = 0; i < iters; ++i) {
+            doTestSortedSetRangeLeafCollector(5);
+        }
+    }
+}

+ 2 - 2
docs/java-api/aggregations/bucket/iprange-aggregation.asciidoc

@@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request:
 
 [source,java]
 --------------------------------------------------
-AggregationBuilder aggregation =
+AggregatorBuilder<?> aggregation =
         AggregationBuilders
                 .ipRange("agg")
                 .field("ip")
@@ -25,7 +25,7 @@ Note that you could also use ip masks as ranges:
 
 [source,java]
 --------------------------------------------------
-AggregationBuilder aggregation =
+AggregatorBuilder<?> aggregation =
         AggregationBuilders
                 .ipRange("agg")
                 .field("ip")

+ 9 - 15
docs/reference/aggregations/bucket/iprange-aggregation.asciidoc

@@ -1,7 +1,7 @@
 [[search-aggregations-bucket-iprange-aggregation]]
-=== IPv4 Range Aggregation
+=== IP Range Aggregation
 
-Just like the dedicated <<search-aggregations-bucket-daterange-aggregation,date>> range aggregation, there is also a dedicated range aggregation for IPv4 typed fields:
+Just like the dedicated <<search-aggregations-bucket-daterange-aggregation,date>> range aggregation, there is also a dedicated range aggregation for IP typed fields:
 
 Example:
 
@@ -33,13 +33,11 @@ Response:
         "ip_ranges": {
             "buckets" : [
                 {
-                    "to": 167772165,
-                    "to_as_string": "10.0.0.5",
+                    "to": "10.0.0.5",
                     "doc_count": 4
                 },
                 {
-                    "from": 167772165,
-                    "from_as_string": "10.0.0.5",
+                    "from": "10.0.0.5",
                     "doc_count": 6
                 }
             ]
@@ -77,22 +75,18 @@ Response:
             "buckets": [
                 {
                     "key": "10.0.0.0/25",
-                    "from": 1.6777216E+8,
-                    "from_as_string": "10.0.0.0",
-                    "to": 167772287,
-                    "to_as_string": "10.0.0.127",
+                    "from": "10.0.0.0",
+                    "to": "10.0.0.127",
                     "doc_count": 127
                 },
                 {
                     "key": "10.0.0.127/25",
-                    "from": 1.6777216E+8,
-                    "from_as_string": "10.0.0.0",
-                    "to": 167772287,
-                    "to_as_string": "10.0.0.127",
+                    "from": "10.0.0.0",
+                    "to": "10.0.0.127",
                     "doc_count": 127
                 }
             ]
         }
     }
 }
---------------------------------------------------
+--------------------------------------------------

+ 7 - 0
docs/reference/migration/migrate_5_0/aggregations.asciidoc

@@ -13,3 +13,10 @@ It is recommended to use <<keyword,`keyword`>> fields instead, either directly
 or through a <<multi-fields,multi-field>> if the numeric representation is
 still needed for sorting, range queries or numeric aggregations like
 <<search-aggregations-metrics-stats-aggregation,`stats` aggregations>>.
+
+==== `ip_range` aggregations
+
+Now that Elasticsearch supports `ipv6`, `ip` addresses are encoded in the index
+using a binary representation rather than a numeric representation. As a
+consequence, the output of `ip_range` aggregations does not give numeric values
+for `from` and `to` anymore.

+ 0 - 717
modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java

@@ -1,717 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.elasticsearch.messy.tests;
-
-import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
-import org.elasticsearch.action.index.IndexRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.index.mapper.ip.LegacyIpFieldMapper;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.script.Script;
-import org.elasticsearch.script.groovy.GroovyPlugin;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
-import org.elasticsearch.search.aggregations.bucket.range.Range;
-import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
-import org.elasticsearch.search.aggregations.metrics.sum.Sum;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.hamcrest.Matchers;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-
-import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
-import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
-import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
-import static org.elasticsearch.search.aggregations.AggregationBuilders.ipRange;
-import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.core.IsNull.notNullValue;
-import static org.hamcrest.core.IsNull.nullValue;
-
-/**
- *
- */
-@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/17700")
-@ESIntegTestCase.SuiteScopeTestCase
-public class IPv4RangeTests extends ESIntegTestCase {
-
-    @Override
-    protected Collection<Class<? extends Plugin>> nodePlugins() {
-        return Collections.singleton(GroovyPlugin.class);
-    }
-
-    @Override
-    public void setupSuiteScopeCluster() throws Exception {
-        {
-            assertAcked(prepareCreate("idx")
-                    .addMapping("type", "ip", "type=ip", "ips", "type=ip"));
-            IndexRequestBuilder[] builders = new IndexRequestBuilder[255]; // TODO randomize the size?
-            // TODO randomize the values in the docs?
-            for (int i = 0; i < builders.length; i++) {
-                builders[i] = client().prepareIndex("idx", "type").setSource(jsonBuilder()
-                        .startObject()
-                        .field("ip", "10.0.0." + (i))
-                        .startArray("ips").value("10.0.0." + i).value("10.0.0." + (i + 1)).endArray()
-                        .field("value", (i < 100 ? 1 : i < 200 ? 2 : 3))        // 100 1's, 100 2's, and 55 3's
-                        .endObject());
-            }
-            indexRandom(true, builders);
-            createIndex("idx_unmapped");
-        }
-        {
-            assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "ip", "type=ip"));
-            List<IndexRequestBuilder> builders = new ArrayList<>();
-            for (int i = 0; i < 2; i++) {
-                builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()
-                        .startObject()
-                        .field("value", i * 2)
-                        .field("ip", "10.0.0.5")
-                        .endObject()));
-            }
-            indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()]));
-        }
-        {
-            assertAcked(prepareCreate("range_idx")
-                    .addMapping("type", "ip", "type=ip", "ips", "type=ip"));
-            IndexRequestBuilder[] builders = new IndexRequestBuilder[4];
-
-            builders[0] = client().prepareIndex("range_idx", "type").setSource(jsonBuilder()
-                    .startObject()
-                    .field("ip", "0.0.0.0")
-                    .endObject());
-
-            builders[1] = client().prepareIndex("range_idx", "type").setSource(jsonBuilder()
-                    .startObject()
-                    .field("ip", "0.0.0.255")
-                    .endObject());
-
-            builders[2] = client().prepareIndex("range_idx", "type").setSource(jsonBuilder()
-                    .startObject()
-                    .field("ip", "255.255.255.0")
-                    .endObject());
-
-            builders[3] = client().prepareIndex("range_idx", "type").setSource(jsonBuilder()
-                    .startObject()
-                    .field("ip", "255.255.255.255")
-                    .endObject());
-
-            indexRandom(true, builders);
-        }
-        ensureSearchable();
-    }
-
-    public void testSingleValueField() throws Exception {
-        SearchResponse response = client().prepareSearch("idx")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addUnboundedTo("10.0.0.100")
-                        .addRange("10.0.0.100", "10.0.0.200")
-                        .addUnboundedFrom("10.0.0.200"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(buckets.size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(55L));
-    }
-
-    public void testSingleValueFieldWithMaskRange() throws Exception {
-        SearchResponse response = client().prepareSearch("idx")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addMaskRange("10.0.0.0/25")
-                        .addMaskRange("10.0.0.128/25"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(2));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.0/25"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.0")));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.0"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.128")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.128"));
-        assertThat(bucket.getDocCount(), equalTo(128L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.128/25"));
-        assertThat((long) ((Number) bucket.getFrom()).doubleValue(), equalTo(LegacyIpFieldMapper.ipToLong("10.0.0.128")));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.128"));
-        assertThat((long) ((Number) bucket.getTo()).doubleValue(), equalTo(LegacyIpFieldMapper.ipToLong("10.0.1.0"))); // range is exclusive on the to side
-        assertThat(bucket.getToAsString(), equalTo("10.0.1.0"));
-        assertThat(bucket.getDocCount(), equalTo(127L)); // include 10.0.0.128
-    }
-
-    public void testSingleValueFieldWithCustomKey() throws Exception {
-        SearchResponse response = client().prepareSearch("idx")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addUnboundedTo("r1", "10.0.0.100")
-                        .addRange("r2", "10.0.0.100", "10.0.0.200")
-                        .addUnboundedFrom("r3", "10.0.0.200"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("r1"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("r2"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("r3"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(55L));
-    }
-
-    public void testSingleValuedFieldWithSubAggregation() throws Exception {
-        SearchResponse response = client().prepareSearch("idx")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addUnboundedTo("10.0.0.100")
-                        .addRange("10.0.0.100", "10.0.0.200")
-                        .addUnboundedFrom("10.0.0.200")
-                        .subAggregation(sum("sum").field("value")))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-        Object[] propertiesKeys = (Object[]) range.getProperty("_key");
-        Object[] propertiesDocCounts = (Object[]) range.getProperty("_count");
-        Object[] propertiesCounts = (Object[]) range.getProperty("sum.value");
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-        Sum sum = bucket.getAggregations().get("sum");
-        assertThat(sum, notNullValue());
-        assertThat(sum.getValue(), equalTo((double) 100));
-        assertThat((String) propertiesKeys[0], equalTo("*-10.0.0.100"));
-        assertThat((long) propertiesDocCounts[0], equalTo(100L));
-        assertThat((double) propertiesCounts[0], equalTo((double) 100));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-        sum = bucket.getAggregations().get("sum");
-        assertThat(sum, notNullValue());
-        assertThat(sum.getValue(), equalTo((double) 200));
-        assertThat((String) propertiesKeys[1], equalTo("10.0.0.100-10.0.0.200"));
-        assertThat((long) propertiesDocCounts[1], equalTo(100L));
-        assertThat((double) propertiesCounts[1], equalTo((double) 200));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(55L));
-        sum = bucket.getAggregations().get("sum");
-        assertThat(sum, notNullValue());
-        assertThat(sum.getValue(), equalTo((double) 55*3));
-        assertThat((String) propertiesKeys[2], equalTo("10.0.0.200-*"));
-        assertThat((long) propertiesDocCounts[2], equalTo(55L));
-        assertThat((double) propertiesCounts[2], equalTo((double) 55 * 3));
-    }
-
-    public void testSingleValuedFieldWithValueScript() throws Exception {
-        SearchResponse response = client()
-                .prepareSearch("idx")
-                .addAggregation(
-                        ipRange("range").field("ip").script(new Script("_value")).addUnboundedTo("10.0.0.100")
-                                .addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
-
-        assertSearchResponse(response);
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(55L));
-    }
-
-    /*
-    [0, 1]
-    [1, 2]
-    [2, 3]
-    ...
-    [99, 100]
-    [100, 101]
-    [101, 102]
-    ...
-    [199, 200]
-    [200, 201]
-    [201, 202]
-    ...
-    [254, 255]
-    [255, 256]
-     */
-
-    public void testMultiValuedField() throws Exception {
-        SearchResponse response = client().prepareSearch("idx")
-                .addAggregation(ipRange("range")
-                        .field("ips")
-                        .addUnboundedTo("10.0.0.100")
-                        .addRange("10.0.0.100", "10.0.0.200")
-                        .addUnboundedFrom("10.0.0.200"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(101L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(56L));
-    }
-
-    public void testMultiValuedFieldWithValueScript() throws Exception {
-        SearchResponse response = client()
-                .prepareSearch("idx")
-                .addAggregation(
-                        ipRange("range").field("ips").script(new Script("_value")).addUnboundedTo("10.0.0.100")
-                                .addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(101L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(56L));
-    }
-
-    public void testScriptSingleValue() throws Exception {
-        SearchResponse response = client()
-                .prepareSearch("idx")
-                .addAggregation(
-                        ipRange("range").script(new Script("doc['ip'].value")).addUnboundedTo("10.0.0.100")
-                                .addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(55L));
-    }
-
-    public void testScriptMultiValued() throws Exception {
-        SearchResponse response = client()
-                .prepareSearch("idx")
-                .addAggregation(
-                        ipRange("range").script(new Script("doc['ips'].values")).addUnboundedTo("10.0.0.100")
-                                .addRange("10.0.0.100", "10.0.0.200").addUnboundedFrom("10.0.0.200")).execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(101L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(56L));
-    }
-
-    public void testUnmapped() throws Exception {
-        SearchResponse response = client().prepareSearch("idx_unmapped")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addUnboundedTo("10.0.0.100")
-                        .addRange("10.0.0.100", "10.0.0.200")
-                        .addUnboundedFrom("10.0.0.200"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(0L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(0L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(0L));
-    }
-
-    public void testPartiallyUnmapped() throws Exception {
-        SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addUnboundedTo("10.0.0.100")
-                        .addRange("10.0.0.100", "10.0.0.200")
-                        .addUnboundedFrom("10.0.0.200"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(3));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(1);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.100")));
-        assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(bucket.getDocCount(), equalTo(100L));
-
-        bucket = buckets.get(2);
-        assertThat(bucket, notNullValue());
-        assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
-        assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
-        assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) LegacyIpFieldMapper.ipToLong("10.0.0.200")));
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(bucket.getDocCount(), equalTo(55L));
-    }
-
-    public void testEmptyAggregation() throws Exception {
-        SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
-                .setQuery(matchAllQuery())
-                .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0)
-                        .subAggregation(ipRange("ip_range").field("ip").addRange("r1", "10.0.0.1", "10.0.0.10")))
-                .execute().actionGet();
-
-        assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
-        Histogram histo = searchResponse.getAggregations().get("histo");
-        assertThat(histo, Matchers.notNullValue());
-        Histogram.Bucket bucket = histo.getBuckets().get(1);
-        assertThat(bucket, Matchers.notNullValue());
-
-        Range range = bucket.getAggregations().get("ip_range");
-        // TODO: use diamond once JI-9019884 is fixed
-        List<Range.Bucket> buckets = new ArrayList<Range.Bucket>(range.getBuckets());
-        assertThat(range, Matchers.notNullValue());
-        assertThat(range.getName(), equalTo("ip_range"));
-        assertThat(buckets.size(), is(1));
-        assertThat((String) buckets.get(0).getKey(), equalTo("r1"));
-        assertThat(buckets.get(0).getFromAsString(), equalTo("10.0.0.1"));
-        assertThat(buckets.get(0).getToAsString(), equalTo("10.0.0.10"));
-        assertThat(buckets.get(0).getDocCount(), equalTo(0L));
-    }
-
-    public void testMask0() {
-        SearchResponse response = client().prepareSearch("idx")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addMaskRange("0.0.0.0/0"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-        Range range = response.getAggregations().get("range");
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(1));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertThat((String) bucket.getKey(), equalTo("0.0.0.0/0"));
-        assertThat(bucket.getFromAsString(), nullValue());
-        assertThat(bucket.getToAsString(), nullValue());
-        assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
-        assertEquals(255L, bucket.getDocCount());
-    }
-
-    public void testMask0SpecialIps() {
-        SearchResponse response = client().prepareSearch("range_idx")
-                .addAggregation(ipRange("range")
-                        .field("ip")
-                        .addMaskRange("0.0.0.0/0"))
-                .execute().actionGet();
-
-        assertSearchResponse(response);
-
-        Range range = response.getAggregations().get("range");
-
-        assertThat(range, notNullValue());
-        assertThat(range.getName(), equalTo("range"));
-        List<? extends Bucket> buckets = range.getBuckets();
-        assertThat(range.getBuckets().size(), equalTo(1));
-
-        Range.Bucket bucket = buckets.get(0);
-        assertEquals(4L, bucket.getDocCount());
-    }
-}