Browse Source

Purge Rollup doc ID upgrade code from 8.0 (#41227)

This is no longer needed in 8.x, because all jobs moving from 6.x to
7.x will be forced to upgrade their rollup ID if they haven't already.
So by time we get to 8.x, all jobs will be on the new scheme.

This removes the old CRC generator and all the flags and state checking
to manage it.  We do need to keep the serialization check since a mixed
cluster will have 7.x nodes sending/receiving the flag, so that is
just hardcoded until 9.0 when we can remove it.
Zachary Tong 6 years ago
parent
commit
29bca00604
20 changed files with 157 additions and 396 deletions
  1. 7 19
      client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java
  2. 0 1
      client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java
  3. 1 5
      client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java
  4. 3 6
      docs/reference/rollup/apis/get-job.asciidoc
  5. 16 28
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java
  6. 1 1
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java
  7. 1 1
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java
  8. 3 10
      x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java
  9. 58 128
      x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIDGenerator.java
  10. 5 20
      x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java
  11. 10 32
      x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java
  12. 11 58
      x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java
  13. 15 17
      x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java
  14. 7 19
      x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java
  15. 18 33
      x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java
  16. 0 3
      x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml
  17. 0 3
      x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml
  18. 0 1
      x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml
  19. 1 1
      x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java
  20. 0 10
      x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupIDUpgradeIT.java

+ 7 - 19
client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/GetRollupJobResponse.java

@@ -48,7 +48,7 @@ public class GetRollupJobResponse {
     static final ParseField STATE = new ParseField("job_state");
     static final ParseField CURRENT_POSITION = new ParseField("current_position");
     static final ParseField ROLLUPS_INDEXED = new ParseField("rollups_indexed");
-    static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id");
+    private static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id");
 
     private List<JobWrapper> jobs;
 
@@ -207,12 +207,10 @@ public class GetRollupJobResponse {
     public static class RollupJobStatus {
         private final IndexerState state;
         private final Map<String, Object> currentPosition;
-        private final boolean upgradedDocumentId;
 
-        RollupJobStatus(IndexerState state, Map<String, Object> position, boolean upgradedDocumentId) {
+        RollupJobStatus(IndexerState state, Map<String, Object> position) {
             this.state = state;
             this.currentPosition = position;
-            this.upgradedDocumentId = upgradedDocumentId;
         }
 
         /**
@@ -227,13 +225,6 @@ public class GetRollupJobResponse {
         public Map<String, Object> getCurrentPosition() {
             return currentPosition;
         }
-        /**
-         * Flag holds the state of the ID scheme, e.g. if it has been upgraded
-         * to the concatenation scheme.
-         */
-        public boolean getUpgradedDocumentId() {
-            return upgradedDocumentId;
-        }
 
         private static final ConstructingObjectParser<RollupJobStatus, Void> PARSER = new ConstructingObjectParser<>(
                 STATUS.getPreferredName(),
@@ -242,8 +233,7 @@ public class GetRollupJobResponse {
                     IndexerState state = (IndexerState) args[0];
                     @SuppressWarnings("unchecked") // We're careful of the contents
                     Map<String, Object> currentPosition = (Map<String, Object>) args[1];
-                    Boolean upgradedDocumentId = (Boolean) args[2];
-                    return new RollupJobStatus(state, currentPosition, upgradedDocumentId == null ? false : upgradedDocumentId);
+                    return new RollupJobStatus(state, currentPosition);
                 });
         static {
             PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING);
@@ -257,7 +247,7 @@ public class GetRollupJobResponse {
                 throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
             }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY);
 
-            // Optional to accommodate old versions of state
+            // Optional to accommodate old versions of state, not used
             PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), UPGRADED_DOC_ID);
         }
 
@@ -267,20 +257,18 @@ public class GetRollupJobResponse {
             if (other == null || getClass() != other.getClass()) return false;
             RollupJobStatus that = (RollupJobStatus) other;
             return Objects.equals(state, that.state)
-                    && Objects.equals(currentPosition, that.currentPosition)
-                    && upgradedDocumentId == that.upgradedDocumentId;
+                    && Objects.equals(currentPosition, that.currentPosition);
         }
 
         @Override
         public int hashCode() {
-            return Objects.hash(state, currentPosition, upgradedDocumentId);
+            return Objects.hash(state, currentPosition);
         }
 
         @Override
         public final String toString() {
             return "{stats=" + state
-                    + ", currentPosition=" + currentPosition
-                    + ", upgradedDocumentId=" + upgradedDocumentId + "}";
+                    + ", currentPosition=" + currentPosition + "}";
         }
     }
 }

+ 0 - 1
client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java

@@ -233,7 +233,6 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
         assertEquals(1, job.getStats().getOutputDocuments());
         assertThat(job.getStatus().getState(), either(equalTo(IndexerState.STARTED)).or(equalTo(IndexerState.INDEXING)));
         assertThat(job.getStatus().getCurrentPosition(), hasKey("date.date_histogram"));
-        assertEquals(true, job.getStatus().getUpgradedDocumentId());
 
         // stop the job
         StopRollupJobRequest stopRequest = new StopRollupJobRequest(id);

+ 1 - 5
client/rest-high-level/src/test/java/org/elasticsearch/client/rollup/GetRollupJobResponseTests.java

@@ -74,10 +74,7 @@ public class GetRollupJobResponseTests extends ESTestCase {
         while (currentPosition.size() < positions) {
             currentPosition.put(randomAlphaOfLength(2), randomAlphaOfLength(2));
         }
-        return new RollupJobStatus(
-            randomFrom(IndexerState.values()),
-            currentPosition,
-            randomBoolean());
+        return new RollupJobStatus(randomFrom(IndexerState.values()), currentPosition);
     }
 
     private void toXContent(GetRollupJobResponse response, XContentBuilder builder) throws IOException {
@@ -108,7 +105,6 @@ public class GetRollupJobResponseTests extends ESTestCase {
         if (status.getCurrentPosition() != null) {
             builder.field(GetRollupJobResponse.CURRENT_POSITION.getPreferredName(), status.getCurrentPosition());
         }
-        builder.field(GetRollupJobResponse.UPGRADED_DOC_ID.getPreferredName(), status.getUpgradedDocumentId());
         builder.endObject();
     }
 

+ 3 - 6
docs/reference/rollup/apis/get-job.asciidoc

@@ -94,8 +94,7 @@ Which will yield the following response:
             "page_size" : 1000
           },
           "status" : {
-            "job_state" : "stopped",
-            "upgraded_doc_id": true
+            "job_state" : "stopped"
           },
           "stats" : {
             "pages_processed" : 0,
@@ -220,8 +219,7 @@ Which will yield the following response:
             "page_size" : 1000
           },
           "status" : {
-            "job_state" : "stopped",
-            "upgraded_doc_id": true
+            "job_state" : "stopped"
           },
           "stats" : {
             "pages_processed" : 0,
@@ -275,8 +273,7 @@ Which will yield the following response:
             "page_size" : 1000
           },
           "status" : {
-            "job_state" : "stopped",
-            "upgraded_doc_id": true
+            "job_state" : "stopped"
           },
           "stats" : {
             "pages_processed" : 0,

+ 16 - 28
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java

@@ -41,19 +41,13 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
     @Nullable
     private final TreeMap<String, Object> currentPosition;
 
-    // Flag holds the state of the ID scheme, e.g. if it has been upgraded to the
-    // concatenation scheme.  See #32372 for more details
-    private boolean upgradedDocumentID;
-
     private static final ParseField STATE = new ParseField("job_state");
     private static final ParseField CURRENT_POSITION = new ParseField("current_position");
-    private static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id");
+    private static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id"); // This can be removed in 9.0
 
     public static final ConstructingObjectParser<RollupJobStatus, Void> PARSER =
             new ConstructingObjectParser<>(NAME,
-                    args -> new RollupJobStatus((IndexerState) args[0],
-                        (HashMap<String, Object>) args[1],
-                        (Boolean)args[2]));
+                    args -> new RollupJobStatus((IndexerState) args[0], (HashMap<String, Object>) args[1]));
 
     static {
         PARSER.declareField(constructorArg(), p -> {
@@ -72,26 +66,23 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
             throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
         }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY);
 
-        // Optional to accommodate old versions of state
+        // Optional to accommodate old versions of state, not used in ctor
         PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), UPGRADED_DOC_ID);
     }
 
-    public RollupJobStatus(IndexerState state, @Nullable Map<String, Object> position,
-                           @Nullable Boolean upgradedDocumentID) {
+    public RollupJobStatus(IndexerState state, @Nullable Map<String, Object> position) {
         this.state = state;
         this.currentPosition = position == null ? null : new TreeMap<>(position);
-        this.upgradedDocumentID = upgradedDocumentID != null ? upgradedDocumentID : false;  //default to false if missing
     }
 
     public RollupJobStatus(StreamInput in) throws IOException {
         state = IndexerState.fromStream(in);
         currentPosition = in.readBoolean() ? new TreeMap<>(in.readMap()) : null;
-        if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
-            upgradedDocumentID = in.readBoolean();
-        } else {
-            // If we're getting this job from a pre-6.4.0 node,
-            // it is using the old ID scheme
-            upgradedDocumentID = false;
+        if (in.getVersion().before(Version.V_8_0_0)) {
+            // 7.x nodes serialize `upgradedDocumentID` flag.  We don't need it anymore, but
+            // we need to pull it off the stream
+            // This can go away completely in 9.0
+            in.readBoolean();
         }
     }
 
@@ -103,10 +94,6 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
         return currentPosition;
     }
 
-    public boolean isUpgradedDocumentID() {
-        return upgradedDocumentID;
-    }
-
     public static RollupJobStatus fromXContent(XContentParser parser) {
         try {
             return PARSER.parse(parser, null);
@@ -122,7 +109,6 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
         if (currentPosition != null) {
             builder.field(CURRENT_POSITION.getPreferredName(), currentPosition);
         }
-        builder.field(UPGRADED_DOC_ID.getPreferredName(), upgradedDocumentID);
         builder.endObject();
         return builder;
     }
@@ -139,8 +125,11 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
         if (currentPosition != null) {
             out.writeMap(currentPosition);
         }
-        if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
-            out.writeBoolean(upgradedDocumentID);
+        if (out.getVersion().before(Version.V_8_0_0)) {
+            // 7.x nodes expect a boolean `upgradedDocumentID` flag. We don't have it anymore,
+            // but we need to tell them we are upgraded in case there is a mixed cluster
+            // This can go away completely in 9.0
+            out.writeBoolean(true);
         }
     }
 
@@ -157,12 +146,11 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
         RollupJobStatus that = (RollupJobStatus) other;
 
         return Objects.equals(this.state, that.state)
-            && Objects.equals(this.currentPosition, that.currentPosition)
-            && Objects.equals(this.upgradedDocumentID, that.upgradedDocumentID);
+            && Objects.equals(this.currentPosition, that.currentPosition);
     }
 
     @Override
     public int hashCode() {
-        return Objects.hash(state, currentPosition, upgradedDocumentID);
+        return Objects.hash(state, currentPosition);
     }
 }

+ 1 - 1
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java

@@ -44,6 +44,6 @@ public class JobWrapperSerializingTests extends AbstractSerializingTestCase<GetR
                 new RollupIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(),
                     randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
                     randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()),
-                new RollupJobStatus(state, Collections.emptyMap(), randomBoolean()));
+                new RollupJobStatus(state, Collections.emptyMap()));
     }
 }

+ 1 - 1
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java

@@ -34,7 +34,7 @@ public class RollupJobStatusTests extends AbstractSerializingTestCase<RollupJobS
 
     @Override
     protected RollupJobStatus createTestInstance() {
-        return new RollupJobStatus(randomFrom(IndexerState.values()), randomPosition(), randomBoolean());
+        return new RollupJobStatus(randomFrom(IndexerState.values()), randomPosition());
     }
 
     @Override

+ 3 - 10
x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java

@@ -43,11 +43,10 @@ class IndexerUtils {
      * @param stats            The stats accumulator for this job's task
      * @param groupConfig      The grouping configuration for the job
      * @param jobId            The ID for the job
-     * @param isUpgradedDocID  `true` if this job is using the new ID scheme
      * @return             A list of rolled documents derived from the response
      */
     static List<IndexRequest> processBuckets(CompositeAggregation agg, String rollupIndex, RollupIndexerJobStats stats,
-                                             GroupConfig groupConfig, String jobId, boolean isUpgradedDocID) {
+                                             GroupConfig groupConfig, String jobId) {
 
         logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]");
         return agg.getBuckets().stream().map(b ->{
@@ -58,20 +57,14 @@ class IndexerUtils {
             TreeMap<String, Object> keys = new TreeMap<>(b.getKey());
             List<Aggregation> metrics = b.getAggregations().asList();
 
-            RollupIDGenerator idGenerator;
-            if (isUpgradedDocID) {
-                idGenerator = new RollupIDGenerator.Murmur3(jobId);
-            } else  {
-                idGenerator = new RollupIDGenerator.CRC();
-            }
+            RollupIDGenerator idGenerator  = new RollupIDGenerator(jobId);
             Map<String, Object> doc = new HashMap<>(keys.size() + metrics.size());
 
             processKeys(keys, doc, b.getDocCount(), groupConfig, idGenerator);
             idGenerator.add(jobId);
             processMetrics(metrics, doc);
 
-            doc.put(RollupField.ROLLUP_META + "." + RollupField.VERSION_FIELD,
-                isUpgradedDocID ? Rollup.CURRENT_ROLLUP_VERSION : Rollup.ROLLUP_VERSION_V1);
+            doc.put(RollupField.ROLLUP_META + "." + RollupField.VERSION_FIELD, Rollup.CURRENT_ROLLUP_VERSION );
             doc.put(RollupField.ROLLUP_META + "." + RollupField.ID.getPreferredName(), jobId);
 
             IndexRequest request = new IndexRequest(rollupIndex, RollupField.TYPE_NAME, idGenerator.getID());

+ 58 - 128
x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIDGenerator.java

@@ -12,7 +12,6 @@ import org.elasticsearch.common.hash.MurmurHash3;
 
 import java.nio.charset.StandardCharsets;
 import java.util.Base64;
-import java.util.zip.CRC32;
 
 /**
  * The ID Generator creates a deterministic document ID to be used for rollup docs.
@@ -25,154 +24,85 @@ import java.util.zip.CRC32;
  *
  * NOTE: this class is not thread safe; there is no synchronization on the "generated"
  * flag and it is unsafe to use from multiple threads.
+ *
+ *  A 128 bit Murmur3 hash of all the keys concatenated together, base64-encoded, then
+ *  prepended with the job ID and a `$` delimiter
+ *
+ *  Null values are hashed as a (hopefully) unique string `__NULL_PLACEHOLDER__830f1de2__`
  */
-public abstract class RollupIDGenerator {
-    public abstract void add(Integer v);
-    public abstract void add(Long v);
-    public abstract void add(Double v);
-    public abstract void add(String v);
-    public abstract void addNull();
-    public abstract String getID();
-
+public class RollupIDGenerator {
     private boolean generated = false;
-
-    final boolean check(Object v) {
-        if (generated) {
-            throw new RuntimeException("Cannot update ID as it has already been generated.");
-        }
-        if (v == null) {
-            addNull();
-            return false;
-        }
-        return true;
+    private static final long SEED = 19;
+    private static final BytesRef DELIM = new BytesRef("$");
+    private static final BytesRef NULL_PLACEHOLDER = new BytesRef("__NULL_PLACEHOLDER__830f1de2__");
+    private final BytesRefBuilder id = new BytesRefBuilder();
+    private final String jobId;
+
+    RollupIDGenerator(String jobId) {
+        this.jobId = jobId;
     }
 
-    final void setFlag() {
-        if (generated) {
-            throw new RuntimeException("Cannot generate ID as it has already been generated.");
+    public void add(Integer v) {
+        if (check(v)) {
+            update(Numbers.intToBytes(v));
         }
-        generated = true;
     }
 
-    /**
-     * The "old" style ID used in Rollup V1.  A rolling 32 bit CRC.
-     *
-     * Null values are hashed as (int)19.
-     */
-    @Deprecated
-    public static class CRC extends RollupIDGenerator {
-        private final CRC32 crc = new CRC32();
-
-        @Override
-        public void add(Integer v) {
-            if (check(v)) {
-                crc.update(v);
-            }
-        }
-
-        @Override
-        public void add(Long v) {
-            if (check(v)) {
-                crc.update(Numbers.longToBytes(v), 0, 8);
-            }
-        }
-
-        @Override
-        public void add(Double v) {
-            if (check(v)) {
-                crc.update(Numbers.doubleToBytes(v), 0, 8);
-            }
-        }
-
-        @Override
-        public void add(String v) {
-            if (check(v)) {
-                byte[] vs = (v).getBytes(StandardCharsets.UTF_8);
-                crc.update(vs, 0, vs.length);
-            }
-        }
-
-        @Override
-        public void addNull() {
-            // Old ID scheme used (int)19 as the null placeholder.
-            // Not great but we're stuck with it :(
-            crc.update(19);
-        }
-
-        @Override
-        public String getID() {
-            setFlag();
-            return String.valueOf(crc.getValue());
+    public void add(Long v) {
+        if (check(v)) {
+            update(Numbers.longToBytes(v));
         }
     }
 
-    /**
-     * The "new" style ID, used in Rollup V2.  A 128 bit Murmur3 hash of
-     * all the keys concatenated together, base64-encoded, then prepended
-     * with the job ID and a `$` delimiter
-     *
-     * Null values are hashed as a (hopefully) unique string `__NULL_PLACEHOLDER__830f1de2__`
-     */
-    public static class Murmur3 extends RollupIDGenerator {
-        private static final long SEED = 19;
-        private static final BytesRef DELIM = new BytesRef("$");
-        private static final BytesRef NULL_PLACEHOLDER = new BytesRef("__NULL_PLACEHOLDER__830f1de2__");
-        private final BytesRefBuilder id = new BytesRefBuilder();
-        private final String jobId;
-
-        Murmur3(String jobId) {
-            this.jobId = jobId;
+    public void add(Double v) {
+        if (check(v)) {
+            update(Numbers.doubleToBytes(v));
         }
+    }
 
-        @Override
-        public void add(Integer v) {
-            if (check(v)) {
-                update(Numbers.intToBytes(v));
-            }
+    public void add(String v) {
+        if (check(v)) {
+            update((v).getBytes(StandardCharsets.UTF_8));
         }
+    }
 
-        @Override
-        public void add(Long v) {
-            if (check(v)) {
-                update(Numbers.longToBytes(v));
-            }
-        }
+    void addNull() {
+        // New ID scheme uses a (hopefully) unique placeholder for null
+        update(NULL_PLACEHOLDER.bytes);
+    }
 
-        @Override
-        public void add(Double v) {
-            if (check(v)) {
-                update(Numbers.doubleToBytes(v));
-            }
-        }
+    private void update(byte[] v) {
+        id.append(v, 0, v.length);
+        id.append(DELIM);
+    }
 
-        @Override
-        public void add(String v) {
-            if (check(v)) {
-                update((v).getBytes(StandardCharsets.UTF_8));
-            }
+    private boolean check(Object v) {
+        if (generated) {
+            throw new RuntimeException("Cannot update ID as it has already been generated.");
         }
-
-        @Override
-        public void addNull() {
-            // New ID scheme uses a (hopefully) unique placeholder for null
-            update(NULL_PLACEHOLDER.bytes);
+        if (v == null) {
+            addNull();
+            return false;
         }
+        return true;
+    }
 
-        private void update(byte[] v) {
-            id.append(v, 0, v.length);
-            id.append(DELIM);
+    private void setFlag() {
+        if (generated) {
+            throw new RuntimeException("Cannot generate ID as it has already been generated.");
         }
+        generated = true;
+    }
 
-        @Override
-        public String getID() {
-            setFlag();
-            MurmurHash3.Hash128 hasher
-                = MurmurHash3.hash128(id.bytes(), 0, id.length(), SEED, new MurmurHash3.Hash128());
-            byte[] hashedBytes = new byte[16];
-            System.arraycopy(Numbers.longToBytes(hasher.h1), 0, hashedBytes, 0, 8);
-            System.arraycopy(Numbers.longToBytes(hasher.h2), 0, hashedBytes, 8, 8);
-            return jobId + "$" + Base64.getUrlEncoder().withoutPadding().encodeToString(hashedBytes);
+    public String getID() {
+        setFlag();
+        MurmurHash3.Hash128 hasher
+            = MurmurHash3.hash128(id.bytes(), 0, id.length(), SEED, new MurmurHash3.Hash128());
+        byte[] hashedBytes = new byte[16];
+        System.arraycopy(Numbers.longToBytes(hasher.h1), 0, hashedBytes, 0, 8);
+        System.arraycopy(Numbers.longToBytes(hasher.h2), 0, hashedBytes, 8, 8);
+        return jobId + "$" + Base64.getUrlEncoder().withoutPadding().encodeToString(hashedBytes);
 
-        }
     }
+
 }

+ 5 - 20
x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java

@@ -49,7 +49,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Executor;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
 import static org.elasticsearch.xpack.core.rollup.RollupField.formatFieldName;
@@ -61,7 +60,6 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer<Map<String, Obj
     static final String AGGREGATION_NAME = RollupField.NAME;
 
     private final RollupJob job;
-    protected final AtomicBoolean upgradedDocumentID;
     private final CompositeAggregationBuilder compositeBuilder;
     private long maxBoundary;
 
@@ -71,11 +69,9 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer<Map<String, Obj
      * @param job The rollup job
      * @param initialState Initial state for the indexer
      * @param initialPosition The last indexed bucket of the task
-     * @param upgradedDocumentID whether job has updated IDs (for BWC)
      */
-    RollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState, Map<String, Object> initialPosition,
-            AtomicBoolean upgradedDocumentID) {
-        this(executor, job, initialState, initialPosition, upgradedDocumentID, new RollupIndexerJobStats());
+    RollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState, Map<String, Object> initialPosition) {
+        this(executor, job, initialState, initialPosition, new RollupIndexerJobStats());
     }
 
     /**
@@ -84,22 +80,13 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer<Map<String, Obj
      * @param job The rollup job
      * @param initialState Initial state for the indexer
      * @param initialPosition The last indexed bucket of the task
-     * @param upgradedDocumentID whether job has updated IDs (for BWC)
      * @param jobStats jobstats instance for collecting stats
      */
-    RollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState, Map<String, Object> initialPosition,
-            AtomicBoolean upgradedDocumentID, RollupIndexerJobStats jobStats) {
+    RollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
+                  Map<String, Object> initialPosition, RollupIndexerJobStats jobStats) {
         super(executor, initialState, initialPosition, jobStats);
         this.job = job;
         this.compositeBuilder = createCompositeBuilder(job.getConfig());
-        this.upgradedDocumentID = upgradedDocumentID;
-    }
-
-    /**
-     * Returns if this job has upgraded it's ID scheme yet or not
-     */
-    public boolean isUpgradedDocumentID() {
-        return upgradedDocumentID.get();
     }
 
     @Override
@@ -127,8 +114,6 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer<Map<String, Obj
 
     @Override
     protected SearchRequest buildSearchRequest() {
-            // Indexer is single-threaded, and only place that the ID scheme can get upgraded is doSaveState(), so
-            // we can pass down the boolean value rather than the atomic here
         final Map<String, Object> position = getPosition();
         SearchSourceBuilder searchSource = new SearchSourceBuilder()
                 .size(0)
@@ -145,7 +130,7 @@ public abstract class RollupIndexer extends AsyncTwoPhaseIndexer<Map<String, Obj
 
         return new IterationResult<>(
                 IndexerUtils.processBuckets(response, job.getConfig().getRollupIndex(), getStats(),
-                        job.getConfig().getGroupConfig(), job.getConfig().getId(), upgradedDocumentID.get()),
+                        job.getConfig().getGroupConfig(), job.getConfig().getId()),
                 response.afterKey(), response.getBuckets().isEmpty());
     }
 

+ 10 - 32
x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java

@@ -36,7 +36,6 @@ import org.elasticsearch.xpack.core.scheduler.SchedulerEngine;
 import org.elasticsearch.xpack.rollup.Rollup;
 
 import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
 /**
@@ -98,9 +97,8 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
         private final RollupJob job;
 
         ClientRollupPageManager(RollupJob job, IndexerState initialState, Map<String, Object> initialPosition,
-                                Client client, AtomicBoolean upgradedDocumentID) {
-            super(threadPool.executor(ThreadPool.Names.GENERIC), job, new AtomicReference<>(initialState),
-                initialPosition, upgradedDocumentID);
+                                Client client) {
+            super(threadPool.executor(ThreadPool.Names.GENERIC), job, new AtomicReference<>(initialState), initialPosition);
             this.client = client;
             this.job = job;
         }
@@ -124,16 +122,9 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
                 next.run();
             } else {
                 // Otherwise, attempt to persist our state
-                // Upgrade to the new ID scheme while we are at it
-                boolean oldState = upgradedDocumentID.getAndSet(true);
-                final RollupJobStatus state = new RollupJobStatus(indexerState, getPosition(), upgradedDocumentID.get());
+                final RollupJobStatus state = new RollupJobStatus(indexerState, getPosition());
                 logger.debug("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + indexerState.toString() + "]");
-                updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> {
-                    // We failed to update the persistent task for some reason,
-                    // set our flag back to what it was before
-                    upgradedDocumentID.set(oldState);
-                    next.run();
-                }));
+                updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> next.run()));
             }
         }
 
@@ -158,7 +149,6 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
     private final SchedulerEngine schedulerEngine;
     private final ThreadPool threadPool;
     private final RollupIndexer indexer;
-    private AtomicBoolean upgradedDocumentID;
 
     RollupJobTask(long id, String type, String action, TaskId parentTask, RollupJob job, RollupJobStatus state,
                   Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map<String, String> headers) {
@@ -167,9 +157,6 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
         this.schedulerEngine = schedulerEngine;
         this.threadPool = threadPool;
 
-        // We can assume the new ID scheme only for new jobs
-        this.upgradedDocumentID = new AtomicBoolean(true);
-
         // If status is not null, we are resuming rather than starting fresh.
         Map<String, Object> initialPosition = null;
         IndexerState initialState = IndexerState.STOPPED;
@@ -196,22 +183,14 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
             }
             initialPosition = state.getPosition();
 
-            // Since we have state, we are resuming a job/checkpoint.  Although we are resuming
-            // from something that was checkpointed, we can't guarantee it was the _final_ checkpoint
-            // before the job ended (e.g. it could have been STOPPING, still indexing and killed, leaving
-            // us with an interval of time partially indexed).
-            //
-            // To be safe, if we are resuming any job, use it's ID upgrade status.  It will only
-            // be true if it actually finished a full checkpoint.
-            this.upgradedDocumentID.set(state.isUpgradedDocumentID());
         }
         this.indexer = new ClientRollupPageManager(job, initialState, initialPosition,
-                new ParentTaskAssigningClient(client, new TaskId(getPersistentTaskId())), upgradedDocumentID);
+                new ParentTaskAssigningClient(client, new TaskId(getPersistentTaskId())));
     }
 
     @Override
     public Status getStatus() {
-        return new RollupJobStatus(indexer.getState(), indexer.getPosition(), upgradedDocumentID.get());
+        return new RollupJobStatus(indexer.getState(), indexer.getPosition());
     }
 
     /**
@@ -256,7 +235,7 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
         }
 
 
-        final RollupJobStatus state = new RollupJobStatus(IndexerState.STARTED, indexer.getPosition(), upgradedDocumentID.get());
+        final RollupJobStatus state = new RollupJobStatus(IndexerState.STARTED, indexer.getPosition());
         logger.debug("Updating state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "][" +
                 state.getPosition() + "]");
         updatePersistentTaskState(state,
@@ -300,11 +279,10 @@ public class RollupJobTask extends AllocatedPersistentTask implements SchedulerE
             case STOPPING:
                 // update the persistent state to STOPPED.  There are two scenarios and both are safe:
                 // 1. we persist STOPPED now, indexer continues a bit then sees the flag and checkpoints another
-                //    STOPPED with the more recent position.  That will also upgrade the ID scheme
+                //    STOPPED with the more recent position.
                 // 2. we persist STOPPED now, indexer continues a bit but then dies.  When/if we resume we'll pick up
-                //    at last checkpoint, overwrite some docs and eventually checkpoint.  At that time we'll also
-                //    upgrade the ID scheme
-                RollupJobStatus state = new RollupJobStatus(IndexerState.STOPPED, indexer.getPosition(), upgradedDocumentID.get());
+                //    at last checkpoint, overwrite some docs and eventually checkpoint.
+                RollupJobStatus state = new RollupJobStatus(IndexerState.STOPPED, indexer.getPosition());
                 updatePersistentTaskState(state,
                         ActionListener.wrap(
                                 (task) -> {

+ 11 - 58
x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java

@@ -32,8 +32,8 @@ import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceB
 import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
 import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
 import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
-import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
 import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
 import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
 import org.elasticsearch.xpack.core.rollup.RollupField;
@@ -119,7 +119,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         directory.close();
 
         final GroupConfig groupConfig = randomGroupConfig(random());
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean());
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo");
 
         assertThat(docs.size(), equalTo(numDocs));
         for (IndexRequest doc : docs) {
@@ -187,7 +187,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         directory.close();
 
         final GroupConfig groupConfig = randomGroupConfig(random());
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean());
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo");
 
         assertThat(docs.size(), equalTo(numDocs));
         for (IndexRequest doc : docs) {
@@ -243,7 +243,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         directory.close();
 
         final GroupConfig groupConfig = randomGroupConfig(random());
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean());
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo");
 
         assertThat(docs.size(), equalTo(numDocs));
         for (IndexRequest doc : docs) {
@@ -310,7 +310,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         directory.close();
 
         final GroupConfig groupConfig = randomGroupConfig(random());
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean());
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo");
 
         assertThat(docs.size(), equalTo(numDocs));
         for (IndexRequest doc : docs) {
@@ -321,53 +321,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         }
     }
 
-    public void testKeyOrderingOldID() {
-        CompositeAggregation composite = mock(CompositeAggregation.class);
-
-        when(composite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocationOnMock -> {
-            List<CompositeAggregation.Bucket> foos = new ArrayList<>();
-
-            CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class);
-            LinkedHashMap<String, Object> keys = new LinkedHashMap<>(3);
-            keys.put("foo.date_histogram", 123L);
-            keys.put("bar.terms", "baz");
-            keys.put("abc.histogram", 1.9);
-            keys = shuffleMap(keys, Collections.emptySet());
-            when(bucket.getKey()).thenReturn(keys);
-
-            List<Aggregation> list = new ArrayList<>(3);
-            InternalNumericMetricsAggregation.SingleValue mockAgg = mock(InternalNumericMetricsAggregation.SingleValue.class);
-            when(mockAgg.getName()).thenReturn("123");
-            list.add(mockAgg);
-
-            InternalNumericMetricsAggregation.SingleValue mockAgg2 = mock(InternalNumericMetricsAggregation.SingleValue.class);
-            when(mockAgg2.getName()).thenReturn("abc");
-            list.add(mockAgg2);
-
-            InternalNumericMetricsAggregation.SingleValue mockAgg3 = mock(InternalNumericMetricsAggregation.SingleValue.class);
-            when(mockAgg3.getName()).thenReturn("yay");
-            list.add(mockAgg3);
-
-            Collections.shuffle(list, random());
-
-            Aggregations aggs = new Aggregations(list);
-            when(bucket.getAggregations()).thenReturn(aggs);
-            when(bucket.getDocCount()).thenReturn(1L);
-
-            foos.add(bucket);
-
-            return foos;
-        });
-
-        // The content of the config don't actually matter for this test
-        // because the test is just looking at agg keys
-        GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(123L, "abc"), null);
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo", false);
-        assertThat(docs.size(), equalTo(1));
-        assertThat(docs.get(0).id(), equalTo("1237859798"));
-    }
-
-    public void testKeyOrderingNewID() {
+    public void testKeyOrdering() {
         CompositeAggregation composite = mock(CompositeAggregation.class);
 
         when(composite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocationOnMock -> {
@@ -406,7 +360,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         });
 
         GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1L, "abc"), null);
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo", true);
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo");
         assertThat(docs.size(), equalTo(1));
         assertThat(docs.get(0).id(), equalTo("foo$c9LcrFqeFW92uN_Z7sv1hA"));
     }
@@ -414,7 +368,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
     /*
         A test to make sure very long keys don't break the hash
      */
-    public void testKeyOrderingNewIDLong() {
+    public void testKeyOrderingLong() {
         CompositeAggregation composite = mock(CompositeAggregation.class);
 
         when(composite.getBuckets()).thenAnswer((Answer<List<CompositeAggregation.Bucket>>) invocationOnMock -> {
@@ -456,7 +410,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         });
 
         GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1, "abc"), null);
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo", true);
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo");
         assertThat(docs.size(), equalTo(1));
         assertThat(docs.get(0).id(), equalTo("foo$VAFKZpyaEqYRPLyic57_qw"));
     }
@@ -483,8 +437,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         });
 
         GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), randomHistogramGroupConfig(random()), null);
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(),
-            groupConfig, "foo", randomBoolean());
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, "foo", new RollupIndexerJobStats(), groupConfig, "foo");
         assertThat(docs.size(), equalTo(1));
         assertFalse(Strings.isNullOrEmpty(docs.get(0).id()));
     }
@@ -548,7 +501,7 @@ public class IndexerUtilsTests extends AggregatorTestCase {
         directory.close();
 
         final GroupConfig groupConfig = randomGroupConfig(random());
-        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean());
+        List<IndexRequest> docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo");
 
         assertThat(docs.size(), equalTo(6));
         for (IndexRequest doc : docs) {

+ 15 - 17
x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java

@@ -72,7 +72,6 @@ import java.util.concurrent.Executor;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.function.Consumer;
 
@@ -83,7 +82,6 @@ import static org.hamcrest.number.OrderingComparison.greaterThan;
 public class RollupIndexerIndexingTests extends AggregatorTestCase {
     private QueryShardContext queryShardContext;
     private IndexSettings settings;
-    private final boolean newIDScheme = randomBoolean();
 
     @Before
     private void setup() {
@@ -113,7 +111,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", 3,
                             "the_histo.date_histogram.interval", "1ms",
                             "the_histo.date_histogram._count", 2,
@@ -126,7 +124,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", 7,
                             "the_histo.date_histogram.interval", "1ms",
                             "the_histo.date_histogram._count", 1,
@@ -171,7 +169,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1h",
                             "the_histo.date_histogram._count", 3,
@@ -189,7 +187,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-03-31T04:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1h",
                             "the_histo.date_histogram._count", 3,
@@ -207,7 +205,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-03-31T05:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1h",
                             "the_histo.date_histogram._count", 4,
@@ -225,7 +223,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-03-31T06:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1h",
                             "the_histo.date_histogram._count", 3,
@@ -243,7 +241,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-03-31T07:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1h",
                             "the_histo.date_histogram._count", 3,
@@ -289,7 +287,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueHours(5).getMillis()),
                             "the_histo.date_histogram.interval", "1m",
                             "the_histo.date_histogram._count", 2,
@@ -302,7 +300,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueMinutes(75).getMillis()),
                             "the_histo.date_histogram.interval", "1m",
                             "the_histo.date_histogram._count", 2,
@@ -315,7 +313,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", rounding.round(now - TimeValue.timeValueMinutes(61).getMillis()),
                             "the_histo.date_histogram.interval", "1m",
                             "the_histo.date_histogram._count", 1,
@@ -354,7 +352,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
                     assertThat(request.type(), equalTo("_doc"));
                     assertThat(request.sourceAsMap(), equalTo(
                             asMap(
-                                    "_rollup.version", newIDScheme ? 2 : 1,
+                                    "_rollup.version", 2,
                                     "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"),
                                     "the_histo.date_histogram.interval", "1d",
                                     "the_histo.date_histogram._count", 2,
@@ -373,7 +371,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-03-31T03:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1d",
                             "the_histo.date_histogram._count", 2,
@@ -386,7 +384,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
             assertThat(request.type(), equalTo("_doc"));
             assertThat(request.sourceAsMap(), equalTo(
                     asMap(
-                            "_rollup.version", newIDScheme ? 2 : 1,
+                            "_rollup.version", 2,
                             "the_histo.date_histogram.timestamp", asLong("2015-04-01T03:00:00.000Z"),
                             "the_histo.date_histogram.interval", "1d",
                             "the_histo.date_histogram._count", 5,
@@ -425,7 +423,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
 
                 Map<String, Object> source = ((IndexRequest) request).sourceAsMap();
 
-                assertThat(source.get("_rollup.version"), equalTo(newIDScheme ? 2 : 1));
+                assertThat(source.get("_rollup.version"), equalTo(2));
                 assertThat(source.get("ts.date_histogram.interval"), equalTo(timeInterval.toString()));
                 assertNotNull(source.get("the_avg.avg._count"));
                 assertNotNull(source.get("the_avg.avg.value"));
@@ -574,7 +572,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
 
         SyncRollupIndexer(Executor executor, RollupJob job, IndexSearcher searcher,
                           MappedFieldType[] fieldTypes, MappedFieldType timestampField) {
-            super(executor, job, new AtomicReference<>(IndexerState.STARTED), null, new AtomicBoolean(newIDScheme));
+            super(executor, job, new AtomicReference<>(IndexerState.STARTED), null);
             this.searcher = searcher;
             this.fieldTypes = fieldTypes;
             this.timestampField = timestampField;

+ 7 - 19
x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java

@@ -53,21 +53,15 @@ import static org.mockito.Mockito.spy;
 public class RollupIndexerStateTests extends ESTestCase {
     private static class EmptyRollupIndexer extends RollupIndexer {
         EmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
-                Map<String, Object> initialPosition, boolean upgraded, RollupIndexerJobStats stats) {
-            super(executor, job, initialState, initialPosition, new AtomicBoolean(upgraded), stats);
-        }
-
-        EmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
-                           Map<String, Object> initialPosition, boolean upgraded) {
-            super(executor, job, initialState, initialPosition, new AtomicBoolean(upgraded));
+                Map<String, Object> initialPosition, RollupIndexerJobStats stats) {
+            super(executor, job, initialState, initialPosition, stats);
         }
 
         EmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
                            Map<String, Object> initialPosition) {
-            this(executor, job, initialState, initialPosition, randomBoolean());
+            super(executor, job, initialState, initialPosition);
         }
 
-
         @Override
         protected void doNextSearch(SearchRequest request, ActionListener<SearchResponse> nextPhase) {
             // TODO Should use InternalComposite constructor but it is package protected in core.
@@ -140,19 +134,14 @@ public class RollupIndexerStateTests extends ESTestCase {
     private static class DelayedEmptyRollupIndexer extends EmptyRollupIndexer {
         protected CountDownLatch latch;
 
-        DelayedEmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
-                                  Map<String, Object> initialPosition, boolean upgraded) {
-            super(executor, job, initialState, initialPosition, upgraded);
-        }
-
         DelayedEmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
                                   Map<String, Object> initialPosition) {
-            super(executor, job, initialState, initialPosition, randomBoolean());
+            super(executor, job, initialState, initialPosition);
         }
 
         DelayedEmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
                 Map<String, Object> initialPosition, RollupIndexerJobStats stats) {
-            super(executor, job, initialState, initialPosition, randomBoolean(), stats);
+            super(executor, job, initialState, initialPosition, stats);
         }
 
         private CountDownLatch newLatch() {
@@ -180,7 +169,7 @@ public class RollupIndexerStateTests extends ESTestCase {
         NonEmptyRollupIndexer(Executor executor, RollupJob job, AtomicReference<IndexerState> initialState,
                               Map<String, Object> initialPosition, Function<SearchRequest, SearchResponse> searchFunction,
                               Function<BulkRequest, BulkResponse> bulkFunction, Consumer<Exception> failureConsumer) {
-            super(executor, job, initialState, initialPosition, new AtomicBoolean(randomBoolean()));
+            super(executor, job, initialState, initialPosition);
             this.searchFunction = searchFunction;
             this.bulkFunction = bulkFunction;
             this.failureConsumer = failureConsumer;
@@ -245,8 +234,7 @@ public class RollupIndexerStateTests extends ESTestCase {
         AtomicReference<IndexerState> state = new AtomicReference<>(IndexerState.STOPPED);
         final ExecutorService executor = Executors.newFixedThreadPool(1);
         try {
-            RollupIndexer indexer = new EmptyRollupIndexer(executor, job, state, null, true);
-            assertTrue(indexer.isUpgradedDocumentID());
+            RollupIndexer indexer = new EmptyRollupIndexer(executor, job, state, null);
             indexer.start();
             assertThat(indexer.getState(), equalTo(IndexerState.STARTED));
             assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()));

+ 18 - 33
x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java

@@ -63,7 +63,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testInitialStatusStopped() {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -76,7 +76,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testInitialStatusAborting() {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.ABORTING, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.ABORTING, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -89,7 +89,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testInitialStatusStopping() {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPING, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPING, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -102,7 +102,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testInitialStatusStarted() {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STARTED, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STARTED, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -115,7 +115,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testInitialStatusIndexingOldID() {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.INDEXING, Collections.singletonMap("foo", "bar"), false);
+        RollupJobStatus status = new RollupJobStatus(IndexerState.INDEXING, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -124,12 +124,11 @@ public class RollupJobTaskTests extends ESTestCase {
         assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
         assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1));
         assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo"));
-        assertFalse(((RollupJobStatus) task.getStatus()).isUpgradedDocumentID());
     }
 
     public void testInitialStatusIndexingNewID() {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.INDEXING, Collections.singletonMap("foo", "bar"), true);
+        RollupJobStatus status = new RollupJobStatus(IndexerState.INDEXING, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -138,7 +137,6 @@ public class RollupJobTaskTests extends ESTestCase {
         assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
         assertThat(((RollupJobStatus)task.getStatus()).getPosition().size(), equalTo(1));
         assertTrue(((RollupJobStatus)task.getStatus()).getPosition().containsKey("foo"));
-        assertTrue(((RollupJobStatus) task.getStatus()).isUpgradedDocumentID());
     }
 
     public void testNoInitialStatus() {
@@ -150,12 +148,11 @@ public class RollupJobTaskTests extends ESTestCase {
             null, client, schedulerEngine, pool, Collections.emptyMap());
         assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED));
         assertNull(((RollupJobStatus)task.getStatus()).getPosition());
-        assertTrue(((RollupJobStatus) task.getStatus()).isUpgradedDocumentID());
     }
 
     public void testStartWhenStarted() throws InterruptedException {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STARTED, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STARTED, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -263,7 +260,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testStartWhenStopped() throws InterruptedException {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = mock(SchedulerEngine.class);
@@ -301,7 +298,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testTriggerUnrelated() throws InterruptedException {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"), randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"));
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = mock(SchedulerEngine.class);
@@ -426,10 +423,6 @@ public class RollupJobTaskTests extends ESTestCase {
                     listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1,
                         new PersistentTasksCustomMetaData.Assignment("foo", "foo")));
                 } else if (counterValue == 1) {
-                    // When we get here, doSaveState() was just invoked so we will have
-                    // have upgraded IDs
-                    RollupJobStatus s = (RollupJobStatus)this.getStatus();
-                    assertTrue(s.isUpgradedDocumentID());
                     finished.set(true);
                 }
 
@@ -438,11 +431,11 @@ public class RollupJobTaskTests extends ESTestCase {
         assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED));
         assertNull(((RollupJobStatus)task.getStatus()).getPosition());
 
-        task.start(new ActionListener<StartRollupJobAction.Response>() {
+        task.start(new ActionListener<>() {
             @Override
             public void onResponse(StartRollupJobAction.Response response) {
                 assertTrue(response.isStarted());
-                assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
+                assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
                 started.set(true);
             }
 
@@ -514,10 +507,6 @@ public class RollupJobTaskTests extends ESTestCase {
                     listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1,
                         new PersistentTasksCustomMetaData.Assignment("foo", "foo")));
                 } else if (counterValue == 1) {
-                    // When we get here, doSaveState() was just invoked so we will have
-                    // have upgraded IDs
-                    RollupJobStatus s = (RollupJobStatus)this.getStatus();
-                    assertTrue(s.isUpgradedDocumentID());
                     finished.set(true);
                 }
 
@@ -526,11 +515,11 @@ public class RollupJobTaskTests extends ESTestCase {
         assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED));
         assertNull(((RollupJobStatus)task.getStatus()).getPosition());
 
-        task.start(new ActionListener<StartRollupJobAction.Response>() {
+        task.start(new ActionListener<>() {
             @Override
             public void onResponse(StartRollupJobAction.Response response) {
                 assertTrue(response.isStarted());
-                assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
+                assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
                 started.set(true);
             }
 
@@ -590,7 +579,7 @@ public class RollupJobTaskTests extends ESTestCase {
         }).when(client).execute(anyObject(), anyObject(), anyObject());
 
         SchedulerEngine schedulerEngine = mock(SchedulerEngine.class);
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null, false);
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null);
         RollupJobTask task = new RollupJobTask(1, "type", "action", new TaskId("node", 123), job,
             status, client, schedulerEngine, pool, Collections.emptyMap()) {
             @Override
@@ -603,10 +592,6 @@ public class RollupJobTaskTests extends ESTestCase {
                     listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1,
                         new PersistentTasksCustomMetaData.Assignment("foo", "foo")));
                 } else if (counterValue == 1) {
-                    // When we get here, doSaveState() was just invoked so we will have
-                    // have upgraded IDs
-                    RollupJobStatus s = (RollupJobStatus)this.getStatus();
-                    assertTrue(s.isUpgradedDocumentID());
                     finished.set(true);
                 }
 
@@ -615,11 +600,11 @@ public class RollupJobTaskTests extends ESTestCase {
         assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STOPPED));
         assertNull(((RollupJobStatus)task.getStatus()).getPosition());
 
-        task.start(new ActionListener<StartRollupJobAction.Response>() {
+        task.start(new ActionListener<>() {
             @Override
             public void onResponse(StartRollupJobAction.Response response) {
                 assertTrue(response.isStarted());
-                assertThat(((RollupJobStatus)task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
+                assertThat(((RollupJobStatus) task.getStatus()).getIndexerState(), equalTo(IndexerState.STARTED));
                 started.set(true);
             }
 
@@ -642,7 +627,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testStopWhenStopped() throws InterruptedException {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null, randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null);
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());
@@ -749,7 +734,7 @@ public class RollupJobTaskTests extends ESTestCase {
 
     public void testStopWhenAborting() throws InterruptedException {
         RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap());
-        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null, randomBoolean());
+        RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null);
         Client client = mock(Client.class);
         when(client.settings()).thenReturn(Settings.EMPTY);
         SchedulerEngine schedulerEngine = new SchedulerEngine(SETTINGS, Clock.systemUTC());

+ 0 - 3
x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml

@@ -77,7 +77,6 @@ setup:
             search_total: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
 
   - do:
       rollup.delete_job:
@@ -130,7 +129,6 @@ setup:
             search_total: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
 
   - do:
       rollup.delete_job:
@@ -183,7 +181,6 @@ setup:
             search_total: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
 
   - do:
       rollup.start_job:

+ 0 - 3
x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml

@@ -78,7 +78,6 @@ setup:
             search_total: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
 ---
 "Test get with no jobs":
 
@@ -189,7 +188,6 @@ setup:
             trigger_count: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
         - config:
             id: "bar"
             index_pattern: "bar"
@@ -221,6 +219,5 @@ setup:
             search_total: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
 
 

+ 0 - 1
x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml

@@ -78,7 +78,6 @@ setup:
             search_total: 0
           status:
             job_state: "stopped"
-            upgraded_doc_id: true
 
 ---
 "Test put_job with existing name":

+ 1 - 1
x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java

@@ -14,13 +14,13 @@ import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.settings.SecureString;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.common.xcontent.ObjectPath;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.XContentHelper;
 import org.elasticsearch.common.xcontent.json.JsonXContent;
 import org.elasticsearch.common.xcontent.support.XContentMapValues;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.test.rest.ESRestTestCase;
-import org.elasticsearch.common.xcontent.ObjectPath;
 
 import java.io.IOException;
 import java.time.Instant;

+ 0 - 10
x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RollupIDUpgradeIT.java

@@ -1,10 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-package org.elasticsearch.upgrades;
-
-public class RollupIDUpgradeIT extends AbstractUpgradeTestCase {
-
-}