|
@@ -148,15 +148,6 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- // Used for specially crafted gap tests
|
|
|
- builders.add(client().prepareIndex("idx", "gap_type").setSource(jsonBuilder().startObject()
|
|
|
- .field(INTERVAL_FIELD, 0)
|
|
|
- .field(GAP_FIELD, 1).endObject()));
|
|
|
-
|
|
|
- builders.add(client().prepareIndex("idx", "gap_type").setSource(jsonBuilder().startObject()
|
|
|
- .field(INTERVAL_FIELD, 49)
|
|
|
- .field(GAP_FIELD, 1).endObject()));
|
|
|
-
|
|
|
for (int i = -10; i < 10; i++) {
|
|
|
builders.add(client().prepareIndex("neg_idx", "type").setSource(
|
|
|
jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD, 10).endObject()));
|
|
@@ -204,31 +195,36 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
metricValue = target.equals(MetricTarget.VALUE) ? PipelineAggregationHelperTests.calculateMetric(docValues, metric) : mockBucket.count;
|
|
|
}
|
|
|
|
|
|
- window.offer(metricValue);
|
|
|
- switch (type) {
|
|
|
- case SIMPLE:
|
|
|
- values.add(simple(window));
|
|
|
- break;
|
|
|
- case LINEAR:
|
|
|
- values.add(linear(window));
|
|
|
- break;
|
|
|
- case EWMA:
|
|
|
- values.add(ewma(window));
|
|
|
- break;
|
|
|
- case HOLT:
|
|
|
- values.add(holt(window));
|
|
|
- break;
|
|
|
- case HOLT_WINTERS:
|
|
|
- // HW needs at least 2 periods of data to start
|
|
|
- if (window.size() >= period * 2) {
|
|
|
- values.add(holtWinters(window));
|
|
|
- } else {
|
|
|
- values.add(null);
|
|
|
- }
|
|
|
-
|
|
|
- break;
|
|
|
+ if (window.size() > 0) {
|
|
|
+ switch (type) {
|
|
|
+ case SIMPLE:
|
|
|
+ values.add(simple(window));
|
|
|
+ break;
|
|
|
+ case LINEAR:
|
|
|
+ values.add(linear(window));
|
|
|
+ break;
|
|
|
+ case EWMA:
|
|
|
+ values.add(ewma(window));
|
|
|
+ break;
|
|
|
+ case HOLT:
|
|
|
+ values.add(holt(window));
|
|
|
+ break;
|
|
|
+ case HOLT_WINTERS:
|
|
|
+ // HW needs at least 2 periods of data to start
|
|
|
+ if (window.size() >= period * 2) {
|
|
|
+ values.add(holtWinters(window));
|
|
|
+ } else {
|
|
|
+ values.add(null);
|
|
|
+ }
|
|
|
+
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ values.add(null);
|
|
|
}
|
|
|
|
|
|
+ window.offer(metricValue);
|
|
|
+
|
|
|
}
|
|
|
testValues.put(type.toString() + "_" + target.toString(), values);
|
|
|
}
|
|
@@ -685,7 +681,10 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(25));
|
|
|
|
|
|
- for (int i = 0; i < 20; i++) {
|
|
|
+ SimpleValue current = buckets.get(0).getAggregations().get("movavg_values");
|
|
|
+ assertThat(current, nullValue());
|
|
|
+
|
|
|
+ for (int i = 1; i < 20; i++) {
|
|
|
Bucket bucket = buckets.get(i);
|
|
|
assertThat(bucket, notNullValue());
|
|
|
assertThat((long) bucket.getKey(), equalTo((long) i - 10));
|
|
@@ -699,7 +698,6 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
}
|
|
|
|
|
|
for (int i = 20; i < 25; i++) {
|
|
|
- System.out.println(i);
|
|
|
Bucket bucket = buckets.get(i);
|
|
|
assertThat(bucket, notNullValue());
|
|
|
assertThat((long) bucket.getKey(), equalTo((long) i - 10));
|
|
@@ -877,350 +875,6 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- /**
|
|
|
- * This test uses the "gap" dataset, which is simply a doc at the beginning and end of
|
|
|
- * the INTERVAL_FIELD range. These docs have a value of 1 in GAP_FIELD.
|
|
|
- * This test verifies that large gaps don't break things, and that the mov avg roughly works
|
|
|
- * in the correct manner (checks direction of change, but not actual values)
|
|
|
- */
|
|
|
- @Test
|
|
|
- public void testGiantGap() {
|
|
|
-
|
|
|
- SearchResponse response = client()
|
|
|
- .prepareSearch("idx").setTypes("gap_type")
|
|
|
- .addAggregation(
|
|
|
- histogram("histo").field(INTERVAL_FIELD).interval(1).extendedBounds(0L, 49L)
|
|
|
- .subAggregation(min("the_metric").field(GAP_FIELD))
|
|
|
- .subAggregation(movingAvg("movavg_values")
|
|
|
- .window(windowSize)
|
|
|
- .modelBuilder(randomModelBuilder())
|
|
|
- .gapPolicy(gapPolicy)
|
|
|
- .setBucketsPaths("the_metric"))
|
|
|
- ).execute().actionGet();
|
|
|
-
|
|
|
- assertSearchResponse(response);
|
|
|
-
|
|
|
- InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
|
|
|
- assertThat(histo, notNullValue());
|
|
|
- assertThat(histo.getName(), equalTo("histo"));
|
|
|
- List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50));
|
|
|
-
|
|
|
- double lastValue = ((SimpleValue)(buckets.get(0).getAggregations().get("movavg_values"))).value();
|
|
|
- assertThat(Double.compare(lastValue, 0.0d), greaterThanOrEqualTo(0));
|
|
|
-
|
|
|
- double currentValue;
|
|
|
- for (int i = 1; i < 49; i++) {
|
|
|
- SimpleValue current = buckets.get(i).getAggregations().get("movavg_values");
|
|
|
- if (current != null) {
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- // Since there are only two values in this test, at the beginning and end, the moving average should
|
|
|
- // decrease every step (until it reaches zero). Crude way to check that it's doing the right thing
|
|
|
- // without actually verifying the computed values. Should work for all types of moving avgs and
|
|
|
- // gap policies
|
|
|
- assertThat(Double.compare(lastValue, currentValue), greaterThanOrEqualTo(0));
|
|
|
- lastValue = currentValue;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
- SimpleValue current = buckets.get(49).getAggregations().get("movavg_values");
|
|
|
- assertThat(current, notNullValue());
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- if (gapPolicy.equals(BucketHelpers.GapPolicy.SKIP)) {
|
|
|
- // if we are ignoring, movavg could go up (holt) or stay the same (simple, linear, ewma)
|
|
|
- assertThat(Double.compare(lastValue, currentValue), lessThanOrEqualTo(0));
|
|
|
- } else if (gapPolicy.equals(BucketHelpers.GapPolicy.INSERT_ZEROS)) {
|
|
|
- // If we insert zeros, this should always increase the moving avg since the last bucket has a real value
|
|
|
- assertThat(Double.compare(lastValue, currentValue), equalTo(-1));
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- /**
|
|
|
- * Big gap, but with prediction at the end.
|
|
|
- */
|
|
|
- @Test
|
|
|
- public void testGiantGapWithPredict() {
|
|
|
- int numPredictions = randomIntBetween(1, 10);
|
|
|
-
|
|
|
- SearchResponse response = client()
|
|
|
- .prepareSearch("idx").setTypes("gap_type")
|
|
|
- .addAggregation(
|
|
|
- histogram("histo").field(INTERVAL_FIELD).interval(1).extendedBounds(0L, 49L)
|
|
|
- .subAggregation(min("the_metric").field(GAP_FIELD))
|
|
|
- .subAggregation(movingAvg("movavg_values")
|
|
|
- .window(windowSize)
|
|
|
- .modelBuilder(randomModelBuilder())
|
|
|
- .gapPolicy(gapPolicy)
|
|
|
- .setBucketsPaths("the_metric")
|
|
|
- .predict(numPredictions))
|
|
|
- ).execute().actionGet();
|
|
|
-
|
|
|
- assertSearchResponse(response);
|
|
|
-
|
|
|
- InternalHistogram<Bucket> histo = response.getAggregations().get("histo");
|
|
|
- assertThat(histo, notNullValue());
|
|
|
- assertThat(histo.getName(), equalTo("histo"));
|
|
|
- List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
-
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50 + numPredictions));
|
|
|
-
|
|
|
-
|
|
|
- double lastValue = ((SimpleValue)(buckets.get(0).getAggregations().get("movavg_values"))).value();
|
|
|
- assertThat(Double.compare(lastValue, 0.0d), greaterThanOrEqualTo(0));
|
|
|
-
|
|
|
- double currentValue;
|
|
|
- for (int i = 1; i < 49; i++) {
|
|
|
- SimpleValue current = buckets.get(i).getAggregations().get("movavg_values");
|
|
|
- if (current != null) {
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- // Since there are only two values in this test, at the beginning and end, the moving average should
|
|
|
- // decrease every step (until it reaches zero). Crude way to check that it's doing the right thing
|
|
|
- // without actually verifying the computed values. Should work for all types of moving avgs and
|
|
|
- // gap policies
|
|
|
- assertThat(Double.compare(lastValue, currentValue), greaterThanOrEqualTo(0));
|
|
|
- lastValue = currentValue;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- SimpleValue current = buckets.get(49).getAggregations().get("movavg_values");
|
|
|
- assertThat(current, notNullValue());
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- if (gapPolicy.equals(BucketHelpers.GapPolicy.SKIP)) {
|
|
|
- // if we are ignoring, movavg could go up (holt) or stay the same (simple, linear, ewma)
|
|
|
- assertThat(Double.compare(lastValue, currentValue), lessThanOrEqualTo(0));
|
|
|
- } else if (gapPolicy.equals(BucketHelpers.GapPolicy.INSERT_ZEROS)) {
|
|
|
- // If we insert zeros, this should always increase the moving avg since the last bucket has a real value
|
|
|
- assertThat(Double.compare(lastValue, currentValue), equalTo(-1));
|
|
|
- }
|
|
|
-
|
|
|
- // Now check predictions
|
|
|
- for (int i = 50; i < 50 + numPredictions; i++) {
|
|
|
- // Unclear at this point which direction the predictions will go, just verify they are
|
|
|
- // not null, and that we don't have the_metric anymore
|
|
|
- assertThat((buckets.get(i).getAggregations().get("movavg_values")), notNullValue());
|
|
|
- assertThat((buckets.get(i).getAggregations().get("the_metric")), nullValue());
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- /**
|
|
|
- * This test filters the "gap" data so that the first doc is excluded. This leaves a long stretch of empty
|
|
|
- * buckets until the final bucket. The moving avg should be zero up until the last bucket, and should work
|
|
|
- * regardless of mov avg type or gap policy.
|
|
|
- */
|
|
|
- @Test
|
|
|
- public void testLeftGap() {
|
|
|
- SearchResponse response = client()
|
|
|
- .prepareSearch("idx").setTypes("gap_type")
|
|
|
- .addAggregation(
|
|
|
- filter("filtered").filter(new RangeQueryBuilder(INTERVAL_FIELD).from(1)).subAggregation(
|
|
|
- histogram("histo").field(INTERVAL_FIELD).interval(1).extendedBounds(0L, 49L)
|
|
|
- .subAggregation(randomMetric("the_metric", GAP_FIELD))
|
|
|
- .subAggregation(movingAvg("movavg_values")
|
|
|
- .window(windowSize)
|
|
|
- .modelBuilder(randomModelBuilder())
|
|
|
- .gapPolicy(gapPolicy)
|
|
|
- .setBucketsPaths("the_metric"))
|
|
|
- ))
|
|
|
- .execute().actionGet();
|
|
|
-
|
|
|
- assertSearchResponse(response);
|
|
|
-
|
|
|
- InternalFilter filtered = response.getAggregations().get("filtered");
|
|
|
- assertThat(filtered, notNullValue());
|
|
|
- assertThat(filtered.getName(), equalTo("filtered"));
|
|
|
-
|
|
|
- InternalHistogram<Bucket> histo = filtered.getAggregations().get("histo");
|
|
|
- assertThat(histo, notNullValue());
|
|
|
- assertThat(histo.getName(), equalTo("histo"));
|
|
|
- List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50));
|
|
|
-
|
|
|
- double lastValue = 0;
|
|
|
-
|
|
|
- double currentValue;
|
|
|
- for (int i = 0; i < 50; i++) {
|
|
|
- SimpleValue current = buckets.get(i).getAggregations().get("movavg_values");
|
|
|
- if (current != null) {
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- assertThat(Double.compare(lastValue, currentValue), lessThanOrEqualTo(0));
|
|
|
- lastValue = currentValue;
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- @Test
|
|
|
- public void testLeftGapWithPredict() {
|
|
|
- int numPredictions = randomIntBetween(1, 10);
|
|
|
- SearchResponse response = client()
|
|
|
- .prepareSearch("idx").setTypes("gap_type")
|
|
|
- .addAggregation(
|
|
|
- filter("filtered").filter(new RangeQueryBuilder(INTERVAL_FIELD).from(1)).subAggregation(
|
|
|
- histogram("histo").field(INTERVAL_FIELD).interval(1).extendedBounds(0L, 49L)
|
|
|
- .subAggregation(randomMetric("the_metric", GAP_FIELD))
|
|
|
- .subAggregation(movingAvg("movavg_values")
|
|
|
- .window(windowSize)
|
|
|
- .modelBuilder(randomModelBuilder())
|
|
|
- .gapPolicy(gapPolicy)
|
|
|
- .setBucketsPaths("the_metric")
|
|
|
- .predict(numPredictions))
|
|
|
- ))
|
|
|
- .execute().actionGet();
|
|
|
-
|
|
|
- assertSearchResponse(response);
|
|
|
-
|
|
|
- InternalFilter filtered = response.getAggregations().get("filtered");
|
|
|
- assertThat(filtered, notNullValue());
|
|
|
- assertThat(filtered.getName(), equalTo("filtered"));
|
|
|
-
|
|
|
- InternalHistogram<Bucket> histo = filtered.getAggregations().get("histo");
|
|
|
- assertThat(histo, notNullValue());
|
|
|
- assertThat(histo.getName(), equalTo("histo"));
|
|
|
- List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
-
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50 + numPredictions));
|
|
|
-
|
|
|
-
|
|
|
- double lastValue = 0;
|
|
|
-
|
|
|
- double currentValue;
|
|
|
- for (int i = 0; i < 50; i++) {
|
|
|
- SimpleValue current = buckets.get(i).getAggregations().get("movavg_values");
|
|
|
- if (current != null) {
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- assertThat(Double.compare(lastValue, currentValue), lessThanOrEqualTo(0));
|
|
|
- lastValue = currentValue;
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // Now check predictions
|
|
|
- for (int i = 50; i < 50 + numPredictions; i++) {
|
|
|
- // Unclear at this point which direction the predictions will go, just verify they are
|
|
|
- // not null, and that we don't have the_metric anymore
|
|
|
- assertThat((buckets.get(i).getAggregations().get("movavg_values")), notNullValue());
|
|
|
- assertThat((buckets.get(i).getAggregations().get("the_metric")), nullValue());
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- /**
|
|
|
- * This test filters the "gap" data so that the last doc is excluded. This leaves a long stretch of empty
|
|
|
- * buckets after the first bucket.
|
|
|
- */
|
|
|
- @Test
|
|
|
- public void testRightGap() {
|
|
|
- SearchResponse response = client()
|
|
|
- .prepareSearch("idx").setTypes("gap_type")
|
|
|
- .addAggregation(
|
|
|
- filter("filtered").filter(new RangeQueryBuilder(INTERVAL_FIELD).to(1)).subAggregation(
|
|
|
- histogram("histo").field(INTERVAL_FIELD).interval(1).extendedBounds(0L, 49L)
|
|
|
- .subAggregation(randomMetric("the_metric", GAP_FIELD))
|
|
|
- .subAggregation(movingAvg("movavg_values")
|
|
|
- .window(windowSize)
|
|
|
- .modelBuilder(randomModelBuilder())
|
|
|
- .gapPolicy(gapPolicy)
|
|
|
- .setBucketsPaths("the_metric"))
|
|
|
- ))
|
|
|
- .execute().actionGet();
|
|
|
-
|
|
|
- assertSearchResponse(response);
|
|
|
-
|
|
|
- InternalFilter filtered = response.getAggregations().get("filtered");
|
|
|
- assertThat(filtered, notNullValue());
|
|
|
- assertThat(filtered.getName(), equalTo("filtered"));
|
|
|
-
|
|
|
- InternalHistogram<Bucket> histo = filtered.getAggregations().get("histo");
|
|
|
- assertThat(histo, notNullValue());
|
|
|
- assertThat(histo.getName(), equalTo("histo"));
|
|
|
- List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50));
|
|
|
-
|
|
|
-
|
|
|
- SimpleValue current = buckets.get(0).getAggregations().get("movavg_values");
|
|
|
- assertThat(current, notNullValue());
|
|
|
-
|
|
|
- double lastValue = current.value();
|
|
|
-
|
|
|
- double currentValue;
|
|
|
- for (int i = 1; i < 50; i++) {
|
|
|
- current = buckets.get(i).getAggregations().get("movavg_values");
|
|
|
- if (current != null) {
|
|
|
- currentValue = current.value();
|
|
|
-
|
|
|
- assertThat(Double.compare(lastValue, currentValue), greaterThanOrEqualTo(0));
|
|
|
- lastValue = currentValue;
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- @Test
|
|
|
- public void testRightGapWithPredict() {
|
|
|
- int numPredictions = randomIntBetween(1, 10);
|
|
|
- SearchResponse response = client()
|
|
|
- .prepareSearch("idx").setTypes("gap_type")
|
|
|
- .addAggregation(
|
|
|
- filter("filtered").filter(new RangeQueryBuilder(INTERVAL_FIELD).to(1)).subAggregation(
|
|
|
- histogram("histo").field(INTERVAL_FIELD).interval(1).extendedBounds(0L, 49L)
|
|
|
- .subAggregation(randomMetric("the_metric", GAP_FIELD))
|
|
|
- .subAggregation(movingAvg("movavg_values")
|
|
|
- .window(windowSize)
|
|
|
- .modelBuilder(randomModelBuilder())
|
|
|
- .gapPolicy(gapPolicy)
|
|
|
- .setBucketsPaths("the_metric")
|
|
|
- .predict(numPredictions))
|
|
|
- ))
|
|
|
- .execute().actionGet();
|
|
|
-
|
|
|
- assertSearchResponse(response);
|
|
|
-
|
|
|
- InternalFilter filtered = response.getAggregations().get("filtered");
|
|
|
- assertThat(filtered, notNullValue());
|
|
|
- assertThat(filtered.getName(), equalTo("filtered"));
|
|
|
-
|
|
|
- InternalHistogram<Bucket> histo = filtered.getAggregations().get("histo");
|
|
|
- assertThat(histo, notNullValue());
|
|
|
- assertThat(histo.getName(), equalTo("histo"));
|
|
|
- List<? extends Bucket> buckets = histo.getBuckets();
|
|
|
-
|
|
|
- // If we are skipping, there will only be predictions at the very beginning and won't append any new buckets
|
|
|
- if (gapPolicy.equals(BucketHelpers.GapPolicy.SKIP)) {
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50));
|
|
|
- } else {
|
|
|
- assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(50 + numPredictions));
|
|
|
- }
|
|
|
-
|
|
|
- // Unlike left-gap tests, we cannot check the slope of prediction for right-gap. E.g. linear will
|
|
|
- // converge on zero, but holt-linear may trend upwards based on the first value
|
|
|
- // Just check for non-nullness
|
|
|
- SimpleValue current = buckets.get(0).getAggregations().get("movavg_values");
|
|
|
- assertThat(current, notNullValue());
|
|
|
-
|
|
|
- // If we are skipping, there will only be predictions at the very beginning and won't append any new buckets
|
|
|
- if (gapPolicy.equals(BucketHelpers.GapPolicy.SKIP)) {
|
|
|
- // Now check predictions
|
|
|
- for (int i = 1; i < 1 + numPredictions; i++) {
|
|
|
- // Unclear at this point which direction the predictions will go, just verify they are
|
|
|
- // not null
|
|
|
- assertThat(buckets.get(i).getDocCount(), equalTo(0L));
|
|
|
- assertThat((buckets.get(i).getAggregations().get("movavg_values")), notNullValue());
|
|
|
- }
|
|
|
- } else {
|
|
|
- // Otherwise we'll have some predictions at the end
|
|
|
- for (int i = 50; i < 50 + numPredictions; i++) {
|
|
|
- // Unclear at this point which direction the predictions will go, just verify they are
|
|
|
- // not null
|
|
|
- assertThat(buckets.get(i).getDocCount(), equalTo(0L));
|
|
|
- assertThat((buckets.get(i).getAggregations().get("movavg_values")), notNullValue());
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
@Test
|
|
|
public void testHoltWintersNotEnoughData() {
|
|
|
try {
|
|
@@ -1288,8 +942,7 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
assertThat(avgAgg.value(), equalTo(10d));
|
|
|
|
|
|
SimpleValue movAvgAgg = bucket.getAggregations().get("avg_movavg");
|
|
|
- assertThat(movAvgAgg, notNullValue());
|
|
|
- assertThat(movAvgAgg.value(), equalTo(10d));
|
|
|
+ assertThat(movAvgAgg, nullValue());
|
|
|
|
|
|
Derivative deriv = bucket.getAggregations().get("deriv");
|
|
|
assertThat(deriv, nullValue());
|
|
@@ -1297,7 +950,28 @@ public class MovAvgTests extends ElasticsearchIntegrationTest {
|
|
|
SimpleValue derivMovAvg = bucket.getAggregations().get("deriv_movavg");
|
|
|
assertThat(derivMovAvg, nullValue());
|
|
|
|
|
|
- for (int i = 1; i < 12; i++) {
|
|
|
+ // Second bucket
|
|
|
+ bucket = buckets.get(1);
|
|
|
+ assertThat(bucket, notNullValue());
|
|
|
+ assertThat((long) bucket.getKey(), equalTo(1L));
|
|
|
+ assertThat(bucket.getDocCount(), equalTo(1l));
|
|
|
+
|
|
|
+ avgAgg = bucket.getAggregations().get("avg");
|
|
|
+ assertThat(avgAgg, notNullValue());
|
|
|
+ assertThat(avgAgg.value(), equalTo(10d));
|
|
|
+
|
|
|
+ deriv = bucket.getAggregations().get("deriv");
|
|
|
+ assertThat(deriv, notNullValue());
|
|
|
+ assertThat(deriv.value(), equalTo(0d));
|
|
|
+
|
|
|
+ movAvgAgg = bucket.getAggregations().get("avg_movavg");
|
|
|
+ assertThat(movAvgAgg, notNullValue());
|
|
|
+ assertThat(movAvgAgg.value(), equalTo(10d));
|
|
|
+
|
|
|
+ derivMovAvg = bucket.getAggregations().get("deriv_movavg");
|
|
|
+ assertThat(derivMovAvg, Matchers.nullValue()); // still null because of movavg delay
|
|
|
+
|
|
|
+ for (int i = 2; i < 12; i++) {
|
|
|
bucket = buckets.get(i);
|
|
|
assertThat(bucket, notNullValue());
|
|
|
assertThat((long) bucket.getKey(), equalTo((long) i));
|