|
@@ -6,13 +6,15 @@
|
|
|
*/
|
|
|
package org.elasticsearch.xpack.rollup.v2;
|
|
|
|
|
|
-import org.apache.lucene.tests.util.LuceneTestCase;
|
|
|
import org.elasticsearch.ElasticsearchException;
|
|
|
import org.elasticsearch.ResourceAlreadyExistsException;
|
|
|
import org.elasticsearch.action.ActionListener;
|
|
|
import org.elasticsearch.action.ActionRequestValidationException;
|
|
|
import org.elasticsearch.action.DocWriteRequest;
|
|
|
+import org.elasticsearch.action.admin.cluster.stats.MappingVisitor;
|
|
|
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
|
|
+import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
|
|
+import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
|
|
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
|
|
import org.elasticsearch.action.admin.indices.rollover.RolloverResponse;
|
|
|
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
|
@@ -22,15 +24,15 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
|
|
import org.elasticsearch.action.bulk.BulkResponse;
|
|
|
import org.elasticsearch.action.datastreams.CreateDataStreamAction;
|
|
|
import org.elasticsearch.action.datastreams.GetDataStreamAction;
|
|
|
-import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
|
|
|
import org.elasticsearch.action.index.IndexRequest;
|
|
|
-import org.elasticsearch.action.search.SearchResponse;
|
|
|
import org.elasticsearch.action.support.WriteRequest;
|
|
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
|
|
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
|
|
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
|
|
import org.elasticsearch.cluster.metadata.Template;
|
|
|
import org.elasticsearch.common.compress.CompressedXContent;
|
|
|
+import org.elasticsearch.common.document.DocumentField;
|
|
|
+import org.elasticsearch.common.network.NetworkAddress;
|
|
|
import org.elasticsearch.common.settings.Settings;
|
|
|
import org.elasticsearch.common.time.DateFormatter;
|
|
|
import org.elasticsearch.datastreams.DataStreamsPlugin;
|
|
@@ -42,17 +44,26 @@ import org.elasticsearch.index.engine.VersionConflictEngineException;
|
|
|
import org.elasticsearch.index.mapper.DateFieldMapper;
|
|
|
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
|
|
|
import org.elasticsearch.index.mapper.TimeSeriesParams;
|
|
|
+import org.elasticsearch.indices.IndicesService;
|
|
|
import org.elasticsearch.plugins.Plugin;
|
|
|
-import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
|
|
|
-import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
|
|
-import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder;
|
|
|
-import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite;
|
|
|
-import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
|
|
+import org.elasticsearch.search.SearchHit;
|
|
|
+import org.elasticsearch.search.aggregations.Aggregation;
|
|
|
+import org.elasticsearch.search.aggregations.AggregationBuilder;
|
|
|
+import org.elasticsearch.search.aggregations.Aggregations;
|
|
|
+import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
|
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
|
|
+import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
|
|
|
+import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
|
|
+import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
|
|
+import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
|
|
|
+import org.elasticsearch.search.aggregations.metrics.Max;
|
|
|
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
|
|
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
|
|
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
|
|
+import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
|
|
|
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
|
|
|
+import org.elasticsearch.search.sort.SortBuilders;
|
|
|
+import org.elasticsearch.search.sort.SortOrder;
|
|
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
|
|
import org.elasticsearch.xcontent.XContentBuilder;
|
|
|
import org.elasticsearch.xcontent.XContentFactory;
|
|
@@ -72,17 +83,22 @@ import org.junit.Before;
|
|
|
import java.io.IOException;
|
|
|
import java.time.Instant;
|
|
|
import java.time.ZoneId;
|
|
|
+import java.time.ZoneOffset;
|
|
|
+import java.time.ZonedDateTime;
|
|
|
import java.util.ArrayList;
|
|
|
import java.util.Collection;
|
|
|
+import java.util.Date;
|
|
|
+import java.util.HashMap;
|
|
|
import java.util.List;
|
|
|
import java.util.Locale;
|
|
|
import java.util.Map;
|
|
|
+import java.util.Optional;
|
|
|
import java.util.concurrent.ExecutionException;
|
|
|
import java.util.stream.Collectors;
|
|
|
|
|
|
+import static org.elasticsearch.index.mapper.TimeSeriesParams.TIME_SERIES_METRIC_PARAM;
|
|
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
|
|
import static org.hamcrest.Matchers.containsString;
|
|
|
-import static org.hamcrest.Matchers.equalTo;
|
|
|
|
|
|
public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
|
|
@@ -92,6 +108,18 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
public static final String FIELD_DIMENSION_2 = "dimension_long";
|
|
|
public static final String FIELD_NUMERIC_1 = "numeric_1";
|
|
|
public static final String FIELD_NUMERIC_2 = "numeric_2";
|
|
|
+ public static final String FIELD_METRIC_LABEL_DOUBLE = "metric_label_double";
|
|
|
+ public static final String FIELD_LABEL_DOUBLE = "label_double";
|
|
|
+ public static final String FIELD_LABEL_INTEGER = "label_integer";
|
|
|
+ public static final String FIELD_LABEL_KEYWORD = "label_keyword";
|
|
|
+ public static final String FIELD_LABEL_TEXT = "label_text";
|
|
|
+ public static final String FIELD_LABEL_BOOLEAN = "label_boolean";
|
|
|
+ public static final String FIELD_LABEL_IPv4_ADDRESS = "label_ipv4_address";
|
|
|
+ public static final String FIELD_LABEL_IPv6_ADDRESS = "label_ipv6_address";
|
|
|
+ public static final String FIELD_LABEL_DATE = "label_date";
|
|
|
+ public static final String FIELD_LABEL_UNMAPPED = "label_unmapped";
|
|
|
+ public static final String FIELD_LABEL_KEYWORD_ARRAY = "label_keyword_array";
|
|
|
+ public static final String FIELD_LABEL_DOUBLE_ARRAY = "label_double_array";
|
|
|
|
|
|
private static final int MAX_DIM_VALUES = 5;
|
|
|
private static final long MAX_NUM_BUCKETS = 10;
|
|
@@ -128,6 +156,13 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
dimensionValues.add(randomAlphaOfLength(6));
|
|
|
}
|
|
|
|
|
|
+ /**
|
|
|
+ * NOTE: here we map each numeric label field also as a (counter) metric.
|
|
|
+ * This is done for testing purposes. There is no easy way to test
|
|
|
+ * that labels are collected using the last value. The idea is to
|
|
|
+ * check that the value of the label (last value) matches the value
|
|
|
+ * of the corresponding metric which uses a last_value metric type.
|
|
|
+ */
|
|
|
client().admin()
|
|
|
.indices()
|
|
|
.prepareCreate(sourceIndex)
|
|
@@ -137,7 +172,10 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
.put("index.number_of_replicas", numOfReplicas)
|
|
|
.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES)
|
|
|
.putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), List.of(FIELD_DIMENSION_1))
|
|
|
- .put(IndexSettings.TIME_SERIES_START_TIME.getKey(), Instant.ofEpochMilli(startTime).toString())
|
|
|
+ .put(
|
|
|
+ IndexSettings.TIME_SERIES_START_TIME.getKey(),
|
|
|
+ DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(Instant.ofEpochMilli(startTime).toEpochMilli())
|
|
|
+ )
|
|
|
.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z")
|
|
|
.build()
|
|
|
)
|
|
@@ -151,7 +189,29 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
FIELD_NUMERIC_1,
|
|
|
"type=long,time_series_metric=gauge",
|
|
|
FIELD_NUMERIC_2,
|
|
|
- "type=double,time_series_metric=counter"
|
|
|
+ "type=double,time_series_metric=counter",
|
|
|
+ FIELD_LABEL_DOUBLE,
|
|
|
+ "type=double",
|
|
|
+ FIELD_LABEL_INTEGER,
|
|
|
+ "type=integer",
|
|
|
+ FIELD_LABEL_KEYWORD,
|
|
|
+ "type=keyword",
|
|
|
+ FIELD_LABEL_TEXT,
|
|
|
+ "type=text",
|
|
|
+ FIELD_LABEL_BOOLEAN,
|
|
|
+ "type=boolean",
|
|
|
+ FIELD_METRIC_LABEL_DOUBLE, /* numeric label indexed as a metric */
|
|
|
+ "type=double,time_series_metric=counter",
|
|
|
+ FIELD_LABEL_IPv4_ADDRESS,
|
|
|
+ "type=ip",
|
|
|
+ FIELD_LABEL_IPv6_ADDRESS,
|
|
|
+ "type=ip",
|
|
|
+ FIELD_LABEL_DATE,
|
|
|
+ "type=date,format=date_optional_time",
|
|
|
+ FIELD_LABEL_KEYWORD_ARRAY,
|
|
|
+ "type=keyword",
|
|
|
+ FIELD_LABEL_DOUBLE_ARRAY,
|
|
|
+ "type=double"
|
|
|
)
|
|
|
.get();
|
|
|
}
|
|
@@ -160,6 +220,22 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
RollupActionConfig config = new RollupActionConfig(randomInterval());
|
|
|
SourceSupplier sourceSupplier = () -> {
|
|
|
String ts = randomDateForInterval(config.getInterval());
|
|
|
+ double labelDoubleValue = DATE_FORMATTER.parseMillis(ts);
|
|
|
+ int labelIntegerValue = randomInt();
|
|
|
+ long labelLongValue = randomLong();
|
|
|
+ String labelIpv4Address = NetworkAddress.format(randomIp(true));
|
|
|
+ String labelIpv6Address = NetworkAddress.format(randomIp(false));
|
|
|
+ Date labelDateValue = randomDate();
|
|
|
+ int keywordArraySize = randomIntBetween(3, 10);
|
|
|
+ String[] keywordArray = new String[keywordArraySize];
|
|
|
+ for (int i = 0; i < keywordArraySize; ++i) {
|
|
|
+ keywordArray[i] = randomAlphaOfLength(10);
|
|
|
+ }
|
|
|
+ int doubleArraySize = randomIntBetween(3, 10);
|
|
|
+ double[] doubleArray = new double[doubleArraySize];
|
|
|
+ for (int i = 0; i < doubleArraySize; ++i) {
|
|
|
+ doubleArray[i] = randomDouble();
|
|
|
+ }
|
|
|
return XContentFactory.jsonBuilder()
|
|
|
.startObject()
|
|
|
.field(FIELD_TIMESTAMP, ts)
|
|
@@ -167,6 +243,18 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
// .field(FIELD_DIMENSION_2, randomIntBetween(1, 10)) //TODO: Fix _tsid format issue and then enable this
|
|
|
.field(FIELD_NUMERIC_1, randomInt())
|
|
|
.field(FIELD_NUMERIC_2, DATE_FORMATTER.parseMillis(ts))
|
|
|
+ .field(FIELD_LABEL_DOUBLE, labelDoubleValue)
|
|
|
+ .field(FIELD_METRIC_LABEL_DOUBLE, labelDoubleValue)
|
|
|
+ .field(FIELD_LABEL_INTEGER, labelIntegerValue)
|
|
|
+ .field(FIELD_LABEL_KEYWORD, ts)
|
|
|
+ .field(FIELD_LABEL_UNMAPPED, randomBoolean() ? labelLongValue : labelDoubleValue)
|
|
|
+ .field(FIELD_LABEL_TEXT, ts)
|
|
|
+ .field(FIELD_LABEL_BOOLEAN, randomBoolean())
|
|
|
+ .field(FIELD_LABEL_IPv4_ADDRESS, labelIpv4Address)
|
|
|
+ .field(FIELD_LABEL_IPv6_ADDRESS, labelIpv6Address)
|
|
|
+ .field(FIELD_LABEL_DATE, labelDateValue)
|
|
|
+ .field(FIELD_LABEL_KEYWORD_ARRAY, keywordArray)
|
|
|
+ .field(FIELD_LABEL_DOUBLE_ARRAY, doubleArray)
|
|
|
.endObject();
|
|
|
};
|
|
|
bulkIndex(sourceSupplier);
|
|
@@ -175,6 +263,19 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
assertRollupIndex(sourceIndex, rollupIndex, config);
|
|
|
}
|
|
|
|
|
|
+ private Date randomDate() {
|
|
|
+ int randomYear = randomIntBetween(1970, 2020);
|
|
|
+ int randomMonth = randomIntBetween(1, 12);
|
|
|
+ int randomDayOfMonth = randomIntBetween(1, 28);
|
|
|
+ int randomHour = randomIntBetween(0, 23);
|
|
|
+ int randomMinute = randomIntBetween(0, 59);
|
|
|
+ int randomSecond = randomIntBetween(0, 59);
|
|
|
+ return Date.from(
|
|
|
+ ZonedDateTime.of(randomYear, randomMonth, randomDayOfMonth, randomHour, randomMinute, randomSecond, 0, ZoneOffset.UTC)
|
|
|
+ .toInstant()
|
|
|
+ );
|
|
|
+ }
|
|
|
+
|
|
|
public void testCopyIndexSettings() throws IOException {
|
|
|
Settings settings = Settings.builder()
|
|
|
.put(LifecycleSettings.LIFECYCLE_NAME, randomAlphaOfLength(5))
|
|
@@ -233,7 +334,6 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
assertThat(exception.getMessage(), containsString("rollup configuration is missing"));
|
|
|
}
|
|
|
|
|
|
- @LuceneTestCase.AwaitsFix(bugUrl = "TODO: Fix this")
|
|
|
public void testRollupSparseMetrics() throws IOException {
|
|
|
RollupActionConfig config = new RollupActionConfig(randomInterval());
|
|
|
SourceSupplier sourceSupplier = () -> {
|
|
@@ -268,7 +368,7 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
assertThat(exception.getMessage(), containsString(rollupIndex));
|
|
|
}
|
|
|
|
|
|
- public void testRollupEmptyIndex() {
|
|
|
+ public void testRollupEmptyIndex() throws IOException {
|
|
|
RollupActionConfig config = new RollupActionConfig(randomInterval());
|
|
|
// Source index has been created in the setup() method
|
|
|
prepareSourceIndex(sourceIndex);
|
|
@@ -439,69 +539,180 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
return response;
|
|
|
}
|
|
|
|
|
|
+ private Aggregations aggregate(final String index, AggregationBuilder aggregationBuilder) {
|
|
|
+ return client().prepareSearch(index).addAggregation(aggregationBuilder).get().getAggregations();
|
|
|
+ }
|
|
|
+
|
|
|
@SuppressWarnings("unchecked")
|
|
|
- private void assertRollupIndex(String sourceIndex, String rollupIndex, RollupActionConfig config) {
|
|
|
+ private void assertRollupIndex(String sourceIndex, String rollupIndex, RollupActionConfig config) throws IOException {
|
|
|
// Retrieve field information for the metric fields
|
|
|
- FieldCapabilitiesResponse fieldCapsResponse = client().prepareFieldCaps(sourceIndex).setFields("*").get();
|
|
|
- Map<String, TimeSeriesParams.MetricType> metricFields = fieldCapsResponse.get()
|
|
|
+ final GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(sourceIndex).get();
|
|
|
+ final Map<String, Object> sourceIndexMappings = getMappingsResponse.mappings()
|
|
|
.entrySet()
|
|
|
.stream()
|
|
|
- .filter(e -> e.getValue().values().iterator().next().getMetricType() != null)
|
|
|
- .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().values().iterator().next().getMetricType()));
|
|
|
-
|
|
|
- final CompositeAggregationBuilder aggregation = buildCompositeAggs("resp", config, metricFields);
|
|
|
- long numBuckets = 0;
|
|
|
- InternalComposite origResp = client().prepareSearch(sourceIndex).addAggregation(aggregation).get().getAggregations().get("resp");
|
|
|
- InternalComposite rollupResp = client().prepareSearch(rollupIndex).addAggregation(aggregation).get().getAggregations().get("resp");
|
|
|
- while (origResp.afterKey() != null) {
|
|
|
- numBuckets += origResp.getBuckets().size();
|
|
|
- assertEquals(origResp, rollupResp);
|
|
|
- aggregation.aggregateAfter(origResp.afterKey());
|
|
|
- origResp = client().prepareSearch(sourceIndex).addAggregation(aggregation).get().getAggregations().get("resp");
|
|
|
- rollupResp = client().prepareSearch(rollupIndex).addAggregation(aggregation).get().getAggregations().get("resp");
|
|
|
- }
|
|
|
- assertEquals(origResp, rollupResp);
|
|
|
+ .filter(entry -> sourceIndex.equals(entry.getKey()))
|
|
|
+ .findFirst()
|
|
|
+ .map(mappingMetadata -> mappingMetadata.getValue().sourceAsMap())
|
|
|
+ .orElseThrow(() -> new IllegalArgumentException("No mapping found for rollup source index [" + sourceIndex + "]"));
|
|
|
+
|
|
|
+ IndexMetadata indexMetadata = client().admin().cluster().prepareState().get().getState().getMetadata().index(sourceIndex);
|
|
|
+ TimeseriesFieldTypeHelper helper = new TimeseriesFieldTypeHelper.Builder(
|
|
|
+ getInstanceFromNode(IndicesService.class),
|
|
|
+ sourceIndexMappings,
|
|
|
+ indexMetadata
|
|
|
+ ).build(config.getTimestampField());
|
|
|
+ Map<String, TimeSeriesParams.MetricType> metricFields = new HashMap<>();
|
|
|
+ Map<String, String> labelFields = new HashMap<>();
|
|
|
+ MappingVisitor.visitMapping(sourceIndexMappings, (field, fieldMapping) -> {
|
|
|
+ if (helper.isTimeSeriesMetric(field, fieldMapping)) {
|
|
|
+ metricFields.put(field, TimeSeriesParams.MetricType.valueOf(fieldMapping.get(TIME_SERIES_METRIC_PARAM).toString()));
|
|
|
+ } else if (helper.isTimeSeriesLabel(field, fieldMapping)) {
|
|
|
+ labelFields.put(field, fieldMapping.get("type").toString());
|
|
|
+ }
|
|
|
+ });
|
|
|
|
|
|
- SearchResponse resp = client().prepareSearch(rollupIndex).setTrackTotalHits(true).get();
|
|
|
- assertThat(resp.getHits().getTotalHits().value, equalTo(numBuckets));
|
|
|
+ assertRollupIndexAggregations(sourceIndex, rollupIndex, config, metricFields, labelFields);
|
|
|
|
|
|
GetIndexResponse indexSettingsResp = client().admin().indices().prepareGetIndex().addIndices(sourceIndex, rollupIndex).get();
|
|
|
- // Assert rollup metadata are set in index settings
|
|
|
- assertEquals("success", indexSettingsResp.getSetting(rollupIndex, "index.rollup.status"));
|
|
|
- assertEquals(
|
|
|
- indexSettingsResp.getSetting(sourceIndex, "index.uuid"),
|
|
|
- indexSettingsResp.getSetting(rollupIndex, "index.rollup.source.uuid")
|
|
|
- );
|
|
|
- assertEquals(sourceIndex, indexSettingsResp.getSetting(rollupIndex, "index.rollup.source.name"));
|
|
|
- assertEquals(indexSettingsResp.getSetting(sourceIndex, "index.mode"), indexSettingsResp.getSetting(rollupIndex, "index.mode"));
|
|
|
- assertEquals(
|
|
|
- indexSettingsResp.getSetting(sourceIndex, "time_series.start_time"),
|
|
|
- indexSettingsResp.getSetting(rollupIndex, "time_series.start_time")
|
|
|
- );
|
|
|
- assertEquals(
|
|
|
- indexSettingsResp.getSetting(sourceIndex, "time_series.end_time"),
|
|
|
- indexSettingsResp.getSetting(rollupIndex, "time_series.end_time")
|
|
|
- );
|
|
|
- assertEquals(
|
|
|
- indexSettingsResp.getSetting(sourceIndex, "index.routing_path"),
|
|
|
- indexSettingsResp.getSetting(rollupIndex, "index.routing_path")
|
|
|
- );
|
|
|
- assertEquals(
|
|
|
- indexSettingsResp.getSetting(sourceIndex, "index.number_of_shards"),
|
|
|
- indexSettingsResp.getSetting(rollupIndex, "index.number_of_shards")
|
|
|
- );
|
|
|
- assertEquals(
|
|
|
- indexSettingsResp.getSetting(sourceIndex, "index.number_of_replicas"),
|
|
|
- indexSettingsResp.getSetting(rollupIndex, "index.number_of_replicas")
|
|
|
- );
|
|
|
- assertEquals("true", indexSettingsResp.getSetting(rollupIndex, "index.blocks.write"));
|
|
|
+ assertRollupIndexSettings(sourceIndex, rollupIndex, indexSettingsResp);
|
|
|
|
|
|
- // Assert field mappings
|
|
|
Map<String, Map<String, Object>> mappings = (Map<String, Map<String, Object>>) indexSettingsResp.getMappings()
|
|
|
.get(rollupIndex)
|
|
|
.getSourceAsMap()
|
|
|
.get("properties");
|
|
|
|
|
|
+ assertFieldMappings(config, metricFields, mappings);
|
|
|
+
|
|
|
+ GetMappingsResponse indexMappings = client().admin()
|
|
|
+ .indices()
|
|
|
+ .getMappings(new GetMappingsRequest().indices(rollupIndex, sourceIndex))
|
|
|
+ .actionGet();
|
|
|
+ Map<String, String> rollupIndexProperties = (Map<String, String>) indexMappings.mappings()
|
|
|
+ .get(rollupIndex)
|
|
|
+ .sourceAsMap()
|
|
|
+ .get("properties");
|
|
|
+ Map<String, String> sourceIndexCloneProperties = (Map<String, String>) indexMappings.mappings()
|
|
|
+ .get(sourceIndex)
|
|
|
+ .sourceAsMap()
|
|
|
+ .get("properties");
|
|
|
+ List<Map.Entry<String, String>> labelFieldRollupIndexCloneProperties = (rollupIndexProperties.entrySet()
|
|
|
+ .stream()
|
|
|
+ .filter(entry -> labelFields.containsKey(entry.getKey()))
|
|
|
+ .toList());
|
|
|
+ List<Map.Entry<String, String>> labelFieldSourceIndexProperties = (sourceIndexCloneProperties.entrySet()
|
|
|
+ .stream()
|
|
|
+ .filter(entry -> labelFields.containsKey(entry.getKey()))
|
|
|
+ .toList());
|
|
|
+ assertEquals(labelFieldRollupIndexCloneProperties, labelFieldSourceIndexProperties);
|
|
|
+ }
|
|
|
+
|
|
|
+ private void assertRollupIndexAggregations(
|
|
|
+ String sourceIndex,
|
|
|
+ String rollupIndex,
|
|
|
+ RollupActionConfig config,
|
|
|
+ Map<String, TimeSeriesParams.MetricType> metricFields,
|
|
|
+ Map<String, String> labelFields
|
|
|
+ ) {
|
|
|
+ final AggregationBuilder aggregations = buildAggregations(config, metricFields, labelFields, config.getTimestampField());
|
|
|
+ Aggregations origResp = aggregate(sourceIndex, aggregations);
|
|
|
+ Aggregations rollupResp = aggregate(rollupIndex, aggregations);
|
|
|
+ assertEquals(origResp.asMap().keySet(), rollupResp.asMap().keySet());
|
|
|
+
|
|
|
+ StringTerms originalTsIdTermsAggregation = (StringTerms) origResp.getAsMap().values().stream().toList().get(0);
|
|
|
+ StringTerms rollupTsIdTermsAggregation = (StringTerms) rollupResp.getAsMap().values().stream().toList().get(0);
|
|
|
+ originalTsIdTermsAggregation.getBuckets().forEach(originalBucket -> {
|
|
|
+
|
|
|
+ StringTerms.Bucket rollupBucket = rollupTsIdTermsAggregation.getBucketByKey(originalBucket.getKeyAsString());
|
|
|
+ assertEquals(originalBucket.getAggregations().asList().size(), rollupBucket.getAggregations().asList().size());
|
|
|
+
|
|
|
+ InternalDateHistogram originalDateHistogram = (InternalDateHistogram) originalBucket.getAggregations().asList().get(0);
|
|
|
+ InternalDateHistogram rollupDateHistogram = (InternalDateHistogram) rollupBucket.getAggregations().asList().get(0);
|
|
|
+ List<InternalDateHistogram.Bucket> originalDateHistogramBuckets = originalDateHistogram.getBuckets();
|
|
|
+ List<InternalDateHistogram.Bucket> rollupDateHistogramBuckets = rollupDateHistogram.getBuckets();
|
|
|
+ assertEquals(originalDateHistogramBuckets.size(), rollupDateHistogramBuckets.size());
|
|
|
+ assertEquals(
|
|
|
+ originalDateHistogramBuckets.stream().map(InternalDateHistogram.Bucket::getKeyAsString).collect(Collectors.toList()),
|
|
|
+ rollupDateHistogramBuckets.stream().map(InternalDateHistogram.Bucket::getKeyAsString).collect(Collectors.toList())
|
|
|
+ );
|
|
|
+
|
|
|
+ for (int i = 0; i < originalDateHistogramBuckets.size(); ++i) {
|
|
|
+ InternalDateHistogram.Bucket originalDateHistogramBucket = originalDateHistogramBuckets.get(i);
|
|
|
+ InternalDateHistogram.Bucket rollupDateHistogramBucket = rollupDateHistogramBuckets.get(i);
|
|
|
+ assertEquals(originalDateHistogramBucket.getKeyAsString(), rollupDateHistogramBucket.getKeyAsString());
|
|
|
+
|
|
|
+ Aggregations originalAggregations = originalDateHistogramBucket.getAggregations();
|
|
|
+ Aggregations rollupAggregations = rollupDateHistogramBucket.getAggregations();
|
|
|
+ assertEquals(originalAggregations.asList().size(), rollupAggregations.asList().size());
|
|
|
+
|
|
|
+ List<Aggregation> nonTopHitsOriginalAggregations = originalAggregations.asList()
|
|
|
+ .stream()
|
|
|
+ .filter(agg -> agg.getType().equals("top_hits") == false)
|
|
|
+ .toList();
|
|
|
+ List<Aggregation> nonTopHitsRollupAggregations = rollupAggregations.asList()
|
|
|
+ .stream()
|
|
|
+ .filter(agg -> agg.getType().equals("top_hits") == false)
|
|
|
+ .toList();
|
|
|
+ assertEquals(nonTopHitsOriginalAggregations, nonTopHitsRollupAggregations);
|
|
|
+
|
|
|
+ List<Aggregation> topHitsOriginalAggregations = originalAggregations.asList()
|
|
|
+ .stream()
|
|
|
+ .filter(agg -> agg.getType().equals("top_hits"))
|
|
|
+ .toList();
|
|
|
+ List<Aggregation> topHitsRollupAggregations = rollupAggregations.asList()
|
|
|
+ .stream()
|
|
|
+ .filter(agg -> agg.getType().equals("top_hits"))
|
|
|
+ .toList();
|
|
|
+ assertEquals(topHitsRollupAggregations.size(), topHitsRollupAggregations.size());
|
|
|
+
|
|
|
+ for (int j = 0; j < topHitsRollupAggregations.size(); ++j) {
|
|
|
+ InternalTopHits originalTopHits = (InternalTopHits) topHitsOriginalAggregations.get(j);
|
|
|
+ InternalTopHits rollupTopHits = (InternalTopHits) topHitsRollupAggregations.get(j);
|
|
|
+ SearchHit[] originalHits = originalTopHits.getHits().getHits();
|
|
|
+ SearchHit[] rollupHits = rollupTopHits.getHits().getHits();
|
|
|
+ assertEquals(originalHits.length, rollupHits.length);
|
|
|
+
|
|
|
+ for (int k = 0; k < originalHits.length; ++k) {
|
|
|
+ SearchHit originalHit = originalHits[k];
|
|
|
+ SearchHit rollupHit = rollupHits[k];
|
|
|
+
|
|
|
+ Map<String, DocumentField> originalHitDocumentFields = originalHit.getDocumentFields();
|
|
|
+ Map<String, DocumentField> rollupHitDocumentFields = rollupHit.getDocumentFields();
|
|
|
+ List<DocumentField> originalFields = originalHitDocumentFields.values().stream().toList();
|
|
|
+ List<DocumentField> rollupFields = rollupHitDocumentFields.values().stream().toList();
|
|
|
+ List<Object> originalFieldsList = originalFields.stream().flatMap(x -> x.getValues().stream()).toList();
|
|
|
+ List<Object> rollupFieldsList = rollupFields.stream().flatMap(x -> x.getValues().stream()).toList();
|
|
|
+ if (originalFieldsList.isEmpty() == false && rollupFieldsList.isEmpty() == false) {
|
|
|
+ // NOTE: here we take advantage of the fact that a label field is indexed also as a metric of type
|
|
|
+ // `counter`. This way we can actually check that the label value stored in the rollup index
|
|
|
+ // is the last value (which is what we store for a metric of type counter) by comparing the metric
|
|
|
+ // field value to the label field value.
|
|
|
+ originalFieldsList.forEach(field -> assertTrue(rollupFieldsList.contains(field)));
|
|
|
+ rollupFieldsList.forEach(field -> assertTrue(originalFieldsList.contains(field)));
|
|
|
+ Object originalLabelValue = originalHit.getDocumentFields().values().stream().toList().get(0).getValue();
|
|
|
+ Object rollupLabelValue = rollupHit.getDocumentFields().values().stream().toList().get(0).getValue();
|
|
|
+ Optional<Aggregation> labelAsMetric = nonTopHitsOriginalAggregations.stream()
|
|
|
+ .filter(agg -> agg.getName().equals("metric_" + rollupTopHits.getName()))
|
|
|
+ .findFirst();
|
|
|
+ // NOTE: this check is possible only if the label can be indexed as a metric (the label is a numeric field)
|
|
|
+ if (labelAsMetric.isPresent()) {
|
|
|
+ double metricValue = ((Max) labelAsMetric.get()).value();
|
|
|
+ assertEquals(metricValue, rollupLabelValue);
|
|
|
+ assertEquals(metricValue, originalLabelValue);
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ });
|
|
|
+ }
|
|
|
+
|
|
|
+ @SuppressWarnings("unchecked")
|
|
|
+ private void assertFieldMappings(
|
|
|
+ RollupActionConfig config,
|
|
|
+ Map<String, TimeSeriesParams.MetricType> metricFields,
|
|
|
+ Map<String, Map<String, Object>> mappings
|
|
|
+ ) {
|
|
|
+ // Assert field mappings
|
|
|
assertEquals(DateFieldMapper.CONTENT_TYPE, mappings.get(config.getTimestampField()).get("type"));
|
|
|
Map<String, Object> dateTimeMeta = (Map<String, Object>) mappings.get(config.getTimestampField()).get("meta");
|
|
|
assertEquals(config.getTimeZone(), dateTimeMeta.get("time_zone"));
|
|
@@ -517,38 +728,107 @@ public class RollupActionSingleNodeTests extends ESSingleNodeTestCase {
|
|
|
});
|
|
|
}
|
|
|
|
|
|
- private CompositeAggregationBuilder buildCompositeAggs(
|
|
|
- String name,
|
|
|
- RollupActionConfig config,
|
|
|
- Map<String, TimeSeriesParams.MetricType> metricFields
|
|
|
+ private void assertRollupIndexSettings(String sourceIndex, String rollupIndex, GetIndexResponse indexSettingsResp) {
|
|
|
+ // Assert rollup metadata are set in index settings
|
|
|
+ assertEquals("success", indexSettingsResp.getSetting(rollupIndex, IndexMetadata.INDEX_ROLLUP_STATUS_KEY));
|
|
|
+
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(sourceIndex, IndexMetadata.SETTING_INDEX_UUID));
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(rollupIndex, IndexMetadata.INDEX_ROLLUP_SOURCE_UUID_KEY));
|
|
|
+ assertEquals(
|
|
|
+ indexSettingsResp.getSetting(sourceIndex, IndexMetadata.SETTING_INDEX_UUID),
|
|
|
+ indexSettingsResp.getSetting(rollupIndex, IndexMetadata.INDEX_ROLLUP_SOURCE_UUID_KEY)
|
|
|
+ );
|
|
|
+
|
|
|
+ assertEquals(sourceIndex, indexSettingsResp.getSetting(rollupIndex, IndexMetadata.INDEX_ROLLUP_SOURCE_NAME_KEY));
|
|
|
+ assertEquals(indexSettingsResp.getSetting(sourceIndex, "index.mode"), indexSettingsResp.getSetting(rollupIndex, "index.mode"));
|
|
|
+
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(sourceIndex, IndexSettings.TIME_SERIES_START_TIME.getKey()));
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(rollupIndex, IndexSettings.TIME_SERIES_START_TIME.getKey()));
|
|
|
+ assertEquals(
|
|
|
+ indexSettingsResp.getSetting(sourceIndex, IndexSettings.TIME_SERIES_START_TIME.getKey()),
|
|
|
+ indexSettingsResp.getSetting(rollupIndex, IndexSettings.TIME_SERIES_START_TIME.getKey())
|
|
|
+ );
|
|
|
+
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(sourceIndex, IndexSettings.TIME_SERIES_END_TIME.getKey()));
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(rollupIndex, IndexSettings.TIME_SERIES_END_TIME.getKey()));
|
|
|
+ assertEquals(
|
|
|
+ indexSettingsResp.getSetting(sourceIndex, IndexSettings.TIME_SERIES_END_TIME.getKey()),
|
|
|
+ indexSettingsResp.getSetting(rollupIndex, IndexSettings.TIME_SERIES_END_TIME.getKey())
|
|
|
+ );
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(sourceIndex, "index.routing_path"));
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(rollupIndex, "index.routing_path"));
|
|
|
+ assertEquals(
|
|
|
+ indexSettingsResp.getSetting(sourceIndex, "index.routing_path"),
|
|
|
+ indexSettingsResp.getSetting(rollupIndex, "index.routing_path")
|
|
|
+ );
|
|
|
+
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(sourceIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS));
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(rollupIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS));
|
|
|
+ assertEquals(
|
|
|
+ indexSettingsResp.getSetting(sourceIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS),
|
|
|
+ indexSettingsResp.getSetting(rollupIndex, IndexMetadata.SETTING_NUMBER_OF_SHARDS)
|
|
|
+ );
|
|
|
+
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(sourceIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS));
|
|
|
+ assertNotNull(indexSettingsResp.getSetting(rollupIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS));
|
|
|
+ assertEquals(
|
|
|
+ indexSettingsResp.getSetting(sourceIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS),
|
|
|
+ indexSettingsResp.getSetting(rollupIndex, IndexMetadata.SETTING_NUMBER_OF_REPLICAS)
|
|
|
+ );
|
|
|
+ assertEquals("true", indexSettingsResp.getSetting(rollupIndex, "index.blocks.write"));
|
|
|
+ }
|
|
|
+
|
|
|
+ private AggregationBuilder buildAggregations(
|
|
|
+ final RollupActionConfig config,
|
|
|
+ final Map<String, TimeSeriesParams.MetricType> metrics,
|
|
|
+ final Map<String, String> labels,
|
|
|
+ final String timestampField
|
|
|
) {
|
|
|
- List<CompositeValuesSourceBuilder<?>> sources = new ArrayList<>();
|
|
|
- // For time series indices, we use the _tsid field for the terms aggregation
|
|
|
- sources.add(new TermsValuesSourceBuilder("tsid").field(TimeSeriesIdFieldMapper.NAME));
|
|
|
|
|
|
- DateHistogramValuesSourceBuilder dateHisto = new DateHistogramValuesSourceBuilder(config.getTimestampField());
|
|
|
- dateHisto.field(config.getTimestampField());
|
|
|
+ final TermsAggregationBuilder tsidAggregation = new TermsAggregationBuilder("tsid").field(TimeSeriesIdFieldMapper.NAME)
|
|
|
+ .size(10_000);
|
|
|
+ final DateHistogramAggregationBuilder dateHistogramAggregation = new DateHistogramAggregationBuilder("timestamp").field(
|
|
|
+ config.getTimestampField()
|
|
|
+ ).fixedInterval(config.getInterval());
|
|
|
if (config.getTimeZone() != null) {
|
|
|
- dateHisto.timeZone(ZoneId.of(config.getTimeZone()));
|
|
|
+ dateHistogramAggregation.timeZone(ZoneId.of(config.getTimeZone()));
|
|
|
}
|
|
|
- dateHisto.fixedInterval(config.getInterval());
|
|
|
- sources.add(dateHisto);
|
|
|
-
|
|
|
- final CompositeAggregationBuilder composite = new CompositeAggregationBuilder(name, sources).size(10);
|
|
|
- metricFields.forEach((fieldname, metricType) -> {
|
|
|
- for (String agg : metricType.supportedAggs()) {
|
|
|
- switch (agg) {
|
|
|
- case "min" -> composite.subAggregation(new MinAggregationBuilder(fieldname + "_" + agg).field(fieldname));
|
|
|
- case "max", "last_value" -> composite.subAggregation(new MaxAggregationBuilder(fieldname + "_" + agg).field(fieldname));
|
|
|
- case "sum" -> composite.subAggregation(new SumAggregationBuilder(fieldname + "_" + agg).field(fieldname));
|
|
|
- case "value_count" -> composite.subAggregation(
|
|
|
- new ValueCountAggregationBuilder(fieldname + "_" + agg).field(fieldname)
|
|
|
+ tsidAggregation.subAggregation(dateHistogramAggregation);
|
|
|
+
|
|
|
+ metrics.forEach((fieldName, metricType) -> {
|
|
|
+ for (final String supportedAggregation : metricType.supportedAggs()) {
|
|
|
+ switch (supportedAggregation) {
|
|
|
+ case "min" -> dateHistogramAggregation.subAggregation(
|
|
|
+ new MinAggregationBuilder(fieldName + "_" + supportedAggregation).field(fieldName)
|
|
|
+ );
|
|
|
+ case "max" -> dateHistogramAggregation.subAggregation(
|
|
|
+ new MaxAggregationBuilder(fieldName + "_" + supportedAggregation).field(fieldName)
|
|
|
+ );
|
|
|
+ case "last_value" -> dateHistogramAggregation.subAggregation(
|
|
|
+ new TopHitsAggregationBuilder(fieldName + "_" + supportedAggregation).sort(
|
|
|
+ SortBuilders.fieldSort(timestampField).order(SortOrder.DESC)
|
|
|
+ ).size(1).fetchField(fieldName)
|
|
|
);
|
|
|
- default -> throw new IllegalArgumentException("Unsupported metric type [" + agg + "]");
|
|
|
+ case "sum" -> dateHistogramAggregation.subAggregation(
|
|
|
+ new SumAggregationBuilder(fieldName + "_" + supportedAggregation).field(fieldName)
|
|
|
+ );
|
|
|
+ case "value_count" -> dateHistogramAggregation.subAggregation(
|
|
|
+ new ValueCountAggregationBuilder(fieldName + "_" + supportedAggregation).field(fieldName)
|
|
|
+ );
|
|
|
+ default -> throw new IllegalArgumentException("Unsupported metric type [" + supportedAggregation + "]");
|
|
|
}
|
|
|
}
|
|
|
});
|
|
|
- return composite;
|
|
|
+
|
|
|
+ labels.forEach((fieldName, type) -> {
|
|
|
+ dateHistogramAggregation.subAggregation(
|
|
|
+ new TopHitsAggregationBuilder(fieldName + "_last_value").sort(SortBuilders.fieldSort(timestampField).order(SortOrder.DESC))
|
|
|
+ .size(1)
|
|
|
+ .fetchField(fieldName)
|
|
|
+ );
|
|
|
+ });
|
|
|
+
|
|
|
+ return tsidAggregation;
|
|
|
}
|
|
|
|
|
|
@FunctionalInterface
|