|
@@ -19,6 +19,12 @@
|
|
|
|
|
|
package org.elasticsearch.index.get;
|
|
|
|
|
|
+import org.apache.lucene.index.DocValuesType;
|
|
|
+import org.apache.lucene.index.FieldInfo;
|
|
|
+import org.apache.lucene.index.IndexOptions;
|
|
|
+import org.apache.lucene.index.IndexableField;
|
|
|
+import org.apache.lucene.index.IndexableFieldType;
|
|
|
+import org.apache.lucene.index.StoredFieldVisitor;
|
|
|
import org.apache.lucene.index.Term;
|
|
|
import org.elasticsearch.ElasticsearchException;
|
|
|
import org.elasticsearch.common.Nullable;
|
|
@@ -37,24 +43,29 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
|
|
import org.elasticsearch.index.IndexSettings;
|
|
|
import org.elasticsearch.index.VersionType;
|
|
|
import org.elasticsearch.index.engine.Engine;
|
|
|
+import org.elasticsearch.index.engine.TranslogLeafReader;
|
|
|
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
|
|
|
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
|
|
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
|
|
import org.elasticsearch.index.mapper.IdFieldMapper;
|
|
|
import org.elasticsearch.index.mapper.Mapper;
|
|
|
import org.elasticsearch.index.mapper.MapperService;
|
|
|
+import org.elasticsearch.index.mapper.ParsedDocument;
|
|
|
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
|
|
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
|
|
+import org.elasticsearch.index.mapper.SourceToParse;
|
|
|
import org.elasticsearch.index.mapper.Uid;
|
|
|
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
|
|
import org.elasticsearch.index.shard.IndexShard;
|
|
|
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
|
|
|
|
|
import java.io.IOException;
|
|
|
+import java.util.Collections;
|
|
|
import java.util.HashMap;
|
|
|
import java.util.List;
|
|
|
import java.util.Map;
|
|
|
import java.util.concurrent.TimeUnit;
|
|
|
+import java.util.stream.Stream;
|
|
|
|
|
|
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
|
|
|
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
|
|
@@ -81,16 +92,16 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|
|
public GetResult get(String id, String[] gFields, boolean realtime, long version,
|
|
|
VersionType versionType, FetchSourceContext fetchSourceContext) {
|
|
|
return
|
|
|
- get(id, gFields, realtime, version, versionType, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, fetchSourceContext, false);
|
|
|
+ get(id, gFields, realtime, version, versionType, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM, fetchSourceContext);
|
|
|
}
|
|
|
|
|
|
private GetResult get(String id, String[] gFields, boolean realtime, long version, VersionType versionType,
|
|
|
- long ifSeqNo, long ifPrimaryTerm, FetchSourceContext fetchSourceContext, boolean readFromTranslog) {
|
|
|
+ long ifSeqNo, long ifPrimaryTerm, FetchSourceContext fetchSourceContext) {
|
|
|
currentMetric.inc();
|
|
|
try {
|
|
|
long now = System.nanoTime();
|
|
|
GetResult getResult =
|
|
|
- innerGet(id, gFields, realtime, version, versionType, ifSeqNo, ifPrimaryTerm, fetchSourceContext, readFromTranslog);
|
|
|
+ innerGet(id, gFields, realtime, version, versionType, ifSeqNo, ifPrimaryTerm, fetchSourceContext);
|
|
|
|
|
|
if (getResult.isExists()) {
|
|
|
existsMetric.inc(System.nanoTime() - now);
|
|
@@ -105,7 +116,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|
|
|
|
|
public GetResult getForUpdate(String id, long ifSeqNo, long ifPrimaryTerm) {
|
|
|
return get(id, new String[]{RoutingFieldMapper.NAME}, true,
|
|
|
- Versions.MATCH_ANY, VersionType.INTERNAL, ifSeqNo, ifPrimaryTerm, FetchSourceContext.FETCH_SOURCE, true);
|
|
|
+ Versions.MATCH_ANY, VersionType.INTERNAL, ifSeqNo, ifPrimaryTerm, FetchSourceContext.FETCH_SOURCE);
|
|
|
}
|
|
|
|
|
|
/**
|
|
@@ -156,13 +167,13 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|
|
}
|
|
|
|
|
|
private GetResult innerGet(String id, String[] gFields, boolean realtime, long version, VersionType versionType,
|
|
|
- long ifSeqNo, long ifPrimaryTerm, FetchSourceContext fetchSourceContext, boolean readFromTranslog) {
|
|
|
+ long ifSeqNo, long ifPrimaryTerm, FetchSourceContext fetchSourceContext) {
|
|
|
fetchSourceContext = normalizeFetchSourceContent(fetchSourceContext, gFields);
|
|
|
|
|
|
Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
|
|
|
- Engine.GetResult get = indexShard.get(new Engine.Get(realtime, readFromTranslog, id, uidTerm)
|
|
|
+ Engine.GetResult get = indexShard.get(new Engine.Get(realtime, realtime, id, uidTerm)
|
|
|
.version(version).versionType(versionType).setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm));
|
|
|
- assert get.isFromTranslog() == false || readFromTranslog : "should only read from translog if explicitly enabled";
|
|
|
+ assert get.isFromTranslog() == false || realtime : "should only read from translog if realtime enabled";
|
|
|
if (get.exists() == false) {
|
|
|
get.close();
|
|
|
}
|
|
@@ -179,13 +190,33 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- private GetResult innerGetLoadFromStoredFields(String id, String[] gFields, FetchSourceContext fetchSourceContext,
|
|
|
- Engine.GetResult get, MapperService mapperService) {
|
|
|
+ private GetResult innerGetLoadFromStoredFields(String id, String[] storedFields, FetchSourceContext fetchSourceContext,
|
|
|
+ Engine.GetResult get, MapperService mapperService) {
|
|
|
+ assert get.exists() : "method should only be called if document could be retrieved";
|
|
|
+
|
|
|
+ // check first if stored fields to be loaded don't contain an object field
|
|
|
+ DocumentMapper docMapper = mapperService.documentMapper();
|
|
|
+ if (storedFields != null) {
|
|
|
+ for (String field : storedFields) {
|
|
|
+ Mapper fieldMapper = docMapper.mappers().getMapper(field);
|
|
|
+ if (fieldMapper == null) {
|
|
|
+ if (docMapper.objectMappers().get(field) != null) {
|
|
|
+ // Only fail if we know it is a object field, missing paths / fields shouldn't fail.
|
|
|
+ throw new IllegalArgumentException("field [" + field + "] isn't a leaf field");
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
Map<String, DocumentField> documentFields = null;
|
|
|
Map<String, DocumentField> metaDataFields = null;
|
|
|
BytesReference source = null;
|
|
|
DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
|
|
|
- FieldsVisitor fieldVisitor = buildFieldsVisitors(gFields, fetchSourceContext);
|
|
|
+ // force fetching source if we read from translog and need to recreate stored fields
|
|
|
+ boolean forceSourceForComputingTranslogStoredFields = get.isFromTranslog() && storedFields != null &&
|
|
|
+ Stream.of(storedFields).anyMatch(f -> TranslogLeafReader.ALL_FIELD_NAMES.contains(f) == false);
|
|
|
+ FieldsVisitor fieldVisitor = buildFieldsVisitors(storedFields,
|
|
|
+ forceSourceForComputingTranslogStoredFields ? FetchSourceContext.FETCH_SOURCE : fetchSourceContext);
|
|
|
if (fieldVisitor != null) {
|
|
|
try {
|
|
|
docIdAndVersion.reader.document(docIdAndVersion.docId, fieldVisitor);
|
|
@@ -194,6 +225,54 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|
|
}
|
|
|
source = fieldVisitor.source();
|
|
|
|
|
|
+ // in case we read from translog, some extra steps are needed to make _source consistent and to load stored fields
|
|
|
+ if (get.isFromTranslog()) {
|
|
|
+ // Fast path: if only asked for the source or stored fields that have been already provided by TranslogLeafReader,
|
|
|
+ // just make source consistent by reapplying source filters from mapping (possibly also nulling the source)
|
|
|
+ if (forceSourceForComputingTranslogStoredFields == false) {
|
|
|
+ try {
|
|
|
+ source = indexShard.mapperService().documentMapper().sourceMapper().applyFilters(source, null);
|
|
|
+ } catch (IOException e) {
|
|
|
+ throw new ElasticsearchException("Failed to reapply filters for [" + id + "] after reading from translog", e);
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ // Slow path: recreate stored fields from original source
|
|
|
+ assert source != null : "original source in translog must exist";
|
|
|
+ SourceToParse sourceToParse = new SourceToParse(shardId.getIndexName(), id, source, XContentHelper.xContentType(source),
|
|
|
+ fieldVisitor.routing());
|
|
|
+ ParsedDocument doc = indexShard.mapperService().documentMapper().parse(sourceToParse);
|
|
|
+ assert doc.dynamicMappingsUpdate() == null : "mapping updates should not be required on already-indexed doc";
|
|
|
+ // update special fields
|
|
|
+ doc.updateSeqID(docIdAndVersion.seqNo, docIdAndVersion.primaryTerm);
|
|
|
+ doc.version().setLongValue(docIdAndVersion.version);
|
|
|
+
|
|
|
+ // retrieve stored fields from parsed doc
|
|
|
+ fieldVisitor = buildFieldsVisitors(storedFields, fetchSourceContext);
|
|
|
+ for (IndexableField indexableField : doc.rootDoc().getFields()) {
|
|
|
+ IndexableFieldType fieldType = indexableField.fieldType();
|
|
|
+ if (fieldType.stored()) {
|
|
|
+ FieldInfo fieldInfo = new FieldInfo(indexableField.name(), 0, false, false, false, IndexOptions.NONE,
|
|
|
+ DocValuesType.NONE, -1, Collections.emptyMap(), 0, 0, 0, false);
|
|
|
+ StoredFieldVisitor.Status status = fieldVisitor.needsField(fieldInfo);
|
|
|
+ if (status == StoredFieldVisitor.Status.YES) {
|
|
|
+ if (indexableField.binaryValue() != null) {
|
|
|
+ fieldVisitor.binaryField(fieldInfo, indexableField.binaryValue());
|
|
|
+ } else if (indexableField.stringValue() != null) {
|
|
|
+ fieldVisitor.objectField(fieldInfo, indexableField.stringValue());
|
|
|
+ } else if (indexableField.numericValue() != null) {
|
|
|
+ fieldVisitor.objectField(fieldInfo, indexableField.numericValue());
|
|
|
+ }
|
|
|
+ } else if (status == StoredFieldVisitor.Status.STOP) {
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // retrieve source (with possible transformations, e.g. source filters
|
|
|
+ source = fieldVisitor.source();
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // put stored fields into result objects
|
|
|
if (!fieldVisitor.fields().isEmpty()) {
|
|
|
fieldVisitor.postProcess(mapperService);
|
|
|
documentFields = new HashMap<>();
|
|
@@ -208,47 +287,26 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- DocumentMapper docMapper = mapperService.documentMapper();
|
|
|
-
|
|
|
- if (gFields != null && gFields.length > 0) {
|
|
|
- for (String field : gFields) {
|
|
|
- Mapper fieldMapper = docMapper.mappers().getMapper(field);
|
|
|
- if (fieldMapper == null) {
|
|
|
- if (docMapper.objectMappers().get(field) != null) {
|
|
|
- // Only fail if we know it is a object field, missing paths / fields shouldn't fail.
|
|
|
- throw new IllegalArgumentException("field [" + field + "] isn't a leaf field");
|
|
|
- }
|
|
|
+ if (source != null) {
|
|
|
+ // apply request-level source filtering
|
|
|
+ if (fetchSourceContext.fetchSource() == false) {
|
|
|
+ source = null;
|
|
|
+ } else if (fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0) {
|
|
|
+ Map<String, Object> sourceAsMap;
|
|
|
+ // TODO: The source might be parsed and available in the sourceLookup but that one uses unordered maps so different.
|
|
|
+ // Do we care?
|
|
|
+ Tuple<XContentType, Map<String, Object>> typeMapTuple = XContentHelper.convertToMap(source, true);
|
|
|
+ XContentType sourceContentType = typeMapTuple.v1();
|
|
|
+ sourceAsMap = typeMapTuple.v2();
|
|
|
+ sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
|
|
|
+ try {
|
|
|
+ source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap));
|
|
|
+ } catch (IOException e) {
|
|
|
+ throw new ElasticsearchException("Failed to get id [" + id + "] with includes/excludes set", e);
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- if (!fetchSourceContext.fetchSource()) {
|
|
|
- source = null;
|
|
|
- }
|
|
|
-
|
|
|
- if (source != null && get.isFromTranslog()) {
|
|
|
- // reapply source filters from mapping (possibly also nulling the source)
|
|
|
- try {
|
|
|
- source = docMapper.sourceMapper().applyFilters(source, null);
|
|
|
- } catch (IOException e) {
|
|
|
- throw new ElasticsearchException("Failed to reapply filters for [" + id + "] after reading from translog", e);
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- if (source != null && (fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0)) {
|
|
|
- Map<String, Object> sourceAsMap;
|
|
|
- // TODO: The source might parsed and available in the sourceLookup but that one uses unordered maps so different. Do we care?
|
|
|
- Tuple<XContentType, Map<String, Object>> typeMapTuple = XContentHelper.convertToMap(source, true);
|
|
|
- XContentType sourceContentType = typeMapTuple.v1();
|
|
|
- sourceAsMap = typeMapTuple.v2();
|
|
|
- sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
|
|
|
- try {
|
|
|
- source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap));
|
|
|
- } catch (IOException e) {
|
|
|
- throw new ElasticsearchException("Failed to get id [" + id + "] with includes/excludes set", e);
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
return new GetResult(shardId.getIndexName(), id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm,
|
|
|
get.version(), get.exists(), source, documentFields, metaDataFields);
|
|
|
}
|