Browse Source

Optimized field visitor for ES|QL loading from _ignored_source (#132428)

This PR builds on the work in #132142 to optimize loading values from
_ignored_source by stopping the FieldVisitor early, once all required
fields have been visited.

Relates to #130886.
Jordan Powers 1 month ago
parent
commit
6ab73a1541

+ 138 - 0
server/src/main/java/org/elasticsearch/index/fieldvisitor/IgnoredSourceFieldLoader.java

@@ -0,0 +1,138 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.fieldvisitor;
+
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.StoredFieldVisitor;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
+import org.elasticsearch.search.fetch.StoredFieldsSpec;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+class IgnoredSourceFieldLoader extends StoredFieldLoader {
+    private final boolean forceSequentialReader;
+    private final Map<String, Set<String>> potentialFieldsInIgnoreSource;
+    private final Set<String> fieldNames;
+
+    IgnoredSourceFieldLoader(StoredFieldsSpec spec, boolean forceSequentialReader) {
+        assert IgnoredSourceFieldLoader.supports(spec);
+
+        fieldNames = new HashSet<>(spec.ignoredFieldsSpec().requiredIgnoredFields());
+        this.forceSequentialReader = forceSequentialReader;
+
+        HashMap<String, Set<String>> potentialFieldsInIgnoreSource = new HashMap<>();
+        for (String requiredIgnoredField : spec.ignoredFieldsSpec().requiredIgnoredFields()) {
+            for (String potentialStoredField : spec.ignoredFieldsSpec().format().requiredStoredFields(requiredIgnoredField)) {
+                potentialFieldsInIgnoreSource.computeIfAbsent(potentialStoredField, k -> new HashSet<>()).add(requiredIgnoredField);
+            }
+        }
+        this.potentialFieldsInIgnoreSource = potentialFieldsInIgnoreSource;
+    }
+
+    @Override
+    public LeafStoredFieldLoader getLoader(LeafReaderContext ctx, int[] docs) throws IOException {
+        var reader = forceSequentialReader ? sequentialReader(ctx) : reader(ctx, docs);
+        var visitor = new SFV(fieldNames, potentialFieldsInIgnoreSource);
+        return new LeafStoredFieldLoader() {
+
+            private int doc = -1;
+
+            @Override
+            public void advanceTo(int doc) throws IOException {
+                if (doc != this.doc) {
+                    visitor.reset();
+                    reader.accept(doc, visitor);
+                    this.doc = doc;
+                }
+            }
+
+            @Override
+            public BytesReference source() {
+                assert false : "source() is not supported by IgnoredSourceFieldLoader";
+                return null;
+            }
+
+            @Override
+            public String id() {
+                assert false : "id() is not supported by IgnoredSourceFieldLoader";
+                return null;
+            }
+
+            @Override
+            public String routing() {
+                assert false : "routing() is not supported by IgnoredSourceFieldLoader";
+                return null;
+            }
+
+            @Override
+            public Map<String, List<Object>> storedFields() {
+                return visitor.values;
+            }
+        };
+    }
+
+    @Override
+    public List<String> fieldsToLoad() {
+        return potentialFieldsInIgnoreSource.keySet().stream().toList();
+    }
+
+    static class SFV extends StoredFieldVisitor {
+        final Map<String, List<Object>> values = new HashMap<>();
+        final Set<String> fieldNames;
+        private final Set<String> unvisitedFields;
+        final Map<String, Set<String>> potentialFieldsInIgnoreSource;
+
+        SFV(Set<String> fieldNames, Map<String, Set<String>> potentialFieldsInIgnoreSource) {
+            this.fieldNames = fieldNames;
+            this.unvisitedFields = new HashSet<>(fieldNames);
+            this.potentialFieldsInIgnoreSource = potentialFieldsInIgnoreSource;
+        }
+
+        @Override
+        public Status needsField(FieldInfo fieldInfo) throws IOException {
+            if (unvisitedFields.isEmpty()) {
+                return Status.STOP;
+            }
+
+            Set<String> foundFields = potentialFieldsInIgnoreSource.get(fieldInfo.name);
+            if (foundFields == null) {
+                return Status.NO;
+            }
+
+            unvisitedFields.removeAll(foundFields);
+            return Status.YES;
+        }
+
+        @Override
+        public void binaryField(FieldInfo fieldInfo, byte[] value) {
+            values.computeIfAbsent(fieldInfo.name, k -> new ArrayList<>()).add(new BytesRef(value));
+        }
+
+        void reset() {
+            values.clear();
+            unvisitedFields.addAll(fieldNames);
+        }
+
+    }
+
+    static boolean supports(StoredFieldsSpec spec) {
+        return spec.onlyRequiresIgnoredFields()
+            && spec.ignoredFieldsSpec().format() == IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE;
+    }
+}

+ 28 - 27
server/src/main/java/org/elasticsearch/index/fieldvisitor/StoredFieldLoader.java

@@ -12,6 +12,7 @@ package org.elasticsearch.index.fieldvisitor;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.index.StoredFields;
 import org.elasticsearch.common.CheckedBiConsumer;
 import org.elasticsearch.common.bytes.BytesReference;
@@ -49,10 +50,26 @@ public abstract class StoredFieldLoader {
      * Creates a new StoredFieldLoader using a StoredFieldsSpec
      */
     public static StoredFieldLoader fromSpec(StoredFieldsSpec spec) {
+        return fromSpec(spec, false);
+    }
+
+    /**
+     * Creates a new StoredFieldLoader using a StoredFieldsSpec that is optimized
+     * for loading documents in order.
+     */
+    public static StoredFieldLoader fromSpecSequential(StoredFieldsSpec spec) {
+        return fromSpec(spec, true);
+    }
+
+    private static StoredFieldLoader fromSpec(StoredFieldsSpec spec, boolean forceSequentialReader) {
         if (spec.noRequirements()) {
             return StoredFieldLoader.empty();
         }
-        return create(spec.requiresSource(), spec.requiredStoredFields());
+
+        if (IgnoredSourceFieldLoader.supports(spec)) {
+            return new IgnoredSourceFieldLoader(spec, forceSequentialReader);
+        }
+        return create(spec.requiresSource(), spec.requiredStoredFields(), forceSequentialReader);
     }
 
     public static StoredFieldLoader create(boolean loadSource, Set<String> fields) {
@@ -85,28 +102,6 @@ public abstract class StoredFieldLoader {
         };
     }
 
-    /**
-     * Creates a new StoredFieldLoader using a StoredFieldsSpec that is optimized
-     * for loading documents in order.
-     */
-    public static StoredFieldLoader fromSpecSequential(StoredFieldsSpec spec) {
-        if (spec.noRequirements()) {
-            return StoredFieldLoader.empty();
-        }
-        List<String> fieldsToLoad = fieldsToLoad(spec.requiresSource(), spec.requiredStoredFields());
-        return new StoredFieldLoader() {
-            @Override
-            public LeafStoredFieldLoader getLoader(LeafReaderContext ctx, int[] docs) throws IOException {
-                return new ReaderStoredFieldLoader(sequentialReader(ctx), spec.requiresSource(), spec.requiredStoredFields());
-            }
-
-            @Override
-            public List<String> fieldsToLoad() {
-                return fieldsToLoad;
-            }
-        };
-    }
-
     /**
      * Creates a StoredFieldLoader tuned for sequential reads of _source
      */
@@ -141,7 +136,8 @@ public abstract class StoredFieldLoader {
         };
     }
 
-    private static CheckedBiConsumer<Integer, FieldsVisitor, IOException> reader(LeafReaderContext ctx, int[] docs) throws IOException {
+    protected static CheckedBiConsumer<Integer, StoredFieldVisitor, IOException> reader(LeafReaderContext ctx, int[] docs)
+        throws IOException {
         LeafReader leafReader = ctx.reader();
         if (docs != null && docs.length > 10 && hasSequentialDocs(docs)) {
             return sequentialReader(ctx);
@@ -150,7 +146,8 @@ public abstract class StoredFieldLoader {
         return storedFields::document;
     }
 
-    private static CheckedBiConsumer<Integer, FieldsVisitor, IOException> sequentialReader(LeafReaderContext ctx) throws IOException {
+    protected static CheckedBiConsumer<Integer, StoredFieldVisitor, IOException> sequentialReader(LeafReaderContext ctx)
+        throws IOException {
         LeafReader leafReader = ctx.reader();
         if (leafReader instanceof SequentialStoredFieldsLeafReader lf) {
             return lf.getSequentialStoredFieldsReader()::document;
@@ -201,7 +198,7 @@ public abstract class StoredFieldLoader {
 
     private static class ReaderStoredFieldLoader implements LeafStoredFieldLoader {
 
-        private final CheckedBiConsumer<Integer, FieldsVisitor, IOException> reader;
+        private final CheckedBiConsumer<Integer, StoredFieldVisitor, IOException> reader;
         private final CustomFieldsVisitor visitor;
         private int doc = -1;
 
@@ -221,7 +218,11 @@ public abstract class StoredFieldLoader {
             return new CustomFieldsVisitor(fields, loadSource);
         }
 
-        ReaderStoredFieldLoader(CheckedBiConsumer<Integer, FieldsVisitor, IOException> reader, boolean loadSource, Set<String> fields) {
+        ReaderStoredFieldLoader(
+            CheckedBiConsumer<Integer, StoredFieldVisitor, IOException> reader,
+            boolean loadSource,
+            Set<String> fields
+        ) {
             this.reader = reader;
             this.visitor = getFieldsVisitor(fields, loadSource);
         }

+ 1 - 9
server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java

@@ -23,7 +23,6 @@ import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
 import java.util.Stack;
-import java.util.stream.Collectors;
 
 /**
  * Block loader for fields that use fallback synthetic source implementation.
@@ -66,14 +65,7 @@ public abstract class FallbackSyntheticSourceBlockLoader implements BlockLoader
 
     @Override
     public StoredFieldsSpec rowStrideStoredFieldSpec() {
-        Set<String> ignoredFieldNames;
-        if (ignoredSourceFormat == IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE) {
-            ignoredFieldNames = fieldPaths.stream().map(IgnoredSourceFieldMapper::ignoredFieldName).collect(Collectors.toSet());
-        } else {
-            ignoredFieldNames = Set.of(IgnoredSourceFieldMapper.NAME);
-        }
-
-        return new StoredFieldsSpec(false, false, ignoredFieldNames);
+        return new StoredFieldsSpec(false, false, Set.of(), new IgnoredFieldsSpec(Set.of(fieldName), ignoredSourceFormat));
     }
 
     @Override

+ 60 - 0
server/src/main/java/org/elasticsearch/index/mapper/IgnoredFieldsSpec.java

@@ -0,0 +1,60 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.mapper;
+
+import org.elasticsearch.ElasticsearchException;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+/**
+ * Defines which fields need to be loaded from _ignored_source during a fetch.
+ */
+public record IgnoredFieldsSpec(Set<String> requiredIgnoredFields, IgnoredSourceFieldMapper.IgnoredSourceFormat format) {
+    public static IgnoredFieldsSpec NONE = new IgnoredFieldsSpec(Set.of(), IgnoredSourceFieldMapper.IgnoredSourceFormat.NO_IGNORED_SOURCE);
+
+    public boolean noRequirements() {
+        return requiredIgnoredFields.isEmpty();
+    }
+
+    public IgnoredFieldsSpec merge(IgnoredFieldsSpec other) {
+        if (this.format == IgnoredSourceFieldMapper.IgnoredSourceFormat.NO_IGNORED_SOURCE) {
+            return other;
+        }
+        if (other.format == IgnoredSourceFieldMapper.IgnoredSourceFormat.NO_IGNORED_SOURCE) {
+            return this;
+        }
+        if (other.requiredIgnoredFields.isEmpty()) {
+            return this;
+        }
+        if (this.requiredIgnoredFields.isEmpty()) {
+            return other;
+        }
+
+        if (this.format != other.format) {
+            throw new ElasticsearchException(
+                "failed to merge IgnoredFieldsSpec with differing formats " + this.format.name() + "," + other.format.name()
+            );
+        }
+
+        Set<String> mergedFields = new HashSet<>(requiredIgnoredFields);
+        mergedFields.addAll(other.requiredIgnoredFields);
+        return new IgnoredFieldsSpec(mergedFields, format);
+    }
+
+    /**
+     * Get the set of stored fields required to load the specified fields from _ignored_source.
+     */
+    public Set<String> requiredStoredFields() {
+        return requiredIgnoredFields.stream().flatMap(field -> format.requiredStoredFields(field).stream()).collect(Collectors.toSet());
+
+    }
+}

+ 24 - 0
server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java

@@ -41,6 +41,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 /**
@@ -277,6 +278,11 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper {
             public void writeIgnoredFields(Collection<NameValue> ignoredFieldValues) {
                 assert false : "cannot write " + ignoredFieldValues.size() + " values with format NO_IGNORED_SOURCE";
             }
+
+            @Override
+            public Set<String> requiredStoredFields(String fieldName) {
+                return Set.of();
+            }
         },
         SINGLE_IGNORED_SOURCE {
             @Override
@@ -327,6 +333,11 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper {
                     nameValue.doc().add(new StoredField(NAME, encode(nameValue)));
                 }
             }
+
+            @Override
+            public Set<String> requiredStoredFields(String fieldName) {
+                return Set.of(IgnoredSourceFieldMapper.NAME);
+            }
         },
         PER_FIELD_IGNORED_SOURCE {
             @Override
@@ -403,6 +414,14 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper {
                     }
                 }
             }
+
+            @Override
+            public Set<String> requiredStoredFields(String fieldName) {
+                return FallbackSyntheticSourceBlockLoader.splitIntoFieldPaths(fieldName)
+                    .stream()
+                    .map(IgnoredSourceFieldMapper::ignoredFieldName)
+                    .collect(Collectors.toSet());
+            }
         };
 
         public abstract Map<String, List<IgnoredSourceFieldMapper.NameValue>> loadAllIgnoredFields(
@@ -416,6 +435,11 @@ public class IgnoredSourceFieldMapper extends MetadataFieldMapper {
         );
 
         public abstract void writeIgnoredFields(Collection<NameValue> ignoredFieldValues);
+
+        /**
+         * Get the set of stored fields needed to retrieve the value for fieldName
+         */
+        public abstract Set<String> requiredStoredFields(String fieldName);
     }
 
     public IgnoredSourceFormat ignoredSourceFormat() {

+ 33 - 3
server/src/main/java/org/elasticsearch/search/fetch/StoredFieldsSpec.java

@@ -9,6 +9,8 @@
 
 package org.elasticsearch.search.fetch;
 
+import org.elasticsearch.index.mapper.IgnoredFieldsSpec;
+
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.Set;
@@ -19,10 +21,25 @@ import java.util.function.Function;
  * @param requiresSource        should source be loaded
  * @param requiredStoredFields  a set of stored fields to load
  */
-public record StoredFieldsSpec(boolean requiresSource, boolean requiresMetadata, Set<String> requiredStoredFields) {
+public record StoredFieldsSpec(
+    boolean requiresSource,
+    boolean requiresMetadata,
+    Set<String> requiredStoredFields,
+    IgnoredFieldsSpec ignoredFieldsSpec
+) {
+    public StoredFieldsSpec(boolean requiresSource, boolean requiresMetadata, Set<String> requiredStoredFields) {
+        this(requiresSource, requiresMetadata, requiredStoredFields, IgnoredFieldsSpec.NONE);
+    }
 
     public boolean noRequirements() {
-        return requiresSource == false && requiresMetadata == false && requiredStoredFields.isEmpty();
+        return requiresSource == false && requiresMetadata == false && requiredStoredFields.isEmpty() && ignoredFieldsSpec.noRequirements();
+    }
+
+    public boolean onlyRequiresIgnoredFields() {
+        return requiresSource == false
+            && requiresMetadata == false
+            && requiredStoredFields.isEmpty()
+            && ignoredFieldsSpec.noRequirements() == false;
     }
 
     /**
@@ -56,10 +73,23 @@ public record StoredFieldsSpec(boolean requiresSource, boolean requiresMetadata,
         return new StoredFieldsSpec(
             this.requiresSource || other.requiresSource,
             this.requiresMetadata || other.requiresMetadata,
-            mergedFields
+            mergedFields,
+            ignoredFieldsSpec.merge(other.ignoredFieldsSpec)
         );
     }
 
+    public Set<String> requiredStoredFields() {
+        if (ignoredFieldsSpec.noRequirements()) {
+            return requiredStoredFields;
+        }
+        if (requiredStoredFields.isEmpty()) {
+            return ignoredFieldsSpec.requiredStoredFields();
+        }
+        Set<String> mergedFields = new HashSet<>(requiredStoredFields);
+        mergedFields.addAll(ignoredFieldsSpec.requiredStoredFields());
+        return mergedFields;
+    }
+
     public static <T> StoredFieldsSpec build(Collection<T> sources, Function<T, StoredFieldsSpec> converter) {
         StoredFieldsSpec storedFieldsSpec = StoredFieldsSpec.NO_REQUIREMENTS;
         for (T source : sources) {

+ 142 - 0
server/src/test/java/org/elasticsearch/index/fieldvisitor/IgnoredSourceFieldLoaderTests.java

@@ -0,0 +1,142 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.fieldvisitor;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.index.mapper.IgnoredFieldsSpec;
+import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
+import org.elasticsearch.search.fetch.StoredFieldsSpec;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Consumer;
+
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasEntry;
+import static org.hamcrest.Matchers.hasKey;
+import static org.hamcrest.Matchers.not;
+
+/**
+ * Test that the {@link IgnoredSourceFieldLoader} loads the correct stored values.
+ */
+public class IgnoredSourceFieldLoaderTests extends ESTestCase {
+    public void testSupports() {
+        assertTrue(
+            IgnoredSourceFieldLoader.supports(
+                new StoredFieldsSpec(
+                    false,
+                    false,
+                    Set.of(),
+                    new IgnoredFieldsSpec(Set.of("foo"), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE)
+                )
+            )
+        );
+
+        assertFalse(
+            IgnoredSourceFieldLoader.supports(
+                new StoredFieldsSpec(
+                    false,
+                    false,
+                    Set.of(),
+                    new IgnoredFieldsSpec(Set.of(), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE)
+                )
+            )
+        );
+
+        assertFalse(
+            IgnoredSourceFieldLoader.supports(
+                new StoredFieldsSpec(
+                    true,
+                    false,
+                    Set.of(),
+                    new IgnoredFieldsSpec(Set.of("foo"), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE)
+                )
+            )
+        );
+
+        assertFalse(IgnoredSourceFieldLoader.supports(StoredFieldsSpec.NO_REQUIREMENTS));
+    }
+
+    public void testLoadSingle() throws IOException {
+        // Note: normally the stored value is encoded in the ignored source format
+        // (see IgnoredSourceFieldMapper#encodeMultipleValuesForField), but these tests are only verifying the loader, not the encoding.
+        BytesRef value = new BytesRef("lorem ipsum");
+        Document doc = new Document();
+        doc.add(new StoredField("_ignored_source.foo", value));
+        testLoader(doc, Set.of("foo"), storedFields -> {
+            assertThat(storedFields, hasEntry(equalTo("_ignored_source.foo"), containsInAnyOrder(value)));
+        });
+    }
+
+    public void testLoadMultiple() throws IOException {
+        BytesRef fooValue = new BytesRef("lorem ipsum");
+        BytesRef barValue = new BytesRef("dolor sit amet");
+        Document doc = new Document();
+        doc.add(new StoredField("_ignored_source.foo", fooValue));
+        doc.add(new StoredField("_ignored_source.bar", barValue));
+        testLoader(doc, Set.of("foo", "bar"), storedFields -> {
+            assertThat(storedFields, hasEntry(equalTo("_ignored_source.foo"), containsInAnyOrder(fooValue)));
+            assertThat(storedFields, hasEntry(equalTo("_ignored_source.bar"), containsInAnyOrder(barValue)));
+        });
+    }
+
+    public void testLoadSubset() throws IOException {
+        BytesRef fooValue = new BytesRef("lorem ipsum");
+        BytesRef barValue = new BytesRef("dolor sit amet");
+
+        Document doc = new Document();
+        doc.add(new StoredField("_ignored_source.foo", fooValue));
+        doc.add(new StoredField("_ignored_source.bar", barValue));
+
+        testLoader(doc, Set.of("foo"), storedFields -> {
+            assertThat(storedFields, hasEntry(equalTo("_ignored_source.foo"), containsInAnyOrder(fooValue)));
+            assertThat(storedFields, not(hasKey("_ignored_source.bar")));
+        });
+    }
+
+    public void testLoadFromParent() throws IOException {
+        BytesRef fooValue = new BytesRef("lorem ipsum");
+        Document doc = new Document();
+        doc.add(new StoredField("_ignored_source.parent", fooValue));
+        testLoader(doc, Set.of("parent.foo"), storedFields -> {
+            assertThat(storedFields, hasEntry(equalTo("_ignored_source.parent"), containsInAnyOrder(fooValue)));
+        });
+    }
+
+    private void testLoader(Document doc, Set<String> fieldsToLoad, Consumer<Map<String, List<Object>>> storedFieldsTest)
+        throws IOException {
+        try (Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(Lucene.STANDARD_ANALYZER))) {
+            StoredFieldsSpec spec = new StoredFieldsSpec(
+                false,
+                false,
+                Set.of(),
+                new IgnoredFieldsSpec(fieldsToLoad, IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE)
+            );
+            assertTrue(IgnoredSourceFieldLoader.supports(spec));
+            iw.addDocument(doc);
+            try (DirectoryReader reader = DirectoryReader.open(iw)) {
+                IgnoredSourceFieldLoader loader = new IgnoredSourceFieldLoader(spec, false);
+                var leafLoader = loader.getLoader(reader.leaves().getFirst(), new int[] { 0 });
+                leafLoader.advanceTo(0);
+                storedFieldsTest.accept(leafLoader.storedFields());
+            }
+        }
+    }
+}

+ 190 - 0
server/src/test/java/org/elasticsearch/index/fieldvisitor/StoredFieldLoaderTests.java

@@ -0,0 +1,190 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.index.fieldvisitor;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.index.mapper.IgnoredFieldsSpec;
+import org.elasticsearch.index.mapper.IgnoredSourceFieldMapper;
+import org.elasticsearch.search.fetch.StoredFieldsSpec;
+import org.elasticsearch.test.ESTestCase;
+import org.hamcrest.Matchers;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Consumer;
+
+import static org.hamcrest.Matchers.anEmptyMap;
+import static org.hamcrest.Matchers.containsInAnyOrder;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasEntry;
+
+public class StoredFieldLoaderTests extends ESTestCase {
+    private Document doc(String... values) {
+        assert values.length % 2 == 0;
+        Document doc = new Document();
+        for (int i = 0; i < values.length; i++) {
+            doc.add(new StoredField(values[i++], new BytesRef(values[i])));
+        }
+        return doc;
+    }
+
+    private StoredFieldsSpec fieldsSpec(
+        Set<String> storedFields,
+        Set<String> ignoredFields,
+        IgnoredSourceFieldMapper.IgnoredSourceFormat format
+    ) {
+        return new StoredFieldsSpec(false, false, storedFields, new IgnoredFieldsSpec(ignoredFields, format));
+    }
+
+    public void testEmpty() throws IOException {
+        testStoredFieldLoader(
+            doc("foo", "lorem ipsum", "_ignored_source.bar", "dolor sit amet"),
+            fieldsSpec(Set.of(), Set.of(), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, anEmptyMap());
+            }
+        );
+    }
+
+    public void testSingleIgnoredSourceNewFormat() throws IOException {
+        testIgnoredSourceLoader(
+            doc("_ignored_source.foo", "lorem ipsum"),
+            fieldsSpec(Set.of(), Set.of("foo"), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source.foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+            }
+        );
+    }
+
+    public void testSingleIgnoredSourceOldFormat() throws IOException {
+        testStoredFieldLoader(
+            doc("_ignored_source", "lorem ipsum"),
+            fieldsSpec(Set.of(), Set.of("foo"), IgnoredSourceFieldMapper.IgnoredSourceFormat.SINGLE_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+            }
+        );
+    }
+
+    public void testMultiValueIgnoredSourceNewFormat() throws IOException {
+        testIgnoredSourceLoader(
+            doc("_ignored_source.foo", "lorem ipsum", "_ignored_source.bar", "dolor sit amet"),
+            fieldsSpec(Set.of(), Set.of("foo", "bar"), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source.foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source.bar"), containsInAnyOrder(new BytesRef("dolor sit amet"))));
+            }
+        );
+    }
+
+    public void testMultiValueIgnoredSourceOldFormat() throws IOException {
+        testStoredFieldLoader(
+            doc("_ignored_source", "lorem ipsum", "_ignored_source", "dolor sit amet"),
+            fieldsSpec(Set.of(), Set.of("foo", "bar"), IgnoredSourceFieldMapper.IgnoredSourceFormat.SINGLE_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(
+                    storedFields,
+                    hasEntry(equalTo("_ignored_source"), containsInAnyOrder(new BytesRef("lorem ipsum"), new BytesRef("dolor sit amet")))
+                );
+            }
+        );
+    }
+
+    public void testSingleStoredField() throws IOException {
+        testStoredFieldLoader(
+            doc("foo", "lorem ipsum"),
+            fieldsSpec(Set.of("foo"), Set.of(), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+            }
+        );
+    }
+
+    public void testMultiValueStoredField() throws IOException {
+        testStoredFieldLoader(
+            doc("foo", "lorem ipsum", "bar", "dolor sit amet"),
+            fieldsSpec(Set.of("foo", "bar"), Set.of(), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+                assertThat(storedFields, hasEntry(equalTo("bar"), containsInAnyOrder(new BytesRef("dolor sit amet"))));
+            }
+        );
+    }
+
+    public void testMixedStoredAndIgnoredFieldsNewFormat() throws IOException {
+        testStoredFieldLoader(
+            doc("foo", "lorem ipsum", "_ignored_source.bar", "dolor sit amet"),
+            fieldsSpec(Set.of("foo"), Set.of("bar"), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source.bar"), containsInAnyOrder(new BytesRef("dolor sit amet"))));
+            }
+        );
+    }
+
+    public void testMixedStoredAndIgnoredFieldsOldFormat() throws IOException {
+        testStoredFieldLoader(
+            doc("foo", "lorem ipsum", "_ignored_source", "dolor sit amet"),
+            fieldsSpec(Set.of("foo"), Set.of("bar"), IgnoredSourceFieldMapper.IgnoredSourceFormat.SINGLE_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source"), containsInAnyOrder(new BytesRef("dolor sit amet"))));
+            }
+        );
+    }
+
+    public void testMixedStoredAndIgnoredFieldsLoadParent() throws IOException {
+        testStoredFieldLoader(
+            doc("foo", "lorem ipsum", "_ignored_source.parent", "dolor sit amet"),
+            fieldsSpec(Set.of("foo"), Set.of("parent.bar"), IgnoredSourceFieldMapper.IgnoredSourceFormat.PER_FIELD_IGNORED_SOURCE),
+            storedFields -> {
+                assertThat(storedFields, hasEntry(equalTo("foo"), containsInAnyOrder(new BytesRef("lorem ipsum"))));
+                assertThat(storedFields, hasEntry(equalTo("_ignored_source.parent"), containsInAnyOrder(new BytesRef("dolor sit amet"))));
+            }
+        );
+    }
+
+    private void testStoredFieldLoader(Document doc, StoredFieldsSpec spec, Consumer<Map<String, List<Object>>> storedFieldsTest)
+        throws IOException {
+        testLoader(doc, spec, StoredFieldLoader.class, storedFieldsTest);
+    }
+
+    private void testIgnoredSourceLoader(Document doc, StoredFieldsSpec spec, Consumer<Map<String, List<Object>>> storedFieldsTest)
+        throws IOException {
+        testLoader(doc, spec, IgnoredSourceFieldLoader.class, storedFieldsTest);
+    }
+
+    private void testLoader(
+        Document doc,
+        StoredFieldsSpec spec,
+        Class<? extends StoredFieldLoader> expectedLoaderClass,
+        Consumer<Map<String, List<Object>>> storedFieldsTest
+    ) throws IOException {
+        try (Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(Lucene.STANDARD_ANALYZER))) {
+            iw.addDocument(doc);
+            try (DirectoryReader reader = DirectoryReader.open(iw)) {
+                StoredFieldLoader loader = StoredFieldLoader.fromSpec(spec);
+                assertThat(loader, Matchers.isA(expectedLoaderClass));
+                var leafLoader = loader.getLoader(reader.leaves().getFirst(), new int[] { 0 });
+                leafLoader.advanceTo(0);
+                storedFieldsTest.accept(leafLoader.storedFields());
+            }
+        }
+    }
+
+}