Bläddra i källkod

Completion field to support multiple completion multi-fields (#83595)

The completion field supports multi-fields. In case the sub-field is another completion field, the whole object structure from the incoming document needs to provided as input to the sub-field while parsing it. We have a special XContentParser for this scenario, but it does not handle returning the same object structure multiple times. That is why you can have only one completion sub-field within a completion field, and the error returned when such mechanism breaks is a general parsing error (that mentions a field called dummy_field) that makes users think they have done something wrong in their document.

This commit expands testing for this scenario and extends the parser to support it.

As part of this change, a new parser is created for each sub-field, which makes it possible to expose the same object structure multiple times, for instance in case a completion field has more than one completion sub-fields.

Additionally, the wrapping of both the geo_point multi field parser and the completion multi field parser into a dummy_field object is removed in favour of returning the correct currentName of the main field we are parsing.

Additionally getTokenLocation is tweaked to return the location of the field we are parsing in the document, so that error messages become clearer when things go wrong.

Closes #83534
Luca Cavanna 3 år sedan
förälder
incheckning
a91e692779

+ 6 - 0
docs/changelog/83595.yaml

@@ -0,0 +1,6 @@
+pr: 83595
+summary: Completion field to support multiple completion multi-fields
+area: Mapping
+type: bug
+issues:
+ - 83534

+ 1 - 16
libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java

@@ -11,14 +11,12 @@ package org.elasticsearch.xcontent.support;
 import org.elasticsearch.xcontent.DeprecationHandler;
 import org.elasticsearch.xcontent.NamedXContentRegistry;
 import org.elasticsearch.xcontent.XContentLocation;
-import org.elasticsearch.xcontent.XContentParser;
 import org.elasticsearch.xcontent.XContentType;
 
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.nio.CharBuffer;
-import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -28,23 +26,10 @@ import java.util.Map;
  */
 public class MapXContentParser extends AbstractXContentParser {
 
-    private XContentType xContentType;
+    private final XContentType xContentType;
     private TokenIterator iterator;
     private boolean closed;
 
-    public static XContentParser wrapObject(Object sourceMap) throws IOException {
-        XContentParser parser = new MapXContentParser(
-            NamedXContentRegistry.EMPTY,
-            DeprecationHandler.IGNORE_DEPRECATIONS,
-            Collections.singletonMap("dummy_field", sourceMap),
-            XContentType.JSON
-        );
-        parser.nextToken(); // start object
-        parser.nextToken(); // field name
-        parser.nextToken(); // field value
-        return parser;
-    }
-
     public MapXContentParser(
         NamedXContentRegistry xContentRegistry,
         DeprecationHandler deprecationHandler,

+ 16 - 1
server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java

@@ -13,7 +13,10 @@ import org.elasticsearch.common.geo.GeometryFormatterFactory;
 import org.elasticsearch.core.CheckedConsumer;
 import org.elasticsearch.index.analysis.NamedAnalyzer;
 import org.elasticsearch.index.query.SearchExecutionContext;
+import org.elasticsearch.xcontent.DeprecationHandler;
+import org.elasticsearch.xcontent.NamedXContentRegistry;
 import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentType;
 import org.elasticsearch.xcontent.support.MapXContentParser;
 
 import java.io.IOException;
@@ -53,7 +56,7 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
             throws IOException;
 
         private void fetchFromSource(Object sourceMap, Consumer<T> consumer) {
-            try (XContentParser parser = MapXContentParser.wrapObject(sourceMap)) {
+            try (XContentParser parser = wrapObject(sourceMap)) {
                 parse(parser, v -> consumer.accept(normalizeFromSource(v)), e -> {}); /* ignore malformed */
             } catch (IOException e) {
                 throw new UncheckedIOException(e);
@@ -67,6 +70,18 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
         // TODO: move geometry normalization to the geometry parser.
         public abstract T normalizeFromSource(T geometry);
 
+        private static XContentParser wrapObject(Object sourceMap) throws IOException {
+            XContentParser parser = new MapXContentParser(
+                NamedXContentRegistry.EMPTY,
+                DeprecationHandler.IGNORE_DEPRECATIONS,
+                Collections.singletonMap("dummy_field", sourceMap),
+                XContentType.JSON
+            );
+            parser.nextToken(); // start object
+            parser.nextToken(); // field name
+            parser.nextToken(); // field value
+            return parser;
+        }
     }
 
     public abstract static class AbstractGeometryFieldType<T> extends MappedFieldType {

+ 89 - 11
server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java

@@ -30,11 +30,15 @@ import org.elasticsearch.index.query.SearchExecutionContext;
 import org.elasticsearch.search.suggest.completion.CompletionSuggester;
 import org.elasticsearch.search.suggest.completion.context.ContextMapping;
 import org.elasticsearch.search.suggest.completion.context.ContextMappings;
-import org.elasticsearch.xcontent.FilterXContentParser;
+import org.elasticsearch.xcontent.DelegatingXContentParser;
+import org.elasticsearch.xcontent.DeprecationHandler;
+import org.elasticsearch.xcontent.NamedXContentRegistry;
 import org.elasticsearch.xcontent.ToXContent;
+import org.elasticsearch.xcontent.XContentLocation;
 import org.elasticsearch.xcontent.XContentParser;
 import org.elasticsearch.xcontent.XContentParser.NumberType;
 import org.elasticsearch.xcontent.XContentParser.Token;
+import org.elasticsearch.xcontent.XContentType;
 import org.elasticsearch.xcontent.support.MapXContentParser;
 
 import java.io.IOException;
@@ -425,8 +429,11 @@ public class CompletionFieldMapper extends FieldMapper {
 
         context.addToFieldNames(fieldType().name());
         for (CompletionInputMetadata metadata : inputMap.values()) {
-            DocumentParserContext externalValueContext = context.switchParser(new CompletionParser(metadata));
-            multiFields.parse(this, externalValueContext);
+            multiFields.parse(
+                this,
+                context,
+                () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation()))
+            );
         }
     }
 
@@ -586,19 +593,66 @@ public class CompletionFieldMapper extends FieldMapper {
         }
     }
 
-    private static class CompletionParser extends FilterXContentParser {
+    /**
+     * Parser that exposes the expected format depending on the type of multi-field that is consuming content.
+     * Completion fields can hold multi-fields, which can either parse a simple string value or an object in case of another completion
+     * field. This parser detects which of the two is parsing content and exposes the full object when needed (including input, weight
+     * and context if available), otherwise the input value only.
+     *
+     * A few assumptions are made that make this work:
+     * 1) only string values are supported for a completion field, hence only sub-fields that parse strings are supported
+     * 2) sub-fields that parse simple values only ever call {@link #textOrNull()} to do so. They may call {@link #currentToken()} only to
+     * check if there's a null value, which is irrelevant in the multi-fields scenario as null values are ignored in the parent field and
+     * don't lead to any field creation.
+     * 3) completion is the only sub-field type that may be parsing the object structure.
+     *
+     * The parser is set to expose by default simple value, unless {@link #nextToken()} is called which is what signals that the
+     * consumer supports the object structure.
+     */
+    // This parser changes behaviour depending on which methods are called by consumers, which is extremely delicate. This kind of works for
+    // our internal mappers, but what about mappers from plugins
+    static class MultiFieldParser extends DelegatingXContentParser {
+        private final String textValue;
+        private final String fieldName;
+        private final XContentLocation locationOffset;
+        private final XContentParser fullObjectParser;
+        // we assume that the consumer is parsing values, we will switch to exposing the object format if nextToken is called
+        private boolean parsingObject = false;
+
+        MultiFieldParser(CompletionInputMetadata metadata, String fieldName, XContentLocation locationOffset) {
+            this.fullObjectParser = new MapXContentParser(
+                NamedXContentRegistry.EMPTY,
+                DeprecationHandler.IGNORE_DEPRECATIONS,
+                metadata.toMap(),
+                XContentType.JSON
+            );
+            this.fieldName = fieldName;
+            this.locationOffset = locationOffset;
+            this.textValue = metadata.input;
+        }
 
-        boolean advanced = false;
-        final String textValue;
+        @Override
+        protected XContentParser delegate() {
+            // if consumers are only reading values, they should never go through delegate and rather call the
+            // overridden currentToken and textOrNull below that don't call super
+            assert parsingObject;
+            return fullObjectParser;
+        }
 
-        private CompletionParser(CompletionInputMetadata metadata) throws IOException {
-            super(MapXContentParser.wrapObject(metadata.toMap()));
-            this.textValue = metadata.input;
+        @Override
+        public Token currentToken() {
+            if (parsingObject == false) {
+                // nextToken has not been called, it may or may not be called at a later time.
+                // What we return does not really matter for mappers that support simple values, as they only check for VALUE_NULL.
+                // For mappers that do support objects, START_OBJECT is a good choice.
+                return Token.START_OBJECT;
+            }
+            return super.currentToken();
         }
 
         @Override
         public String textOrNull() throws IOException {
-            if (advanced == false) {
+            if (parsingObject == false) {
                 return textValue;
             }
             return super.textOrNull();
@@ -606,8 +660,32 @@ public class CompletionFieldMapper extends FieldMapper {
 
         @Override
         public Token nextToken() throws IOException {
-            advanced = true;
+            if (parsingObject == false) {
+                // a completion sub-field is parsing
+                parsingObject = true;
+                // move to START_OBJECT, currentToken has already returned START_OBJECT and we will advance one token further just below
+                this.fullObjectParser.nextToken();
+            }
             return super.nextToken();
         }
+
+        @Override
+        public String currentName() throws IOException {
+            if (parsingObject == false) {
+                return fieldName;
+            }
+            String currentName = super.currentName();
+            if (currentName == null && currentToken() == Token.END_OBJECT) {
+                return fieldName;
+            }
+            return currentName;
+        }
+
+        @Override
+        public XContentLocation getTokenLocation() {
+            // return fixed token location: it's not possible to match the token location while parsing through the object structure,
+            // because completion metadata have been rewritten hence they won't match the incoming document
+            return locationOffset;
+        }
     }
 }

+ 5 - 5
server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java

@@ -255,7 +255,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
                 valuePreview
             );
         }
-        multiFields.parse(this, context);
+        multiFields.parse(this, context, () -> context);
     }
 
     /**
@@ -449,7 +449,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
         return indexAnalyzers;
     }
 
-    public static class MultiFields implements Iterable<FieldMapper>, ToXContent {
+    public static final class MultiFields implements Iterable<FieldMapper>, ToXContent {
 
         private static final MultiFields EMPTY = new MultiFields(Collections.emptyMap());
 
@@ -507,16 +507,16 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
             this.mappers = mappers;
         }
 
-        public void parse(FieldMapper mainField, DocumentParserContext context) throws IOException {
+        public void parse(FieldMapper mainField, DocumentParserContext context, Supplier<DocumentParserContext> multiFieldContextSupplier)
+            throws IOException {
             // TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part
             // of the mappings
             if (mappers.isEmpty()) {
                 return;
             }
-
             context.path().add(mainField.simpleName());
             for (FieldMapper mapper : mappers.values()) {
-                mapper.parse(context);
+                mapper.parse(multiFieldContextSupplier.get());
             }
             context.path().remove();
         }

+ 33 - 2
server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java

@@ -42,9 +42,9 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
 import org.elasticsearch.search.lookup.FieldValues;
 import org.elasticsearch.search.lookup.SearchLookup;
 import org.elasticsearch.search.runtime.GeoPointScriptFieldDistanceFeatureQuery;
+import org.elasticsearch.xcontent.FilterXContentParser;
 import org.elasticsearch.xcontent.XContentBuilder;
 import org.elasticsearch.xcontent.XContentParser;
-import org.elasticsearch.xcontent.support.MapXContentParser;
 
 import java.io.IOException;
 import java.io.UncheckedIOException;
@@ -215,7 +215,38 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
             context.doc().add(new StoredField(fieldType().name(), geometry.toString()));
         }
         // TODO phase out geohash (which is currently used in the CompletionSuggester)
-        multiFields.parse(this, context.switchParser(MapXContentParser.wrapObject(geometry.geohash())));
+        // we only expose the geohash value and disallow advancing tokens, hence we can reuse the same parser throughout multiple sub-fields
+        DocumentParserContext parserContext = context.switchParser(new GeoHashMultiFieldParser(context.parser(), geometry.geohash()));
+        multiFields.parse(this, context, () -> parserContext);
+    }
+
+    /**
+     * Parser that pretends to be the main document parser, but exposes the provided geohash regardless of how the geopoint was provided
+     * in the incoming document. We rely on the fact that consumers are only ever call {@link XContentParser#textOrNull()} and never
+     * advance tokens, which is explicitly disallowed by this parser.
+     */
+    static class GeoHashMultiFieldParser extends FilterXContentParser {
+        private final String value;
+
+        GeoHashMultiFieldParser(XContentParser innerParser, String value) {
+            super(innerParser);
+            this.value = value;
+        }
+
+        @Override
+        public String textOrNull() throws IOException {
+            return value;
+        }
+
+        @Override
+        public Token currentToken() {
+            return Token.VALUE_STRING;
+        }
+
+        @Override
+        public Token nextToken() throws IOException {
+            throw new UnsupportedOperationException();
+        }
     }
 
     @Override

+ 180 - 58
server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java

@@ -29,6 +29,7 @@ import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.lucene.Lucene;
 import org.elasticsearch.common.unit.Fuzziness;
+import org.elasticsearch.core.CheckedConsumer;
 import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.analysis.AnalyzerScope;
 import org.elasticsearch.index.analysis.IndexAnalyzers;
@@ -38,6 +39,9 @@ import org.elasticsearch.index.codec.PerFieldMapperCodec;
 import org.elasticsearch.xcontent.ToXContent;
 import org.elasticsearch.xcontent.XContentBuilder;
 import org.elasticsearch.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentLocation;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParserConfiguration;
 import org.elasticsearch.xcontent.json.JsonXContent;
 import org.hamcrest.FeatureMatcher;
 import org.hamcrest.Matcher;
@@ -45,7 +49,11 @@ import org.hamcrest.Matchers;
 import org.hamcrest.core.CombinableMatcher;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.function.Function;
 
 import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
@@ -135,7 +143,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testDefaultConfiguration() throws IOException {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
 
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
@@ -158,7 +165,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testCompletionAnalyzerSettings() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "completion");
             b.field("analyzer", "simple");
@@ -192,7 +198,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
 
     @SuppressWarnings("unchecked")
     public void testTypeParsing() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "completion");
             b.field("analyzer", "simple");
@@ -218,7 +223,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingMinimal() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
 
@@ -228,7 +232,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingFailure() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
 
         MapperParsingException e = expectThrows(
@@ -239,7 +242,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testKeywordWithSubCompletionAndContext() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "keyword");
             b.startObject("fields");
@@ -284,7 +286,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testCompletionWithContextAndSubCompletion() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "completion");
             b.startArray("contexts");
@@ -366,8 +367,7 @@ public class CompletionFieldMapperTests extends MapperTestCase {
         }
     }
 
-    public void testKeywordWithSubCompletionAndStringInsert() throws Exception {
-
+    public void testGeoHashWithSubCompletionAndStringInsert() throws Exception {
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "geo_point");
             b.startObject("fields");
@@ -386,13 +386,19 @@ public class CompletionFieldMapperTests extends MapperTestCase {
         // unable to assert about geofield content, covered in a REST test
     }
 
-    public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exception {
+    public void testCompletionTypeWithSubfieldsAndStringInsert() throws Exception {
+        List<CheckedConsumer<XContentBuilder, IOException>> builders = new ArrayList<>();
+        builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject());
+        builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject());
+        builders.add(b -> b.startObject("subsuggest1").field("type", "completion").endObject());
+        builders.add(b -> b.startObject("subsuggest2").field("type", "completion").endObject());
+        Collections.shuffle(builders, random());
 
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "completion");
             b.startObject("fields");
-            {
-                b.startObject("subsuggest").field("type", "completion").endObject();
+            for (CheckedConsumer<XContentBuilder, IOException> builder : builders) {
+                builder.accept(b);
             }
             b.endObject();
         }));
@@ -401,42 +407,80 @@ public class CompletionFieldMapperTests extends MapperTestCase {
 
         LuceneDocument indexableFields = parsedDocument.rootDoc();
         assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("suggestion")));
-        assertThat(indexableFields.getFields("field.subsuggest"), arrayContainingInAnyOrder(suggestField("suggestion")));
+        assertThat(indexableFields.getFields("field.subsuggest1"), arrayContainingInAnyOrder(suggestField("suggestion")));
+        assertThat(indexableFields.getFields("field.subsuggest2"), arrayContainingInAnyOrder(suggestField("suggestion")));
+        assertThat(
+            indexableFields.getFields("field.analyzed1"),
+            arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion"))
+        );
+        assertThat(
+            indexableFields.getFields("field.analyzed2"),
+            arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion"))
+        );
     }
 
-    public void testCompletionTypeWithSubCompletionFieldAndObjectInsert() throws Exception {
+    public void testCompletionTypeWithSubfieldsAndArrayInsert() throws Exception {
+        List<CheckedConsumer<XContentBuilder, IOException>> builders = new ArrayList<>();
+        builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject());
+        builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject());
+        builders.add(b -> b.startObject("subcompletion1").field("type", "completion").endObject());
+        builders.add(b -> b.startObject("subcompletion2").field("type", "completion").endObject());
+        Collections.shuffle(builders, random());
 
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "completion");
             b.startObject("fields");
-            {
-                b.startObject("analyzed").field("type", "completion").endObject();
+            for (CheckedConsumer<XContentBuilder, IOException> builder : builders) {
+                builder.accept(b);
             }
             b.endObject();
         }));
 
-        ParsedDocument parsedDocument = defaultMapper.parse(source(b -> {
-            b.startObject("field");
-            {
-                b.array("input", "New York", "NY");
-                b.field("weight", 34);
-            }
-            b.endObject();
-        }));
+        ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.array("field", "New York", "NY")));
 
         LuceneDocument indexableFields = parsedDocument.rootDoc();
         assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")));
-        assertThat(indexableFields.getFields("field.analyzed"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")));
-        // unable to assert about weight, covered in a REST test
+        assertThat(
+            indexableFields.getFields("field.subcompletion1"),
+            arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))
+        );
+        assertThat(
+            indexableFields.getFields("field.subcompletion2"),
+            arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))
+        );
+        assertThat(
+            indexableFields.getFields("field.analyzed1"),
+            arrayContainingInAnyOrder(
+                keywordField("New York"),
+                sortedSetDocValuesField("New York"),
+                keywordField("NY"),
+                sortedSetDocValuesField("NY")
+            )
+        );
+        assertThat(
+            indexableFields.getFields("field.analyzed2"),
+            arrayContainingInAnyOrder(
+                keywordField("New York"),
+                sortedSetDocValuesField("New York"),
+                keywordField("NY"),
+                sortedSetDocValuesField("NY")
+            )
+        );
     }
 
-    public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Exception {
+    public void testCompletionTypeWithSubfieldsAndObjectInsert() throws Exception {
+        List<CheckedConsumer<XContentBuilder, IOException>> builders = new ArrayList<>();
+        builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject());
+        builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject());
+        builders.add(b -> b.startObject("subcompletion1").field("type", "completion").endObject());
+        builders.add(b -> b.startObject("subcompletion2").field("type", "completion").endObject());
+        Collections.shuffle(builders, random());
 
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
             b.field("type", "completion");
             b.startObject("fields");
-            {
-                b.startObject("analyzed").field("type", "keyword").endObject();
+            for (CheckedConsumer<XContentBuilder, IOException> builder : builders) {
+                builder.accept(b);
             }
             b.endObject();
         }));
@@ -453,7 +497,15 @@ public class CompletionFieldMapperTests extends MapperTestCase {
         LuceneDocument indexableFields = parsedDocument.rootDoc();
         assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")));
         assertThat(
-            indexableFields.getFields("field.analyzed"),
+            indexableFields.getFields("field.subcompletion1"),
+            arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))
+        );
+        assertThat(
+            indexableFields.getFields("field.subcompletion2"),
+            arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))
+        );
+        assertThat(
+            indexableFields.getFields("field.analyzed1"),
             arrayContainingInAnyOrder(
                 keywordField("New York"),
                 sortedSetDocValuesField("New York"),
@@ -461,32 +513,19 @@ public class CompletionFieldMapperTests extends MapperTestCase {
                 sortedSetDocValuesField("NY")
             )
         );
-        // unable to assert about weight, covered in a REST test
-    }
-
-    public void testCompletionTypeWithSubKeywordFieldAndStringInsert() throws Exception {
-
-        DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> {
-            b.field("type", "completion");
-            b.startObject("fields");
-            {
-                b.startObject("analyzed").field("type", "keyword").endObject();
-            }
-            b.endObject();
-        }));
-
-        ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "suggestion")));
-
-        LuceneDocument indexableFields = parsedDocument.rootDoc();
-        assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("suggestion")));
         assertThat(
-            indexableFields.getFields("field.analyzed"),
-            arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion"))
+            indexableFields.getFields("field.analyzed2"),
+            arrayContainingInAnyOrder(
+                keywordField("New York"),
+                sortedSetDocValuesField("New York"),
+                keywordField("NY"),
+                sortedSetDocValuesField("NY")
+            )
         );
+        // unable to assert about weight, covered in a REST test
     }
 
     public void testParsingMultiValued() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
 
@@ -497,7 +536,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingWithWeight() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
 
@@ -515,7 +553,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingMultiValueWithWeight() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
 
@@ -536,7 +573,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingWithGeoFieldAlias() throws Exception {
-
         MapperService mapperService = createMapperService(mapping(b -> {
             b.startObject("completion");
             {
@@ -574,7 +610,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingFull() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
 
@@ -596,7 +631,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testParsingMixed() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
 
@@ -640,7 +674,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testNonContextEnabledParsingWithContexts() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         MapperParsingException e = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(source(b -> {
             b.startObject("field");
@@ -656,7 +689,6 @@ public class CompletionFieldMapperTests extends MapperTestCase {
     }
 
     public void testFieldValueValidation() throws Exception {
-
         DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping));
         CharsRefBuilder charsRefBuilder = new CharsRefBuilder();
         charsRefBuilder.append("sugg");
@@ -790,6 +822,96 @@ public class CompletionFieldMapperTests extends MapperTestCase {
         );
     }
 
+    private static CompletionFieldMapper.CompletionInputMetadata randomCompletionMetadata() {
+        Map<String, Set<String>> contexts = randomBoolean()
+            ? Collections.emptyMap()
+            : Collections.singletonMap("filter", Collections.singleton("value"));
+        return new CompletionFieldMapper.CompletionInputMetadata("text", contexts, 10);
+    }
+
+    private static XContentParser documentParser(CompletionFieldMapper.CompletionInputMetadata metadata) throws IOException {
+        XContentBuilder docBuilder = JsonXContent.contentBuilder();
+        if (randomBoolean()) {
+            docBuilder.prettyPrint();
+        }
+        docBuilder.startObject();
+        docBuilder.field("field");
+        docBuilder.map(metadata.toMap());
+        docBuilder.endObject();
+        String document = Strings.toString(docBuilder);
+        XContentParser docParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, document);
+        docParser.nextToken();
+        docParser.nextToken();
+        assertEquals(XContentParser.Token.START_OBJECT, docParser.nextToken());
+        return docParser;
+    }
+
+    public void testMultiFieldParserSimpleValue() throws IOException {
+        CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata();
+        XContentParser documentParser = documentParser(metadata);
+        XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser(
+            metadata,
+            documentParser.currentName(),
+            documentParser.getTokenLocation()
+        );
+        // we don't check currentToken here because it returns START_OBJECT that is inconsistent with returning a value
+        assertEquals("text", multiFieldParser.textOrNull());
+        assertEquals(documentParser.getTokenLocation(), multiFieldParser.getTokenLocation());
+        assertEquals(documentParser.currentName(), multiFieldParser.currentName());
+    }
+
+    public void testMultiFieldParserCompletionSubfield() throws IOException {
+        CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata();
+        XContentParser documentParser = documentParser(metadata);
+        // compare the object structure with the original metadata, this implicitly verifies that the xcontent read is valid
+        XContentBuilder multiFieldBuilder = JsonXContent.contentBuilder()
+            .copyCurrentStructure(
+                new CompletionFieldMapper.MultiFieldParser(metadata, documentParser.currentName(), documentParser.getTokenLocation())
+            );
+        XContentBuilder metadataBuilder = JsonXContent.contentBuilder().map(metadata.toMap());
+        String jsonMetadata = Strings.toString(metadataBuilder);
+        assertEquals(jsonMetadata, Strings.toString(multiFieldBuilder));
+        // advance token by token and verify currentName as well as getTokenLocation
+        XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser(
+            metadata,
+            documentParser.currentName(),
+            documentParser.getTokenLocation()
+        );
+        XContentParser expectedParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, jsonMetadata);
+        assertEquals(expectedParser.nextToken(), multiFieldParser.currentToken());
+        XContentLocation expectedTokenLocation = documentParser.getTokenLocation();
+        while (expectedParser.nextToken() != null) {
+            XContentParser.Token token = multiFieldParser.nextToken();
+            assertEquals(expectedParser.currentToken(), token);
+            assertEquals(expectedParser.currentToken(), multiFieldParser.currentToken());
+            assertEquals(expectedTokenLocation, multiFieldParser.getTokenLocation());
+            assertEquals(documentParser.nextToken(), multiFieldParser.currentToken());
+            assertEquals(documentParser.currentName(), multiFieldParser.currentName());
+        }
+        assertNull(multiFieldParser.nextToken());
+    }
+
+    public void testMultiFieldParserMixedSubfields() throws IOException {
+        CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata();
+        XContentParser documentParser = documentParser(metadata);
+        // simulate 10 sub-fields which may either read simple values or the full object structure
+        for (int i = 0; i < 10; i++) {
+            XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser(
+                metadata,
+                documentParser.currentName(),
+                documentParser.getTokenLocation()
+            );
+            if (randomBoolean()) {
+                assertEquals("text", multiFieldParser.textOrNull());
+            } else {
+                XContentBuilder multiFieldBuilder = JsonXContent.contentBuilder().copyCurrentStructure(multiFieldParser);
+                XContentBuilder metadataBuilder = JsonXContent.contentBuilder().map(metadata.toMap());
+                String jsonMetadata = Strings.toString(metadataBuilder);
+                assertEquals(jsonMetadata, Strings.toString(multiFieldBuilder));
+            }
+        }
+    }
+
     private Matcher<IndexableField> suggestField(String value) {
         return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), Matchers.instanceOf(SuggestField.class));
     }

+ 51 - 0
server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java

@@ -8,9 +8,13 @@
 package org.elasticsearch.index.mapper;
 
 import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.geo.GeoPoint;
 import org.elasticsearch.xcontent.XContentBuilder;
 import org.elasticsearch.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParserConfiguration;
+import org.elasticsearch.xcontent.json.JsonXContent;
 import org.hamcrest.CoreMatchers;
 
 import java.io.IOException;
@@ -235,6 +239,53 @@ public class GeoPointFieldMapperTests extends MapperTestCase {
         assertThat(doc.getFields("field.geohash")[1].binaryValue().utf8ToString(), equalTo("s0fu7n0xng81"));
     }
 
+    public void testKeywordWithGeopointSubfield() throws Exception {
+        DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> {
+            b.field("type", "keyword").field("doc_values", false);
+            ;
+            b.startObject("fields");
+            {
+                b.startObject("geopoint").field("type", "geo_point").field("doc_values", false).endObject();
+            }
+            b.endObject();
+        }));
+        LuceneDocument doc = mapper.parse(source(b -> b.array("field", "s093jd0k72s1"))).rootDoc();
+        assertThat(doc.getFields("field"), arrayWithSize(1));
+        assertEquals("s093jd0k72s1", doc.getFields("field")[0].binaryValue().utf8ToString());
+        assertThat(doc.getFields("field.geopoint"), arrayWithSize(1));
+        assertThat(doc.getField("field.geopoint"), hasToString(both(containsString("field.geopoint:2.999")).and(containsString("1.999"))));
+    }
+
+    private static XContentParser documentParser(String value, boolean prettyPrint) throws IOException {
+        XContentBuilder docBuilder = JsonXContent.contentBuilder();
+        if (prettyPrint) {
+            docBuilder.prettyPrint();
+        }
+        docBuilder.startObject();
+        docBuilder.field("field", value);
+        docBuilder.endObject();
+        String document = Strings.toString(docBuilder);
+        XContentParser docParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, document);
+        docParser.nextToken();
+        docParser.nextToken();
+        assertEquals(XContentParser.Token.VALUE_STRING, docParser.nextToken());
+        return docParser;
+    }
+
+    public void testGeoHashMultiFieldParser() throws IOException {
+        boolean prettyPrint = randomBoolean();
+        XContentParser docParser = documentParser("POINT (2 3)", prettyPrint);
+        XContentParser expectedParser = documentParser("s093jd0k72s1", prettyPrint);
+        XContentParser parser = new GeoPointFieldMapper.GeoHashMultiFieldParser(docParser, "s093jd0k72s1");
+        for (int i = 0; i < 10; i++) {
+            assertEquals(expectedParser.currentToken(), parser.currentToken());
+            assertEquals(expectedParser.currentName(), parser.currentName());
+            assertEquals(expectedParser.getTokenLocation(), parser.getTokenLocation());
+            assertEquals(expectedParser.textOrNull(), parser.textOrNull());
+            expectThrows(UnsupportedOperationException.class, parser::nextToken);
+        }
+    }
+
     public void testNullValue() throws Exception {
         DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point")));
         Mapper fieldMapper = mapper.mappers().getMapper("field");