Browse Source

Add ability to index prefixes on text fields (#28290)

This adds the ability to index term prefixes into a hidden subfield, enabling prefix queries to be run without multitermquery rewrites. The subfield reuses the analysis chain of its parent text field, appending an EdgeNGramTokenFilter. It can be configured with minimum and maximum ngram lengths. Query terms with lengths outside this min-max range fall back to using prefix queries against the parent text field.

The mapping looks like this:

"my_text_field" : {
"type" : "text",
"analyzer" : "english",
"index_prefix" : { "min_chars" : 1, "max_chars" : 10 }
}

Relates to #27049
Alan Woodward 7 years ago
parent
commit
424ecb3c7d

+ 36 - 0
docs/reference/mapping/types/text.asciidoc

@@ -89,6 +89,13 @@ The following parameters are accepted by `text` fields:
     What information should be stored in the index, for search and highlighting purposes.
     Defaults to `positions`.
 
+<<index-prefix-config,`index_prefix`>>::
+
+    If enabled, term prefixes of between 2 and 5 characters are indexed into a
+    separate field.  This allows prefix searches to run more efficiently, at
+    the expense of a larger index. Accepts an
+    <<index-prefix-config,`index-prefix configuration block`>>
+
 <<norms,`norms`>>::
 
     Whether field-length should be taken into account when scoring queries.
@@ -128,3 +135,32 @@ The following parameters are accepted by `text` fields:
     Whether term vectors should be stored for an <<mapping-index,`analyzed`>>
     field. Defaults to `no`.
 
+[[index-prefix-config]]
+==== Index Prefix configuration
+
+Text fields may also index term prefixes to speed up prefix searches. The `index_prefix`
+parameter is configured as below. Either or both of `min_chars` and `max_chars` may be excluded.
+Both values are treated as inclusive
+
+[source,js]
+--------------------------------
+PUT my_index
+{
+  "mappings": {
+    "_doc": {
+      "properties": {
+        "full_name": {
+          "type":  "text",
+          "index_prefix" : {
+            "min_chars" : 1,    <1>
+            "max_chars" : 10    <2>
+          }
+        }
+      }
+    }
+  }
+}
+--------------------------------
+// CONSOLE
+<1> `min_chars` must be greater than zero, defaults to 2
+<2> `max_chars` must be greater than or equal to `min_chars` and less than 20, defaults to 5

+ 59 - 0
rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml

@@ -0,0 +1,59 @@
+---
+"search with index prefixes":
+  - skip:
+      version: " - 6.99.99"
+      reason: index_prefix is only available as of 6.3.0
+  - do:
+      indices.create:
+        index:  test
+        body:
+          mappings:
+            test:
+              properties:
+                text:
+                  type: text
+                  index_prefix:
+                    min_chars: 1
+                    max_chars: 10
+
+  - do:
+      index:
+          index:  test
+          type:   test
+          id:     1
+          body:   { text: some short words and a stupendously long one }
+
+  - do:
+      indices.refresh:
+        index: [test]
+
+  - do:
+      search:
+        index: test
+        q: shor*
+        df: text
+
+  - match: {hits.total: 1}
+  - match: {hits.hits.0._score: 1}
+
+  - do:
+      search:
+        index: test
+        body:
+          query:
+            query_string:
+              default_field: text
+              query: shor*
+              boost: 2
+
+  - match: {hits.total: 1}
+  - match: {hits.hits.0._score: 2}
+
+  - do:
+      search:
+        index: test
+        q: stupendousl*
+        df: text
+
+  - match: {hits.total: 1}
+  - match: {hits.hits.0._score: 1}

+ 201 - 2
server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java

@@ -19,22 +19,31 @@
 
 package org.elasticsearch.index.mapper;
 
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.AnalyzerWrapper;
+import org.apache.lucene.analysis.TokenFilter;
+import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.Term;
+import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.MultiTermQuery;
 import org.apache.lucene.search.NormsFieldExistsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.TermQuery;
+import org.elasticsearch.common.collect.Iterators;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.index.analysis.AnalyzerScope;
 import org.elasticsearch.index.analysis.NamedAnalyzer;
 import org.elasticsearch.index.fielddata.IndexFieldData;
 import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
 import org.elasticsearch.index.query.QueryShardContext;
 
 import java.io.IOException;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -52,6 +61,8 @@ public class TextFieldMapper extends FieldMapper {
         public static final double FIELDDATA_MIN_FREQUENCY = 0;
         public static final double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE;
         public static final int FIELDDATA_MIN_SEGMENT_SIZE = 0;
+        public static final int INDEX_PREFIX_MIN_CHARS = 2;
+        public static final int INDEX_PREFIX_MAX_CHARS = 5;
 
         public static final MappedFieldType FIELD_TYPE = new TextFieldType();
 
@@ -69,6 +80,7 @@ public class TextFieldMapper extends FieldMapper {
     public static class Builder extends FieldMapper.Builder<Builder, TextFieldMapper> {
 
         private int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER;
+        private PrefixFieldType prefixFieldType;
 
         public Builder(String name) {
             super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
@@ -113,6 +125,21 @@ public class TextFieldMapper extends FieldMapper {
             return builder;
         }
 
+        public Builder indexPrefixes(int minChars, int maxChars) {
+            if (minChars > maxChars) {
+                throw new IllegalArgumentException("min_chars [" + minChars + "] must be less than max_chars [" + maxChars + "]");
+            }
+            if (minChars < 1) {
+                throw new IllegalArgumentException("min_chars [" + minChars + "] must be greater than zero");
+            }
+            if (maxChars >= 20) {
+                throw new IllegalArgumentException("max_chars [" + maxChars + "] must be less than 20");
+            }
+            this.prefixFieldType = new PrefixFieldType(name() + "._index_prefix", minChars, maxChars);
+            fieldType().setPrefixFieldType(this.prefixFieldType);
+            return this;
+        }
+
         @Override
         public TextFieldMapper build(BuilderContext context) {
             if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
@@ -125,8 +152,13 @@ public class TextFieldMapper extends FieldMapper {
                 fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap));
             }
             setupFieldType(context);
+            if (prefixFieldType != null && fieldType().isSearchable() == false) {
+                throw new IllegalArgumentException("Cannot set index_prefix on unindexed field [" + name() + "]");
+            }
+            PrefixFieldMapper prefixMapper = prefixFieldType == null ? null
+                : new PrefixFieldMapper(prefixFieldType.setAnalyzer(fieldType.indexAnalyzer()), context.indexSettings());
             return new TextFieldMapper(
-                    name, fieldType, defaultFieldType, positionIncrementGap,
+                    name, fieldType, defaultFieldType, positionIncrementGap, prefixMapper,
                     context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
         }
     }
@@ -161,18 +193,143 @@ public class TextFieldMapper extends FieldMapper {
                     builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
                     DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated());
                     iterator.remove();
+                } else if (propName.equals("index_prefix")) {
+                    Map<?, ?> indexPrefix = (Map<?, ?>) propNode;
+                    int minChars = XContentMapValues.nodeIntegerValue(indexPrefix.remove("min_chars"),
+                        Defaults.INDEX_PREFIX_MIN_CHARS);
+                    int maxChars = XContentMapValues.nodeIntegerValue(indexPrefix.remove("max_chars"),
+                        Defaults.INDEX_PREFIX_MAX_CHARS);
+                    builder.indexPrefixes(minChars, maxChars);
+                    DocumentMapperParser.checkNoRemainingFields(propName, indexPrefix, parserContext.indexVersionCreated());
+                    iterator.remove();
                 }
             }
             return builder;
         }
     }
 
+    private static class PrefixWrappedAnalyzer extends AnalyzerWrapper {
+
+        private final int minChars;
+        private final int maxChars;
+        private final Analyzer delegate;
+
+        PrefixWrappedAnalyzer(Analyzer delegate, int minChars, int maxChars) {
+            super(delegate.getReuseStrategy());
+            this.delegate = delegate;
+            this.minChars = minChars;
+            this.maxChars = maxChars;
+        }
+
+        @Override
+        protected Analyzer getWrappedAnalyzer(String fieldName) {
+            return delegate;
+        }
+
+        @Override
+        protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) {
+            TokenFilter filter = new EdgeNGramTokenFilter(components.getTokenStream(), minChars, maxChars);
+            return new TokenStreamComponents(components.getTokenizer(), filter);
+        }
+    }
+
+    private static final class PrefixFieldType extends StringFieldType {
+
+        final int minChars;
+        final int maxChars;
+
+        PrefixFieldType(String name, int minChars, int maxChars) {
+            setTokenized(true);
+            setOmitNorms(true);
+            setIndexOptions(IndexOptions.DOCS);
+            setName(name);
+            this.minChars = minChars;
+            this.maxChars = maxChars;
+        }
+
+        PrefixFieldType setAnalyzer(NamedAnalyzer delegate) {
+            setIndexAnalyzer(new NamedAnalyzer(delegate.name(), AnalyzerScope.INDEX,
+                new PrefixWrappedAnalyzer(delegate.analyzer(), minChars, maxChars)));
+            return this;
+        }
+
+        boolean accept(int length) {
+            return length >= minChars && length <= maxChars;
+        }
+
+        void doXContent(XContentBuilder builder) throws IOException {
+            builder.startObject("index_prefix");
+            builder.field("min_chars", minChars);
+            builder.field("max_chars", maxChars);
+            builder.endObject();
+        }
+
+        @Override
+        public MappedFieldType clone() {
+            return new PrefixFieldType(name(), minChars, maxChars);
+        }
+
+        @Override
+        public String typeName() {
+            return "prefix";
+        }
+
+        @Override
+        public String toString() {
+            return super.toString() + ",prefixChars=" + minChars + ":" + maxChars;
+        }
+
+        @Override
+        public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
+            super.checkCompatibility(other, conflicts);
+            PrefixFieldType otherFieldType = (PrefixFieldType) other;
+            if (otherFieldType.minChars != this.minChars) {
+                conflicts.add("mapper [" + name() + "] has different min_chars values");
+            }
+            if (otherFieldType.maxChars != this.maxChars) {
+                conflicts.add("mapper [" + name() + "] has different max_chars values");
+            }
+        }
+
+        @Override
+        public Query existsQuery(QueryShardContext context) {
+            throw new UnsupportedOperationException();
+        }
+    }
+
+    private static final class PrefixFieldMapper extends FieldMapper {
+
+        protected PrefixFieldMapper(PrefixFieldType fieldType, Settings indexSettings) {
+            super(fieldType.name(), fieldType, fieldType, indexSettings, MultiFields.empty(), CopyTo.empty());
+        }
+
+        void addField(String value, List<IndexableField> fields) {
+            fields.add(new Field(fieldType().name(), value, fieldType()));
+        }
+
+        @Override
+        protected void parseCreateField(ParseContext context, List<IndexableField> fields) {
+            throw new UnsupportedOperationException();
+        }
+
+        @Override
+        protected String contentType() {
+            return "prefix";
+        }
+
+        @Override
+        public String toString() {
+            return fieldType().toString();
+        }
+    }
+
     public static final class TextFieldType extends StringFieldType {
 
         private boolean fielddata;
         private double fielddataMinFrequency;
         private double fielddataMaxFrequency;
         private int fielddataMinSegmentSize;
+        private PrefixFieldType prefixFieldType;
 
         public TextFieldType() {
             setTokenized(true);
@@ -248,11 +405,29 @@ public class TextFieldMapper extends FieldMapper {
             this.fielddataMinSegmentSize = fielddataMinSegmentSize;
         }
 
+        void setPrefixFieldType(PrefixFieldType prefixFieldType) {
+            checkIfFrozen();
+            this.prefixFieldType = prefixFieldType;
+        }
+
         @Override
         public String typeName() {
             return CONTENT_TYPE;
         }
 
+        @Override
+        public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) {
+            if (prefixFieldType == null || prefixFieldType.accept(value.length()) == false) {
+                return super.prefixQuery(value, method, context);
+            }
+            Query tq = prefixFieldType.termQuery(value, context);
+            if (method == null || method == MultiTermQuery.CONSTANT_SCORE_REWRITE
+                || method == MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE) {
+                return new ConstantScoreQuery(tq);
+            }
+            return tq;
+        }
+
         @Override
         public Query existsQuery(QueryShardContext context) {
             if (omitNorms()) {
@@ -282,9 +457,10 @@ public class TextFieldMapper extends FieldMapper {
     }
 
     private int positionIncrementGap;
+    private PrefixFieldMapper prefixFieldMapper;
 
     protected TextFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
-                                int positionIncrementGap,
+                                int positionIncrementGap, PrefixFieldMapper prefixFieldMapper,
                                 Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
         super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
         assert fieldType.tokenized();
@@ -293,6 +469,7 @@ public class TextFieldMapper extends FieldMapper {
             throw new IllegalArgumentException("Cannot enable fielddata on a [text] field that is not indexed: [" + name() + "]");
         }
         this.positionIncrementGap = positionIncrementGap;
+        this.prefixFieldMapper = prefixFieldMapper;
     }
 
     @Override
@@ -323,7 +500,18 @@ public class TextFieldMapper extends FieldMapper {
             if (fieldType().omitNorms()) {
                 createFieldNamesField(context, fields);
             }
+            if (prefixFieldMapper != null) {
+                prefixFieldMapper.addField(value, fields);
+            }
+        }
+    }
+
+    @Override
+    public Iterator<Mapper> iterator() {
+        if (prefixFieldMapper == null) {
+            return super.iterator();
         }
+        return Iterators.concat(super.iterator(), Collections.singleton(prefixFieldMapper).iterator());
     }
 
     @Override
@@ -334,6 +522,14 @@ public class TextFieldMapper extends FieldMapper {
     @Override
     protected void doMerge(Mapper mergeWith) {
         super.doMerge(mergeWith);
+        TextFieldMapper mw = (TextFieldMapper) mergeWith;
+        if (this.prefixFieldMapper != null && mw.prefixFieldMapper != null) {
+            this.prefixFieldMapper = (PrefixFieldMapper) this.prefixFieldMapper.merge(mw.prefixFieldMapper);
+        }
+        else if (this.prefixFieldMapper != null || mw.prefixFieldMapper != null) {
+            throw new IllegalArgumentException("mapper [" + name() + "] has different index_prefix settings, current ["
+                + this.prefixFieldMapper + "], merged [" + mw.prefixFieldMapper + "]");
+        }
     }
 
     @Override
@@ -371,5 +567,8 @@ public class TextFieldMapper extends FieldMapper {
                 builder.endObject();
             }
         }
+        if (fieldType().prefixFieldType != null) {
+            fieldType().prefixFieldType.doXContent(builder);
+        }
     }
 }

+ 197 - 0
server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java

@@ -25,7 +25,13 @@ import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.IndexableFieldType;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.PostingsEnum;
+import org.apache.lucene.index.Term;
 import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.PrefixQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.common.compress.CompressedXContent;
@@ -39,6 +45,7 @@ import org.elasticsearch.index.VersionType;
 import org.elasticsearch.index.engine.Engine;
 import org.elasticsearch.index.mapper.MapperService.MergeReason;
 import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
+import org.elasticsearch.index.query.QueryShardContext;
 import org.elasticsearch.index.shard.IndexShard;
 import org.elasticsearch.plugins.Plugin;
 import org.elasticsearch.test.ESSingleNodeTestCase;
@@ -52,8 +59,10 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.instanceOf;
 
 public class TextFieldMapperTests extends ESSingleNodeTestCase {
 
@@ -584,4 +593,192 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
         );
         assertThat(e.getMessage(), containsString("name cannot be empty string"));
     }
+
+    public void testIndexPrefixMapping() throws IOException {
+
+        QueryShardContext queryShardContext = indexService.newQueryShardContext(
+            randomInt(20), null, () -> {
+                throw new UnsupportedOperationException();
+            }, null);
+
+        {
+            String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix")
+                .field("min_chars", 1)
+                .field("max_chars", 10)
+                .endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+
+            DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
+            assertEquals(mapping, mapper.mappingSource().toString());
+
+            assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=1:10"));
+
+            Query q = mapper.mappers().getMapper("field").fieldType().prefixQuery("goin", CONSTANT_SCORE_REWRITE, queryShardContext);
+            assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q);
+            q = mapper.mappers().getMapper("field").fieldType().prefixQuery("internationalisatio",
+                CONSTANT_SCORE_REWRITE, queryShardContext);
+            assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q);
+
+            ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder()
+                    .startObject()
+                    .field("field", "Some English text that is going to be very useful")
+                    .endObject()
+                    .bytes(),
+                XContentType.JSON));
+
+            IndexableField[] fields = doc.rootDoc().getFields("field._index_prefix");
+            assertEquals(1, fields.length);
+        }
+
+        {
+            String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix").endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+            CompressedXContent json = new CompressedXContent(mapping);
+            DocumentMapper mapper = parser.parse("type", json);
+
+            Query q1 = mapper.mappers().getMapper("field").fieldType().prefixQuery("g",
+                CONSTANT_SCORE_REWRITE, queryShardContext);
+            assertThat(q1, instanceOf(PrefixQuery.class));
+            Query q2 = mapper.mappers().getMapper("field").fieldType().prefixQuery("go",
+                CONSTANT_SCORE_REWRITE, queryShardContext);
+            assertThat(q2, instanceOf(ConstantScoreQuery.class));
+            Query q5 = mapper.mappers().getMapper("field").fieldType().prefixQuery("going",
+                CONSTANT_SCORE_REWRITE, queryShardContext);
+            assertThat(q5, instanceOf(ConstantScoreQuery.class));
+            Query q6 = mapper.mappers().getMapper("field").fieldType().prefixQuery("goings",
+                CONSTANT_SCORE_REWRITE, queryShardContext);
+            assertThat(q6, instanceOf(PrefixQuery.class));
+
+            indexService.mapperService().merge("type", json, MergeReason.MAPPING_UPDATE);
+
+            String badUpdate = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix")
+                .field("min_chars", 1)
+                .field("max_chars", 10)
+                .endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+
+            IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
+                indexService.mapperService()
+                    .merge("type", new CompressedXContent(badUpdate), MergeReason.MAPPING_UPDATE);
+            });
+            assertThat(e.getMessage(), containsString("mapper [field._index_prefix] has different min_chars values"));
+        }
+
+        {
+            String illegalMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix")
+                .field("min_chars", 1)
+                .field("max_chars", 10)
+                .endObject()
+                .startObject("fields")
+                .startObject("_index_prefix").field("type", "text").endObject()
+                .endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+
+            IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
+                indexService.mapperService()
+                    .merge("type", new CompressedXContent(illegalMapping), MergeReason.MAPPING_UPDATE);
+            });
+            assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice in [type]"));
+
+        }
+
+        {
+            String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix")
+                .field("min_chars", 11)
+                .field("max_chars", 10)
+                .endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+            IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+                () -> parser.parse("type", new CompressedXContent(badConfigMapping))
+            );
+            assertThat(e.getMessage(), containsString("min_chars [11] must be less than max_chars [10]"));
+        }
+
+        {
+            String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix")
+                .field("min_chars", 0)
+                .field("max_chars", 10)
+                .endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+            IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+                () -> parser.parse("type", new CompressedXContent(badConfigMapping))
+            );
+            assertThat(e.getMessage(), containsString("min_chars [0] must be greater than zero"));
+        }
+
+        {
+            String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .startObject("index_prefix")
+                .field("min_chars", 1)
+                .field("max_chars", 25)
+                .endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+            IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+                () -> parser.parse("type", new CompressedXContent(badConfigMapping))
+            );
+            assertThat(e.getMessage(), containsString("max_chars [25] must be less than 20"));
+        }
+
+        {
+            String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("analyzer", "english")
+                .field("index_prefix", (String) null)
+                .endObject().endObject()
+                .endObject().endObject().string();
+            MapperParsingException e = expectThrows(MapperParsingException.class,
+                () -> parser.parse("type", new CompressedXContent(badConfigMapping))
+            );
+            assertThat(e.getMessage(), containsString("[index_prefix] must not have a [null] value"));
+        }
+
+        {
+            String badConfigMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
+                .startObject("properties").startObject("field")
+                .field("type", "text")
+                .field("index", "false")
+                .startObject("index_prefix").endObject()
+                .endObject().endObject()
+                .endObject().endObject().string();
+            IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+                () -> parser.parse("type", new CompressedXContent(badConfigMapping))
+            );
+            assertThat(e.getMessage(), containsString("Cannot set index_prefix on unindexed field [field]"));
+        }
+    }
 }