123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988 |
- /*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
- package org.elasticsearch.index.mapper;
- import org.apache.lucene.analysis.MockSynonymAnalyzer;
- import org.apache.lucene.analysis.TokenStream;
- import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
- import org.apache.lucene.document.FieldType;
- import org.apache.lucene.index.DocValuesType;
- import org.apache.lucene.index.IndexOptions;
- import org.apache.lucene.index.IndexableField;
- import org.apache.lucene.index.IndexableFieldType;
- import org.apache.lucene.index.LeafReader;
- import org.apache.lucene.index.PostingsEnum;
- import org.apache.lucene.index.Term;
- import org.apache.lucene.index.TermsEnum;
- import org.apache.lucene.search.ConstantScoreQuery;
- import org.apache.lucene.search.MultiPhraseQuery;
- import org.apache.lucene.search.PhraseQuery;
- import org.apache.lucene.search.PrefixQuery;
- import org.apache.lucene.search.Query;
- import org.apache.lucene.search.TermQuery;
- import org.apache.lucene.util.BytesRef;
- import org.elasticsearch.Version;
- import org.elasticsearch.action.index.IndexRequest;
- import org.elasticsearch.common.Strings;
- import org.elasticsearch.common.bytes.BytesReference;
- import org.elasticsearch.common.compress.CompressedXContent;
- import org.elasticsearch.common.lucene.uid.Versions;
- import org.elasticsearch.common.settings.Settings;
- import org.elasticsearch.common.xcontent.ToXContent;
- import org.elasticsearch.common.xcontent.XContentBuilder;
- import org.elasticsearch.common.xcontent.XContentFactory;
- import org.elasticsearch.common.xcontent.XContentType;
- import org.elasticsearch.index.IndexService;
- import org.elasticsearch.index.VersionType;
- import org.elasticsearch.index.engine.Engine;
- import org.elasticsearch.index.mapper.MapperService.MergeReason;
- import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
- import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
- import org.elasticsearch.index.query.QueryShardContext;
- import org.elasticsearch.index.search.MatchQuery;
- import org.elasticsearch.index.seqno.SequenceNumbers;
- import org.elasticsearch.index.shard.IndexShard;
- import org.elasticsearch.plugins.Plugin;
- import org.elasticsearch.test.ESSingleNodeTestCase;
- import org.elasticsearch.test.InternalSettingsPlugin;
- import org.junit.Before;
- import java.io.IOException;
- import java.util.Arrays;
- import java.util.Collection;
- import java.util.Collections;
- import java.util.HashMap;
- import java.util.Map;
- import static org.apache.lucene.search.MultiTermQuery.CONSTANT_SCORE_REWRITE;
- import static org.hamcrest.Matchers.containsString;
- import static org.hamcrest.Matchers.equalTo;
- import static org.hamcrest.Matchers.instanceOf;
- import static org.hamcrest.core.Is.is;
- public class TextFieldMapperTests extends ESSingleNodeTestCase {
- IndexService indexService;
- DocumentMapperParser parser;
- @Before
- public void setup() {
- Settings settings = Settings.builder()
- // Stop filter remains in server as it is part of lucene-core
- .put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
- .put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
- .build();
- indexService = createIndex("test", settings);
- parser = indexService.mapperService().documentMapperParser();
- }
- @Override
- protected Collection<Class<? extends Plugin>> getPlugins() {
- return pluginList(InternalSettingsPlugin.class);
- }
- public void testDefaults() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "text").endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "1234")
- .endObject()),
- XContentType.JSON));
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(1, fields.length);
- assertEquals("1234", fields[0].stringValue());
- IndexableFieldType fieldType = fields[0].fieldType();
- assertThat(fieldType.omitNorms(), equalTo(false));
- assertTrue(fieldType.tokenized());
- assertFalse(fieldType.stored());
- assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS));
- assertThat(fieldType.storeTermVectors(), equalTo(false));
- assertThat(fieldType.storeTermVectorOffsets(), equalTo(false));
- assertThat(fieldType.storeTermVectorPositions(), equalTo(false));
- assertThat(fieldType.storeTermVectorPayloads(), equalTo(false));
- assertEquals(DocValuesType.NONE, fieldType.docValuesType());
- }
- public void testEnableStore() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "text").field("store", true).endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "1234")
- .endObject()),
- XContentType.JSON));
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(1, fields.length);
- assertTrue(fields[0].fieldType().stored());
- }
- public void testDisableIndex() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "text").field("index", false).endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "1234")
- .endObject()),
- XContentType.JSON));
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(0, fields.length);
- }
- public void testDisableNorms() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("norms", false)
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "1234")
- .endObject()),
- XContentType.JSON));
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(1, fields.length);
- assertTrue(fields[0].fieldType().omitNorms());
- }
- public void testIndexOptions() throws IOException {
- Map<String, IndexOptions> supportedOptions = new HashMap<>();
- supportedOptions.put("docs", IndexOptions.DOCS);
- supportedOptions.put("freqs", IndexOptions.DOCS_AND_FREQS);
- supportedOptions.put("positions", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
- supportedOptions.put("offsets", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
- XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties");
- for (String option : supportedOptions.keySet()) {
- mappingBuilder.startObject(option).field("type", "text").field("index_options", option).endObject();
- }
- String mapping = Strings.toString(mappingBuilder.endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- XContentBuilder jsonDoc = XContentFactory.jsonBuilder().startObject();
- for (String option : supportedOptions.keySet()) {
- jsonDoc.field(option, "1234");
- }
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()),
- XContentType.JSON));
- for (Map.Entry<String, IndexOptions> entry : supportedOptions.entrySet()) {
- String field = entry.getKey();
- IndexOptions options = entry.getValue();
- IndexableField[] fields = doc.rootDoc().getFields(field);
- assertEquals(1, fields.length);
- assertEquals(options, fields[0].fieldType().indexOptions());
- }
- }
- public void testDefaultPositionIncrementGap() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field").field("type", "text").endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = indexService.mapperService().merge("type",
- new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
- assertEquals(mapping, mapper.mappingSource().toString());
- SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .array("field", new String[] {"a", "b"})
- .endObject()),
- XContentType.JSON);
- ParsedDocument doc = mapper.parse(sourceToParse);
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(2, fields.length);
- assertEquals("a", fields[0].stringValue());
- assertEquals("b", fields[1].stringValue());
- IndexShard shard = indexService.getShard(0);
- shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL,
- sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false);
- shard.refresh("test");
- try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
- LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
- TermsEnum terms = leaf.terms("field").iterator();
- assertTrue(terms.seekExact(new BytesRef("b")));
- PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
- assertEquals(0, postings.nextDoc());
- assertEquals(TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + 1, postings.nextPosition());
- }
- }
- public void testPositionIncrementGap() throws IOException {
- final int positionIncrementGap = randomIntBetween(1, 1000);
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("position_increment_gap", positionIncrementGap)
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = indexService.mapperService().merge("type",
- new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
- assertEquals(mapping, mapper.mappingSource().toString());
- SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .array("field", new String[]{"a", "b"})
- .endObject()),
- XContentType.JSON);
- ParsedDocument doc = mapper.parse(sourceToParse);
- IndexableField[] fields = doc.rootDoc().getFields("field");
- assertEquals(2, fields.length);
- assertEquals("a", fields[0].stringValue());
- assertEquals("b", fields[1].stringValue());
- IndexShard shard = indexService.getShard(0);
- shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL,
- sourceToParse, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false);
- shard.refresh("test");
- try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
- LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
- TermsEnum terms = leaf.terms("field").iterator();
- assertTrue(terms.seekExact(new BytesRef("b")));
- PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
- assertEquals(0, postings.nextDoc());
- assertEquals(positionIncrementGap + 1, postings.nextPosition());
- }
- }
- public void testSearchAnalyzerSerialization() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .field("search_analyzer", "keyword")
- .endObject()
- .endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- // special case: default index analyzer
- mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "default")
- .field("search_analyzer", "keyword")
- .endObject()
- .endObject().endObject().endObject());
- mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "keyword")
- .endObject()
- .endObject().endObject().endObject());
- mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- // special case: default search analyzer
- mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "keyword")
- .field("search_analyzer", "default")
- .endObject()
- .endObject().endObject().endObject());
- mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "keyword")
- .endObject()
- .endObject().endObject().endObject());
- mapper = parser.parse("type", new CompressedXContent(mapping));
- XContentBuilder builder = XContentFactory.jsonBuilder();
- builder.startObject();
- mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")));
- builder.endObject();
- String mappingString = Strings.toString(builder);
- assertTrue(mappingString.contains("analyzer"));
- assertTrue(mappingString.contains("search_analyzer"));
- assertTrue(mappingString.contains("search_quote_analyzer"));
- }
- public void testSearchQuoteAnalyzerSerialization() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .field("search_analyzer", "standard")
- .field("search_quote_analyzer", "keyword")
- .endObject()
- .endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- // special case: default index/search analyzer
- mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "default")
- .field("search_analyzer", "default")
- .field("search_quote_analyzer", "keyword")
- .endObject()
- .endObject().endObject().endObject());
- mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- }
- public void testTermVectors() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field1")
- .field("type", "text")
- .field("term_vector", "no")
- .endObject()
- .startObject("field2")
- .field("type", "text")
- .field("term_vector", "yes")
- .endObject()
- .startObject("field3")
- .field("type", "text")
- .field("term_vector", "with_offsets")
- .endObject()
- .startObject("field4")
- .field("type", "text")
- .field("term_vector", "with_positions")
- .endObject()
- .startObject("field5")
- .field("type", "text")
- .field("term_vector", "with_positions_offsets")
- .endObject()
- .startObject("field6")
- .field("type", "text")
- .field("term_vector", "with_positions_offsets_payloads")
- .endObject()
- .endObject()
- .endObject().endObject());
- DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping));
- ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field1", "1234")
- .field("field2", "1234")
- .field("field3", "1234")
- .field("field4", "1234")
- .field("field5", "1234")
- .field("field6", "1234")
- .endObject()),
- XContentType.JSON));
- assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false));
- assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false));
- assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false));
- assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false));
- assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true));
- assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false));
- assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false));
- assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false));
- assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true));
- assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true));
- assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false));
- assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false));
- assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true));
- assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false));
- assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true));
- assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false));
- assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true));
- assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true));
- assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true));
- assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false));
- assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true));
- assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true));
- assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
- assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
- }
- public void testEagerGlobalOrdinals() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("eager_global_ordinals", true)
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
- assertTrue(fieldMapper.fieldType().eagerGlobalOrdinals());
- }
- public void testFielddata() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper disabledMapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, disabledMapper.mappingSource().toString());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
- FieldMapper fieldMapper = (FieldMapper) disabledMapper.mappers().getMapper("field");
- fieldMapper.fieldType().fielddataBuilder("test");
- });
- assertThat(e.getMessage(), containsString("Fielddata is disabled"));
- mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("fielddata", true)
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper enabledMapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, enabledMapper.mappingSource().toString());
- FieldMapper enabledFieldMapper = (FieldMapper) enabledMapper.mappers().getMapper("field");
- enabledFieldMapper.fieldType().fielddataBuilder("test"); // no exception this time
- String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("index", false)
- .field("fielddata", true)
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(illegalMapping)));
- assertThat(ex.getMessage(), containsString("Cannot enable fielddata on a [text] field that is not indexed"));
- }
- public void testFrequencyFilter() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("fielddata", true)
- .startObject("fielddata_frequency_filter")
- .field("min", 2d)
- .field("min_segment_size", 1000)
- .endObject()
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- TextFieldMapper fieldMapper = (TextFieldMapper) mapper.mappers().getMapper("field");
- TextFieldType fieldType = fieldMapper.fieldType();
- assertThat(fieldType.fielddataMinFrequency(), equalTo(2d));
- assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE));
- assertThat(fieldType.fielddataMinSegmentSize(), equalTo(1000));
- }
- public void testNullConfigValuesFail() throws MapperParsingException, IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
- .startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", (String) null)
- .endObject()
- .endObject()
- .endObject().endObject());
- Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("[analyzer] must not have a [null] value", e.getMessage());
- }
- public void testNotIndexedFieldPositionIncrement() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("index", false)
- .field("position_increment_gap", 10)
- .endObject().endObject().endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage());
- }
- public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException {
- for (String indexOptions : Arrays.asList("docs", "freqs")) {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("index_options", indexOptions)
- .field("position_increment_gap", 10)
- .endObject().endObject().endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(mapping)));
- assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage());
- }
- }
- public void testEmptyName() throws IOException {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
- .startObject("type")
- .startObject("properties")
- .startObject("")
- .field("type", "text")
- .endObject()
- .endObject()
- .endObject().endObject());
- // Empty name not allowed in index created after 5.0
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(mapping))
- );
- assertThat(e.getMessage(), containsString("name cannot be empty string"));
- }
- public void testIndexPrefixIndexTypes() throws IOException {
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes").endObject()
- .field("index_options", "offsets")
- .endObject().endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
- FieldType ft = prefix.fieldType;
- assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, ft.indexOptions());
- }
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes").endObject()
- .field("index_options", "freqs")
- .endObject().endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
- FieldType ft = prefix.fieldType;
- assertEquals(IndexOptions.DOCS, ft.indexOptions());
- assertFalse(ft.storeTermVectors());
- }
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes").endObject()
- .field("index_options", "positions")
- .endObject().endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
- FieldType ft = prefix.fieldType;
- if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
- assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
- } else {
- assertEquals(IndexOptions.DOCS, ft.indexOptions());
- }
- assertFalse(ft.storeTermVectors());
- }
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes").endObject()
- .field("term_vector", "with_positions_offsets")
- .endObject().endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
- FieldType ft = prefix.fieldType;
- if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
- assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
- } else {
- assertEquals(IndexOptions.DOCS, ft.indexOptions());
- }
- assertTrue(ft.storeTermVectorOffsets());
- }
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes").endObject()
- .field("term_vector", "with_positions")
- .endObject().endObject().endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- FieldMapper prefix = (FieldMapper) mapper.mappers().getMapper("field._index_prefix");
- FieldType ft = prefix.fieldType;
- if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) {
- assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions());
- } else {
- assertEquals(IndexOptions.DOCS, ft.indexOptions());
- }
- assertFalse(ft.storeTermVectorOffsets());
- }
- }
- public void testFastPhraseMapping() throws IOException {
- QueryShardContext queryShardContext = indexService.newQueryShardContext(
- randomInt(20), null, () -> {
- throw new UnsupportedOperationException();
- }, null);
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties")
- .startObject("field")
- .field("type", "text")
- .field("analyzer", "my_stop_analyzer")
- .field("index_phrases", true)
- .endObject()
- .startObject("synfield")
- .field("type", "text")
- .field("analyzer", "standard") // will be replaced with MockSynonymAnalyzer
- .field("index_phrases", true)
- .endObject()
- .endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
- Query q = new MatchPhraseQueryBuilder("field", "two words").toQuery(queryShardContext);
- assertThat(q, is(new PhraseQuery("field._index_phrase", "two words")));
- Query q2 = new MatchPhraseQueryBuilder("field", "three words here").toQuery(queryShardContext);
- assertThat(q2, is(new PhraseQuery("field._index_phrase", "three words", "words here")));
- Query q3 = new MatchPhraseQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext);
- assertThat(q3, is(new PhraseQuery(1, "field", "two", "words")));
- Query q4 = new MatchPhraseQueryBuilder("field", "singleton").toQuery(queryShardContext);
- assertThat(q4, is(new TermQuery(new Term("field", "singleton"))));
- Query q5 = new MatchPhraseQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext);
- assertThat(q5,
- is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build()));
- MatchQuery matchQuery = new MatchQuery(queryShardContext);
- matchQuery.setAnalyzer(new MockSynonymAnalyzer());
- Query q6 = matchQuery.parse(MatchQuery.Type.PHRASE, "synfield", "motor dogs");
- assertThat(q6, is(new MultiPhraseQuery.Builder()
- .add(new Term[]{
- new Term("synfield._index_phrase", "motor dogs"),
- new Term("synfield._index_phrase", "motor dog")})
- .build()));
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "Some English text that is going to be very useful")
- .endObject()),
- XContentType.JSON));
- IndexableField[] fields = doc.rootDoc().getFields("field._index_phrase");
- assertEquals(1, fields.length);
- try (TokenStream ts = fields[0].tokenStream(queryShardContext.getMapperService().indexAnalyzer(), null)) {
- CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
- ts.reset();
- assertTrue(ts.incrementToken());
- assertEquals("Some English", termAtt.toString());
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("index", "false")
- .field("index_phrases", true)
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("Cannot set index_phrases on unindexed field [field]"));
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("index_options", "freqs")
- .field("index_phrases", true)
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("Cannot set index_phrases on field [field] if positions are not enabled"));
- }
- }
- public void testIndexPrefixMapping() throws IOException {
- QueryShardContext queryShardContext = indexService.newQueryShardContext(
- randomInt(20), null, () -> {
- throw new UnsupportedOperationException();
- }, null);
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes")
- .field("min_chars", 1)
- .field("max_chars", 10)
- .endObject()
- .endObject().endObject()
- .endObject().endObject());
- DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
- assertEquals(mapping, mapper.mappingSource().toString());
- assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=1:10"));
- FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
- MappedFieldType fieldType = fieldMapper.fieldType;
- Query q = fieldType.prefixQuery("goin", CONSTANT_SCORE_REWRITE, queryShardContext);
- assertEquals(new ConstantScoreQuery(new TermQuery(new Term("field._index_prefix", "goin"))), q);
- q = fieldType.prefixQuery("internationalisatio", CONSTANT_SCORE_REWRITE, queryShardContext);
- assertEquals(new PrefixQuery(new Term("field", "internationalisatio")), q);
- ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
- .bytes(XContentFactory.jsonBuilder()
- .startObject()
- .field("field", "Some English text that is going to be very useful")
- .endObject()),
- XContentType.JSON));
- IndexableField[] fields = doc.rootDoc().getFields("field._index_prefix");
- assertEquals(1, fields.length);
- }
- {
- String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes").endObject()
- .endObject().endObject()
- .endObject().endObject());
- CompressedXContent json = new CompressedXContent(mapping);
- DocumentMapper mapper = parser.parse("type", json);
- FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field");
- MappedFieldType fieldType = fieldMapper.fieldType;
- Query q1 = fieldType.prefixQuery("g", CONSTANT_SCORE_REWRITE, queryShardContext);
- assertThat(q1, instanceOf(PrefixQuery.class));
- Query q2 = fieldType.prefixQuery("go", CONSTANT_SCORE_REWRITE, queryShardContext);
- assertThat(q2, instanceOf(ConstantScoreQuery.class));
- Query q5 = fieldType.prefixQuery("going", CONSTANT_SCORE_REWRITE, queryShardContext);
- assertThat(q5, instanceOf(ConstantScoreQuery.class));
- Query q6 = fieldType.prefixQuery("goings", CONSTANT_SCORE_REWRITE, queryShardContext);
- assertThat(q6, instanceOf(PrefixQuery.class));
- }
- {
- String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes")
- .field("min_chars", 1)
- .field("max_chars", 10)
- .endObject()
- .startObject("fields")
- .startObject("_index_prefix").field("type", "text").endObject()
- .endObject()
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
- indexService.mapperService()
- .merge("type", new CompressedXContent(illegalMapping), MergeReason.MAPPING_UPDATE);
- });
- assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined twice in [type]"));
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes")
- .field("min_chars", 11)
- .field("max_chars", 10)
- .endObject()
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("min_chars [11] must be less than max_chars [10]"));
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes")
- .field("min_chars", 0)
- .field("max_chars", 10)
- .endObject()
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("min_chars [0] must be greater than zero"));
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .startObject("index_prefixes")
- .field("min_chars", 1)
- .field("max_chars", 25)
- .endObject()
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("max_chars [25] must be less than 20"));
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("analyzer", "standard")
- .field("index_prefixes", (String) null)
- .endObject().endObject()
- .endObject().endObject());
- MapperParsingException e = expectThrows(MapperParsingException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("[index_prefixes] must not have a [null] value"));
- }
- {
- String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
- .startObject("properties").startObject("field")
- .field("type", "text")
- .field("index", "false")
- .startObject("index_prefixes").endObject()
- .endObject().endObject()
- .endObject().endObject());
- IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
- () -> parser.parse("type", new CompressedXContent(badConfigMapping))
- );
- assertThat(e.getMessage(), containsString("Cannot set index_prefixes on unindexed field [field]"));
- }
- }
- }
|