|
@@ -30,16 +30,24 @@ import org.elasticsearch.common.settings.Settings;
|
|
|
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
|
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
|
|
import org.elasticsearch.index.mapper.ContentPath;
|
|
|
+import org.elasticsearch.index.mapper.DocumentMapperParser;
|
|
|
import org.elasticsearch.index.mapper.FieldMapper.Loading;
|
|
|
import org.elasticsearch.index.mapper.Mapper;
|
|
|
import org.elasticsearch.index.mapper.MapperParsingException;
|
|
|
|
|
|
import java.util.ArrayList;
|
|
|
import java.util.Collections;
|
|
|
+import java.util.Iterator;
|
|
|
import java.util.List;
|
|
|
import java.util.Map;
|
|
|
+import java.util.Map.Entry;
|
|
|
|
|
|
-import static org.elasticsearch.common.xcontent.support.XContentMapValues.*;
|
|
|
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.isArray;
|
|
|
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
|
|
|
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
|
|
|
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue;
|
|
|
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
|
|
|
+import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
|
|
|
import static org.elasticsearch.index.mapper.FieldMapper.DOC_VALUES_FORMAT;
|
|
|
|
|
|
/**
|
|
@@ -57,14 +65,17 @@ public class TypeParsers {
|
|
|
List<AbstractFieldMapper.Builder> fields = null;
|
|
|
String firstType = null;
|
|
|
|
|
|
- for (Map.Entry<String, Object> entry : node.entrySet()) {
|
|
|
+ for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
|
|
+ Map.Entry<String, Object> entry = iterator.next();
|
|
|
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
|
|
Object fieldNode = entry.getValue();
|
|
|
if (fieldName.equals("path")) {
|
|
|
pathType = parsePathType(name, fieldNode.toString());
|
|
|
+ iterator.remove();
|
|
|
} else if (fieldName.equals("fields")) {
|
|
|
Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode;
|
|
|
- for (Map.Entry<String, Object> entry1 : fieldsNode.entrySet()) {
|
|
|
+ for (Iterator<Map.Entry<String, Object>> fieldsIterator = fieldsNode.entrySet().iterator(); fieldsIterator.hasNext();) {
|
|
|
+ Map.Entry<String, Object> entry1 = fieldsIterator.next();
|
|
|
String propName = entry1.getKey();
|
|
|
Map<String, Object> propNode = (Map<String, Object>) entry1.getValue();
|
|
|
|
|
@@ -85,13 +96,18 @@ public class TypeParsers {
|
|
|
}
|
|
|
if (propName.equals(name)) {
|
|
|
mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext);
|
|
|
+ fieldsIterator.remove();
|
|
|
} else {
|
|
|
if (fields == null) {
|
|
|
fields = new ArrayList<>(2);
|
|
|
}
|
|
|
fields.add((AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext));
|
|
|
+ fieldsIterator.remove();
|
|
|
}
|
|
|
}
|
|
|
+ fieldsNode.remove("type");
|
|
|
+ DocumentMapperParser.checkNoRemainingFields(fieldName, fieldsNode, parserContext.indexVersionCreated());
|
|
|
+ iterator.remove();
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -140,64 +156,88 @@ public class TypeParsers {
|
|
|
|
|
|
public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, Mapper.TypeParser.ParserContext parserContext) {
|
|
|
parseField(builder, name, numberNode, parserContext);
|
|
|
- for (Map.Entry<String, Object> entry : numberNode.entrySet()) {
|
|
|
+ for (Iterator<Map.Entry<String, Object>> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) {
|
|
|
+ Map.Entry<String, Object> entry = iterator.next();
|
|
|
String propName = Strings.toUnderscoreCase(entry.getKey());
|
|
|
Object propNode = entry.getValue();
|
|
|
if (propName.equals("precision_step")) {
|
|
|
builder.precisionStep(nodeIntegerValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("ignore_malformed")) {
|
|
|
builder.ignoreMalformed(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("coerce")) {
|
|
|
builder.coerce(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("omit_norms")) {
|
|
|
builder.omitNorms(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("similarity")) {
|
|
|
builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
|
|
|
- } else {
|
|
|
- parseMultiField(builder, name, parserContext, propName, propNode);
|
|
|
+ iterator.remove();
|
|
|
+ } else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
|
|
+ iterator.remove();
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
|
|
- for (Map.Entry<String, Object> entry : fieldNode.entrySet()) {
|
|
|
+ for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
|
|
+ Map.Entry<String, Object> entry = iterator.next();
|
|
|
final String propName = Strings.toUnderscoreCase(entry.getKey());
|
|
|
final Object propNode = entry.getValue();
|
|
|
if (propName.equals("index_name")) {
|
|
|
builder.indexName(propNode.toString());
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("store")) {
|
|
|
builder.store(parseStore(name, propNode.toString()));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("index")) {
|
|
|
parseIndex(name, propNode.toString(), builder);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("tokenized")) {
|
|
|
builder.tokenized(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals(DOC_VALUES)) {
|
|
|
builder.docValues(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("term_vector")) {
|
|
|
parseTermVector(name, propNode.toString(), builder);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("boost")) {
|
|
|
builder.boost(nodeFloatValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("store_term_vectors")) {
|
|
|
builder.storeTermVectors(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("store_term_vector_offsets")) {
|
|
|
builder.storeTermVectorOffsets(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("store_term_vector_positions")) {
|
|
|
builder.storeTermVectorPositions(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("store_term_vector_payloads")) {
|
|
|
builder.storeTermVectorPayloads(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("omit_norms")) {
|
|
|
builder.omitNorms(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("norms")) {
|
|
|
final Map<String, Object> properties = nodeMapValue(propNode, "norms");
|
|
|
- for (Map.Entry<String, Object> entry2 : properties.entrySet()) {
|
|
|
+ for (Iterator<Entry<String, Object>> propsIterator = properties.entrySet().iterator(); propsIterator.hasNext();) {
|
|
|
+ Entry<String, Object> entry2 = propsIterator.next();
|
|
|
final String propName2 = Strings.toUnderscoreCase(entry2.getKey());
|
|
|
final Object propNode2 = entry2.getValue();
|
|
|
if (propName2.equals("enabled")) {
|
|
|
builder.omitNorms(!nodeBooleanValue(propNode2));
|
|
|
+ propsIterator.remove();
|
|
|
} else if (propName2.equals(Loading.KEY)) {
|
|
|
builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null));
|
|
|
+ propsIterator.remove();
|
|
|
}
|
|
|
}
|
|
|
+ DocumentMapperParser.checkNoRemainingFields(propName, properties, parserContext.indexVersionCreated());
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("omit_term_freq_and_positions")) {
|
|
|
final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
|
|
|
if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_0_0_RC2)) {
|
|
@@ -205,8 +245,10 @@ public class TypeParsers {
|
|
|
}
|
|
|
// deprecated option for BW compat
|
|
|
builder.indexOptions(op);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("index_options")) {
|
|
|
builder.indexOptions(nodeIndexOptionValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("analyzer")) {
|
|
|
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
|
|
if (analyzer == null) {
|
|
@@ -214,40 +256,50 @@ public class TypeParsers {
|
|
|
}
|
|
|
builder.indexAnalyzer(analyzer);
|
|
|
builder.searchAnalyzer(analyzer);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("index_analyzer")) {
|
|
|
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
|
|
if (analyzer == null) {
|
|
|
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
|
|
}
|
|
|
builder.indexAnalyzer(analyzer);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("search_analyzer")) {
|
|
|
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
|
|
if (analyzer == null) {
|
|
|
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
|
|
}
|
|
|
builder.searchAnalyzer(analyzer);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("include_in_all")) {
|
|
|
builder.includeInAll(nodeBooleanValue(propNode));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("postings_format")) {
|
|
|
String postingFormatName = propNode.toString();
|
|
|
builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals(DOC_VALUES_FORMAT)) {
|
|
|
String docValuesFormatName = propNode.toString();
|
|
|
builder.docValuesFormat(parserContext.docValuesFormatService().get(docValuesFormatName));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("similarity")) {
|
|
|
builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString()));
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("fielddata")) {
|
|
|
final Settings settings = ImmutableSettings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build();
|
|
|
builder.fieldDataSettings(settings);
|
|
|
+ iterator.remove();
|
|
|
} else if (propName.equals("copy_to")) {
|
|
|
parseCopyFields(propNode, builder);
|
|
|
+ iterator.remove();
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- public static void parseMultiField(AbstractFieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
|
|
+ public static boolean parseMultiField(AbstractFieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) {
|
|
|
if (propName.equals("path")) {
|
|
|
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
|
|
|
+ return true;
|
|
|
} else if (propName.equals("fields")) {
|
|
|
|
|
|
final Map<String, Object> multiFieldsPropNodes;
|
|
@@ -282,8 +334,12 @@ public class TypeParsers {
|
|
|
throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + multiFieldName + "]");
|
|
|
}
|
|
|
builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext));
|
|
|
+ multiFieldNodes.remove("type");
|
|
|
+ DocumentMapperParser.checkNoRemainingFields(propName, multiFieldNodes, parserContext.indexVersionCreated());
|
|
|
}
|
|
|
+ return true;
|
|
|
}
|
|
|
+ return false;
|
|
|
}
|
|
|
|
|
|
private static IndexOptions nodeIndexOptionValue(final Object propNode) {
|