Browse Source

Avoid stack overflow while parsing mapping (#95705)

Parsing a hugely nested mapping can lead to stack overflow errors.
This PR introduces a protection against this by tracking the depth level
while parsing a object in the mapping, and throwing an error if the max depth
level is exceeded. To do this, we use a new mappingParserContext object
to parse every new mapping, and not reuse the same mappingParserContext.

Currently we do have a protection against super deep object introduced
in #90285. But this protection is happening during dynamic mapping
update, when the mapping is already parsed. To avoid stack overflow
during parsing, we need this change as well.

Closes #52098
Mayya Sharipova 2 years ago
parent
commit
5bc7d264ed

+ 6 - 0
docs/changelog/95705.yaml

@@ -0,0 +1,6 @@
+pr: 95705
+summary: Avoid stack overflow while parsing mapping
+area: Mapping
+type: bug
+issues:
+ - 52098

+ 7 - 7
server/src/main/java/org/elasticsearch/index/mapper/MapperService.java

@@ -121,8 +121,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
     private final DocumentParser documentParser;
     private final Version indexVersionCreated;
     private final MapperRegistry mapperRegistry;
-    private final MappingParserContext mappingParserContext;
-
+    private final Supplier<MappingParserContext> mappingParserContextSupplier;
     private volatile DocumentMapper mapper;
 
     public MapperService(
@@ -164,7 +163,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
         this.indexVersionCreated = indexSettings.getIndexVersionCreated();
         this.indexAnalyzers = indexAnalyzers;
         this.mapperRegistry = mapperRegistry;
-        this.mappingParserContext = new MappingParserContext(
+        this.mappingParserContextSupplier = () -> new MappingParserContext(
             similarityService::getSimilarity,
             type -> mapperRegistry.getMapperParser(type, indexVersionCreated),
             mapperRegistry.getRuntimeFieldParsers()::get,
@@ -176,12 +175,12 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
             indexSettings,
             idFieldMapper
         );
-        this.documentParser = new DocumentParser(parserConfiguration, this.mappingParserContext);
+        this.documentParser = new DocumentParser(parserConfiguration, this.mappingParserContextSupplier.get());
         Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
             indexSettings.getIndexVersionCreated()
         );
         this.mappingParser = new MappingParser(
-            mappingParserContext,
+            mappingParserContextSupplier,
             metadataMapperParsers,
             this::getMetadataMappers,
             this::resolveDocumentType
@@ -197,7 +196,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
     }
 
     public MappingParserContext parserContext() {
-        return this.mappingParserContext;
+        return mappingParserContextSupplier.get();
     }
 
     /**
@@ -209,6 +208,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
     }
 
     Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> getMetadataMappers() {
+        final MappingParserContext mappingParserContext = parserContext();
         final DocumentMapper existingMapper = mapper;
         final Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
             indexSettings.getIndexVersionCreated()
@@ -216,7 +216,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
         Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
         if (existingMapper == null) {
             for (MetadataFieldMapper.TypeParser parser : metadataMapperParsers.values()) {
-                MetadataFieldMapper metadataFieldMapper = parser.getDefault(parserContext());
+                MetadataFieldMapper metadataFieldMapper = parser.getDefault(mappingParserContext);
                 // A MetadataFieldMapper may choose to not be added to the metadata mappers
                 // of an index (eg TimeSeriesIdFieldMapper is only added to time series indices)
                 // In this case its TypeParser will return null instead of the MetadataFieldMapper

+ 7 - 6
server/src/main/java/org/elasticsearch/index/mapper/MappingParser.java

@@ -25,18 +25,18 @@ import java.util.function.Supplier;
  * Parser for {@link Mapping} provided in {@link CompressedXContent} format
  */
 public final class MappingParser {
-    private final MappingParserContext mappingParserContext;
+    private final Supplier<MappingParserContext> mappingParserContextSupplier;
     private final Supplier<Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper>> metadataMappersSupplier;
     private final Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers;
     private final Function<String, String> documentTypeResolver;
 
     MappingParser(
-        MappingParserContext mappingParserContext,
+        Supplier<MappingParserContext> mappingParserContextSupplier,
         Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers,
         Supplier<Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper>> metadataMappersSupplier,
         Function<String, String> documentTypeResolver
     ) {
-        this.mappingParserContext = mappingParserContext;
+        this.mappingParserContextSupplier = mappingParserContextSupplier;
         this.metadataMapperParsers = metadataMapperParsers;
         this.metadataMappersSupplier = metadataMappersSupplier;
         this.documentTypeResolver = documentTypeResolver;
@@ -97,8 +97,9 @@ public final class MappingParser {
     }
 
     private Mapping parse(String type, Map<String, Object> mapping) throws MapperParsingException {
+        final MappingParserContext mappingParserContext = mappingParserContextSupplier.get();
 
-        RootObjectMapper.Builder rootObjectMapper = RootObjectMapper.parse(type, mapping, this.mappingParserContext);
+        RootObjectMapper.Builder rootObjectMapper = RootObjectMapper.parse(type, mapping, mappingParserContext);
 
         Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersSupplier.get();
         Map<String, Object> meta = null;
@@ -118,7 +119,7 @@ public final class MappingParser {
                 }
                 @SuppressWarnings("unchecked")
                 Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
-                MetadataFieldMapper metadataFieldMapper = typeParser.parse(fieldName, fieldNodeMap, this.mappingParserContext)
+                MetadataFieldMapper metadataFieldMapper = typeParser.parse(fieldName, fieldNodeMap, mappingParserContext)
                     .build(MapperBuilderContext.forMetadata());
                 metadataMappers.put(metadataFieldMapper.getClass(), metadataFieldMapper);
                 assert fieldNodeMap.isEmpty();
@@ -147,7 +148,7 @@ public final class MappingParser {
              */
             meta = Collections.unmodifiableMap(new HashMap<>(removed));
         }
-        if (this.mappingParserContext.indexVersionCreated().isLegacyIndexVersion() == false) {
+        if (mappingParserContext.indexVersionCreated().isLegacyIndexVersion() == false) {
             // legacy indices are allowed to have extra definitions that we ignore (we will drop them on import)
             checkNoRemainingFields(mapping, "Root mapping definition has unsupported parameters: ");
         }

+ 14 - 0
server/src/main/java/org/elasticsearch/index/mapper/MappingParserContext.java

@@ -37,6 +37,8 @@ public class MappingParserContext {
     private final IndexAnalyzers indexAnalyzers;
     private final IndexSettings indexSettings;
     private final IdFieldMapper idFieldMapper;
+    private final long mappingObjectDepthLimit;
+    private long mappingObjectDepth = 0;
 
     public MappingParserContext(
         Function<String, SimilarityProvider> similarityLookupService,
@@ -60,6 +62,7 @@ public class MappingParserContext {
         this.indexAnalyzers = indexAnalyzers;
         this.indexSettings = indexSettings;
         this.idFieldMapper = idFieldMapper;
+        this.mappingObjectDepthLimit = indexSettings.getMappingDepthLimit();
     }
 
     public IndexAnalyzers getIndexAnalyzers() {
@@ -129,6 +132,17 @@ public class MappingParserContext {
         return scriptCompiler;
     }
 
+    void incrementMappingObjectDepth() throws MapperParsingException {
+        mappingObjectDepth++;
+        if (mappingObjectDepth > mappingObjectDepthLimit) {
+            throw new MapperParsingException("Limit of mapping depth [" + mappingObjectDepthLimit + "] has been exceeded");
+        }
+    }
+
+    void decrementMappingObjectDepth() throws MapperParsingException {
+        mappingObjectDepth--;
+    }
+
     public MappingParserContext createMultiFieldContext() {
         return new MultiFieldParserContext(this);
     }

+ 2 - 1
server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java

@@ -182,7 +182,6 @@ public class ObjectMapper extends Mapper implements Cloneable {
     }
 
     public static class TypeParser implements Mapper.TypeParser {
-
         @Override
         public boolean supportsVersion(Version indexCreatedVersion) {
             return true;
@@ -191,6 +190,7 @@ public class ObjectMapper extends Mapper implements Cloneable {
         @Override
         public Mapper.Builder parse(String name, Map<String, Object> node, MappingParserContext parserContext)
             throws MapperParsingException {
+            parserContext.incrementMappingObjectDepth(); // throws MapperParsingException if depth limit is exceeded
             Explicit<Boolean> subobjects = parseSubobjects(node);
             ObjectMapper.Builder builder = new Builder(name, subobjects);
             for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
@@ -201,6 +201,7 @@ public class ObjectMapper extends Mapper implements Cloneable {
                     iterator.remove();
                 }
             }
+            parserContext.decrementMappingObjectDepth();
             return builder;
         }
 

+ 104 - 0
server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java

@@ -21,6 +21,8 @@ import org.elasticsearch.index.analysis.AnalyzerScope;
 import org.elasticsearch.index.analysis.IndexAnalyzers;
 import org.elasticsearch.index.analysis.NamedAnalyzer;
 import org.elasticsearch.index.mapper.MapperService.MergeReason;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -28,10 +30,12 @@ import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.CyclicBarrier;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
+import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING;
 import static org.elasticsearch.test.ListMatcher.matchesList;
 import static org.elasticsearch.test.MapMatcher.assertMap;
 import static org.hamcrest.Matchers.containsString;
@@ -345,4 +349,104 @@ public class DocumentMapperTests extends MapperServiceTestCase {
         })));
         assertThat(e.getMessage(), containsString("Limit of total dimension fields [" + max + "] has been exceeded"));
     }
+
+    public void testDeeplyNestedMapping() throws Exception {
+        final int maxDepth = INDEX_MAPPING_DEPTH_LIMIT_SETTING.get(Settings.EMPTY).intValue();
+        {
+            // test that the depth limit is enforced for object field
+            XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
+            for (int i = 0; i < maxDepth + 5; i++) {
+                builder.startObject("obj" + i);
+                builder.startObject("properties");
+            }
+            builder.startObject("foo").field("type", "keyword").endObject();
+            for (int i = 0; i < maxDepth + 5; i++) {
+                builder.endObject();
+                builder.endObject();
+            }
+            builder.endObject().endObject().endObject();
+
+            MapperParsingException exc = expectThrows(
+                MapperParsingException.class,
+                () -> createMapperService(Settings.builder().put(getIndexSettings()).build(), builder)
+            );
+            assertThat(exc.getMessage(), containsString("Limit of mapping depth [" + maxDepth + "] has been exceeded"));
+        }
+
+        {
+            // test that the limit is per individual field, so several object fields don't trip the limit
+            XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
+            for (int i = 0; i < maxDepth - 3; i++) {
+                builder.startObject("obj" + i);
+                builder.startObject("properties");
+            }
+
+            for (int i = 0; i < 2; i++) {
+                builder.startObject("sub_obj1" + i);
+                builder.startObject("properties");
+            }
+            builder.startObject("foo").field("type", "keyword").endObject();
+            for (int i = 0; i < 2; i++) {
+                builder.endObject();
+                builder.endObject();
+            }
+
+            for (int i = 0; i < 2; i++) {
+                builder.startObject("sub_obj2" + i);
+                builder.startObject("properties");
+            }
+            builder.startObject("foo2").field("type", "keyword").endObject();
+            for (int i = 0; i < 2; i++) {
+                builder.endObject();
+                builder.endObject();
+            }
+
+            for (int i = 0; i < maxDepth - 3; i++) {
+                builder.endObject();
+                builder.endObject();
+            }
+            builder.endObject().endObject().endObject();
+
+            createMapperService(Settings.builder().put(getIndexSettings()).build(), builder);
+        }
+        {
+            // test that parsing correct objects in parallel using the same MapperService don't trip the limit
+            final int numThreads = randomIntBetween(2, 5);
+            final XContentBuilder[] builders = new XContentBuilder[numThreads];
+
+            for (int i = 0; i < numThreads; i++) {
+                builders[i] = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
+                for (int j = 0; j < maxDepth - 1; j++) {
+                    builders[i].startObject("obj" + i + "_" + j);
+                    builders[i].startObject("properties");
+                }
+                builders[i].startObject("foo").field("type", "keyword").endObject();
+                for (int j = 0; j < maxDepth - 1; j++) {
+                    builders[i].endObject();
+                    builders[i].endObject();
+                }
+                builders[i].endObject().endObject().endObject();
+            }
+
+            final MapperService mapperService = createMapperService(Version.CURRENT, Settings.EMPTY, () -> false);
+            final CountDownLatch latch = new CountDownLatch(1);
+            final Thread[] threads = new Thread[numThreads];
+            for (int i = 0; i < threads.length; i++) {
+                final int threadId = i;
+                threads[threadId] = new Thread(() -> {
+                    try {
+                        latch.await();
+                        mapperService.parseMapping("_doc", new CompressedXContent(Strings.toString(builders[threadId])));
+                    } catch (Exception e) {
+                        throw new AssertionError(e);
+                    }
+                });
+                threads[threadId].start();
+            }
+            latch.countDown();
+            for (Thread thread : threads) {
+                thread.join();
+            }
+        }
+    }
 }

+ 5 - 3
server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java

@@ -27,6 +27,7 @@ import java.io.IOException;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.function.Supplier;
 
 public class MappingParserTests extends MapperServiceTestCase {
 
@@ -40,7 +41,7 @@ public class MappingParserTests extends MapperServiceTestCase {
         IndexAnalyzers indexAnalyzers = createIndexAnalyzers();
         SimilarityService similarityService = new SimilarityService(indexSettings, scriptService, Collections.emptyMap());
         MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
-        MappingParserContext mappingParserContext = new MappingParserContext(
+        Supplier<MappingParserContext> mappingParserContextSupplier = () -> new MappingParserContext(
             similarityService::getSimilarity,
             type -> mapperRegistry.getMapperParser(type, indexSettings.getIndexVersionCreated()),
             mapperRegistry.getRuntimeFieldParsers()::get,
@@ -54,17 +55,18 @@ public class MappingParserTests extends MapperServiceTestCase {
             indexSettings,
             indexSettings.getMode().idFieldMapperWithoutFieldData()
         );
+
         Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
             indexSettings.getIndexVersionCreated()
         );
         Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
-        metadataMapperParsers.values().stream().map(parser -> parser.getDefault(mappingParserContext)).forEach(m -> {
+        metadataMapperParsers.values().stream().map(parser -> parser.getDefault(mappingParserContextSupplier.get())).forEach(m -> {
             if (m != null) {
                 metadataMappers.put(m.getClass(), m);
             }
         });
         return new MappingParser(
-            mappingParserContext,
+            mappingParserContextSupplier,
             metadataMapperParsers,
             () -> metadataMappers,
             type -> MapperService.SINGLE_MAPPING_NAME

+ 13 - 0
server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java

@@ -10,8 +10,11 @@ package org.elasticsearch.index.mapper;
 
 import org.elasticsearch.TransportVersion;
 import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetadata;
 import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.index.IndexSettings;
 import org.elasticsearch.index.analysis.AnalyzerScope;
 import org.elasticsearch.index.analysis.IndexAnalyzers;
 import org.elasticsearch.index.analysis.NamedAnalyzer;
@@ -71,6 +74,16 @@ public class TypeParsersTests extends ESTestCase {
         MapperService mapperService = mock(MapperService.class);
         IndexAnalyzers indexAnalyzers = IndexAnalyzers.of(defaultAnalyzers());
         when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers);
+
+        Settings settings = Settings.builder()
+            .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
+            .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
+            .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
+            .build();
+        IndexMetadata metadata = IndexMetadata.builder("test").settings(settings).build();
+        IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY);
+        when(mapperService.getIndexSettings()).thenReturn(indexSettings);
+
         Version olderVersion = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
         MappingParserContext olderContext = new MappingParserContext(
             null,