Skip to content
6 changes: 6 additions & 0 deletions docs/changelog/95705.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 95705
summary: Avoid stack overflow while parsing mapping
area: Mapping
type: bug
issues:
- 52098
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,7 @@ public enum MergeReason {
private final DocumentParser documentParser;
private final Version indexVersionCreated;
private final MapperRegistry mapperRegistry;
private final MappingParserContext mappingParserContext;

private final Supplier<MappingParserContext> mappingParserContextSupplier;
private volatile DocumentMapper mapper;

public MapperService(
Expand Down Expand Up @@ -164,7 +163,7 @@ public MapperService(
this.indexVersionCreated = indexSettings.getIndexVersionCreated();
this.indexAnalyzers = indexAnalyzers;
this.mapperRegistry = mapperRegistry;
this.mappingParserContext = new MappingParserContext(
this.mappingParserContextSupplier = () -> new MappingParserContext(
similarityService::getSimilarity,
type -> mapperRegistry.getMapperParser(type, indexVersionCreated),
mapperRegistry.getRuntimeFieldParsers()::get,
Expand All @@ -176,12 +175,12 @@ public MapperService(
indexSettings,
idFieldMapper
);
this.documentParser = new DocumentParser(parserConfiguration, this.mappingParserContext);
this.documentParser = new DocumentParser(parserConfiguration, this.mappingParserContextSupplier.get());
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
indexSettings.getIndexVersionCreated()
);
this.mappingParser = new MappingParser(
mappingParserContext,
mappingParserContextSupplier,
metadataMapperParsers,
this::getMetadataMappers,
this::resolveDocumentType
Expand All @@ -197,7 +196,7 @@ public IndexAnalyzers getIndexAnalyzers() {
}

public MappingParserContext parserContext() {
return this.mappingParserContext;
return mappingParserContextSupplier.get();
}

/**
Expand All @@ -209,14 +208,15 @@ public DocumentParser documentParser() {
}

Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> getMetadataMappers() {
final MappingParserContext mappingParserContext = parserContext();
final DocumentMapper existingMapper = mapper;
final Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
indexSettings.getIndexVersionCreated()
);
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
if (existingMapper == null) {
for (MetadataFieldMapper.TypeParser parser : metadataMapperParsers.values()) {
MetadataFieldMapper metadataFieldMapper = parser.getDefault(parserContext());
MetadataFieldMapper metadataFieldMapper = parser.getDefault(mappingParserContext);
// A MetadataFieldMapper may choose to not be added to the metadata mappers
// of an index (eg TimeSeriesIdFieldMapper is only added to time series indices)
// In this case its TypeParser will return null instead of the MetadataFieldMapper
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,18 @@
* Parser for {@link Mapping} provided in {@link CompressedXContent} format
*/
public final class MappingParser {
private final MappingParserContext mappingParserContext;
private final Supplier<MappingParserContext> mappingParserContextSupplier;
private final Supplier<Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper>> metadataMappersSupplier;
private final Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers;
private final Function<String, String> documentTypeResolver;

MappingParser(
MappingParserContext mappingParserContext,
Supplier<MappingParserContext> mappingParserContextSupplier,
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers,
Supplier<Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper>> metadataMappersSupplier,
Function<String, String> documentTypeResolver
) {
this.mappingParserContext = mappingParserContext;
this.mappingParserContextSupplier = mappingParserContextSupplier;
this.metadataMapperParsers = metadataMapperParsers;
this.metadataMappersSupplier = metadataMappersSupplier;
this.documentTypeResolver = documentTypeResolver;
Expand Down Expand Up @@ -97,8 +97,9 @@ Mapping parse(@Nullable String type, CompressedXContent source) throws MapperPar
}

private Mapping parse(String type, Map<String, Object> mapping) throws MapperParsingException {
final MappingParserContext mappingParserContext = mappingParserContextSupplier.get();

RootObjectMapper.Builder rootObjectMapper = RootObjectMapper.parse(type, mapping, this.mappingParserContext);
RootObjectMapper.Builder rootObjectMapper = RootObjectMapper.parse(type, mapping, mappingParserContext);

Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersSupplier.get();
Map<String, Object> meta = null;
Expand All @@ -118,7 +119,7 @@ private Mapping parse(String type, Map<String, Object> mapping) throws MapperPar
}
@SuppressWarnings("unchecked")
Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
MetadataFieldMapper metadataFieldMapper = typeParser.parse(fieldName, fieldNodeMap, this.mappingParserContext)
MetadataFieldMapper metadataFieldMapper = typeParser.parse(fieldName, fieldNodeMap, mappingParserContext)
.build(MapperBuilderContext.forMetadata());
metadataMappers.put(metadataFieldMapper.getClass(), metadataFieldMapper);
assert fieldNodeMap.isEmpty();
Expand Down Expand Up @@ -147,7 +148,7 @@ private Mapping parse(String type, Map<String, Object> mapping) throws MapperPar
*/
meta = Collections.unmodifiableMap(new HashMap<>(removed));
}
if (this.mappingParserContext.indexVersionCreated().isLegacyIndexVersion() == false) {
if (mappingParserContext.indexVersionCreated().isLegacyIndexVersion() == false) {
// legacy indices are allowed to have extra definitions that we ignore (we will drop them on import)
checkNoRemainingFields(mapping, "Root mapping definition has unsupported parameters: ");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ public class MappingParserContext {
private final IndexAnalyzers indexAnalyzers;
private final IndexSettings indexSettings;
private final IdFieldMapper idFieldMapper;
private final long mappingObjectDepthLimit;
private long mappingObjectDepth = 0;

public MappingParserContext(
Function<String, SimilarityProvider> similarityLookupService,
Expand All @@ -60,6 +62,7 @@ public MappingParserContext(
this.indexAnalyzers = indexAnalyzers;
this.indexSettings = indexSettings;
this.idFieldMapper = idFieldMapper;
this.mappingObjectDepthLimit = indexSettings.getMappingDepthLimit();
}

public IndexAnalyzers getIndexAnalyzers() {
Expand Down Expand Up @@ -129,6 +132,17 @@ public ScriptCompiler scriptCompiler() {
return scriptCompiler;
}

void incrementMappingObjectDepth() throws MapperParsingException {
mappingObjectDepth++;
if (mappingObjectDepth > mappingObjectDepthLimit) {
throw new MapperParsingException("Limit of mapping depth [" + mappingObjectDepthLimit + "] has been exceeded");
}
}

void decrementMappingObjectDepth() throws MapperParsingException {
mappingObjectDepth--;
}

public MappingParserContext createMultiFieldContext() {
return new MultiFieldParserContext(this);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,6 @@ public ObjectMapper build(MapperBuilderContext context) {
}

public static class TypeParser implements Mapper.TypeParser {

@Override
public boolean supportsVersion(Version indexCreatedVersion) {
return true;
Expand All @@ -191,6 +190,7 @@ public boolean supportsVersion(Version indexCreatedVersion) {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, MappingParserContext parserContext)
throws MapperParsingException {
parserContext.incrementMappingObjectDepth(); // throws MapperParsingException if depth limit is exceeded
Explicit<Boolean> subobjects = parseSubobjects(node);
ObjectMapper.Builder builder = new Builder(name, subobjects);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Expand All @@ -201,6 +201,7 @@ public Mapper.Builder parse(String name, Map<String, Object> node, MappingParser
iterator.remove();
}
}
parserContext.decrementMappingObjectDepth();
return builder;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,21 @@
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;

import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING;
import static org.elasticsearch.test.ListMatcher.matchesList;
import static org.elasticsearch.test.MapMatcher.assertMap;
import static org.hamcrest.Matchers.containsString;
Expand Down Expand Up @@ -345,4 +349,104 @@ public void testTooManyDimensionFields() {
})));
assertThat(e.getMessage(), containsString("Limit of total dimension fields [" + max + "] has been exceeded"));
}

public void testDeeplyNestedMapping() throws Exception {
final int maxDepth = INDEX_MAPPING_DEPTH_LIMIT_SETTING.get(Settings.EMPTY).intValue();
{
// test that the depth limit is enforced for object field
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
for (int i = 0; i < maxDepth + 5; i++) {
builder.startObject("obj" + i);
builder.startObject("properties");
}
builder.startObject("foo").field("type", "keyword").endObject();
for (int i = 0; i < maxDepth + 5; i++) {
builder.endObject();
builder.endObject();
}
builder.endObject().endObject().endObject();

MapperParsingException exc = expectThrows(
MapperParsingException.class,
() -> createMapperService(Settings.builder().put(getIndexSettings()).build(), builder)
);
assertThat(exc.getMessage(), containsString("Limit of mapping depth [" + maxDepth + "] has been exceeded"));
}

{
// test that the limit is per individual field, so several object fields don't trip the limit
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
for (int i = 0; i < maxDepth - 3; i++) {
builder.startObject("obj" + i);
builder.startObject("properties");
}

for (int i = 0; i < 2; i++) {
builder.startObject("sub_obj1" + i);
builder.startObject("properties");
}
builder.startObject("foo").field("type", "keyword").endObject();
for (int i = 0; i < 2; i++) {
builder.endObject();
builder.endObject();
}

for (int i = 0; i < 2; i++) {
builder.startObject("sub_obj2" + i);
builder.startObject("properties");
}
builder.startObject("foo2").field("type", "keyword").endObject();
for (int i = 0; i < 2; i++) {
builder.endObject();
builder.endObject();
}

for (int i = 0; i < maxDepth - 3; i++) {
builder.endObject();
builder.endObject();
}
builder.endObject().endObject().endObject();

createMapperService(Settings.builder().put(getIndexSettings()).build(), builder);
}
{
// test that parsing correct objects in parallel using the same MapperService don't trip the limit
final int numThreads = randomIntBetween(2, 5);
final XContentBuilder[] builders = new XContentBuilder[numThreads];

for (int i = 0; i < numThreads; i++) {
builders[i] = XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("properties");
for (int j = 0; j < maxDepth - 1; j++) {
builders[i].startObject("obj" + i + "_" + j);
builders[i].startObject("properties");
}
builders[i].startObject("foo").field("type", "keyword").endObject();
for (int j = 0; j < maxDepth - 1; j++) {
builders[i].endObject();
builders[i].endObject();
}
builders[i].endObject().endObject().endObject();
}

final MapperService mapperService = createMapperService(Version.CURRENT, Settings.EMPTY, () -> false);
final CountDownLatch latch = new CountDownLatch(1);
final Thread[] threads = new Thread[numThreads];
for (int i = 0; i < threads.length; i++) {
final int threadId = i;
threads[threadId] = new Thread(() -> {
try {
latch.await();
mapperService.parseMapping("_doc", new CompressedXContent(Strings.toString(builders[threadId])));
} catch (Exception e) {
throw new AssertionError(e);
}
});
threads[threadId].start();
}
latch.countDown();
for (Thread thread : threads) {
thread.join();
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Supplier;

public class MappingParserTests extends MapperServiceTestCase {

Expand All @@ -40,7 +41,7 @@ private static MappingParser createMappingParser(Settings settings, Version vers
IndexAnalyzers indexAnalyzers = createIndexAnalyzers();
SimilarityService similarityService = new SimilarityService(indexSettings, scriptService, Collections.emptyMap());
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
MappingParserContext mappingParserContext = new MappingParserContext(
Supplier<MappingParserContext> mappingParserContextSupplier = () -> new MappingParserContext(
similarityService::getSimilarity,
type -> mapperRegistry.getMapperParser(type, indexSettings.getIndexVersionCreated()),
mapperRegistry.getRuntimeFieldParsers()::get,
Expand All @@ -54,17 +55,18 @@ private static MappingParser createMappingParser(Settings settings, Version vers
indexSettings,
indexSettings.getMode().idFieldMapperWithoutFieldData()
);

Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
indexSettings.getIndexVersionCreated()
);
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
metadataMapperParsers.values().stream().map(parser -> parser.getDefault(mappingParserContext)).forEach(m -> {
metadataMapperParsers.values().stream().map(parser -> parser.getDefault(mappingParserContextSupplier.get())).forEach(m -> {
if (m != null) {
metadataMappers.put(m.getClass(), m);
}
});
return new MappingParser(
mappingParserContext,
mappingParserContextSupplier,
metadataMapperParsers,
() -> metadataMappers,
type -> MapperService.SINGLE_MAPPING_NAME
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,11 @@

import org.elasticsearch.TransportVersion;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
Expand Down Expand Up @@ -71,6 +74,16 @@ public void testMultiFieldWithinMultiField() throws IOException {
MapperService mapperService = mock(MapperService.class);
IndexAnalyzers indexAnalyzers = IndexAnalyzers.of(defaultAnalyzers());
when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers);

Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.build();
IndexMetadata metadata = IndexMetadata.builder("test").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);

Version olderVersion = VersionUtils.randomPreviousCompatibleVersion(random(), Version.V_8_0_0);
MappingParserContext olderContext = new MappingParserContext(
null,
Expand Down