Explorar el Código

ESQL: Ignore multivalued key columns in lookup index on JOIN (#120726) (#121037)

Fixes https://github.com/elastic/elasticsearch/issues/118780

Second part of https://github.com/elastic/elasticsearch/pull/120519

In the first PR, we avoid matching multivalue keys in lookup when they come from the query.
Now, we avoid matching multivalues when the lookup index has multivalues in the key column.
Iván Cea Fontenla hace 8 meses
padre
commit
62894021e1

+ 1 - 0
x-pack/plugin/esql/compute/src/main/java/module-info.java

@@ -35,4 +35,5 @@ module org.elasticsearch.compute {
     exports org.elasticsearch.compute.operator.mvdedupe;
     exports org.elasticsearch.compute.aggregation.table;
     exports org.elasticsearch.compute.data.sort;
+    exports org.elasticsearch.compute.querydsl.query;
 }

+ 64 - 31
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/lookup/QueryList.java

@@ -9,6 +9,9 @@ package org.elasticsearch.compute.operator.lookup;
 
 import org.apache.lucene.document.InetAddressPoint;
 import org.apache.lucene.geo.GeoEncodingUtils;
+import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
 import org.elasticsearch.common.geo.ShapeRelation;
@@ -20,6 +23,8 @@ import org.elasticsearch.compute.data.ElementType;
 import org.elasticsearch.compute.data.FloatBlock;
 import org.elasticsearch.compute.data.IntBlock;
 import org.elasticsearch.compute.data.LongBlock;
+import org.elasticsearch.compute.operator.Warnings;
+import org.elasticsearch.compute.querydsl.query.SingleValueMatchQuery;
 import org.elasticsearch.core.Nullable;
 import org.elasticsearch.geometry.Geometry;
 import org.elasticsearch.geometry.Point;
@@ -30,6 +35,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.RangeFieldMapper;
 import org.elasticsearch.index.query.SearchExecutionContext;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.function.IntFunction;
@@ -38,10 +44,14 @@ import java.util.function.IntFunction;
  * Generates a list of Lucene queries based on the input block.
  */
 public abstract class QueryList {
+    protected final SearchExecutionContext searchExecutionContext;
+    protected final MappedFieldType field;
     protected final Block block;
     protected final boolean onlySingleValues;
 
-    protected QueryList(Block block, boolean onlySingleValues) {
+    protected QueryList(MappedFieldType field, SearchExecutionContext searchExecutionContext, Block block, boolean onlySingleValues) {
+        this.searchExecutionContext = searchExecutionContext;
+        this.field = field;
         this.block = block;
         this.onlySingleValues = onlySingleValues;
     }
@@ -59,11 +69,52 @@ public abstract class QueryList {
      */
     public abstract QueryList onlySingleValues();
 
+    final Query getQuery(int position) {
+        final int valueCount = block.getValueCount(position);
+        if (onlySingleValues && valueCount != 1) {
+            return null;
+        }
+        final int firstValueIndex = block.getFirstValueIndex(position);
+
+        Query query = doGetQuery(position, firstValueIndex, valueCount);
+
+        if (onlySingleValues) {
+            query = wrapSingleValueQuery(query);
+        }
+
+        return query;
+    }
+
     /**
      * Returns the query at the given position.
      */
     @Nullable
-    abstract Query getQuery(int position);
+    abstract Query doGetQuery(int position, int firstValueIndex, int valueCount);
+
+    private Query wrapSingleValueQuery(Query query) {
+        SingleValueMatchQuery singleValueQuery = new SingleValueMatchQuery(
+            searchExecutionContext.getForField(field, MappedFieldType.FielddataOperation.SEARCH),
+            // Not emitting warnings for multivalued fields not matching
+            Warnings.NOOP_WARNINGS
+        );
+
+        Query rewrite = singleValueQuery;
+        try {
+            rewrite = singleValueQuery.rewrite(searchExecutionContext.searcher());
+            if (rewrite instanceof MatchAllDocsQuery) {
+                // nothing to filter
+                return query;
+            }
+        } catch (IOException e) {
+            // ignore
+            // TODO: Should we do something with the exception?
+        }
+
+        BooleanQuery.Builder builder = new BooleanQuery.Builder();
+        builder.add(query, BooleanClause.Occur.FILTER);
+        builder.add(rewrite, BooleanClause.Occur.FILTER);
+        return builder.build();
+    }
 
     /**
      * Returns a list of term queries for the given field and the input block
@@ -146,8 +197,6 @@ public abstract class QueryList {
     }
 
     private static class TermQueryList extends QueryList {
-        private final MappedFieldType field;
-        private final SearchExecutionContext searchExecutionContext;
         private final IntFunction<Object> blockValueReader;
 
         private TermQueryList(
@@ -157,9 +206,7 @@ public abstract class QueryList {
             boolean onlySingleValues,
             IntFunction<Object> blockValueReader
         ) {
-            super(block, onlySingleValues);
-            this.field = field;
-            this.searchExecutionContext = searchExecutionContext;
+            super(field, searchExecutionContext, block, onlySingleValues);
             this.blockValueReader = blockValueReader;
         }
 
@@ -169,19 +216,14 @@ public abstract class QueryList {
         }
 
         @Override
-        Query getQuery(int position) {
-            final int count = block.getValueCount(position);
-            if (onlySingleValues && count != 1) {
-                return null;
-            }
-            final int first = block.getFirstValueIndex(position);
-            return switch (count) {
+        Query doGetQuery(int position, int firstValueIndex, int valueCount) {
+            return switch (valueCount) {
                 case 0 -> null;
-                case 1 -> field.termQuery(blockValueReader.apply(first), searchExecutionContext);
+                case 1 -> field.termQuery(blockValueReader.apply(firstValueIndex), searchExecutionContext);
                 default -> {
-                    final List<Object> terms = new ArrayList<>(count);
-                    for (int i = 0; i < count; i++) {
-                        final Object value = blockValueReader.apply(first + i);
+                    final List<Object> terms = new ArrayList<>(valueCount);
+                    for (int i = 0; i < valueCount; i++) {
+                        final Object value = blockValueReader.apply(firstValueIndex + i);
                         terms.add(value);
                     }
                     yield field.termsQuery(terms, searchExecutionContext);
@@ -192,8 +234,6 @@ public abstract class QueryList {
 
     private static class GeoShapeQueryList extends QueryList {
         private final BytesRef scratch = new BytesRef();
-        private final MappedFieldType field;
-        private final SearchExecutionContext searchExecutionContext;
         private final IntFunction<Geometry> blockValueReader;
         private final IntFunction<Query> shapeQuery;
 
@@ -203,10 +243,8 @@ public abstract class QueryList {
             Block block,
             boolean onlySingleValues
         ) {
-            super(block, onlySingleValues);
+            super(field, searchExecutionContext, block, onlySingleValues);
 
-            this.field = field;
-            this.searchExecutionContext = searchExecutionContext;
             this.blockValueReader = blockToGeometry(block);
             this.shapeQuery = shapeQuery();
         }
@@ -217,15 +255,10 @@ public abstract class QueryList {
         }
 
         @Override
-        Query getQuery(int position) {
-            final int count = block.getValueCount(position);
-            if (onlySingleValues && count != 1) {
-                return null;
-            }
-            final int first = block.getFirstValueIndex(position);
-            return switch (count) {
+        Query doGetQuery(int position, int firstValueIndex, int valueCount) {
+            return switch (valueCount) {
                 case 0 -> null;
-                case 1 -> shapeQuery.apply(first);
+                case 1 -> shapeQuery.apply(firstValueIndex);
                 // TODO: support multiple values
                 default -> throw new IllegalArgumentException("can't read multiple Geometry values from a single position");
             };

+ 3 - 3
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java → x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/querydsl/query/SingleValueMatchQuery.java

@@ -5,7 +5,7 @@
  * 2.0.
  */
 
-package org.elasticsearch.xpack.esql.querydsl.query;
+package org.elasticsearch.compute.querydsl.query;
 
 import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.LeafReaderContext;
@@ -39,7 +39,7 @@ import java.util.Objects;
 /**
  * Finds all fields with a single-value. If a field has a multi-value, it emits a {@link Warnings}.
  */
-final class SingleValueMatchQuery extends Query {
+public final class SingleValueMatchQuery extends Query {
 
     /**
      * Choose a big enough value so this approximation never drives the iteration.
@@ -52,7 +52,7 @@ final class SingleValueMatchQuery extends Query {
     private final IndexFieldData<?> fieldData;
     private final Warnings warnings;
 
-    SingleValueMatchQuery(IndexFieldData<?> fieldData, Warnings warnings) {
+    public SingleValueMatchQuery(IndexFieldData<?> fieldData, Warnings warnings) {
         this.fieldData = fieldData;
         this.warnings = warnings;
     }

+ 79 - 77
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/EnrichQuerySourceOperatorTests.java

@@ -9,7 +9,7 @@ package org.elasticsearch.compute.operator.lookup;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
-import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.KeywordField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
@@ -35,9 +35,12 @@ import org.elasticsearch.compute.data.Page;
 import org.elasticsearch.compute.operator.DriverContext;
 import org.elasticsearch.compute.operator.Warnings;
 import org.elasticsearch.core.IOUtils;
+import org.elasticsearch.index.fielddata.FieldDataContext;
+import org.elasticsearch.index.fielddata.IndexFieldDataCache;
 import org.elasticsearch.index.mapper.KeywordFieldMapper;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.query.SearchExecutionContext;
+import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
 import org.elasticsearch.test.ESTestCase;
 import org.junit.After;
 import org.junit.Before;
@@ -54,6 +57,7 @@ import java.util.stream.IntStream;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.lessThanOrEqualTo;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 public class EnrichQuerySourceOperatorTests extends ESTestCase {
 
@@ -67,8 +71,7 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
     }
 
     @After
-    public void allBreakersEmpty() throws Exception {
-        MockBigArrays.ensureAllArraysAreReleased();
+    public void allBreakersEmpty() {
         assertThat(blockFactory.breaker().getUsed(), equalTo(0L));
     }
 
@@ -76,23 +79,11 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
         try (
             var directoryData = makeDirectoryWith(
                 List.of(List.of("a2"), List.of("a1", "c1", "b2"), List.of("a2"), List.of("a3"), List.of("b2", "b1", "a1"))
-            )
+            );
+            var inputTerms = makeTermsBlock(List.of(List.of("b2"), List.of("c1", "a2"), List.of("z2"), List.of(), List.of("a3"), List.of()))
         ) {
-            final BytesRefBlock inputTerms;
-            try (BytesRefBlock.Builder termBuilder = blockFactory.newBytesRefBlockBuilder(6)) {
-                termBuilder.appendBytesRef(new BytesRef("b2"))
-                    .beginPositionEntry()
-                    .appendBytesRef(new BytesRef("c1"))
-                    .appendBytesRef(new BytesRef("a2"))
-                    .endPositionEntry()
-                    .appendBytesRef(new BytesRef("z2"))
-                    .appendNull()
-                    .appendBytesRef(new BytesRef("a3"))
-                    .appendNull();
-                inputTerms = termBuilder.build();
-            }
             MappedFieldType uidField = new KeywordFieldMapper.KeywordFieldType("uid");
-            QueryList queryList = QueryList.rawTermQueryList(uidField, mock(SearchExecutionContext.class), inputTerms);
+            QueryList queryList = QueryList.rawTermQueryList(uidField, directoryData.searchExecutionContext, inputTerms);
             assertThat(queryList.getPositionCount(), equalTo(6));
             assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2")))));
             assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2")))));
@@ -106,7 +97,7 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
             // 1 -> [c1, a2] -> [1, 0, 2]
             // 2 -> [z2] -> []
             // 3 -> [] -> []
-            // 4 -> [a1] -> [3]
+            // 4 -> [a3] -> [3]
             // 5 -> [] -> []
             var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich");
             EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(
@@ -136,38 +127,34 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
             assertThat(BlockUtils.toJavaObject(positions, 5), equalTo(4));
             page.releaseBlocks();
             assertTrue(queryOperator.isFinished());
-            IOUtils.close(inputTerms);
         }
     }
 
     public void testRandomMatchQueries() throws Exception {
+        // Build lookup index values
         int numTerms = randomIntBetween(10, 1000);
-        List<List<String>> termsList = IntStream.range(0, numTerms).mapToObj(i -> List.of("term-" + i)).toList();
-        Map<String, Integer> terms = IntStream.range(0, numTerms).boxed().collect(Collectors.toMap(i -> "term-" + i, i -> i));
+        List<List<String>> directoryTermsList = IntStream.range(0, numTerms).mapToObj(i -> List.of("term-" + i)).toList();
+        Map<String, Integer> directoryTerms = IntStream.range(0, numTerms).boxed().collect(Collectors.toMap(i -> "term-" + i, i -> i));
 
-        try (var directoryData = makeDirectoryWith(termsList)) {
-            Map<Integer, Set<Integer>> expectedPositions = new HashMap<>();
-            int numPositions = randomIntBetween(1, 1000);
-            final BytesRefBlock inputTerms;
-            try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(numPositions)) {
-                for (int i = 0; i < numPositions; i++) {
-                    if (randomBoolean()) {
-                        String term = randomFrom(terms.keySet());
-                        builder.appendBytesRef(new BytesRef(term));
-                        Integer position = terms.get(term);
-                        expectedPositions.put(i, Set.of(position));
-                    } else {
-                        if (randomBoolean()) {
-                            builder.appendNull();
-                        } else {
-                            String term = "other-" + randomIntBetween(1, 100);
-                            builder.appendBytesRef(new BytesRef(term));
-                        }
-                    }
-                }
-                inputTerms = builder.build();
+        // Build input terms
+        Map<Integer, Set<Integer>> expectedPositions = new HashMap<>();
+        int numPositions = randomIntBetween(1, 1000);
+        List<List<String>> inputTermsList = IntStream.range(0, numPositions).<List<String>>mapToObj(i -> {
+            if (randomBoolean()) {
+                String term = randomFrom(directoryTerms.keySet());
+                Integer position = directoryTerms.get(term);
+                expectedPositions.put(i, Set.of(position));
+                return List.of(term);
+            } else if (randomBoolean()) {
+                return List.of();
+            } else {
+                String term = "other-" + randomIntBetween(1, 100);
+                return List.of(term);
             }
-            var queryList = QueryList.rawTermQueryList(directoryData.field, mock(SearchExecutionContext.class), inputTerms);
+        }).toList();
+
+        try (var directoryData = makeDirectoryWith(directoryTermsList); var inputTerms = makeTermsBlock(inputTermsList)) {
+            var queryList = QueryList.rawTermQueryList(directoryData.field, directoryData.searchExecutionContext, inputTerms);
             int maxPageSize = between(1, 256);
             var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test enrich");
             EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(
@@ -193,7 +180,6 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
                 }
             }
             assertThat(actualPositions, equalTo(expectedPositions));
-            IOUtils.close(inputTerms);
         }
     }
 
@@ -201,35 +187,20 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
         try (
             var directoryData = makeDirectoryWith(
                 List.of(List.of("a2"), List.of("a1", "c1", "b2"), List.of("a2"), List.of("a3"), List.of("b2", "b1", "a1"))
+            );
+            var inputTerms = makeTermsBlock(
+                List.of(List.of("b2"), List.of("c1", "a2"), List.of("z2"), List.of(), List.of("a3"), List.of("a3", "a2", "z2", "xx"))
             )
         ) {
-            final BytesRefBlock inputTerms;
-            try (BytesRefBlock.Builder termBuilder = blockFactory.newBytesRefBlockBuilder(6)) {
-                termBuilder.appendBytesRef(new BytesRef("b2"))
-                    .beginPositionEntry()
-                    .appendBytesRef(new BytesRef("c1"))
-                    .appendBytesRef(new BytesRef("a2"))
-                    .endPositionEntry()
-                    .appendBytesRef(new BytesRef("z2"))
-                    .appendNull()
-                    .appendBytesRef(new BytesRef("a3"))
-                    .beginPositionEntry()
-                    .appendBytesRef(new BytesRef("a3"))
-                    .appendBytesRef(new BytesRef("a2"))
-                    .appendBytesRef(new BytesRef("z2"))
-                    .appendBytesRef(new BytesRef("xx"))
-                    .endPositionEntry();
-                inputTerms = termBuilder.build();
-            }
-            QueryList queryList = QueryList.rawTermQueryList(directoryData.field, mock(SearchExecutionContext.class), inputTerms)
+            QueryList queryList = QueryList.rawTermQueryList(directoryData.field, directoryData.searchExecutionContext, inputTerms)
                 .onlySingleValues();
             // pos -> terms -> docs
             // -----------------------------
-            // 0 -> [b2] -> [1, 4]
+            // 0 -> [b2] -> []
             // 1 -> [c1, a2] -> []
             // 2 -> [z2] -> []
             // 3 -> [] -> []
-            // 4 -> [a1] -> [3]
+            // 4 -> [a3] -> [3]
             // 5 -> [a3, a2, z2, xx] -> []
             var warnings = Warnings.createWarnings(DriverContext.WarningsMode.IGNORE, 0, 0, "test lookup");
             EnrichQuerySourceOperator queryOperator = new EnrichQuerySourceOperator(
@@ -241,19 +212,14 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
             );
             Page page = queryOperator.getOutput();
             assertNotNull(page);
-            assertThat(page.getPositionCount(), equalTo(3));
+            assertThat(page.getPositionCount(), equalTo(1));
             IntVector docs = getDocVector(page, 0);
-            assertThat(docs.getInt(0), equalTo(1));
-            assertThat(docs.getInt(1), equalTo(4));
-            assertThat(docs.getInt(2), equalTo(3));
+            assertThat(docs.getInt(0), equalTo(3));
 
             Block positions = page.getBlock(1);
-            assertThat(BlockUtils.toJavaObject(positions, 0), equalTo(0));
-            assertThat(BlockUtils.toJavaObject(positions, 1), equalTo(0));
-            assertThat(BlockUtils.toJavaObject(positions, 2), equalTo(4));
+            assertThat(BlockUtils.toJavaObject(positions, 0), equalTo(4));
             page.releaseBlocks();
             assertTrue(queryOperator.isFinished());
-            IOUtils.close(inputTerms);
         }
     }
 
@@ -262,7 +228,12 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
         return doc.asVector().docs();
     }
 
-    private record DirectoryData(DirectoryReader reader, MockDirectoryWrapper dir, MappedFieldType field) implements AutoCloseable {
+    private record DirectoryData(
+        DirectoryReader reader,
+        MockDirectoryWrapper dir,
+        SearchExecutionContext searchExecutionContext,
+        MappedFieldType field
+    ) implements AutoCloseable {
         @Override
         public void close() throws IOException {
             IOUtils.close(reader, dir);
@@ -277,14 +248,45 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase {
             for (var termList : terms) {
                 Document doc = new Document();
                 for (String term : termList) {
-                    doc.add(new StringField("uid", term, Field.Store.NO));
+                    doc.add(new KeywordField("uid", term, Field.Store.NO));
                 }
                 writer.addDocument(doc);
             }
             writer.forceMerge(1);
             writer.commit();
 
-            return new DirectoryData(DirectoryReader.open(writer), dir, new KeywordFieldMapper.KeywordFieldType("uid"));
+            var directoryReader = DirectoryReader.open(writer);
+            var indexSearcher = newSearcher(directoryReader);
+            var searchExecutionContext = mock(SearchExecutionContext.class);
+            var field = new KeywordFieldMapper.KeywordFieldType("uid");
+            var fieldDataContext = FieldDataContext.noRuntimeFields("test");
+            var indexFieldData = field.fielddataBuilder(fieldDataContext)
+                .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService());
+
+            // Required for "onlySingleValues" mode to work
+            when(searchExecutionContext.searcher()).thenReturn(indexSearcher);
+            when(searchExecutionContext.getForField(field, MappedFieldType.FielddataOperation.SEARCH)).thenReturn(indexFieldData);
+
+            return new DirectoryData(directoryReader, dir, searchExecutionContext, field);
+        }
+    }
+
+    private Block makeTermsBlock(List<List<String>> terms) {
+        try (BytesRefBlock.Builder termBuilder = blockFactory.newBytesRefBlockBuilder(6)) {
+            for (var termList : terms) {
+                if (termList.isEmpty()) {
+                    termBuilder.appendNull();
+                } else if (termList.size() == 1) {
+                    termBuilder.appendBytesRef(new BytesRef(termList.get(0)));
+                } else {
+                    termBuilder.beginPositionEntry();
+                    for (String term : termList) {
+                        termBuilder.appendBytesRef(new BytesRef(term));
+                    }
+                    termBuilder.endPositionEntry();
+                }
+            }
+            return termBuilder.build();
         }
     }
 }

+ 7 - 7
x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec

@@ -301,6 +301,7 @@ emp_no:integer | language_code:integer | language_name:keyword
 
 mvJoinKeyOnTheLookupIndex
 required_capability: join_lookup_v12
+required_capability: join_lookup_skip_mv_on_lookup_key
 
 FROM employees
 | WHERE 10003 < emp_no AND emp_no < 10008
@@ -313,9 +314,8 @@ FROM employees
 emp_no:integer | language_code:integer | language_name:keyword
 10004          | 4                     | Quenya
 10005          | 5                     | null
-10006          | 6                     | Mv-Lang
-10007          | 7                     | Mv-Lang
-10007          | 7                     | Mv-Lang2
+10006          | 6                     | null
+10007          | 7                     | null
 ;
 
 mvJoinKeyOnFrom
@@ -354,6 +354,7 @@ language_code:integer | language_name:keyword | country:text
 
 mvJoinKeyFromRowExpanded
 required_capability: join_lookup_v12
+required_capability: join_lookup_skip_mv_on_lookup_key
 
 ROW language_code = [4, 5, 6, 7, 8]
 | MV_EXPAND language_code
@@ -365,10 +366,9 @@ ROW language_code = [4, 5, 6, 7, 8]
 language_code:integer | language_name:keyword | country:text
 4                     | Quenya                | null
 5                     | null                  | Atlantis
-6                     | Mv-Lang               | Mv-Land
-7                     | Mv-Lang               | Mv-Land
-7                     | Mv-Lang2              | Mv-Land2
-8                     | Mv-Lang2              | Mv-Land2
+6                     | null                  | null
+7                     | null                  | null
+8                     | null                  | null
 ;
 
 ###############################################

+ 5 - 0
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java

@@ -589,6 +589,11 @@ public class EsqlCapabilities {
          */
         JOIN_LOOKUP_SKIP_MV(JOIN_LOOKUP_V12.isEnabled()),
 
+        /**
+         * LOOKUP JOIN without MV matching on lookup index key (https://github.com/elastic/elasticsearch/issues/118780)
+         */
+        JOIN_LOOKUP_SKIP_MV_ON_LOOKUP_KEY(JOIN_LOOKUP_V12.isEnabled()),
+
         /**
          * Fix for https://github.com/elastic/elasticsearch/issues/117054
          */

+ 1 - 0
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQuery.java

@@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.compute.operator.DriverContext;
 import org.elasticsearch.compute.operator.Warnings;
+import org.elasticsearch.compute.querydsl.query.SingleValueMatchQuery;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.query.AbstractQueryBuilder;
 import org.elasticsearch.index.query.MatchNoneQueryBuilder;

+ 1 - 0
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMathQueryTests.java

@@ -22,6 +22,7 @@ import org.apache.lucene.store.Directory;
 import org.apache.lucene.tests.index.RandomIndexWriter;
 import org.elasticsearch.compute.operator.DriverContext;
 import org.elasticsearch.compute.operator.Warnings;
+import org.elasticsearch.compute.querydsl.query.SingleValueMatchQuery;
 import org.elasticsearch.index.mapper.MappedFieldType;
 import org.elasticsearch.index.mapper.MapperService;
 import org.elasticsearch.index.mapper.MapperServiceTestCase;