Browse Source

Backport scoring support in ES|QL to 8.x branch (#117747)

* ESQL - enabling scoring with METADATA _score (#113120)
* ESQL - enabling scoring with METADATA _score
Co-authored-by: ChrisHegarty <chegar999@gmail.com>

* Update docs/changelog/117747.yaml

* minor fix
Tommaso Teofili 10 months ago
parent
commit
8fcf28016b
32 changed files with 1932 additions and 445 deletions
  1. 5 0
      docs/changelog/113120.yaml
  2. 5 0
      docs/changelog/117747.yaml
  3. 362 351
      muted-tests.yml
  4. 12 3
      server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java
  5. 4 1
      x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java
  6. 4 1
      x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java
  7. 74 22
      x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java
  8. 120 21
      x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java
  9. 2 1
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java
  10. 26 7
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java
  11. 25 6
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java
  12. 151 0
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java
  13. 42 8
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java
  14. 6 3
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java
  15. 6 3
      x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java
  16. 285 0
      x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec
  17. 4 3
      x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java
  18. 2 1
      x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java
  19. 299 0
      x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java
  20. 51 0
      x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java
  21. 96 0
      x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java
  22. 6 1
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java
  23. 9 0
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java
  24. 5 0
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java
  25. 16 2
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java
  26. 11 3
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java
  27. 3 1
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
  28. 14 0
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java
  29. 10 4
      x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java
  30. 25 0
      x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java
  31. 62 0
      x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java
  32. 190 3
      x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java

+ 5 - 0
docs/changelog/113120.yaml

@@ -0,0 +1,5 @@
+pr: 113120
+summary: ESQL - enabling scoring with METADATA `_score`
+area: ES|QL
+type: enhancement
+issues: []

+ 5 - 0
docs/changelog/117747.yaml

@@ -0,0 +1,5 @@
+pr: 117747
+summary: Backport scoring support in ES|QL to 8.x branch
+area: ES|QL
+type: enhancement
+issues: []

+ 362 - 351
muted-tests.yml

@@ -1,355 +1,355 @@
 tests:
-- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
-  method: test {yaml=reference/esql/esql-async-query-api/line_17}
-  issue: https://github.com/elastic/elasticsearch/issues/109260
-- class: "org.elasticsearch.client.RestClientSingleHostIntegTests"
-  issue: "https://github.com/elastic/elasticsearch/issues/102717"
-  method: "testRequestResetAndAbort"
-- class: org.elasticsearch.index.store.FsDirectoryFactoryTests
-  method: testStoreDirectory
-  issue: https://github.com/elastic/elasticsearch/issues/110210
-- class: org.elasticsearch.index.store.FsDirectoryFactoryTests
-  method: testPreload
-  issue: https://github.com/elastic/elasticsearch/issues/110211
-- class: org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT
-  method: testMetadataMigratedAfterUpgrade
-  issue: https://github.com/elastic/elasticsearch/issues/110232
-- class: org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests
-  method: testPopulationOfCacheWhenLoadingPrivilegesForAllApplications
-  issue: https://github.com/elastic/elasticsearch/issues/110789
-- class: org.elasticsearch.xpack.searchablesnapshots.cache.common.CacheFileTests
-  method: testCacheFileCreatedAsSparseFile
-  issue: https://github.com/elastic/elasticsearch/issues/110801
-- class: org.elasticsearch.nativeaccess.VectorSystemPropertyTests
-  method: testSystemPropertyDisabled
-  issue: https://github.com/elastic/elasticsearch/issues/110949
-- class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT
-  method: testAuthenticateWithImplicitFlow
-  issue: https://github.com/elastic/elasticsearch/issues/111191
-- class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT
-  method: testAuthenticateWithCodeFlowAndClientPost
-  issue: https://github.com/elastic/elasticsearch/issues/111396
-- class: org.elasticsearch.xpack.restart.FullClusterRestartIT
-  method: testSingleDoc {cluster=UPGRADED}
-  issue: https://github.com/elastic/elasticsearch/issues/111434
-- class: org.elasticsearch.xpack.restart.FullClusterRestartIT
-  method: testDataStreams {cluster=UPGRADED}
-  issue: https://github.com/elastic/elasticsearch/issues/111448
-- class: org.elasticsearch.search.SearchServiceTests
-  issue: https://github.com/elastic/elasticsearch/issues/111529
-- class: org.elasticsearch.upgrades.FullClusterRestartIT
-  method: testSnapshotRestore {cluster=UPGRADED}
-  issue: https://github.com/elastic/elasticsearch/issues/111798
-- class: org.elasticsearch.xpack.inference.InferenceRestIT
-  method: test {p0=inference/80_random_rerank_retriever/Random rerank retriever predictably shuffles results}
-  issue: https://github.com/elastic/elasticsearch/issues/111999
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testDeleteJobAfterMissingIndex
-  issue: https://github.com/elastic/elasticsearch/issues/112088
-- class: org.elasticsearch.smoketest.WatcherYamlRestIT
-  method: test {p0=watcher/usage/10_basic/Test watcher usage stats output}
-  issue: https://github.com/elastic/elasticsearch/issues/112189
-- class: org.elasticsearch.xpack.test.rest.XPackRestIT
-  method: test {p0=ml/inference_processor/Test create processor with missing mandatory fields}
-  issue: https://github.com/elastic/elasticsearch/issues/112191
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testDeleteJobAsync
-  issue: https://github.com/elastic/elasticsearch/issues/112212
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testMultiIndexDelete
-  issue: https://github.com/elastic/elasticsearch/issues/112381
-- class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests
-  method: "testAggregateIntermediate {TestCase=<geo_point> #2}"
-  issue: https://github.com/elastic/elasticsearch/issues/112461
-- class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests
-  method: testAggregateIntermediate {TestCase=<geo_point>}
-  issue: https://github.com/elastic/elasticsearch/issues/112463
-- class: org.elasticsearch.xpack.inference.external.http.RequestBasedTaskRunnerTests
-  method: testLoopOneAtATime
-  issue: https://github.com/elastic/elasticsearch/issues/112471
-- class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT
-  issue: https://github.com/elastic/elasticsearch/issues/111497
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testPutJob_GivenFarequoteConfig
-  issue: https://github.com/elastic/elasticsearch/issues/112382
-- class: org.elasticsearch.packaging.test.PackagesSecurityAutoConfigurationTests
-  method: test20SecurityNotAutoConfiguredOnReInstallation
-  issue: https://github.com/elastic/elasticsearch/issues/112635
-- class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
-  method: test {case-functions.testSelectInsertWithLcaseAndLengthWithOrderBy}
-  issue: https://github.com/elastic/elasticsearch/issues/112642
-- class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
-  method: test {case-functions.testUcaseInline1}
-  issue: https://github.com/elastic/elasticsearch/issues/112641
-- class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
-  method: test {case-functions.testUpperCasingTheSecondLetterFromTheRightFromFirstName}
-  issue: https://github.com/elastic/elasticsearch/issues/112640
-- class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
-  method: test {case-functions.testUcaseInline3}
-  issue: https://github.com/elastic/elasticsearch/issues/112643
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testDelete_multipleRequest
-  issue: https://github.com/elastic/elasticsearch/issues/112701
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testCreateJobInSharedIndexUpdatesMapping
-  issue: https://github.com/elastic/elasticsearch/issues/112729
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testGetJob_GivenNoSuchJob
-  issue: https://github.com/elastic/elasticsearch/issues/112730
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testDeleteJobAfterMissingAliases
-  issue: https://github.com/elastic/elasticsearch/issues/112823
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testCreateJob_WithClashingFieldMappingsFails
-  issue: https://github.com/elastic/elasticsearch/issues/113046
-- class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
-  method: test {case-functions.testUcaseInline1}
-  issue: https://github.com/elastic/elasticsearch/issues/112641
-- class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
-  method: test {case-functions.testUcaseInline3}
-  issue: https://github.com/elastic/elasticsearch/issues/112643
-- class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
-  method: test {case-functions.testUpperCasingTheSecondLetterFromTheRightFromFirstName}
-  issue: https://github.com/elastic/elasticsearch/issues/112640
-- class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
-  method: test {case-functions.testSelectInsertWithLcaseAndLengthWithOrderBy}
-  issue: https://github.com/elastic/elasticsearch/issues/112642
-- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests
-  method: testResponse
-  issue: https://github.com/elastic/elasticsearch/issues/113148
-- class: org.elasticsearch.packaging.test.WindowsServiceTests
-  method: test30StartStop
-  issue: https://github.com/elastic/elasticsearch/issues/113160
-- class: org.elasticsearch.packaging.test.WindowsServiceTests
-  method: test33JavaChanged
-  issue: https://github.com/elastic/elasticsearch/issues/113177
-- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests
-  method: testErrorMidStream
-  issue: https://github.com/elastic/elasticsearch/issues/113179
-- class: org.elasticsearch.smoketest.MlWithSecurityIT
-  method: test {yaml=ml/sparse_vector_search/Test sparse_vector search with query vector and pruning config}
-  issue: https://github.com/elastic/elasticsearch/issues/108997
-- class: org.elasticsearch.packaging.test.WindowsServiceTests
-  method: test80JavaOptsInEnvVar
-  issue: https://github.com/elastic/elasticsearch/issues/113219
-- class: org.elasticsearch.packaging.test.WindowsServiceTests
-  method: test81JavaOptsInJvmOptions
-  issue: https://github.com/elastic/elasticsearch/issues/113313
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item}
-  issue: https://github.com/elastic/elasticsearch/issues/113325
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testDeleteJob_TimingStatsDocumentIsDeleted
-  issue: https://github.com/elastic/elasticsearch/issues/113370
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated}
-  issue: https://github.com/elastic/elasticsearch/pull/113286
-- class: org.elasticsearch.index.mapper.extras.TokenCountFieldMapperTests
-  method: testBlockLoaderFromRowStrideReaderWithSyntheticSource
-  issue: https://github.com/elastic/elasticsearch/issues/113427
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testOutOfOrderData
-  issue: https://github.com/elastic/elasticsearch/issues/113477
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testCreateJobsWithIndexNameOption
-  issue: https://github.com/elastic/elasticsearch/issues/113528
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}
-  issue: https://github.com/elastic/elasticsearch/issues/113537
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testCantCreateJobWithSameID
-  issue: https://github.com/elastic/elasticsearch/issues/113581
-- class: org.elasticsearch.integration.KibanaUserRoleIntegTests
-  method: testFieldMappings
-  issue: https://github.com/elastic/elasticsearch/issues/113592
-- class: org.elasticsearch.integration.KibanaUserRoleIntegTests
-  method: testSearchAndMSearch
-  issue: https://github.com/elastic/elasticsearch/issues/113593
-- class: org.elasticsearch.xpack.transform.integration.TransformIT
-  method: testStopWaitForCheckpoint
-  issue: https://github.com/elastic/elasticsearch/issues/106113
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=search/540_ignore_above_synthetic_source/ignore_above mapping level setting on arrays}
-  issue: https://github.com/elastic/elasticsearch/issues/113648
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testGetJobs_GivenMultipleJobs
-  issue: https://github.com/elastic/elasticsearch/issues/113654
-- class: org.elasticsearch.xpack.ml.integration.MlJobIT
-  method: testGetJobs_GivenSingleJob
-  issue: https://github.com/elastic/elasticsearch/issues/113655
-- class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanosTests
-  issue: https://github.com/elastic/elasticsearch/issues/113661
-- class: org.elasticsearch.search.retriever.RankDocsRetrieverBuilderTests
-  method: testRewrite
-  issue: https://github.com/elastic/elasticsearch/issues/114467
-- class: org.elasticsearch.gradle.internal.PublishPluginFuncTest
-  issue: https://github.com/elastic/elasticsearch/issues/114492
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=indices.split/40_routing_partition_size/nested}
-  issue: https://github.com/elastic/elasticsearch/issues/113842
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=indices.split/40_routing_partition_size/more than 1}
-  issue: https://github.com/elastic/elasticsearch/issues/113841
-- class: org.elasticsearch.kibana.KibanaThreadPoolIT
-  method: testBlockedThreadPoolsRejectUserRequests
-  issue: https://github.com/elastic/elasticsearch/issues/113939
-- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
-  method: testPutE5WithTrainedModelAndInference
-  issue: https://github.com/elastic/elasticsearch/issues/114023
-- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
-  method: testPutE5Small_withPlatformAgnosticVariant
-  issue: https://github.com/elastic/elasticsearch/issues/113983
-- class: org.elasticsearch.datastreams.LazyRolloverDuringDisruptionIT
-  method: testRolloverIsExecutedOnce
-  issue: https://github.com/elastic/elasticsearch/issues/112634
-- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT
-  method: test {yaml=rrf/800_rrf_with_text_similarity_reranker_retriever/explain using rrf retriever and text-similarity}
-  issue: https://github.com/elastic/elasticsearch/issues/114757
-- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT
-  method: testTracingCrossCluster
-  issue: https://github.com/elastic/elasticsearch/issues/112731
-- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
-  method: testPutE5Small_withPlatformSpecificVariant
-  issue: https://github.com/elastic/elasticsearch/issues/113950
-- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
-  method: test {yaml=reference/rest-api/usage/line_38}
-  issue: https://github.com/elastic/elasticsearch/issues/113694
-- class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT
-  method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only}
-  issue: https://github.com/elastic/elasticsearch/issues/115475
-- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests
-  method: testProcessFileChanges
-  issue: https://github.com/elastic/elasticsearch/issues/115280
-- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
-  method: testInferDeploysDefaultE5
-  issue: https://github.com/elastic/elasticsearch/issues/115361
-- class: org.elasticsearch.xpack.inference.InferenceCrudIT
-  method: testSupportedStream
-  issue: https://github.com/elastic/elasticsearch/issues/113430
-- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
-  method: testGeoShapeGeoHash
-  issue: https://github.com/elastic/elasticsearch/issues/115664
-- class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT
-  issue: https://github.com/elastic/elasticsearch/issues/116126
-- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT
-  issue: https://github.com/elastic/elasticsearch/issues/111319
-- class: org.elasticsearch.upgrades.FullClusterRestartIT
-  method: testSnapshotRestore {cluster=OLD}
-  issue: https://github.com/elastic/elasticsearch/issues/111777
-- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT
-  method: testLookbackWithIndicesOptions
-  issue: https://github.com/elastic/elasticsearch/issues/116127
-- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT
-  method: testSnapshotRestore {cluster=UPGRADED}
-  issue: https://github.com/elastic/elasticsearch/issues/111799
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=search/380_sort_segments_on_timestamp/Test that index segments are NOT sorted on timestamp field when @timestamp field is dynamically added}
-  issue: https://github.com/elastic/elasticsearch/issues/116221
-- class: org.elasticsearch.ingest.common.IngestCommonClientYamlTestSuiteIT
-  method: test {yaml=ingest/310_reroute_processor/Test remove then add reroute processor with and without lazy rollover}
-  issue: https://github.com/elastic/elasticsearch/issues/116158
-- class: org.elasticsearch.ingest.common.IngestCommonClientYamlTestSuiteIT
-  method: test {yaml=ingest/310_reroute_processor/Test data stream with lazy rollover obtains pipeline from template}
-  issue: https://github.com/elastic/elasticsearch/issues/116157
-- class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT
-  method: testEnterpriseDownloaderTask
-  issue: https://github.com/elastic/elasticsearch/issues/115163
-- class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT
-  method: testDeprecatedSettingsReturnWarnings
-  issue: https://github.com/elastic/elasticsearch/issues/108628
-- class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT
-  method: test {yaml=/10_apm/Test template reinstallation}
-  issue: https://github.com/elastic/elasticsearch/issues/116445
-- class: org.elasticsearch.action.admin.HotThreadsIT
-  method: testHotThreadsDontFail
-  issue: https://github.com/elastic/elasticsearch/issues/115754
-- class: org.elasticsearch.action.search.PointInTimeIT
-  method: testPITTiebreak
-  issue: https://github.com/elastic/elasticsearch/issues/115810
-- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
-  method: test {p0=esql/61_enrich_ip/IP strings}
-  issue: https://github.com/elastic/elasticsearch/issues/116529
-- class: org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsCanMatchOnCoordinatorIntegTests
-  method: testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQueryingAnyNodeWhenTheyAreOutsideOfTheQueryRange
-  issue: https://github.com/elastic/elasticsearch/issues/116523
-- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
-  method: testInferDeploysDefaultElser
-  issue: https://github.com/elastic/elasticsearch/issues/114913
-- class: org.elasticsearch.threadpool.SimpleThreadPoolIT
-  method: testThreadPoolMetrics
-  issue: https://github.com/elastic/elasticsearch/issues/108320
-- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
-  method: test {p0=esql/60_enrich/Enrich on keyword with fields alias}
-  issue: https://github.com/elastic/elasticsearch/issues/116592
-- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
-  method: test {p0=esql/60_enrich/Enrich on keyword with fields}
-  issue: https://github.com/elastic/elasticsearch/issues/116593
-- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
-  method: testGeoShapeGeoTile
-  issue: https://github.com/elastic/elasticsearch/issues/115717
-- class: org.elasticsearch.search.StressSearchServiceReaperIT
-  method: testStressReaper
-  issue: https://github.com/elastic/elasticsearch/issues/115816
-- class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
-  method: testGeoShapeGeoHex
-  issue: https://github.com/elastic/elasticsearch/issues/115705
-- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
-  method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNoDocValues SYNC}
-  issue: https://github.com/elastic/elasticsearch/issues/116945
-- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
-  method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNoDocValues ASYNC}
-  issue: https://github.com/elastic/elasticsearch/issues/116945
-- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
-  method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNoDocValues}
-  issue: https://github.com/elastic/elasticsearch/issues/116945
-- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
-  method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNotIndexedNorDocValues SYNC}
-  issue: https://github.com/elastic/elasticsearch/issues/116945
-- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
-  method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNotIndexedNorDocValues ASYNC}
-  issue: https://github.com/elastic/elasticsearch/issues/116945
-- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
-  method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNotIndexedNorDocValues}
-  issue: https://github.com/elastic/elasticsearch/issues/116945
-- class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT
-  method: testRandomDirectoryIOExceptions
-  issue: https://github.com/elastic/elasticsearch/issues/114824
-- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
-  method: testRerank {upgradedNodes=1}
-  issue: https://github.com/elastic/elasticsearch/issues/116973
-- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
-  method: testCohereEmbeddings {upgradedNodes=1}
-  issue: https://github.com/elastic/elasticsearch/issues/116974
-- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
-  method: testCohereEmbeddings {upgradedNodes=2}
-  issue: https://github.com/elastic/elasticsearch/issues/116975
-- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests
-  method: testRetryPointInTime
-  issue: https://github.com/elastic/elasticsearch/issues/117116
-- class: org.elasticsearch.oldrepos.OldRepositoryAccessIT
-  method: testOldRepoAccess
-  issue: https://github.com/elastic/elasticsearch/issues/115631
-- class: org.elasticsearch.xpack.inference.InferenceRestIT
-  method: test {p0=inference/40_semantic_text_query/Query a field that uses the default ELSER 2 endpoint}
-  issue: https://github.com/elastic/elasticsearch/issues/117027
-- class: org.elasticsearch.xpack.inference.InferenceRestIT
-  method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint}
-  issue: https://github.com/elastic/elasticsearch/issues/117349
-- class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT
-  method: testSearchWithRandomDisconnects
-  issue: https://github.com/elastic/elasticsearch/issues/116175
-- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests
-  method: testMinimumVersionBetweenNewAndOldVersion
-  issue: https://github.com/elastic/elasticsearch/issues/117485
-- class: org.elasticsearch.discovery.ClusterDisruptionIT
-  method: testAckedIndexing
-  issue: https://github.com/elastic/elasticsearch/issues/117024
-- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
-  method: testMultipleInferencesTriggeringDownloadAndDeploy
-  issue: https://github.com/elastic/elasticsearch/issues/117208
-- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
-  method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set}
-  issue: https://github.com/elastic/elasticsearch/issues/116777
+  - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
+    method: test {yaml=reference/esql/esql-async-query-api/line_17}
+    issue: https://github.com/elastic/elasticsearch/issues/109260
+  - class: "org.elasticsearch.client.RestClientSingleHostIntegTests"
+    issue: "https://github.com/elastic/elasticsearch/issues/102717"
+    method: "testRequestResetAndAbort"
+  - class: org.elasticsearch.index.store.FsDirectoryFactoryTests
+    method: testStoreDirectory
+    issue: https://github.com/elastic/elasticsearch/issues/110210
+  - class: org.elasticsearch.index.store.FsDirectoryFactoryTests
+    method: testPreload
+    issue: https://github.com/elastic/elasticsearch/issues/110211
+  - class: org.elasticsearch.upgrades.SecurityIndexRolesMetadataMigrationIT
+    method: testMetadataMigratedAfterUpgrade
+    issue: https://github.com/elastic/elasticsearch/issues/110232
+  - class: org.elasticsearch.xpack.security.authz.store.NativePrivilegeStoreCacheTests
+    method: testPopulationOfCacheWhenLoadingPrivilegesForAllApplications
+    issue: https://github.com/elastic/elasticsearch/issues/110789
+  - class: org.elasticsearch.xpack.searchablesnapshots.cache.common.CacheFileTests
+    method: testCacheFileCreatedAsSparseFile
+    issue: https://github.com/elastic/elasticsearch/issues/110801
+  - class: org.elasticsearch.nativeaccess.VectorSystemPropertyTests
+    method: testSystemPropertyDisabled
+    issue: https://github.com/elastic/elasticsearch/issues/110949
+  - class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT
+    method: testAuthenticateWithImplicitFlow
+    issue: https://github.com/elastic/elasticsearch/issues/111191
+  - class: org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectAuthIT
+    method: testAuthenticateWithCodeFlowAndClientPost
+    issue: https://github.com/elastic/elasticsearch/issues/111396
+  - class: org.elasticsearch.xpack.restart.FullClusterRestartIT
+    method: testSingleDoc {cluster=UPGRADED}
+    issue: https://github.com/elastic/elasticsearch/issues/111434
+  - class: org.elasticsearch.xpack.restart.FullClusterRestartIT
+    method: testDataStreams {cluster=UPGRADED}
+    issue: https://github.com/elastic/elasticsearch/issues/111448
+  - class: org.elasticsearch.search.SearchServiceTests
+    issue: https://github.com/elastic/elasticsearch/issues/111529
+  - class: org.elasticsearch.upgrades.FullClusterRestartIT
+    method: testSnapshotRestore {cluster=UPGRADED}
+    issue: https://github.com/elastic/elasticsearch/issues/111798
+  - class: org.elasticsearch.xpack.inference.InferenceRestIT
+    method: test {p0=inference/80_random_rerank_retriever/Random rerank retriever predictably shuffles results}
+    issue: https://github.com/elastic/elasticsearch/issues/111999
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testDeleteJobAfterMissingIndex
+    issue: https://github.com/elastic/elasticsearch/issues/112088
+  - class: org.elasticsearch.smoketest.WatcherYamlRestIT
+    method: test {p0=watcher/usage/10_basic/Test watcher usage stats output}
+    issue: https://github.com/elastic/elasticsearch/issues/112189
+  - class: org.elasticsearch.xpack.test.rest.XPackRestIT
+    method: test {p0=ml/inference_processor/Test create processor with missing mandatory fields}
+    issue: https://github.com/elastic/elasticsearch/issues/112191
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testDeleteJobAsync
+    issue: https://github.com/elastic/elasticsearch/issues/112212
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testMultiIndexDelete
+    issue: https://github.com/elastic/elasticsearch/issues/112381
+  - class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests
+    method: "testAggregateIntermediate {TestCase=<geo_point> #2}"
+    issue: https://github.com/elastic/elasticsearch/issues/112461
+  - class: org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroidTests
+    method: testAggregateIntermediate {TestCase=<geo_point>}
+    issue: https://github.com/elastic/elasticsearch/issues/112463
+  - class: org.elasticsearch.xpack.inference.external.http.RequestBasedTaskRunnerTests
+    method: testLoopOneAtATime
+    issue: https://github.com/elastic/elasticsearch/issues/112471
+  - class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT
+    issue: https://github.com/elastic/elasticsearch/issues/111497
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testPutJob_GivenFarequoteConfig
+    issue: https://github.com/elastic/elasticsearch/issues/112382
+  - class: org.elasticsearch.packaging.test.PackagesSecurityAutoConfigurationTests
+    method: test20SecurityNotAutoConfiguredOnReInstallation
+    issue: https://github.com/elastic/elasticsearch/issues/112635
+  - class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
+    method: test {case-functions.testSelectInsertWithLcaseAndLengthWithOrderBy}
+    issue: https://github.com/elastic/elasticsearch/issues/112642
+  - class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
+    method: test {case-functions.testUcaseInline1}
+    issue: https://github.com/elastic/elasticsearch/issues/112641
+  - class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
+    method: test {case-functions.testUpperCasingTheSecondLetterFromTheRightFromFirstName}
+    issue: https://github.com/elastic/elasticsearch/issues/112640
+  - class: org.elasticsearch.xpack.sql.qa.single_node.JdbcSqlSpecIT
+    method: test {case-functions.testUcaseInline3}
+    issue: https://github.com/elastic/elasticsearch/issues/112643
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testDelete_multipleRequest
+    issue: https://github.com/elastic/elasticsearch/issues/112701
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testCreateJobInSharedIndexUpdatesMapping
+    issue: https://github.com/elastic/elasticsearch/issues/112729
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testGetJob_GivenNoSuchJob
+    issue: https://github.com/elastic/elasticsearch/issues/112730
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testDeleteJobAfterMissingAliases
+    issue: https://github.com/elastic/elasticsearch/issues/112823
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testCreateJob_WithClashingFieldMappingsFails
+    issue: https://github.com/elastic/elasticsearch/issues/113046
+  - class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
+    method: test {case-functions.testUcaseInline1}
+    issue: https://github.com/elastic/elasticsearch/issues/112641
+  - class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
+    method: test {case-functions.testUcaseInline3}
+    issue: https://github.com/elastic/elasticsearch/issues/112643
+  - class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
+    method: test {case-functions.testUpperCasingTheSecondLetterFromTheRightFromFirstName}
+    issue: https://github.com/elastic/elasticsearch/issues/112640
+  - class: org.elasticsearch.xpack.sql.qa.security.JdbcSqlSpecIT
+    method: test {case-functions.testSelectInsertWithLcaseAndLengthWithOrderBy}
+    issue: https://github.com/elastic/elasticsearch/issues/112642
+  - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests
+    method: testResponse
+    issue: https://github.com/elastic/elasticsearch/issues/113148
+  - class: org.elasticsearch.packaging.test.WindowsServiceTests
+    method: test30StartStop
+    issue: https://github.com/elastic/elasticsearch/issues/113160
+  - class: org.elasticsearch.packaging.test.WindowsServiceTests
+    method: test33JavaChanged
+    issue: https://github.com/elastic/elasticsearch/issues/113177
+  - class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests
+    method: testErrorMidStream
+    issue: https://github.com/elastic/elasticsearch/issues/113179
+  - class: org.elasticsearch.smoketest.MlWithSecurityIT
+    method: test {yaml=ml/sparse_vector_search/Test sparse_vector search with query vector and pruning config}
+    issue: https://github.com/elastic/elasticsearch/issues/108997
+  - class: org.elasticsearch.packaging.test.WindowsServiceTests
+    method: test80JavaOptsInEnvVar
+    issue: https://github.com/elastic/elasticsearch/issues/113219
+  - class: org.elasticsearch.packaging.test.WindowsServiceTests
+    method: test81JavaOptsInJvmOptions
+    issue: https://github.com/elastic/elasticsearch/issues/113313
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item}
+    issue: https://github.com/elastic/elasticsearch/issues/113325
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testDeleteJob_TimingStatsDocumentIsDeleted
+    issue: https://github.com/elastic/elasticsearch/issues/113370
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=search/500_date_range/from, to, include_lower, include_upper deprecated}
+    issue: https://github.com/elastic/elasticsearch/pull/113286
+  - class: org.elasticsearch.index.mapper.extras.TokenCountFieldMapperTests
+    method: testBlockLoaderFromRowStrideReaderWithSyntheticSource
+    issue: https://github.com/elastic/elasticsearch/issues/113427
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testOutOfOrderData
+    issue: https://github.com/elastic/elasticsearch/issues/113477
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testCreateJobsWithIndexNameOption
+    issue: https://github.com/elastic/elasticsearch/issues/113528
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}
+    issue: https://github.com/elastic/elasticsearch/issues/113537
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testCantCreateJobWithSameID
+    issue: https://github.com/elastic/elasticsearch/issues/113581
+  - class: org.elasticsearch.integration.KibanaUserRoleIntegTests
+    method: testFieldMappings
+    issue: https://github.com/elastic/elasticsearch/issues/113592
+  - class: org.elasticsearch.integration.KibanaUserRoleIntegTests
+    method: testSearchAndMSearch
+    issue: https://github.com/elastic/elasticsearch/issues/113593
+  - class: org.elasticsearch.xpack.transform.integration.TransformIT
+    method: testStopWaitForCheckpoint
+    issue: https://github.com/elastic/elasticsearch/issues/106113
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=search/540_ignore_above_synthetic_source/ignore_above mapping level setting on arrays}
+    issue: https://github.com/elastic/elasticsearch/issues/113648
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testGetJobs_GivenMultipleJobs
+    issue: https://github.com/elastic/elasticsearch/issues/113654
+  - class: org.elasticsearch.xpack.ml.integration.MlJobIT
+    method: testGetJobs_GivenSingleJob
+    issue: https://github.com/elastic/elasticsearch/issues/113655
+  - class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDateNanosTests
+    issue: https://github.com/elastic/elasticsearch/issues/113661
+  - class: org.elasticsearch.search.retriever.RankDocsRetrieverBuilderTests
+    method: testRewrite
+    issue: https://github.com/elastic/elasticsearch/issues/114467
+  - class: org.elasticsearch.gradle.internal.PublishPluginFuncTest
+    issue: https://github.com/elastic/elasticsearch/issues/114492
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=indices.split/40_routing_partition_size/nested}
+    issue: https://github.com/elastic/elasticsearch/issues/113842
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=indices.split/40_routing_partition_size/more than 1}
+    issue: https://github.com/elastic/elasticsearch/issues/113841
+  - class: org.elasticsearch.kibana.KibanaThreadPoolIT
+    method: testBlockedThreadPoolsRejectUserRequests
+    issue: https://github.com/elastic/elasticsearch/issues/113939
+  - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
+    method: testPutE5WithTrainedModelAndInference
+    issue: https://github.com/elastic/elasticsearch/issues/114023
+  - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
+    method: testPutE5Small_withPlatformAgnosticVariant
+    issue: https://github.com/elastic/elasticsearch/issues/113983
+  - class: org.elasticsearch.datastreams.LazyRolloverDuringDisruptionIT
+    method: testRolloverIsExecutedOnce
+    issue: https://github.com/elastic/elasticsearch/issues/112634
+  - class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT
+    method: test {yaml=rrf/800_rrf_with_text_similarity_reranker_retriever/explain using rrf retriever and text-similarity}
+    issue: https://github.com/elastic/elasticsearch/issues/114757
+  - class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT
+    method: testTracingCrossCluster
+    issue: https://github.com/elastic/elasticsearch/issues/112731
+  - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
+    method: testPutE5Small_withPlatformSpecificVariant
+    issue: https://github.com/elastic/elasticsearch/issues/113950
+  - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
+    method: test {yaml=reference/rest-api/usage/line_38}
+    issue: https://github.com/elastic/elasticsearch/issues/113694
+  - class: org.elasticsearch.test.rest.yaml.RcsCcsCommonYamlTestSuiteIT
+    method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only}
+    issue: https://github.com/elastic/elasticsearch/issues/115475
+  - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests
+    method: testProcessFileChanges
+    issue: https://github.com/elastic/elasticsearch/issues/115280
+  - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
+    method: testInferDeploysDefaultE5
+    issue: https://github.com/elastic/elasticsearch/issues/115361
+  - class: org.elasticsearch.xpack.inference.InferenceCrudIT
+    method: testSupportedStream
+    issue: https://github.com/elastic/elasticsearch/issues/113430
+  - class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
+    method: testGeoShapeGeoHash
+    issue: https://github.com/elastic/elasticsearch/issues/115664
+  - class: org.elasticsearch.indices.mapping.UpdateMappingIntegrationIT
+    issue: https://github.com/elastic/elasticsearch/issues/116126
+  - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT
+    issue: https://github.com/elastic/elasticsearch/issues/111319
+  - class: org.elasticsearch.upgrades.FullClusterRestartIT
+    method: testSnapshotRestore {cluster=OLD}
+    issue: https://github.com/elastic/elasticsearch/issues/111777
+  - class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT
+    method: testLookbackWithIndicesOptions
+    issue: https://github.com/elastic/elasticsearch/issues/116127
+  - class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT
+    method: testSnapshotRestore {cluster=UPGRADED}
+    issue: https://github.com/elastic/elasticsearch/issues/111799
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=search/380_sort_segments_on_timestamp/Test that index segments are NOT sorted on timestamp field when @timestamp field is dynamically added}
+    issue: https://github.com/elastic/elasticsearch/issues/116221
+  - class: org.elasticsearch.ingest.common.IngestCommonClientYamlTestSuiteIT
+    method: test {yaml=ingest/310_reroute_processor/Test remove then add reroute processor with and without lazy rollover}
+    issue: https://github.com/elastic/elasticsearch/issues/116158
+  - class: org.elasticsearch.ingest.common.IngestCommonClientYamlTestSuiteIT
+    method: test {yaml=ingest/310_reroute_processor/Test data stream with lazy rollover obtains pipeline from template}
+    issue: https://github.com/elastic/elasticsearch/issues/116157
+  - class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT
+    method: testEnterpriseDownloaderTask
+    issue: https://github.com/elastic/elasticsearch/issues/115163
+  - class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT
+    method: testDeprecatedSettingsReturnWarnings
+    issue: https://github.com/elastic/elasticsearch/issues/108628
+  - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT
+    method: test {yaml=/10_apm/Test template reinstallation}
+    issue: https://github.com/elastic/elasticsearch/issues/116445
+  - class: org.elasticsearch.action.admin.HotThreadsIT
+    method: testHotThreadsDontFail
+    issue: https://github.com/elastic/elasticsearch/issues/115754
+  - class: org.elasticsearch.action.search.PointInTimeIT
+    method: testPITTiebreak
+    issue: https://github.com/elastic/elasticsearch/issues/115810
+  - class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
+    method: test {p0=esql/61_enrich_ip/IP strings}
+    issue: https://github.com/elastic/elasticsearch/issues/116529
+  - class: org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshotsCanMatchOnCoordinatorIntegTests
+    method: testSearchableSnapshotShardsAreSkippedBySearchRequestWithoutQueryingAnyNodeWhenTheyAreOutsideOfTheQueryRange
+    issue: https://github.com/elastic/elasticsearch/issues/116523
+  - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
+    method: testInferDeploysDefaultElser
+    issue: https://github.com/elastic/elasticsearch/issues/114913
+  - class: org.elasticsearch.threadpool.SimpleThreadPoolIT
+    method: testThreadPoolMetrics
+    issue: https://github.com/elastic/elasticsearch/issues/108320
+  - class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
+    method: test {p0=esql/60_enrich/Enrich on keyword with fields alias}
+    issue: https://github.com/elastic/elasticsearch/issues/116592
+  - class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
+    method: test {p0=esql/60_enrich/Enrich on keyword with fields}
+    issue: https://github.com/elastic/elasticsearch/issues/116593
+  - class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
+    method: testGeoShapeGeoTile
+    issue: https://github.com/elastic/elasticsearch/issues/115717
+  - class: org.elasticsearch.search.StressSearchServiceReaperIT
+    method: testStressReaper
+    issue: https://github.com/elastic/elasticsearch/issues/115816
+  - class: org.elasticsearch.xpack.spatial.search.GeoGridAggAndQueryConsistencyIT
+    method: testGeoShapeGeoHex
+    issue: https://github.com/elastic/elasticsearch/issues/115705
+  - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
+    method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNoDocValues SYNC}
+    issue: https://github.com/elastic/elasticsearch/issues/116945
+  - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
+    method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNoDocValues ASYNC}
+    issue: https://github.com/elastic/elasticsearch/issues/116945
+  - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
+    method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNoDocValues}
+    issue: https://github.com/elastic/elasticsearch/issues/116945
+  - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
+    method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNotIndexedNorDocValues SYNC}
+    issue: https://github.com/elastic/elasticsearch/issues/116945
+  - class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
+    method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNotIndexedNorDocValues ASYNC}
+    issue: https://github.com/elastic/elasticsearch/issues/116945
+  - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
+    method: test {spatial.CentroidFromAirportsAfterIntersectsCompoundPredicateNotIndexedNorDocValues}
+    issue: https://github.com/elastic/elasticsearch/issues/116945
+  - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT
+    method: testRandomDirectoryIOExceptions
+    issue: https://github.com/elastic/elasticsearch/issues/114824
+  - class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
+    method: testRerank {upgradedNodes=1}
+    issue: https://github.com/elastic/elasticsearch/issues/116973
+  - class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
+    method: testCohereEmbeddings {upgradedNodes=1}
+    issue: https://github.com/elastic/elasticsearch/issues/116974
+  - class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
+    method: testCohereEmbeddings {upgradedNodes=2}
+    issue: https://github.com/elastic/elasticsearch/issues/116975
+  - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests
+    method: testRetryPointInTime
+    issue: https://github.com/elastic/elasticsearch/issues/117116
+  - class: org.elasticsearch.oldrepos.OldRepositoryAccessIT
+    method: testOldRepoAccess
+    issue: https://github.com/elastic/elasticsearch/issues/115631
+  - class: org.elasticsearch.xpack.inference.InferenceRestIT
+    method: test {p0=inference/40_semantic_text_query/Query a field that uses the default ELSER 2 endpoint}
+    issue: https://github.com/elastic/elasticsearch/issues/117027
+  - class: org.elasticsearch.xpack.inference.InferenceRestIT
+    method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint}
+    issue: https://github.com/elastic/elasticsearch/issues/117349
+  - class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT
+    method: testSearchWithRandomDisconnects
+    issue: https://github.com/elastic/elasticsearch/issues/116175
+  - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests
+    method: testMinimumVersionBetweenNewAndOldVersion
+    issue: https://github.com/elastic/elasticsearch/issues/117485
+  - class: org.elasticsearch.discovery.ClusterDisruptionIT
+    method: testAckedIndexing
+    issue: https://github.com/elastic/elasticsearch/issues/117024
+  - class: org.elasticsearch.xpack.inference.DefaultEndPointsIT
+    method: testMultipleInferencesTriggeringDownloadAndDeploy
+    issue: https://github.com/elastic/elasticsearch/issues/117208
+  - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
+    method: test {p0=synonyms/90_synonyms_reloading_for_synset/Reload analyzers for specific synonym set}
+    issue: https://github.com/elastic/elasticsearch/issues/116777
 
 # Examples:
 #
@@ -389,3 +389,14 @@ tests:
 #  - class: "org.elasticsearch.xpack.esql.**"
 #    method: "test {union_types.MultiIndexIpStringStatsInline *}"
 #    issue: "https://github.com/elastic/elasticsearch/..."
+  - class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests
+    method: testStopWorksInMiddleOfProcessing
+    issue: https://github.com/elastic/elasticsearch/issues/117591
+  - class: org.elasticsearch.repositories.s3.RepositoryS3ClientYamlTestSuiteIT
+    issue: https://github.com/elastic/elasticsearch/issues/117596
+  - class: "org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT"
+    method: "test {scoring.*}"
+    issue: https://github.com/elastic/elasticsearch/issues/117641
+  - class: "org.elasticsearch.xpack.esql.qa.single_node.EsqlSpecIT"
+    method: "test {scoring.*}"
+    issue: https://github.com/elastic/elasticsearch/issues/117641

+ 12 - 3
server/src/main/java/org/elasticsearch/search/sort/SortBuilder.java

@@ -155,6 +155,11 @@ public abstract class SortBuilder<T extends SortBuilder<T>>
     }
 
     public static Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sortBuilders, SearchExecutionContext context) throws IOException {
+        return buildSort(sortBuilders, context, true);
+    }
+
+    public static Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sortBuilders, SearchExecutionContext context, boolean optimize)
+        throws IOException {
         List<SortField> sortFields = new ArrayList<>(sortBuilders.size());
         List<DocValueFormat> sortFormats = new ArrayList<>(sortBuilders.size());
         for (SortBuilder<?> builder : sortBuilders) {
@@ -169,9 +174,13 @@ public abstract class SortBuilder<T extends SortBuilder<T>>
             if (sortFields.size() > 1) {
                 sort = true;
             } else {
-                SortField sortField = sortFields.get(0);
-                if (sortField.getType() == SortField.Type.SCORE && sortField.getReverse() == false) {
-                    sort = false;
+                if (optimize) {
+                    SortField sortField = sortFields.get(0);
+                    if (sortField.getType() == SortField.Type.SCORE && sortField.getReverse() == false) {
+                        sort = false;
+                    } else {
+                        sort = true;
+                    }
                 } else {
                     sort = true;
                 }

+ 4 - 1
x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java

@@ -31,6 +31,7 @@ import static org.elasticsearch.core.Tuple.tuple;
 public class MetadataAttribute extends TypedAttribute {
     public static final String TIMESTAMP_FIELD = "@timestamp";
     public static final String TSID_FIELD = "_tsid";
+    public static final String SCORE = "_score";
 
     static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
         Attribute.class,
@@ -50,7 +51,9 @@ public class MetadataAttribute extends TypedAttribute {
         SourceFieldMapper.NAME,
         tuple(DataType.SOURCE, false),
         IndexModeFieldMapper.NAME,
-        tuple(DataType.KEYWORD, true)
+        tuple(DataType.KEYWORD, true),
+        SCORE,
+        tuple(DataType.DOUBLE, false)
     );
 
     private final boolean searchable;

+ 4 - 1
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java

@@ -79,6 +79,7 @@ public abstract class LuceneOperator extends SourceOperator {
         protected final DataPartitioning dataPartitioning;
         protected final int taskConcurrency;
         protected final int limit;
+        protected final ScoreMode scoreMode;
         protected final LuceneSliceQueue sliceQueue;
 
         /**
@@ -95,6 +96,7 @@ public abstract class LuceneOperator extends SourceOperator {
             ScoreMode scoreMode
         ) {
             this.limit = limit;
+            this.scoreMode = scoreMode;
             this.dataPartitioning = dataPartitioning;
             var weightFunction = weightFunction(queryFunction, scoreMode);
             this.sliceQueue = LuceneSliceQueue.create(contexts, weightFunction, dataPartitioning, taskConcurrency);
@@ -438,7 +440,8 @@ public abstract class LuceneOperator extends SourceOperator {
             final var query = queryFunction.apply(ctx);
             final var searcher = ctx.searcher();
             try {
-                return searcher.createWeight(searcher.rewrite(new ConstantScoreQuery(query)), scoreMode, 1);
+                Query actualQuery = scoreMode.needsScores() ? query : new ConstantScoreQuery(query);
+                return searcher.createWeight(searcher.rewrite(actualQuery), scoreMode, 1);
             } catch (IOException e) {
                 throw new UncheckedIOException(e);
             }

+ 74 - 22
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java

@@ -13,7 +13,9 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.Scorable;
 import org.apache.lucene.search.ScoreMode;
 import org.elasticsearch.compute.data.BlockFactory;
+import org.elasticsearch.compute.data.DocBlock;
 import org.elasticsearch.compute.data.DocVector;
+import org.elasticsearch.compute.data.DoubleVector;
 import org.elasticsearch.compute.data.IntBlock;
 import org.elasticsearch.compute.data.IntVector;
 import org.elasticsearch.compute.data.Page;
@@ -25,6 +27,9 @@ import java.io.IOException;
 import java.util.List;
 import java.util.function.Function;
 
+import static org.apache.lucene.search.ScoreMode.COMPLETE;
+import static org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES;
+
 /**
  * Source operator that incrementally runs Lucene searches
  */
@@ -34,6 +39,7 @@ public class LuceneSourceOperator extends LuceneOperator {
     private int remainingDocs;
 
     private IntVector.Builder docsBuilder;
+    private DoubleVector.Builder scoreBuilder;
     private final LeafCollector leafCollector;
     private final int minPageSize;
 
@@ -47,15 +53,16 @@ public class LuceneSourceOperator extends LuceneOperator {
             DataPartitioning dataPartitioning,
             int taskConcurrency,
             int maxPageSize,
-            int limit
+            int limit,
+            boolean scoring
         ) {
-            super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit, ScoreMode.COMPLETE_NO_SCORES);
+            super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit, scoring ? COMPLETE : COMPLETE_NO_SCORES);
             this.maxPageSize = maxPageSize;
         }
 
         @Override
         public SourceOperator get(DriverContext driverContext) {
-            return new LuceneSourceOperator(driverContext.blockFactory(), maxPageSize, sliceQueue, limit);
+            return new LuceneSourceOperator(driverContext.blockFactory(), maxPageSize, sliceQueue, limit, scoreMode);
         }
 
         public int maxPageSize() {
@@ -70,32 +77,65 @@ public class LuceneSourceOperator extends LuceneOperator {
                 + maxPageSize
                 + ", limit = "
                 + limit
+                + ", scoreMode = "
+                + scoreMode
                 + "]";
         }
     }
 
-    public LuceneSourceOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue, int limit) {
+    @SuppressWarnings("this-escape")
+    public LuceneSourceOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue, int limit, ScoreMode scoreMode) {
         super(blockFactory, maxPageSize, sliceQueue);
         this.minPageSize = Math.max(1, maxPageSize / 2);
         this.remainingDocs = limit;
-        this.docsBuilder = blockFactory.newIntVectorBuilder(Math.min(limit, maxPageSize));
-        this.leafCollector = new LeafCollector() {
-            @Override
-            public void setScorer(Scorable scorer) {
-
+        int estimatedSize = Math.min(limit, maxPageSize);
+        boolean success = false;
+        try {
+            this.docsBuilder = blockFactory.newIntVectorBuilder(estimatedSize);
+            if (scoreMode.needsScores()) {
+                scoreBuilder = blockFactory.newDoubleVectorBuilder(estimatedSize);
+                this.leafCollector = new ScoringCollector();
+            } else {
+                scoreBuilder = null;
+                this.leafCollector = new LimitingCollector();
             }
+            success = true;
+        } finally {
+            if (success == false) {
+                close();
+            }
+        }
+    }
 
-            @Override
-            public void collect(int doc) {
-                if (remainingDocs > 0) {
-                    --remainingDocs;
-                    docsBuilder.appendInt(doc);
-                    currentPagePos++;
-                } else {
-                    throw new CollectionTerminatedException();
-                }
+    class LimitingCollector implements LeafCollector {
+        @Override
+        public void setScorer(Scorable scorer) {}
+
+        @Override
+        public void collect(int doc) throws IOException {
+            if (remainingDocs > 0) {
+                --remainingDocs;
+                docsBuilder.appendInt(doc);
+                currentPagePos++;
+            } else {
+                throw new CollectionTerminatedException();
             }
-        };
+        }
+    }
+
+    final class ScoringCollector extends LuceneSourceOperator.LimitingCollector {
+        private Scorable scorable;
+
+        @Override
+        public void setScorer(Scorable scorer) {
+            this.scorable = scorer;
+        }
+
+        @Override
+        public void collect(int doc) throws IOException {
+            super.collect(doc);
+            scoreBuilder.appendDouble(scorable.score());
+        }
     }
 
     @Override
@@ -139,15 +179,27 @@ public class LuceneSourceOperator extends LuceneOperator {
                 IntBlock shard = null;
                 IntBlock leaf = null;
                 IntVector docs = null;
+                DoubleVector scores = null;
+                DocBlock docBlock = null;
                 try {
                     shard = blockFactory.newConstantIntBlockWith(scorer.shardContext().index(), currentPagePos);
                     leaf = blockFactory.newConstantIntBlockWith(scorer.leafReaderContext().ord, currentPagePos);
                     docs = docsBuilder.build();
                     docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize));
-                    page = new Page(currentPagePos, new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock());
+                    docBlock = new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock();
+                    shard = null;
+                    leaf = null;
+                    docs = null;
+                    if (scoreBuilder == null) {
+                        page = new Page(currentPagePos, docBlock);
+                    } else {
+                        scores = scoreBuilder.build();
+                        scoreBuilder = blockFactory.newDoubleVectorBuilder(Math.min(remainingDocs, maxPageSize));
+                        page = new Page(currentPagePos, docBlock, scores.asBlock());
+                    }
                 } finally {
                     if (page == null) {
-                        Releasables.closeExpectNoException(shard, leaf, docs);
+                        Releasables.closeExpectNoException(shard, leaf, docs, docBlock, scores);
                     }
                 }
                 currentPagePos = 0;
@@ -160,7 +212,7 @@ public class LuceneSourceOperator extends LuceneOperator {
 
     @Override
     public void close() {
-        docsBuilder.close();
+        Releasables.close(docsBuilder, scoreBuilder);
     }
 
     @Override

+ 120 - 21
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java

@@ -10,14 +10,22 @@ package org.elasticsearch.compute.lucene;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.search.CollectionTerminatedException;
+import org.apache.lucene.search.FieldDoc;
 import org.apache.lucene.search.LeafCollector;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.ScoreMode;
-import org.apache.lucene.search.TopFieldCollector;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TopDocsCollector;
+import org.apache.lucene.search.TopFieldCollectorManager;
+import org.apache.lucene.search.TopScoreDocCollectorManager;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.compute.data.BlockFactory;
+import org.elasticsearch.compute.data.DocBlock;
 import org.elasticsearch.compute.data.DocVector;
+import org.elasticsearch.compute.data.DoubleBlock;
+import org.elasticsearch.compute.data.DoubleVector;
 import org.elasticsearch.compute.data.IntBlock;
 import org.elasticsearch.compute.data.IntVector;
 import org.elasticsearch.compute.data.Page;
@@ -28,17 +36,21 @@ import org.elasticsearch.search.sort.SortAndFormats;
 import org.elasticsearch.search.sort.SortBuilder;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Optional;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
+import static org.apache.lucene.search.ScoreMode.COMPLETE;
+import static org.apache.lucene.search.ScoreMode.TOP_DOCS;
+
 /**
  * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN)
  */
 public final class LuceneTopNSourceOperator extends LuceneOperator {
-    public static final class Factory extends LuceneOperator.Factory {
+    public static class Factory extends LuceneOperator.Factory {
         private final int maxPageSize;
         private final List<SortBuilder<?>> sorts;
 
@@ -49,16 +61,17 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
             int taskConcurrency,
             int maxPageSize,
             int limit,
-            List<SortBuilder<?>> sorts
+            List<SortBuilder<?>> sorts,
+            boolean scoring
         ) {
-            super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit, ScoreMode.TOP_DOCS);
+            super(contexts, queryFunction, dataPartitioning, taskConcurrency, limit, scoring ? COMPLETE : TOP_DOCS);
             this.maxPageSize = maxPageSize;
             this.sorts = sorts;
         }
 
         @Override
         public SourceOperator get(DriverContext driverContext) {
-            return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue);
+            return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue, scoreMode);
         }
 
         public int maxPageSize() {
@@ -74,6 +87,8 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
                 + maxPageSize
                 + ", limit = "
                 + limit
+                + ", scoreMode = "
+                + scoreMode
                 + ", sorts = ["
                 + notPrettySorts
                 + "]]";
@@ -92,17 +107,20 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
     private PerShardCollector perShardCollector;
     private final List<SortBuilder<?>> sorts;
     private final int limit;
+    private final ScoreMode scoreMode;
 
     public LuceneTopNSourceOperator(
         BlockFactory blockFactory,
         int maxPageSize,
         List<SortBuilder<?>> sorts,
         int limit,
-        LuceneSliceQueue sliceQueue
+        LuceneSliceQueue sliceQueue,
+        ScoreMode scoreMode
     ) {
         super(blockFactory, maxPageSize, sliceQueue);
         this.sorts = sorts;
         this.limit = limit;
+        this.scoreMode = scoreMode;
     }
 
     @Override
@@ -144,7 +162,7 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
         try {
             if (perShardCollector == null || perShardCollector.shardContext.index() != scorer.shardContext().index()) {
                 // TODO: share the bottom between shardCollectors
-                perShardCollector = new PerShardCollector(scorer.shardContext(), sorts, limit);
+                perShardCollector = newPerShardCollector(scorer.shardContext(), sorts, limit);
             }
             var leafCollector = perShardCollector.getLeafCollector(scorer.leafReaderContext());
             scorer.scoreNextRange(leafCollector, scorer.leafReaderContext().reader().getLiveDocs(), maxPageSize);
@@ -170,7 +188,7 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
             assert isEmitting() == false : "offset=" + offset + " score_docs=" + Arrays.toString(scoreDocs);
             offset = 0;
             if (perShardCollector != null) {
-                scoreDocs = perShardCollector.topFieldCollector.topDocs().scoreDocs;
+                scoreDocs = perShardCollector.collector.topDocs().scoreDocs;
             } else {
                 scoreDocs = new ScoreDoc[0];
             }
@@ -182,10 +200,13 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
         IntBlock shard = null;
         IntVector segments = null;
         IntVector docs = null;
+        DocBlock docBlock = null;
+        DoubleBlock scores = null;
         Page page = null;
         try (
             IntVector.Builder currentSegmentBuilder = blockFactory.newIntVectorFixedBuilder(size);
-            IntVector.Builder currentDocsBuilder = blockFactory.newIntVectorFixedBuilder(size)
+            IntVector.Builder currentDocsBuilder = blockFactory.newIntVectorFixedBuilder(size);
+            DoubleVector.Builder currentScoresBuilder = scoreVectorOrNull(size);
         ) {
             int start = offset;
             offset += size;
@@ -195,52 +216,130 @@ public final class LuceneTopNSourceOperator extends LuceneOperator {
                 int segment = ReaderUtil.subIndex(doc, leafContexts);
                 currentSegmentBuilder.appendInt(segment);
                 currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment
+                if (currentScoresBuilder != null) {
+                    float score = getScore(scoreDocs[i]);
+                    currentScoresBuilder.appendDouble(score);
+                }
             }
 
             shard = blockFactory.newConstantIntBlockWith(perShardCollector.shardContext.index(), size);
             segments = currentSegmentBuilder.build();
             docs = currentDocsBuilder.build();
-            page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock());
+            docBlock = new DocVector(shard.asVector(), segments, docs, null).asBlock();
+            shard = null;
+            segments = null;
+            docs = null;
+            if (currentScoresBuilder == null) {
+                page = new Page(size, docBlock);
+            } else {
+                scores = currentScoresBuilder.build().asBlock();
+                page = new Page(size, docBlock, scores);
+            }
         } finally {
             if (page == null) {
-                Releasables.closeExpectNoException(shard, segments, docs);
+                Releasables.closeExpectNoException(shard, segments, docs, docBlock, scores);
             }
         }
         pagesEmitted++;
         return page;
     }
 
+    private float getScore(ScoreDoc scoreDoc) {
+        if (scoreDoc instanceof FieldDoc fieldDoc) {
+            if (Float.isNaN(fieldDoc.score)) {
+                if (sorts != null) {
+                    return (Float) fieldDoc.fields[sorts.size() + 1];
+                } else {
+                    return (Float) fieldDoc.fields[0];
+                }
+            } else {
+                return fieldDoc.score;
+            }
+        } else {
+            return scoreDoc.score;
+        }
+    }
+
+    private DoubleVector.Builder scoreVectorOrNull(int size) {
+        if (scoreMode.needsScores()) {
+            return blockFactory.newDoubleVectorFixedBuilder(size);
+        } else {
+            return null;
+        }
+    }
+
     @Override
     protected void describe(StringBuilder sb) {
         sb.append(", limit = ").append(limit);
+        sb.append(", scoreMode = ").append(scoreMode);
         String notPrettySorts = sorts.stream().map(Strings::toString).collect(Collectors.joining(","));
         sb.append(", sorts = [").append(notPrettySorts).append("]");
     }
 
-    static final class PerShardCollector {
+    PerShardCollector newPerShardCollector(ShardContext shardContext, List<SortBuilder<?>> sorts, int limit) throws IOException {
+        Optional<SortAndFormats> sortAndFormats = shardContext.buildSort(sorts);
+        if (sortAndFormats.isEmpty()) {
+            throw new IllegalStateException("sorts must not be disabled in TopN");
+        }
+        if (scoreMode.needsScores() == false) {
+            return new NonScoringPerShardCollector(shardContext, sortAndFormats.get().sort, limit);
+        } else {
+            SortField[] sortFields = sortAndFormats.get().sort.getSort();
+            if (sortFields != null && sortFields.length == 1 && sortFields[0].needsScores() && sortFields[0].getReverse() == false) {
+                // SORT _score DESC
+                return new ScoringPerShardCollector(
+                    shardContext,
+                    new TopScoreDocCollectorManager(limit, null, limit, false).newCollector()
+                );
+            } else {
+                // SORT ..., _score, ...
+                var sort = new Sort();
+                if (sortFields != null) {
+                    var l = new ArrayList<>(Arrays.asList(sortFields));
+                    l.add(SortField.FIELD_DOC);
+                    l.add(SortField.FIELD_SCORE);
+                    sort = new Sort(l.toArray(SortField[]::new));
+                }
+                return new ScoringPerShardCollector(
+                    shardContext,
+                    new TopFieldCollectorManager(sort, limit, null, limit, false).newCollector()
+                );
+            }
+        }
+    }
+
+    abstract static class PerShardCollector {
         private final ShardContext shardContext;
-        private final TopFieldCollector topFieldCollector;
+        private final TopDocsCollector<?> collector;
         private int leafIndex;
         private LeafCollector leafCollector;
         private Thread currentThread;
 
-        PerShardCollector(ShardContext shardContext, List<SortBuilder<?>> sorts, int limit) throws IOException {
+        PerShardCollector(ShardContext shardContext, TopDocsCollector<?> collector) {
             this.shardContext = shardContext;
-            Optional<SortAndFormats> sortAndFormats = shardContext.buildSort(sorts);
-            if (sortAndFormats.isEmpty()) {
-                throw new IllegalStateException("sorts must not be disabled in TopN");
-            }
-            // We don't use CollectorManager here as we don't retrieve the total hits and sort by score.
-            this.topFieldCollector = TopFieldCollector.create(sortAndFormats.get().sort, limit, 0);
+            this.collector = collector;
         }
 
         LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException {
             if (currentThread != Thread.currentThread() || leafIndex != leafReaderContext.ord) {
-                leafCollector = topFieldCollector.getLeafCollector(leafReaderContext);
+                leafCollector = collector.getLeafCollector(leafReaderContext);
                 leafIndex = leafReaderContext.ord;
                 currentThread = Thread.currentThread();
             }
             return leafCollector;
         }
     }
+
+    static final class NonScoringPerShardCollector extends PerShardCollector {
+        NonScoringPerShardCollector(ShardContext shardContext, Sort sort, int limit) {
+            // We don't use CollectorManager here as we don't retrieve the total hits and sort by score.
+            super(shardContext, new TopFieldCollectorManager(sort, limit, null, 0, false).newCollector());
+        }
+    }
+
+    static final class ScoringPerShardCollector extends PerShardCollector {
+        ScoringPerShardCollector(ShardContext shardContext, TopDocsCollector<?> topDocsCollector) {
+            super(shardContext, topDocsCollector);
+        }
+    }
 }

+ 2 - 1
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java

@@ -394,7 +394,8 @@ public class OperatorTests extends MapperServiceTestCase {
             randomFrom(DataPartitioning.values()),
             randomIntBetween(1, 10),
             randomPageSize(),
-            limit
+            limit,
+            false // no scoring
         );
     }
 }

+ 26 - 7
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java

@@ -27,6 +27,8 @@ import org.elasticsearch.compute.data.BooleanBlock;
 import org.elasticsearch.compute.data.BooleanVector;
 import org.elasticsearch.compute.data.BytesRefBlock;
 import org.elasticsearch.compute.data.BytesRefVector;
+import org.elasticsearch.compute.data.DocBlock;
+import org.elasticsearch.compute.data.DoubleBlock;
 import org.elasticsearch.compute.data.ElementType;
 import org.elasticsearch.compute.data.Page;
 import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.DenseCollector;
@@ -120,8 +122,9 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase {
     private void assertTermQuery(String term, List<Page> results) {
         int matchCount = 0;
         for (Page page : results) {
-            BytesRefVector terms = page.<BytesRefBlock>getBlock(1).asVector();
-            BooleanVector matches = page.<BooleanBlock>getBlock(2).asVector();
+            int initialBlockIndex = initialBlockIndex(page);
+            BytesRefVector terms = page.<BytesRefBlock>getBlock(initialBlockIndex).asVector();
+            BooleanVector matches = page.<BooleanBlock>getBlock(initialBlockIndex + 1).asVector();
             for (int i = 0; i < page.getPositionCount(); i++) {
                 BytesRef termAtPosition = terms.getBytesRef(i, new BytesRef());
                 assertThat(matches.getBoolean(i), equalTo(termAtPosition.utf8ToString().equals(term)));
@@ -155,8 +158,9 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase {
         List<Page> results = runQuery(values, new TermInSetQuery(MultiTermQuery.CONSTANT_SCORE_REWRITE, FIELD, matchingBytes), shuffleDocs);
         int matchCount = 0;
         for (Page page : results) {
-            BytesRefVector terms = page.<BytesRefBlock>getBlock(1).asVector();
-            BooleanVector matches = page.<BooleanBlock>getBlock(2).asVector();
+            int initialBlockIndex = initialBlockIndex(page);
+            BytesRefVector terms = page.<BytesRefBlock>getBlock(initialBlockIndex).asVector();
+            BooleanVector matches = page.<BooleanBlock>getBlock(initialBlockIndex + 1).asVector();
             for (int i = 0; i < page.getPositionCount(); i++) {
                 BytesRef termAtPosition = terms.getBytesRef(i, new BytesRef());
                 assertThat(matches.getBoolean(i), equalTo(matching.contains(termAtPosition.utf8ToString())));
@@ -207,7 +211,7 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase {
             List<Page> results = new ArrayList<>();
             Driver driver = new Driver(
                 driverContext,
-                luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT).get(driverContext),
+                luceneOperatorFactory(reader, new MatchAllDocsQuery(), LuceneOperator.NO_LIMIT, scoring).get(driverContext),
                 operators,
                 new TestResultPageSinkOperator(results::add),
                 () -> {}
@@ -248,7 +252,21 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase {
         return new DriverContext(blockFactory.bigArrays(), blockFactory);
     }
 
-    static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit) {
+    // Scores are not interesting to this test, but enabled conditionally and effectively ignored just for coverage.
+    private final boolean scoring = randomBoolean();
+
+    // Returns the initial block index, ignoring the score block if scoring is enabled
+    private int initialBlockIndex(Page page) {
+        assert page.getBlock(0) instanceof DocBlock : "expected doc block at index 0";
+        if (scoring) {
+            assert page.getBlock(1) instanceof DoubleBlock : "expected double block at index 1";
+            return 2;
+        } else {
+            return 1;
+        }
+    }
+
+    static LuceneOperator.Factory luceneOperatorFactory(IndexReader reader, Query query, int limit, boolean scoring) {
         final ShardContext searchContext = new LuceneSourceOperatorTests.MockShardContext(reader, 0);
         return new LuceneSourceOperator.Factory(
             List.of(searchContext),
@@ -256,7 +274,8 @@ public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase {
             randomFrom(DataPartitioning.values()),
             randomIntBetween(1, 10),
             randomPageSize(),
-            limit
+            limit,
+            scoring
         );
     }
 }

+ 25 - 6
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java

@@ -17,6 +17,8 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.tests.index.RandomIndexWriter;
 import org.elasticsearch.common.breaker.CircuitBreakingException;
+import org.elasticsearch.compute.data.DocBlock;
+import org.elasticsearch.compute.data.DoubleBlock;
 import org.elasticsearch.compute.data.ElementType;
 import org.elasticsearch.compute.data.LongBlock;
 import org.elasticsearch.compute.data.Page;
@@ -63,10 +65,10 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
 
     @Override
     protected LuceneSourceOperator.Factory simple() {
-        return simple(randomFrom(DataPartitioning.values()), between(1, 10_000), 100);
+        return simple(randomFrom(DataPartitioning.values()), between(1, 10_000), 100, scoring);
     }
 
-    private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, int numDocs, int limit) {
+    private LuceneSourceOperator.Factory simple(DataPartitioning dataPartitioning, int numDocs, int limit, boolean scoring) {
         int commitEvery = Math.max(1, numDocs / 10);
         try (
             RandomIndexWriter writer = new RandomIndexWriter(
@@ -91,7 +93,7 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
         ShardContext ctx = new MockShardContext(reader, 0);
         Function<ShardContext, Query> queryFunction = c -> new MatchAllDocsQuery();
         int maxPageSize = between(10, Math.max(10, numDocs));
-        return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit);
+        return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit, scoring);
     }
 
     @Override
@@ -101,7 +103,10 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
 
     @Override
     protected Matcher<String> expectedDescriptionOfSimple() {
-        return matchesRegex("LuceneSourceOperator\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100]");
+        return matchesRegex(
+            "LuceneSourceOperator"
+                + "\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, scoreMode = (COMPLETE|COMPLETE_NO_SCORES)]"
+        );
     }
 
     // TODO tests for the other data partitioning configurations
@@ -149,7 +154,7 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
     }
 
     private void testSimple(DriverContext ctx, int size, int limit) {
-        LuceneSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit);
+        LuceneSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit, scoring);
         Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD, ElementType.LONG);
 
         List<Page> results = new ArrayList<>();
@@ -164,7 +169,7 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
         }
 
         for (Page page : results) {
-            LongBlock sBlock = page.getBlock(1);
+            LongBlock sBlock = page.getBlock(initialBlockIndex(page));
             for (int p = 0; p < page.getPositionCount(); p++) {
                 assertThat(sBlock.getLong(sBlock.getFirstValueIndex(p)), both(greaterThanOrEqualTo(0L)).and(lessThan((long) size)));
             }
@@ -174,6 +179,20 @@ public class LuceneSourceOperatorTests extends AnyOperatorTestCase {
         assertThat(results, hasSize(both(greaterThanOrEqualTo(minPages)).and(lessThanOrEqualTo(maxPages))));
     }
 
+    // Scores are not interesting to this test, but enabled conditionally and effectively ignored just for coverage.
+    private final boolean scoring = randomBoolean();
+
+    // Returns the initial block index, ignoring the score block if scoring is enabled
+    private int initialBlockIndex(Page page) {
+        assert page.getBlock(0) instanceof DocBlock : "expected doc block at index 0";
+        if (scoring) {
+            assert page.getBlock(1) instanceof DoubleBlock : "expected double block at index 1";
+            return 2;
+        } else {
+            return 1;
+        }
+    }
+
     /**
      * Creates a mock search context with the given index reader.
      * The returned mock search context can be used to test with {@link LuceneOperator}.

+ 151 - 0
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java

@@ -0,0 +1,151 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.compute.lucene;
+
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.index.NoMergePolicy;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSelector;
+import org.apache.lucene.search.SortedNumericSortField;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.tests.index.RandomIndexWriter;
+import org.elasticsearch.compute.data.DoubleBlock;
+import org.elasticsearch.compute.data.ElementType;
+import org.elasticsearch.compute.data.Page;
+import org.elasticsearch.compute.operator.Driver;
+import org.elasticsearch.compute.operator.DriverContext;
+import org.elasticsearch.compute.operator.Operator;
+import org.elasticsearch.compute.operator.OperatorTestCase;
+import org.elasticsearch.compute.operator.TestResultPageSinkOperator;
+import org.elasticsearch.core.IOUtils;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.mapper.NumberFieldMapper;
+import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.sort.FieldSortBuilder;
+import org.elasticsearch.search.sort.SortAndFormats;
+import org.elasticsearch.search.sort.SortBuilder;
+import org.hamcrest.Matcher;
+import org.junit.After;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.function.Function;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.matchesRegex;
+
+public class LuceneTopNSourceOperatorScoringTests extends LuceneTopNSourceOperatorTests {
+    private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG);
+    private Directory directory = newDirectory();
+    private IndexReader reader;
+
+    @After
+    private void closeIndex() throws IOException {
+        IOUtils.close(reader, directory);
+    }
+
+    @Override
+    protected LuceneTopNSourceOperator.Factory simple() {
+        return simple(DataPartitioning.SHARD, 10_000, 100);
+    }
+
+    private LuceneTopNSourceOperator.Factory simple(DataPartitioning dataPartitioning, int size, int limit) {
+        int commitEvery = Math.max(1, size / 10);
+        try (
+            RandomIndexWriter writer = new RandomIndexWriter(
+                random(),
+                directory,
+                newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE)
+            )
+        ) {
+            for (int d = 0; d < size; d++) {
+                List<IndexableField> doc = new ArrayList<>();
+                doc.add(new SortedNumericDocValuesField("s", d));
+                writer.addDocument(doc);
+                if (d % commitEvery == 0) {
+                    writer.commit();
+                }
+            }
+            reader = writer.getReader();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        ShardContext ctx = new LuceneSourceOperatorTests.MockShardContext(reader, 0) {
+            @Override
+            public Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sorts) {
+                SortField field = new SortedNumericSortField("s", SortField.Type.LONG, false, SortedNumericSelector.Type.MIN);
+                return Optional.of(new SortAndFormats(new Sort(field), new DocValueFormat[] { null }));
+            }
+        };
+        Function<ShardContext, Query> queryFunction = c -> new MatchAllDocsQuery();
+        int taskConcurrency = 0;
+        int maxPageSize = between(10, Math.max(10, size));
+        List<SortBuilder<?>> sorts = List.of(new FieldSortBuilder("s"));
+        return new LuceneTopNSourceOperator.Factory(
+            List.of(ctx),
+            queryFunction,
+            dataPartitioning,
+            taskConcurrency,
+            maxPageSize,
+            limit,
+            sorts,
+            true // scoring
+        );
+    }
+
+    @Override
+    protected Matcher<String> expectedToStringOfSimple() {
+        return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, scoreMode = COMPLETE, sorts = \\[\\{.+}]]");
+    }
+
+    @Override
+    protected Matcher<String> expectedDescriptionOfSimple() {
+        return matchesRegex(
+            "LuceneTopNSourceOperator"
+                + "\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, scoreMode = COMPLETE, sorts = \\[\\{.+}]]"
+        );
+    }
+
+    @Override
+    protected void testSimple(DriverContext ctx, int size, int limit) {
+        LuceneTopNSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit);
+        Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD, ElementType.LONG);
+
+        List<Page> results = new ArrayList<>();
+        OperatorTestCase.runDriver(
+            new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {})
+        );
+        OperatorTestCase.assertDriverContext(ctx);
+
+        long expectedS = 0;
+        int maxPageSize = factory.maxPageSize();
+        for (Page page : results) {
+            if (limit - expectedS < maxPageSize) {
+                assertThat(page.getPositionCount(), equalTo((int) (limit - expectedS)));
+            } else {
+                assertThat(page.getPositionCount(), equalTo(maxPageSize));
+            }
+            DoubleBlock sBlock = page.getBlock(1);
+            for (int p = 0; p < page.getPositionCount(); p++) {
+                assertThat(sBlock.getDouble(sBlock.getFirstValueIndex(p)), equalTo(1.0d));
+                expectedS++;
+            }
+        }
+        int pages = (int) Math.ceil((float) Math.min(size, limit) / maxPageSize);
+        assertThat(results, hasSize(pages));
+    }
+}

+ 42 - 8
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java

@@ -20,6 +20,8 @@ import org.apache.lucene.search.SortedNumericSortField;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.tests.index.RandomIndexWriter;
 import org.elasticsearch.common.breaker.CircuitBreakingException;
+import org.elasticsearch.compute.data.DocBlock;
+import org.elasticsearch.compute.data.DoubleBlock;
 import org.elasticsearch.compute.data.ElementType;
 import org.elasticsearch.compute.data.LongBlock;
 import org.elasticsearch.compute.data.Page;
@@ -56,7 +58,7 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
     private IndexReader reader;
 
     @After
-    public void closeIndex() throws IOException {
+    private void closeIndex() throws IOException {
         IOUtils.close(reader, directory);
     }
 
@@ -105,19 +107,25 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
             taskConcurrency,
             maxPageSize,
             limit,
-            sorts
+            sorts,
+            scoring
         );
     }
 
     @Override
     protected Matcher<String> expectedToStringOfSimple() {
-        return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, sorts = \\[\\{.+}]]");
+        var s = scoring ? "COMPLETE" : "TOP_DOCS";
+        return matchesRegex("LuceneTopNSourceOperator\\[maxPageSize = \\d+, limit = 100, scoreMode = " + s + ", sorts = \\[\\{.+}]]");
     }
 
     @Override
     protected Matcher<String> expectedDescriptionOfSimple() {
+        var s = scoring ? "COMPLETE" : "TOP_DOCS";
         return matchesRegex(
-            "LuceneTopNSourceOperator\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, sorts = \\[\\{.+}]]"
+            "LuceneTopNSourceOperator"
+                + "\\[dataPartitioning = (DOC|SHARD|SEGMENT), maxPageSize = \\d+, limit = 100, scoreMode = "
+                + s
+                + ", sorts = \\[\\{.+}]]"
         );
     }
 
@@ -137,12 +145,24 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
         }
     }
 
-    private void testShardDataPartitioning(DriverContext context) {
+    void testShardDataPartitioning(DriverContext context) {
         int size = between(1_000, 20_000);
         int limit = between(10, size);
         testSimple(context, size, limit);
     }
 
+    public void testWithCranky() {
+        try {
+            int size = between(1_000, 20_000);
+            int limit = between(10, size);
+            testSimple(crankyDriverContext(), size, limit);
+            logger.info("cranky didn't break");
+        } catch (CircuitBreakingException e) {
+            logger.info("broken", e);
+            assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE));
+        }
+    }
+
     public void testEmpty() {
         testEmpty(driverContext());
     }
@@ -157,11 +177,11 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
         }
     }
 
-    private void testEmpty(DriverContext context) {
+    void testEmpty(DriverContext context) {
         testSimple(context, 0, between(10, 10_000));
     }
 
-    private void testSimple(DriverContext ctx, int size, int limit) {
+    protected void testSimple(DriverContext ctx, int size, int limit) {
         LuceneTopNSourceOperator.Factory factory = simple(DataPartitioning.SHARD, size, limit);
         Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD, ElementType.LONG);
 
@@ -178,7 +198,7 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
             } else {
                 assertThat(page.getPositionCount(), equalTo(factory.maxPageSize()));
             }
-            LongBlock sBlock = page.getBlock(1);
+            LongBlock sBlock = page.getBlock(initialBlockIndex(page));
             for (int p = 0; p < page.getPositionCount(); p++) {
                 assertThat(sBlock.getLong(sBlock.getFirstValueIndex(p)), equalTo(expectedS++));
             }
@@ -186,4 +206,18 @@ public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase {
         int pages = (int) Math.ceil((float) Math.min(size, limit) / factory.maxPageSize());
         assertThat(results, hasSize(pages));
     }
+
+    // Scores are not interesting to this test, but enabled conditionally and effectively ignored just for coverage.
+    private final boolean scoring = randomBoolean();
+
+    // Returns the initial block index, ignoring the score block if scoring is enabled
+    private int initialBlockIndex(Page page) {
+        assert page.getBlock(0) instanceof DocBlock : "expected doc block at index 0";
+        if (scoring) {
+            assert page.getBlock(1) instanceof DoubleBlock : "expected double block at index 1";
+            return 2;
+        } else {
+            return 1;
+        }
+    }
 }

+ 6 - 3
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java

@@ -265,7 +265,8 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase {
             DataPartitioning.SHARD,
             1,// randomIntBetween(1, 10),
             pageSize,
-            LuceneOperator.NO_LIMIT
+            LuceneOperator.NO_LIMIT,
+            false // no scoring
         );
         return luceneFactory.get(context);
     }
@@ -1292,7 +1293,8 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase {
             randomFrom(DataPartitioning.values()),
             randomIntBetween(1, 10),
             randomPageSize(),
-            LuceneOperator.NO_LIMIT
+            LuceneOperator.NO_LIMIT,
+            false // no scoring
         );
         var vsShardContext = new ValuesSourceReaderOperator.ShardContext(reader(indexKey), () -> SourceLoader.FROM_STORED_SOURCE);
         try (
@@ -1450,7 +1452,8 @@ public class ValueSourceReaderTypeConversionTests extends AnyOperatorTestCase {
                 DataPartitioning.SHARD,
                 randomIntBetween(1, 10),
                 1000,
-                LuceneOperator.NO_LIMIT
+                LuceneOperator.NO_LIMIT,
+                false // no scoring
             );
             // TODO add index2
             MappedFieldType ft = mapperService(indexKey).fieldType("key");

+ 6 - 3
x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java

@@ -170,7 +170,8 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase {
             DataPartitioning.SHARD,
             randomIntBetween(1, 10),
             pageSize,
-            LuceneOperator.NO_LIMIT
+            LuceneOperator.NO_LIMIT,
+            false // no scoring
         );
         return luceneFactory.get(context);
     }
@@ -1301,7 +1302,8 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase {
             randomFrom(DataPartitioning.values()),
             randomIntBetween(1, 10),
             randomPageSize(),
-            LuceneOperator.NO_LIMIT
+            LuceneOperator.NO_LIMIT,
+            false // no scoring
         );
         try (
             Driver driver = new Driver(
@@ -1524,7 +1526,8 @@ public class ValuesSourceReaderOperatorTests extends OperatorTestCase {
                 DataPartitioning.SHARD,
                 randomIntBetween(1, 10),
                 1000,
-                LuceneOperator.NO_LIMIT
+                LuceneOperator.NO_LIMIT,
+                false // no scoring
             );
             MappedFieldType ft = mapperService.fieldType("key");
             var readerFactory = new ValuesSourceReaderOperator.Factory(

+ 285 - 0
x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec

@@ -0,0 +1,285 @@
+###############################################
+# Tests for scoring support
+#
+
+singleQstrBoostScoringSorted
+required_capability: metadata_score
+required_capability: qstr_function
+
+from books metadata _score 
+| where qstr("author:Lord Rings^2")
+| eval c_score = ceil(_score)  
+| keep book_no, title, c_score 
+| sort c_score desc, book_no asc
+| LIMIT 2;
+
+book_no:keyword | title:text                                                                                  | c_score:double
+2675            | The Lord of the Rings - Boxed Set                                                           | 6.0
+4023            | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings | 6.0
+;
+
+singleMatchWithKeywordFieldScoring
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score 
+| where author.keyword:"William Faulkner" 
+| keep book_no, author, _score 
+| sort book_no;
+
+book_no:keyword | author:text | _score:double
+2713            | William Faulkner | 2.3142893314361572
+2883            | William Faulkner | 2.3142893314361572
+4724            | William Faulkner | 2.3142893314361572
+4977            | William Faulkner | 2.3142893314361572
+5119            | William Faulkner | 2.3142893314361572
+5404            | William Faulkner | 2.3142893314361572
+5578            | William Faulkner | 2.3142893314361572
+8077            | William Faulkner | 2.3142893314361572
+9896            | William Faulkner | 2.3142893314361572
+;
+
+qstrWithFieldAndScoringSortedEval
+required_capability: qstr_function
+required_capability: metadata_score
+
+from books metadata _score
+| where qstr("title:rings")
+| sort _score desc
+| eval _score::long
+| keep book_no, title, _score
+| limit 3;
+
+book_no:keyword | title:text                                                                 | _score:double 
+2675            | The Lord of the Rings - Boxed Set                                          | 2.7583377361297607
+7140            | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) | 1.9239964485168457
+2714            | Return of the King Being the Third Part of The Lord of the Rings           | 1.9239964485168457
+;
+
+qstrWithFieldAndScoringSorted
+required_capability: qstr_function
+required_capability: metadata_score
+
+from books metadata _score
+| where qstr("title:rings")
+| sort _score desc, book_no desc
+| keep book_no, title, _score
+| limit 3;
+
+book_no:keyword | title:text                                                                 | _score:double 
+2675            | The Lord of the Rings - Boxed Set                                          | 2.7583377361297607
+7140            | The Lord of the Rings Poster Collection: Six Paintings by Alan Lee (No. 1) | 1.9239964485168457
+2714            | Return of the King Being the Third Part of The Lord of the Rings           | 1.9239964485168457
+;
+
+singleQstrScoringManipulated
+required_capability: metadata_score
+required_capability: qstr_function
+
+from books metadata _score 
+| where qstr("author:William Faulkner") 
+| eval add_score = ceil(_score) + 1 
+| keep book_no, author, add_score 
+| sort book_no 
+| LIMIT 2;
+
+book_no:keyword | author:text                                        | add_score:double
+2378            | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 2.0
+2713            | William Faulkner                                   | 7.0
+;
+
+testMultiValuedFieldWithConjunctionWithScore
+required_capability: match_function
+required_capability: metadata_score
+
+from employees metadata _score
+| where match(job_positions, "Data Scientist") and match(job_positions, "Support Engineer")
+| keep emp_no, first_name, last_name, job_positions, _score;
+
+emp_no:integer | first_name:keyword | last_name:keyword | job_positions:keyword | _score:double
+10043          | Yishay             | Tzvieli           | [Data Scientist, Python Developer, Support Engineer] | 5.233309745788574
+;
+
+testMatchAndQueryStringFunctionsWithScore
+required_capability: match_function
+required_capability: metadata_score
+
+from employees metadata _score
+| where match(job_positions, "Data Scientist") and qstr("job_positions: (Support Engineer) and gender: F")
+| keep emp_no, first_name, last_name, job_positions, _score;
+ignoreOrder:true
+
+emp_no:integer | first_name:keyword | last_name:keyword | job_positions:keyword | _score:double
+10041          | Uri                 | Lenart           | [Data Scientist, Head Human Resources, Internship, Senior Team Lead] | 3.509873867034912
+10043          | Yishay              | Tzvieli          | [Data Scientist, Python Developer, Support Engineer] | 5.233309745788574
+;
+
+multipleWhereWithMatchScoringNoSort
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score
+| where title:"short stories"
+| where author:"Ursula K. Le Guin"
+| keep book_no, title, author, _score;
+
+ignoreOrder:true
+book_no:keyword | title:text                                | author:text        | _score:double
+8480            | The wind's twelve quarters: Short stories | Ursula K. Le Guin  | 14.489097595214844
+;
+
+multipleWhereWithMatchScoring
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score
+| where title:"short stories"
+| where author:"Ursula K. Le Guin"
+| keep book_no, title, author, _score
+| sort book_no;
+
+book_no:keyword | title:text                                | author:text        | _score:double
+8480            | The wind's twelve quarters: Short stories | Ursula K. Le Guin  | 14.489097595214844
+;
+
+combinedMatchWithFunctionsScoring
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score
+| where title:"Tolkien" AND author:"Tolkien" AND year > 2000
+| where mv_count(author) == 1
+| keep book_no, title, author, year, _score
+| sort book_no;
+
+book_no:keyword | title:text               | author:text    | year:integer | _score:double
+5335            | Letters of J R R Tolkien | J.R.R. Tolkien | 2014         | 5.448054313659668
+;
+
+singleQstrScoring
+required_capability: metadata_score
+required_capability: qstr_function
+
+from books metadata _score 
+| where qstr("author:William Faulkner") 
+| keep book_no, author, _score 
+| sort book_no 
+| LIMIT 2;
+
+book_no:keyword | author:text                                        | _score:double
+2378            | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 0.9976131916046143
+2713            | William Faulkner                                   | 5.9556169509887695
+;
+
+singleQstrScoringGrok
+required_capability: metadata_score
+required_capability: qstr_function
+
+from books metadata _score 
+| where qstr("author:Lord Rings") 
+| GROK title "%{WORD:title} %{WORD}" 
+| sort _score desc 
+| keep book_no, title, _score 
+| LIMIT 3;
+
+book_no:keyword | title:keyword | _score:double
+8875            | The | 2.9505908489227295
+4023            | A   | 2.8327860832214355
+2675            | The | 2.7583377361297607
+;
+
+combinedMatchWithScoringEvalNoSort
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score
+| where title:"Tolkien" AND author:"Tolkien" AND year > 2000
+| where mv_count(author) == 1
+| eval c_score = ceil(_score)
+| keep book_no, title, author, year, c_score;
+
+ignoreOrder:true
+book_no:keyword | title:text               | author:text    | year:integer | c_score:double
+5335            | Letters of J R R Tolkien | J.R.R. Tolkien | 2014         | 6
+;
+
+singleQstrScoringRename
+required_capability: metadata_score
+required_capability: qstr_function
+
+from books metadata _score 
+| where qstr("author:Lord Rings") 
+| rename _score as rank 
+| sort rank desc 
+| keep book_no, rank 
+| LIMIT 3;
+
+book_no:keyword | rank:double
+8875            | 2.9505908489227295
+4023            | 2.8327860832214355
+2675            | 2.7583377361297607
+;
+
+singleMatchWithTextFieldScoring
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score 
+| where author:"William Faulkner" 
+| sort book_no 
+| keep book_no, author, _score 
+| limit 5;
+
+book_no:keyword | author:text                                        | _score:double
+2378            | [Carol Faulkner, Holly Byers Ochoa, Lucretia Mott] | 0.9976131916046143
+2713            | William Faulkner                                   | 4.272439002990723
+2847            | Colleen Faulkner                                   | 1.7401835918426514
+2883            | William Faulkner                                   | 4.272439002990723
+3293            | Danny Faulkner                                     | 1.7401835918426514
+;
+
+combinedMatchWithFunctionsScoringNoSort
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score
+| where title:"Tolkien" AND author:"Tolkien" AND year > 2000
+| where mv_count(author) == 1
+| keep book_no, title, author, year, _score;
+
+ignoreOrder:true
+book_no:keyword | title:text               | author:text    | year:integer | _score:double
+5335            | Letters of J R R Tolkien | J.R.R. Tolkien | 2014         | 5.448054313659668
+;
+
+combinedMatchWithScoringEval
+required_capability: metadata_score
+required_capability: match_operator_colon
+
+from books metadata _score
+| where title:"Tolkien" AND author:"Tolkien" AND year > 2000
+| where mv_count(author) == 1
+| eval c_score = ceil(_score)
+| keep book_no, title, author, year, c_score
+| sort book_no;
+
+book_no:keyword | title:text               | author:text    | year:integer | c_score:double
+5335            | Letters of J R R Tolkien | J.R.R. Tolkien | 2014         | 6
+;
+
+singleQstrScoringEval
+required_capability: metadata_score
+required_capability: qstr_function
+
+from books metadata _score 
+| where qstr("author:Lord Rings") 
+| eval c_score = ceil(_score) 
+| keep book_no, c_score 
+| sort book_no desc 
+| LIMIT 3;
+
+book_no:keyword | c_score:double
+8875            | 3.0
+7350            | 2.0
+7140            | 3.0
+;

+ 4 - 3
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java

@@ -84,7 +84,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase {
         assumeTrue("requires query pragmas", canUseQueryPragmas());
         nodeLevelReduction = randomBoolean();
         READ_DESCRIPTION = """
-            \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647]
+            \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES]
             \\_ValuesSourceReaderOperator[fields = [pause_me]]
             \\_AggregationOperator[mode = INITIAL, aggs = sum of longs]
             \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize()));
@@ -436,6 +436,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase {
 
     public void testTaskContentsForTopNQuery() throws Exception {
         READ_DESCRIPTION = ("\\_LuceneTopNSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 1000, "
+            + "scoreMode = TOP_DOCS, "
             + "sorts = [{\"pause_me\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"long\"}}]]\n"
             + "\\_ValuesSourceReaderOperator[fields = [pause_me]]\n"
             + "\\_ProjectOperator[projection = [1]]\n"
@@ -470,7 +471,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase {
     public void testTaskContentsForLimitQuery() throws Exception {
         String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs()));
         READ_DESCRIPTION = """
-            \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit()]
+            \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = limit(), scoreMode = COMPLETE_NO_SCORES]
             \\_ValuesSourceReaderOperator[fields = [pause_me]]
             \\_ProjectOperator[projection = [1]]
             \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())).replace("limit()", limit);
@@ -498,7 +499,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase {
 
     public void testTaskContentsForGroupingStatsQuery() throws Exception {
         READ_DESCRIPTION = """
-            \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647]
+            \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647, scoreMode = COMPLETE_NO_SCORES]
             \\_ValuesSourceReaderOperator[fields = [foo]]
             \\_OrdinalsGroupingOperator(aggs = max of longs)
             \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize()));

+ 2 - 1
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/LookupFromIndexIT.java

@@ -148,7 +148,8 @@ public class LookupFromIndexIT extends AbstractEsqlIntegTestCase {
                 DataPartitioning.SEGMENT,
                 1,
                 10000,
-                DocIdSetIterator.NO_MORE_DOCS
+                DocIdSetIterator.NO_MORE_DOCS,
+                false // no scoring
             );
             ValuesSourceReaderOperator.Factory reader = new ValuesSourceReaderOperator.Factory(
                 List.of(

+ 299 - 0
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchFunctionIT.java

@@ -0,0 +1,299 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.plugin;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.xpack.esql.VerificationException;
+import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase;
+import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
+import org.elasticsearch.xpack.esql.action.EsqlQueryRequest;
+import org.elasticsearch.xpack.esql.action.EsqlQueryResponse;
+import org.junit.Before;
+
+import java.util.List;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.hamcrest.CoreMatchers.containsString;
+
+//@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "debug")
+public class MatchFunctionIT extends AbstractEsqlIntegTestCase {
+
+    @Before
+    public void setupIndex() {
+        createAndPopulateIndex();
+    }
+
+    @Override
+    protected EsqlQueryResponse run(EsqlQueryRequest request) {
+        assumeTrue("match function capability not available", EsqlCapabilities.Cap.MATCH_FUNCTION.isEnabled());
+        return super.run(request);
+    }
+
+    public void testSimpleWhereMatch() {
+        var query = """
+            FROM test
+            | WHERE match(content, "fox")
+            | KEEP id
+            | SORT id
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id"));
+            assertColumnTypes(resp.columns(), List.of("integer"));
+            assertValues(resp.values(), List.of(List.of(1), List.of(6)));
+        }
+    }
+
+    public void testCombinedWhereMatch() {
+        var query = """
+            FROM test
+            | WHERE match(content, "fox") AND id > 5
+            | KEEP id
+            | SORT id
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id"));
+            assertColumnTypes(resp.columns(), List.of("integer"));
+            assertValues(resp.values(), List.of(List.of(6)));
+        }
+    }
+
+    public void testMultipleMatch() {
+        var query = """
+            FROM test
+            | WHERE match(content, "fox") AND match(content, "brown")
+            | KEEP id
+            | SORT id
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id"));
+            assertColumnTypes(resp.columns(), List.of("integer"));
+            assertValues(resp.values(), List.of(List.of(1), List.of(6)));
+        }
+    }
+
+    public void testMultipleWhereMatch() {
+        var query = """
+            FROM test
+            | WHERE match(content, "fox") AND match(content, "brown")
+            | EVAL summary = CONCAT("document with id: ", to_str(id), "and content: ", content)
+            | SORT summary
+            | LIMIT 4
+            | WHERE match(content, "brown fox")
+            | KEEP id
+            """;
+
+        var error = expectThrows(ElasticsearchException.class, () -> run(query));
+        assertThat(error.getMessage(), containsString("[MATCH] function cannot be used after LIMIT"));
+    }
+
+    public void testNotWhereMatch() {
+        var query = """
+            FROM test
+            | WHERE NOT match(content, "brown fox")
+            | KEEP id
+            | SORT id
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id"));
+            assertColumnTypes(resp.columns(), List.of("integer"));
+            assertValues(resp.values(), List.of(List.of(5)));
+        }
+    }
+
+    public void testWhereMatchWithScoring() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE match(content, "fox")
+            | KEEP id, _score
+            | SORT id ASC
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624)));
+        }
+    }
+
+    public void testWhereMatchWithScoringDifferentSort() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE match(content, "fox")
+            | KEEP id, _score
+            | SORT id DESC
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValues(resp.values(), List.of(List.of(6, 0.9114001989364624), List.of(1, 1.156558871269226)));
+        }
+    }
+
+    public void testWhereMatchWithScoringSortScore() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE match(content, "fox")
+            | KEEP id, _score
+            | SORT _score DESC
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624)));
+        }
+    }
+
+    public void testWhereMatchWithScoringNoSort() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE content:"fox"
+            | KEEP id, _score
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValuesInAnyOrder(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624)));
+        }
+    }
+
+    public void testNonExistingColumn() {
+        var query = """
+            FROM test
+            | WHERE something:"fox"
+            """;
+
+        var error = expectThrows(VerificationException.class, () -> run(query));
+        assertThat(error.getMessage(), containsString("Unknown column [something]"));
+    }
+
+    public void testWhereMatchEvalColumn() {
+        var query = """
+            FROM test
+            | EVAL upper_content = to_upper(content)
+            | WHERE upper_content:"FOX"
+            | KEEP id
+            """;
+
+        var error = expectThrows(VerificationException.class, () -> run(query));
+        assertThat(
+            error.getMessage(),
+            containsString("[:] operator cannot operate on [upper_content], which is not a field from an index mapping")
+        );
+    }
+
+    public void testWhereMatchOverWrittenColumn() {
+        var query = """
+            FROM test
+            | DROP content
+            | EVAL content = CONCAT("document with ID ", to_str(id))
+            | WHERE content:"document"
+            """;
+
+        var error = expectThrows(VerificationException.class, () -> run(query));
+        assertThat(
+            error.getMessage(),
+            containsString("[:] operator cannot operate on [content], which is not a field from an index mapping")
+        );
+    }
+
+    public void testWhereMatchAfterStats() {
+        var query = """
+            FROM test
+            | STATS count(*)
+            | WHERE content:"fox"
+            """;
+
+        var error = expectThrows(VerificationException.class, () -> run(query));
+        assertThat(error.getMessage(), containsString("Unknown column [content]"));
+    }
+
+    public void testWhereMatchWithFunctions() {
+        var query = """
+            FROM test
+            | WHERE content:"fox" OR to_upper(content) == "FOX"
+            """;
+        var error = expectThrows(ElasticsearchException.class, () -> run(query));
+        assertThat(
+            error.getMessage(),
+            containsString(
+                "Invalid condition [content:\"fox\" OR to_upper(content) == \"FOX\"]. "
+                    + "[:] operator can't be used as part of an or condition"
+            )
+        );
+    }
+
+    public void testWhereMatchWithRow() {
+        var query = """
+            ROW content = "a brown fox"
+            | WHERE content:"fox"
+            """;
+
+        var error = expectThrows(ElasticsearchException.class, () -> run(query));
+        assertThat(
+            error.getMessage(),
+            containsString("[:] operator cannot operate on [\"a brown fox\"], which is not a field from an index mapping")
+        );
+    }
+
+    public void testMatchWithinEval() {
+        var query = """
+            FROM test
+            | EVAL matches_query = content:"fox"
+            """;
+
+        var error = expectThrows(VerificationException.class, () -> run(query));
+        assertThat(error.getMessage(), containsString("[:] operator is only supported in WHERE commands"));
+    }
+
+    public void testMatchWithNonTextField() {
+        var query = """
+            FROM test
+            | WHERE id:"fox"
+            """;
+
+        var error = expectThrows(VerificationException.class, () -> run(query));
+        assertThat(error.getMessage(), containsString("first argument of [id:\"fox\"] must be [string], found value [id] type [integer]"));
+    }
+
+    private void createAndPopulateIndex() {
+        var indexName = "test";
+        var client = client().admin().indices();
+        var CreateRequest = client.prepareCreate(indexName)
+            .setSettings(Settings.builder().put("index.number_of_shards", 1))
+            .setMapping("id", "type=integer", "content", "type=text");
+        assertAcked(CreateRequest);
+        client().prepareBulk()
+            .add(new IndexRequest(indexName).id("1").source("id", 1, "content", "This is a brown fox"))
+            .add(new IndexRequest(indexName).id("2").source("id", 2, "content", "This is a brown dog"))
+            .add(new IndexRequest(indexName).id("3").source("id", 3, "content", "This dog is really brown"))
+            .add(new IndexRequest(indexName).id("4").source("id", 4, "content", "The dog is brown but this document is very very long"))
+            .add(new IndexRequest(indexName).id("5").source("id", 5, "content", "There is also a white cat"))
+            .add(new IndexRequest(indexName).id("6").source("id", 6, "content", "The quick brown fox jumps over the lazy dog"))
+            .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
+            .get();
+        ensureYellow(indexName);
+    }
+}

+ 51 - 0
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java

@@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.test.junit.annotations.TestLogging;
 import org.elasticsearch.xpack.esql.VerificationException;
 import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase;
+import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
 import org.junit.Before;
 
 import java.util.List;
@@ -105,6 +106,56 @@ public class MatchOperatorIT extends AbstractEsqlIntegTestCase {
         }
     }
 
+    public void testWhereMatchWithScoring() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE content:"fox"
+            | KEEP id, _score
+            | SORT id ASC
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624)));
+        }
+    }
+
+    public void testWhereMatchWithScoringDifferentSort() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE content:"fox"
+            | KEEP id, _score
+            | SORT id
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValues(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624)));
+        }
+    }
+
+    public void testWhereMatchWithScoringNoSort() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE content:"fox"
+            | KEEP id, _score
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValuesInAnyOrder(resp.values(), List.of(List.of(1, 1.156558871269226), List.of(6, 0.9114001989364624)));
+        }
+    }
+
     public void testNonExistingColumn() {
         var query = """
             FROM test

+ 96 - 0
x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/QueryStringIT.java

@@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.index.query.QueryShardException;
 import org.elasticsearch.xpack.esql.VerificationException;
 import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase;
+import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
 import org.junit.Before;
 
 import java.util.List;
@@ -137,4 +138,99 @@ public class QueryStringIT extends AbstractEsqlIntegTestCase {
             .get();
         ensureYellow(indexName);
     }
+
+    public void testWhereQstrWithScoring() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE qstr("content: fox")
+            | KEEP id, _score
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValuesInAnyOrder(
+                resp.values(),
+                List.of(
+                    List.of(2, 0.3028995096683502),
+                    List.of(3, 0.3028995096683502),
+                    List.of(4, 0.2547692656517029),
+                    List.of(5, 0.28161853551864624)
+                )
+            );
+
+        }
+    }
+
+    public void testWhereQstrWithScoringSorted() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE qstr("content:fox fox")
+            | KEEP id, _score
+            | SORT _score DESC
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValues(
+                resp.values(),
+                List.of(
+                    List.of(3, 1.5605685710906982),
+                    List.of(2, 0.6057990193367004),
+                    List.of(5, 0.5632370710372925),
+                    List.of(4, 0.5095385313034058)
+                )
+            );
+
+        }
+    }
+
+    public void testWhereQstrWithScoringNoSort() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE qstr("content: fox")
+            | KEEP id, _score
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValuesInAnyOrder(
+                resp.values(),
+                List.of(
+                    List.of(2, 0.3028995096683502),
+                    List.of(3, 0.3028995096683502),
+                    List.of(4, 0.2547692656517029),
+                    List.of(5, 0.28161853551864624)
+                )
+            );
+        }
+    }
+
+    public void testWhereQstrWithNonPushableAndScoring() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var query = """
+            FROM test
+            METADATA _score
+            | WHERE qstr("content: fox")
+              AND abs(id) > 0
+            | EVAL c_score = ceil(_score)
+            | KEEP id, c_score
+            | SORT id DESC
+            | LIMIT 2
+            """;
+
+        try (var resp = run(query)) {
+            assertColumnNames(resp.columns(), List.of("id", "c_score"));
+            assertColumnTypes(resp.columns(), List.of("integer", "double"));
+            assertValuesInAnyOrder(resp.values(), List.of(List.of(5, 1.0), List.of(4, 1.0)));
+        }
+    }
 }

+ 6 - 1
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java

@@ -524,7 +524,12 @@ public class EsqlCapabilities {
         /**
          * Fix for https://github.com/elastic/elasticsearch/issues/114714, again
          */
-        FIX_STATS_BY_FOLDABLE_EXPRESSION_2;
+        FIX_STATS_BY_FOLDABLE_EXPRESSION_2,
+
+        /**
+         * Support the "METADATA _score" directive to enable _score column.
+         */
+        METADATA_SCORE(Build.current().isSnapshot());
 
         private final boolean enabled;
 

+ 9 - 0
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java

@@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet;
 import org.elasticsearch.xpack.esql.core.expression.Expression;
 import org.elasticsearch.xpack.esql.core.expression.Expressions;
 import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
+import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
 import org.elasticsearch.xpack.esql.core.expression.NameId;
 import org.elasticsearch.xpack.esql.core.expression.NamedExpression;
 import org.elasticsearch.xpack.esql.core.expression.TypeResolutions;
@@ -221,6 +222,7 @@ public class Verifier {
             checkFullTextQueryFunctions(p, failures);
         });
         checkRemoteEnrich(plan, failures);
+        checkMetadataScoreNameReserved(plan, failures);
 
         if (failures.isEmpty()) {
             checkLicense(plan, licenseState, failures);
@@ -234,6 +236,13 @@ public class Verifier {
         return failures;
     }
 
+    private static void checkMetadataScoreNameReserved(LogicalPlan p, Set<Failure> failures) {
+        // _score can only be set as metadata attribute
+        if (p.inputSet().stream().anyMatch(a -> MetadataAttribute.SCORE.equals(a.name()) && (a instanceof MetadataAttribute) == false)) {
+            failures.add(fail(p, "`" + MetadataAttribute.SCORE + "` is a reserved METADATA attribute"));
+        }
+    }
+
     private void checkSort(LogicalPlan p, Set<Failure> failures) {
         if (p instanceof OrderBy ob) {
             ob.order().forEach(o -> {

+ 5 - 0
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java

@@ -9,6 +9,7 @@ package org.elasticsearch.xpack.esql.optimizer.rules.physical.local;
 
 import org.elasticsearch.xpack.esql.core.expression.Expression;
 import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
+import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
 import org.elasticsearch.xpack.esql.core.type.DataType;
 import org.elasticsearch.xpack.esql.stats.SearchStats;
 
@@ -59,6 +60,10 @@ public interface LucenePushdownPredicates {
         return false;
     }
 
+    default boolean isPushableMetadataAttribute(Expression exp) {
+        return exp instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE);
+    }
+
     /**
      * The default implementation of this has no access to SearchStats, so it can only make decisions based on the FieldAttribute itself.
      * In particular, it assumes TEXT fields have no exact subfields (underlying keyword field),

+ 16 - 2
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java

@@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute;
 import org.elasticsearch.xpack.esql.core.expression.AttributeMap;
 import org.elasticsearch.xpack.esql.core.expression.Expression;
 import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
+import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
 import org.elasticsearch.xpack.esql.core.expression.NameId;
 import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute;
 import org.elasticsearch.xpack.esql.expression.Order;
@@ -57,6 +58,7 @@ import java.util.List;
  * </ol>
  */
 public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimizerRule<TopNExec, LocalPhysicalOptimizerContext> {
+
     @Override
     protected PhysicalPlan rule(TopNExec topNExec, LocalPhysicalOptimizerContext ctx) {
         Pushable pushable = evaluatePushable(topNExec, LucenePushdownPredicates.from(ctx.searchStats()));
@@ -155,6 +157,8 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi
                             order.nullsPosition()
                         )
                     );
+                } else if (lucenePushdownPredicates.isPushableMetadataAttribute(order.child())) {
+                    pushableSorts.add(new EsQueryExec.ScoreSort(order.direction()));
                 } else if (order.child() instanceof ReferenceAttribute referenceAttribute) {
                     Attribute resolvedAttribute = aliasReplacedBy.resolve(referenceAttribute, referenceAttribute);
                     if (distances.containsKey(resolvedAttribute.id())) {
@@ -193,13 +197,23 @@ public class PushTopNToSource extends PhysicalOptimizerRules.ParameterizedOptimi
 
     private static boolean canPushDownOrders(List<Order> orders, LucenePushdownPredicates lucenePushdownPredicates) {
         // allow only exact FieldAttributes (no expressions) for sorting
-        return orders.stream().allMatch(o -> lucenePushdownPredicates.isPushableFieldAttribute(o.child()));
+        return orders.stream()
+            .allMatch(
+                o -> lucenePushdownPredicates.isPushableFieldAttribute(o.child())
+                    || lucenePushdownPredicates.isPushableMetadataAttribute(o.child())
+            );
     }
 
     private static List<EsQueryExec.Sort> buildFieldSorts(List<Order> orders) {
         List<EsQueryExec.Sort> sorts = new ArrayList<>(orders.size());
         for (Order o : orders) {
-            sorts.add(new EsQueryExec.FieldSort(((FieldAttribute) o.child()).exactAttribute(), o.direction(), o.nullsPosition()));
+            if (o.child() instanceof FieldAttribute fa) {
+                sorts.add(new EsQueryExec.FieldSort(fa.exactAttribute(), o.direction(), o.nullsPosition()));
+            } else if (o.child() instanceof MetadataAttribute ma && MetadataAttribute.SCORE.equals(ma.name())) {
+                sorts.add(new EsQueryExec.ScoreSort(o.direction()));
+            } else {
+                assert false : "unexpected ordering on expression type " + o.child().getClass();
+            }
         }
         return sorts;
     }

+ 11 - 3
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java

@@ -16,6 +16,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec;
 import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec;
 import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import static org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules.TransformDirection.UP;
@@ -29,6 +30,8 @@ public class ReplaceSourceAttributes extends PhysicalOptimizerRules.OptimizerRul
     @Override
     protected PhysicalPlan rule(EsSourceExec plan) {
         var docId = new FieldAttribute(plan.source(), EsQueryExec.DOC_ID_FIELD.getName(), EsQueryExec.DOC_ID_FIELD);
+        final List<Attribute> attributes = new ArrayList<>();
+        attributes.add(docId);
         if (plan.indexMode() == IndexMode.TIME_SERIES) {
             Attribute tsid = null, timestamp = null;
             for (Attribute attr : plan.output()) {
@@ -42,9 +45,14 @@ public class ReplaceSourceAttributes extends PhysicalOptimizerRules.OptimizerRul
             if (tsid == null || timestamp == null) {
                 throw new IllegalStateException("_tsid or @timestamp are missing from the time-series source");
             }
-            return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), List.of(docId, tsid, timestamp), plan.query());
-        } else {
-            return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), List.of(docId), plan.query());
+            attributes.add(tsid);
+            attributes.add(timestamp);
         }
+        plan.output().forEach(attr -> {
+            if (attr instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE)) {
+                attributes.add(ma);
+            }
+        });
+        return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), attributes, plan.query());
     }
 }

+ 3 - 1
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java

@@ -16,6 +16,7 @@ import org.elasticsearch.dissect.DissectException;
 import org.elasticsearch.dissect.DissectParser;
 import org.elasticsearch.index.IndexMode;
 import org.elasticsearch.xpack.esql.VerificationException;
+import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
 import org.elasticsearch.xpack.esql.common.Failure;
 import org.elasticsearch.xpack.esql.core.expression.Alias;
 import org.elasticsearch.xpack.esql.core.expression.Attribute;
@@ -276,7 +277,8 @@ public class LogicalPlanBuilder extends ExpressionBuilder {
             for (var c : metadataOptionContext.UNQUOTED_SOURCE()) {
                 String id = c.getText();
                 Source src = source(c);
-                if (MetadataAttribute.isSupported(id) == false) {
+                if (MetadataAttribute.isSupported(id) == false // TODO: drop check below once METADATA_SCORE is no longer snapshot-only
+                    || (EsqlCapabilities.Cap.METADATA_SCORE.isEnabled() == false && MetadataAttribute.SCORE.equals(id))) {
                     throw new ParsingException(src, "unsupported metadata field [" + id + "]");
                 }
                 Attribute a = metadataMap.put(id, MetadataAttribute.create(src, id));

+ 14 - 0
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java

@@ -15,6 +15,7 @@ import org.elasticsearch.index.IndexMode;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.search.sort.FieldSortBuilder;
 import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
+import org.elasticsearch.search.sort.ScoreSortBuilder;
 import org.elasticsearch.search.sort.SortBuilder;
 import org.elasticsearch.search.sort.SortOrder;
 import org.elasticsearch.xpack.esql.core.expression.Attribute;
@@ -94,6 +95,19 @@ public class EsQueryExec extends LeafExec implements EstimatesRowSize {
         }
     }
 
+    public record ScoreSort(Order.OrderDirection direction) implements Sort {
+        @Override
+        public SortBuilder<?> sortBuilder() {
+            return new ScoreSortBuilder();
+        }
+
+        @Override
+        public FieldAttribute field() {
+            // TODO: refactor this: not all Sorts are backed by FieldAttributes
+            return null;
+        }
+    }
+
     public EsQueryExec(Source source, EsIndex index, IndexMode indexMode, List<Attribute> attributes, QueryBuilder query) {
         this(source, index, indexMode, attributes, query, null, null, null);
     }

+ 10 - 4
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java

@@ -51,6 +51,7 @@ import org.elasticsearch.search.sort.SortBuilder;
 import org.elasticsearch.xpack.esql.core.expression.Attribute;
 import org.elasticsearch.xpack.esql.core.expression.Expression;
 import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
+import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
 import org.elasticsearch.xpack.esql.core.type.DataType;
 import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField;
 import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction;
@@ -165,7 +166,10 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi
         assert esQueryExec.estimatedRowSize() != null : "estimated row size not initialized";
         int rowEstimatedSize = esQueryExec.estimatedRowSize();
         int limit = esQueryExec.limit() != null ? (Integer) esQueryExec.limit().fold() : NO_LIMIT;
-        if (sorts != null && sorts.isEmpty() == false) {
+        boolean scoring = esQueryExec.attrs()
+            .stream()
+            .anyMatch(a -> a instanceof MetadataAttribute && a.name().equals(MetadataAttribute.SCORE));
+        if ((sorts != null && sorts.isEmpty() == false)) {
             List<SortBuilder<?>> sortBuilders = new ArrayList<>(sorts.size());
             for (Sort sort : sorts) {
                 sortBuilders.add(sort.sortBuilder());
@@ -177,7 +181,8 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi
                 context.queryPragmas().taskConcurrency(),
                 context.pageSize(rowEstimatedSize),
                 limit,
-                sortBuilders
+                sortBuilders,
+                scoring
             );
         } else {
             if (esQueryExec.indexMode() == IndexMode.TIME_SERIES) {
@@ -195,7 +200,8 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi
                     context.queryPragmas().dataPartitioning(),
                     context.queryPragmas().taskConcurrency(),
                     context.pageSize(rowEstimatedSize),
-                    limit
+                    limit,
+                    scoring
                 );
             }
         }
@@ -273,7 +279,7 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi
 
         @Override
         public Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sorts) throws IOException {
-            return SortBuilder.buildSort(sorts, ctx);
+            return SortBuilder.buildSort(sorts, ctx, false);
         }
 
         @Override

+ 25 - 0
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java

@@ -12,6 +12,7 @@ import org.elasticsearch.common.logging.LoggerMessageFormat;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.xpack.esql.VerificationException;
 import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
+import org.elasticsearch.xpack.esql.core.expression.Attribute;
 import org.elasticsearch.xpack.esql.core.type.DataType;
 import org.elasticsearch.xpack.esql.core.type.EsField;
 import org.elasticsearch.xpack.esql.core.type.InvalidMappedField;
@@ -21,6 +22,7 @@ import org.elasticsearch.xpack.esql.index.IndexResolution;
 import org.elasticsearch.xpack.esql.parser.EsqlParser;
 import org.elasticsearch.xpack.esql.parser.QueryParam;
 import org.elasticsearch.xpack.esql.parser.QueryParams;
+import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan;
 
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
@@ -1754,6 +1756,29 @@ public class VerifierTests extends ESTestCase {
         );
     }
 
+    public void testNonMetadataScore() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        assertEquals("1:12: `_score` is a reserved METADATA attribute", error("from foo | eval _score = 10"));
+
+        assertEquals(
+            "1:48: `_score` is a reserved METADATA attribute",
+            error("from foo metadata _score | where qstr(\"bar\") | eval _score = _score + 1")
+        );
+    }
+
+    public void testScoreRenaming() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        assertEquals("1:33: `_score` is a reserved METADATA attribute", error("from foo METADATA _id, _score | rename _id as _score"));
+
+        assertTrue(passes("from foo metadata _score | rename _score as foo").stream().anyMatch(a -> a.name().equals("foo")));
+    }
+
+    private List<Attribute> passes(String query) {
+        LogicalPlan logicalPlan = defaultAnalyzer.analyze(parser.createStatement(query));
+        assertTrue(logicalPlan.resolved());
+        return logicalPlan.output();
+    }
+
     public void testIntervalAsString() {
         // DateTrunc
         for (String interval : List.of("1 minu", "1 dy", "1.5 minutes", "0.5 days", "minutes 1", "day 5")) {

+ 62 - 0
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java

@@ -36,6 +36,7 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils;
 import org.elasticsearch.xpack.esql.EsqlTestUtils.TestConfigurableSearchStats;
 import org.elasticsearch.xpack.esql.EsqlTestUtils.TestConfigurableSearchStats.Config;
 import org.elasticsearch.xpack.esql.VerificationException;
+import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
 import org.elasticsearch.xpack.esql.analysis.Analyzer;
 import org.elasticsearch.xpack.esql.analysis.AnalyzerContext;
 import org.elasticsearch.xpack.esql.analysis.EnrichResolution;
@@ -63,6 +64,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Count;
 import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialAggregateFunction;
 import org.elasticsearch.xpack.esql.expression.function.aggregate.SpatialCentroid;
 import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum;
+import org.elasticsearch.xpack.esql.expression.function.fulltext.Match;
 import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round;
 import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialContains;
 import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint;
@@ -6484,6 +6486,66 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
         );
     }
 
+    public void testScore() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var plan = physicalPlan("""
+            from test metadata _score
+            | where match(first_name, "john")
+            | keep _score
+            """);
+
+        ProjectExec outerProject = as(plan, ProjectExec.class);
+        LimitExec limitExec = as(outerProject.child(), LimitExec.class);
+        ExchangeExec exchange = as(limitExec.child(), ExchangeExec.class);
+        FragmentExec frag = as(exchange.child(), FragmentExec.class);
+
+        LogicalPlan opt = logicalOptimizer.optimize(frag.fragment());
+        Limit limit = as(opt, Limit.class);
+        Filter filter = as(limit.child(), Filter.class);
+
+        Match match = as(filter.condition(), Match.class);
+        assertTrue(match.field() instanceof FieldAttribute);
+        assertEquals("first_name", ((FieldAttribute) match.field()).field().getName());
+
+        EsRelation esRelation = as(filter.child(), EsRelation.class);
+        assertTrue(esRelation.optimized());
+        assertTrue(esRelation.resolved());
+        assertTrue(esRelation.output().stream().anyMatch(a -> a.name().equals(MetadataAttribute.SCORE) && a instanceof MetadataAttribute));
+    }
+
+    public void testScoreTopN() {
+        assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled());
+        var plan = physicalPlan("""
+            from test metadata _score
+            | where match(first_name, "john")
+            | keep _score
+            | sort _score desc
+            """);
+
+        ProjectExec projectExec = as(plan, ProjectExec.class);
+        TopNExec topNExec = as(projectExec.child(), TopNExec.class);
+        ExchangeExec exchange = as(topNExec.child(), ExchangeExec.class);
+        FragmentExec frag = as(exchange.child(), FragmentExec.class);
+
+        LogicalPlan opt = logicalOptimizer.optimize(frag.fragment());
+        TopN topN = as(opt, TopN.class);
+        List<Order> order = topN.order();
+        Order scoreOrer = order.get(0);
+        assertEquals(Order.OrderDirection.DESC, scoreOrer.direction());
+        Expression child = scoreOrer.child();
+        assertTrue(child instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE));
+        Filter filter = as(topN.child(), Filter.class);
+
+        Match match = as(filter.condition(), Match.class);
+        assertTrue(match.field() instanceof FieldAttribute);
+        assertEquals("first_name", ((FieldAttribute) match.field()).field().getName());
+
+        EsRelation esRelation = as(filter.child(), EsRelation.class);
+        assertTrue(esRelation.optimized());
+        assertTrue(esRelation.resolved());
+        assertTrue(esRelation.output().stream().anyMatch(a -> a.name().equals(MetadataAttribute.SCORE) && a instanceof MetadataAttribute));
+    }
+
     @SuppressWarnings("SameParameterValue")
     private static void assertFilterCondition(
         Filter filter,

+ 190 - 3
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java

@@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute;
 import org.elasticsearch.xpack.esql.core.expression.Expression;
 import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
 import org.elasticsearch.xpack.esql.core.expression.Literal;
+import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
 import org.elasticsearch.xpack.esql.core.expression.Nullability;
 import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute;
 import org.elasticsearch.xpack.esql.core.tree.Source;
@@ -64,6 +65,13 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleScoreSortField() {
+        // FROM index METADATA _score | SORT _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false).scoreSort().limit(10);
+        assertPushdownSort(query);
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortMultipleFields() {
         // FROM index | SORT field, integer, double | LIMIT 10
         var query = from("index").sort("field").sort("integer").sort("double").limit(10);
@@ -71,6 +79,13 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleSortMultipleFieldsAndScore() {
+        // FROM index | SORT field, integer, double, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false).sort("field").sort("integer").sort("double").scoreSort().limit(10);
+        assertPushdownSort(query);
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortFieldAndEvalLiteral() {
         // FROM index | EVAL x = 1 | SORT field | LIMIT 10
         var query = from("index").eval("x", e -> e.i(1)).sort("field").limit(10);
@@ -78,6 +93,13 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleSortFieldScoreAndEvalLiteral() {
+        // FROM index METADATA _score | EVAL x = 1 | SORT field, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false).eval("x", e -> e.i(1)).sort("field").scoreSort().limit(10);
+        assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortFieldWithAlias() {
         // FROM index | EVAL x = field | SORT field | LIMIT 10
         var query = from("index").eval("x", b -> b.field("field")).sort("field").limit(10);
@@ -98,6 +120,21 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleSortMultipleFieldsWithAliasesAndScore() {
+        // FROM index | EVAL x = field, y = integer, z = double | SORT field, integer, double, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("x", b -> b.field("field"))
+            .eval("y", b -> b.field("integer"))
+            .eval("z", b -> b.field("double"))
+            .sort("field")
+            .sort("integer")
+            .sort("double")
+            .scoreSort()
+            .limit(10);
+        assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortFieldAsAlias() {
         // FROM index | EVAL x = field | SORT x | LIMIT 10
         var query = from("index").eval("x", b -> b.field("field")).sort("x").limit(10);
@@ -105,6 +142,13 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleSortFieldAsAliasAndScore() {
+        // FROM index METADATA _score | EVAL x = field | SORT x, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false).eval("x", b -> b.field("field")).sort("x").scoreSort().limit(10);
+        assertPushdownSort(query, Map.of("x", "field"), List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortFieldAndEvalSumLiterals() {
         // FROM index | EVAL sum = 1 + 2 | SORT field | LIMIT 10
         var query = from("index").eval("sum", b -> b.add(b.i(1), b.i(2))).sort("field").limit(10);
@@ -112,6 +156,17 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleSortFieldAndEvalSumLiteralsAndScore() {
+        // FROM index METADATA _score | EVAL sum = 1 + 2 | SORT field, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("sum", b -> b.add(b.i(1), b.i(2)))
+            .sort("field")
+            .scoreSort()
+            .limit(10);
+        assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortFieldAndEvalSumLiteralAndField() {
         // FROM index | EVAL sum = 1 + integer | SORT integer | LIMIT 10
         var query = from("index").eval("sum", b -> b.add(b.i(1), b.field("integer"))).sort("integer").limit(10);
@@ -119,6 +174,17 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSimpleSortFieldAndEvalSumLiteralAndFieldAndScore() {
+        // FROM index METADATA _score | EVAL sum = 1 + integer | SORT integer, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("sum", b -> b.add(b.i(1), b.field("integer")))
+            .sort("integer")
+            .scoreSort()
+            .limit(10);
+        assertPushdownSort(query, List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSimpleSortEvalSumLiteralAndField() {
         // FROM index | EVAL sum = 1 + integer | SORT sum | LIMIT 10
         var query = from("index").eval("sum", b -> b.add(b.i(1), b.field("integer"))).sort("sum").limit(10);
@@ -144,6 +210,14 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoPointFieldAnsScore() {
+        // FROM index METADATA _score | SORT location, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false).sort("location", Order.OrderDirection.ASC).scoreSort().limit(10);
+        // NOTE: while geo_point is not sortable, this is checked during logical planning and the physical planner does not know or care
+        assertPushdownSort(query);
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSortGeoDistanceFunction() {
         // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)) | SORT distance | LIMIT 10
         var query = from("index").eval("distance", b -> b.distance("location", "POINT(1 2)"))
@@ -154,6 +228,18 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoDistanceFunctionAndScore() {
+        // FROM index METADATA _score | EVAL distance = ST_DISTANCE(location, POINT(1 2)) | SORT distance, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("distance", b -> b.distance("location", "POINT(1 2)"))
+            .sort("distance", Order.OrderDirection.ASC)
+            .scoreSort()
+            .limit(10);
+        // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance'
+        assertPushdownSort(query, Map.of("distance", "location"), List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSortGeoDistanceFunctionInverted() {
         // FROM index | EVAL distance = ST_DISTANCE(POINT(1 2), location) | SORT distance | LIMIT 10
         var query = from("index").eval("distance", b -> b.distance("POINT(1 2)", "location"))
@@ -164,6 +250,18 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoDistanceFunctionInvertedAndScore() {
+        // FROM index METADATA _score | EVAL distance = ST_DISTANCE(POINT(1 2), location) | SORT distance, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("distance", b -> b.distance("POINT(1 2)", "location"))
+            .sort("distance", Order.OrderDirection.ASC)
+            .scoreSort()
+            .limit(10);
+        // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance'
+        assertPushdownSort(query, Map.of("distance", "location"), List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSortGeoDistanceFunctionLiterals() {
         // FROM index | EVAL distance = ST_DISTANCE(POINT(2 1), POINT(1 2)) | SORT distance | LIMIT 10
         var query = from("index").eval("distance", b -> b.distance("POINT(2 1)", "POINT(1 2)"))
@@ -174,6 +272,18 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoDistanceFunctionLiteralsAndScore() {
+        // FROM index METADATA _score | EVAL distance = ST_DISTANCE(POINT(2 1), POINT(1 2)) | SORT distance, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("distance", b -> b.distance("POINT(2 1)", "POINT(1 2)"))
+            .sort("distance", Order.OrderDirection.ASC)
+            .scoreSort()
+            .limit(10);
+        // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance'
+        assertNoPushdownSort(query, "sort on foldable distance function");
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSortGeoDistanceFunctionAndFieldsWithAliases() {
         // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, field, integer | LIMIT 10
         var query = from("index").eval("distance", b -> b.distance("location", "POINT(1 2)"))
@@ -187,6 +297,21 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoDistanceFunctionAndFieldsWithAliasesAndScore() {
+        // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, field, integer, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("distance", b -> b.distance("location", "POINT(1 2)"))
+            .eval("x", b -> b.field("field"))
+            .sort("distance", Order.OrderDirection.ASC)
+            .sort("field", Order.OrderDirection.DESC)
+            .sort("integer", Order.OrderDirection.DESC)
+            .scoreSort()
+            .limit(10);
+        // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance'
+        assertPushdownSort(query, query.orders, Map.of("distance", "location"), List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSortGeoDistanceFunctionAndFieldsAndAliases() {
         // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, x, integer | LIMIT 10
         var query = from("index").eval("distance", b -> b.distance("location", "POINT(1 2)"))
@@ -200,6 +325,21 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoDistanceFunctionAndFieldsAndAliasesAndScore() {
+        // FROM index | EVAL distance = ST_DISTANCE(location, POINT(1 2)), x = field | SORT distance, x, integer, _score | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("distance", b -> b.distance("location", "POINT(1 2)"))
+            .eval("x", b -> b.field("field"))
+            .sort("distance", Order.OrderDirection.ASC)
+            .sort("x", Order.OrderDirection.DESC)
+            .sort("integer", Order.OrderDirection.DESC)
+            .scoreSort()
+            .limit(10);
+        // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance'
+        assertPushdownSort(query, query.orders, Map.of("distance", "location", "x", "field"), List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     public void testSortGeoDistanceFunctionAndFieldsAndManyAliases() {
         // FROM index
         // | EVAL loc = location, loc2 = loc, loc3 = loc2, distance = ST_DISTANCE(loc3, POINT(1 2)), x = field
@@ -219,6 +359,27 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
     }
 
+    public void testSortGeoDistanceFunctionAndFieldsAndManyAliasesAndScore() {
+        // FROM index METADATA _score
+        // | EVAL loc = location, loc2 = loc, loc3 = loc2, distance = ST_DISTANCE(loc3, POINT(1 2)), x = field
+        // | SORT distance, x, integer, _score
+        // | LIMIT 10
+        var query = from("index").metadata("_score", DOUBLE, false)
+            .eval("loc", b -> b.field("location"))
+            .eval("loc2", b -> b.ref("loc"))
+            .eval("loc3", b -> b.ref("loc2"))
+            .eval("distance", b -> b.distance("loc3", "POINT(1 2)"))
+            .eval("x", b -> b.field("field"))
+            .sort("distance", Order.OrderDirection.ASC)
+            .sort("x", Order.OrderDirection.DESC)
+            .sort("integer", Order.OrderDirection.DESC)
+            .scoreSort()
+            .limit(10);
+        // The pushed-down sort will use the underlying field 'location', not the sorted reference field 'distance'
+        assertPushdownSort(query, Map.of("distance", "location", "x", "field"), List.of(EvalExec.class, EsQueryExec.class));
+        assertNoPushdownSort(query.asTimeSeries(), "for time series index mode");
+    }
+
     private static void assertPushdownSort(TestPhysicalPlanBuilder builder) {
         assertPushdownSort(builder, null, List.of(EsQueryExec.class));
     }
@@ -289,9 +450,12 @@ public class PushTopNToSourceTests extends ESTestCase {
         assertThat("Expect sorts count to match", sorts.size(), is(expectedSorts.size()));
         for (int i = 0; i < expectedSorts.size(); i++) {
             String name = ((Attribute) expectedSorts.get(i).child()).name();
-            String fieldName = sorts.get(i).field().fieldName();
-            assertThat("Expect sort[" + i + "] name to match", fieldName, is(sortName(name, fieldMap)));
-            assertThat("Expect sort[" + i + "] direction to match", sorts.get(i).direction(), is(expectedSorts.get(i).direction()));
+            EsQueryExec.Sort sort = sorts.get(i);
+            if (sort.field() != null) {
+                String fieldName = sort.field().fieldName();
+                assertThat("Expect sort[" + i + "] name to match", fieldName, is(sortName(name, fieldMap)));
+            }
+            assertThat("Expect sort[" + i + "] direction to match", sort.direction(), is(expectedSorts.get(i).direction()));
         }
     }
 
@@ -317,6 +481,7 @@ public class PushTopNToSourceTests extends ESTestCase {
         private final String index;
         private final LinkedHashMap<String, FieldAttribute> fields;
         private final LinkedHashMap<String, ReferenceAttribute> refs;
+        private final LinkedHashMap<String, MetadataAttribute> metadata;
         private IndexMode indexMode;
         private final List<Alias> aliases = new ArrayList<>();
         private final List<Order> orders = new ArrayList<>();
@@ -327,6 +492,7 @@ public class PushTopNToSourceTests extends ESTestCase {
             this.indexMode = indexMode;
             this.fields = new LinkedHashMap<>();
             this.refs = new LinkedHashMap<>();
+            this.metadata = new LinkedHashMap<>();
             addSortableFieldAttributes(this.fields);
         }
 
@@ -346,6 +512,11 @@ public class PushTopNToSourceTests extends ESTestCase {
             return new TestPhysicalPlanBuilder(index, IndexMode.STANDARD);
         }
 
+        TestPhysicalPlanBuilder metadata(String metadataAttribute, DataType dataType, boolean searchable) {
+            metadata.put(metadataAttribute, new MetadataAttribute(Source.EMPTY, metadataAttribute, dataType, searchable));
+            return this;
+        }
+
         public TestPhysicalPlanBuilder eval(Alias... aliases) {
             if (orders.isEmpty() == false) {
                 throw new IllegalArgumentException("Eval must be before sort");
@@ -376,6 +547,22 @@ public class PushTopNToSourceTests extends ESTestCase {
             return sort(field, Order.OrderDirection.ASC);
         }
 
+        public TestPhysicalPlanBuilder scoreSort(Order.OrderDirection direction) {
+            orders.add(
+                new Order(
+                    Source.EMPTY,
+                    MetadataAttribute.create(Source.EMPTY, MetadataAttribute.SCORE),
+                    direction,
+                    Order.NullsPosition.LAST
+                )
+            );
+            return this;
+        }
+
+        public TestPhysicalPlanBuilder scoreSort() {
+            return scoreSort(Order.OrderDirection.DESC);
+        }
+
         public TestPhysicalPlanBuilder sort(String field, Order.OrderDirection direction) {
             Attribute attr = refs.get(field);
             if (attr == null) {