1
0
Эх сурвалжийг харах

Merge pull request ESQL-1079 from elastic/main

🤖 ESQL: Merge upstream
elasticsearchmachine 2 жил өмнө
parent
commit
7df7247d79
58 өөрчлөгдсөн 1504 нэмэгдсэн , 251 устгасан
  1. 2 4
      build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java
  2. 23 13
      build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/util/Util.java
  3. 4 0
      build-tools-internal/build.gradle
  4. 5 0
      build-tools-internal/settings.gradle
  5. 7 7
      build-tools-internal/src/main/groovy/elasticsearch.ide.gradle
  6. 6 2
      build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle
  7. 7 1
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java
  8. 37 5
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java
  9. 42 0
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java
  10. 125 0
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java
  11. 82 0
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/ArchivedOracleJdkToolchainResolver.java
  12. 28 0
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/JavaToolChainResolverPlugin.java
  13. 99 0
      build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java
  14. 4 5
      build-tools-internal/src/main/resources/templates/release-highlights.asciidoc
  15. 107 0
      build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AbstractToolchainResolverSpec.groovy
  16. 83 0
      build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy
  17. 62 0
      build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/ArchivedOracleJdkToolchainResolverSpec.groovy
  18. 51 0
      build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolverSpec.groovy
  19. 3 2
      build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc
  20. 3 2
      build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.noHighlights.generateFile.asciidoc
  21. 4 0
      build-tools/settings.gradle
  22. 7 0
      build-tools/src/main/java/org/elasticsearch/gradle/VersionProperties.java
  23. 0 2
      build.gradle
  24. 5 0
      docs/changelog/95621.yaml
  25. 5 0
      docs/changelog/95694.yaml
  26. 1 1
      docs/reference/cluster/tasks.asciidoc
  27. 5 0
      docs/reference/mapping/types/histogram.asciidoc
  28. 11 5
      docs/reference/modules/discovery/fault-detection.asciidoc
  29. 5 6
      docs/reference/rest-api/common-parms.asciidoc
  30. 1 0
      docs/reference/troubleshooting.asciidoc
  31. 0 2
      qa/mixed-cluster/build.gradle
  32. 12 12
      rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml
  33. 12 18
      server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java
  34. 14 23
      server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java
  35. 32 0
      server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java
  36. 46 94
      server/src/main/java/org/elasticsearch/index/shard/IndexShard.java
  37. 3 3
      server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java
  38. 81 0
      server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java
  39. 2 2
      server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java
  40. 0 5
      server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java
  41. 25 4
      settings.gradle
  42. 1 1
      test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java
  43. 1 1
      test/test-clusters/build.gradle
  44. 1 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java
  45. 55 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/StorageInternalUser.java
  46. 2 1
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/User.java
  47. 2 0
      x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java
  48. 6 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java
  49. 82 0
      x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/StorageInternalUserTests.java
  50. 18 0
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsIngestPipelineRegistry.java
  51. 15 8
      x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/utils/ingest/PipelineRegistry.java
  52. 59 5
      x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsIngestPipelineRegistryTests.java
  53. 1 1
      x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java
  54. 131 0
      x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml
  55. 34 0
      x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/analytics/histogram.yml
  56. 5 2
      x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java
  57. 27 11
      x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java
  58. 13 3
      x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java

+ 2 - 4
build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/GitInfoPlugin.java

@@ -9,6 +9,7 @@
 package org.elasticsearch.gradle.internal.conventions;
 
 import org.elasticsearch.gradle.internal.conventions.info.GitInfo;
+import org.elasticsearch.gradle.internal.conventions.util.Util;
 import org.gradle.api.Plugin;
 import org.gradle.api.Project;
 import org.gradle.api.model.ObjectFactory;
@@ -35,10 +36,7 @@ class GitInfoPlugin implements Plugin<Project> {
 
     @Override
     public void apply(Project project) {
-        File rootDir = (project.getGradle().getParent() == null) ?
-                project.getRootDir() :
-                project.getGradle().getParent().getRootProject().getRootDir();
-
+        File rootDir = Util.locateElasticsearchWorkspace(project.getGradle());
         gitInfo = objectFactory.property(GitInfo.class).value(factory.provider(() ->
             GitInfo.gitInfo(rootDir)
         ));

+ 23 - 13
build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/util/Util.java

@@ -21,6 +21,7 @@ import org.gradle.api.tasks.util.PatternFilterable;
 
 import javax.annotation.Nullable;
 import java.io.File;
+import java.util.Collection;
 import java.util.Optional;
 import java.util.function.Supplier;
 
@@ -105,7 +106,6 @@ public class Util {
         return project.getExtensions().getByType(JavaPluginExtension.class) == null;
     }
 
-
     public static Object toStringable(Supplier<String> getter) {
         return new Object() {
             @Override
@@ -119,21 +119,31 @@ public class Util {
         return project.getExtensions().getByType(JavaPluginExtension.class).getSourceSets();
     }
 
-    public static File locateElasticsearchWorkspace(Gradle project) {
-        if (project.getParent() == null) {
-            // See if any of these included builds is the Elasticsearch project
-            for (IncludedBuild includedBuild : project.getIncludedBuilds()) {
-                File versionProperties = new File(includedBuild.getProjectDir(), "build-tools-internal/version.properties");
-                if (versionProperties.exists()) {
-                    return includedBuild.getProjectDir();
+        public static File locateElasticsearchWorkspace(Gradle gradle) {
+            if(gradle.getRootProject().getName().startsWith("build-tools")) {
+                File buildToolsParent = gradle.getRootProject().getRootDir().getParentFile();
+                if(versionFileExists(buildToolsParent)) {
+                    return buildToolsParent;
                 }
+                return buildToolsParent;
             }
+            if (gradle.getParent() == null) {
+                // See if any of these included builds is the Elasticsearch gradle
+                for (IncludedBuild includedBuild : gradle.getIncludedBuilds()) {
+                    if (versionFileExists(includedBuild.getProjectDir())) {
+                        return includedBuild.getProjectDir();
+                    }
+                }
 
-            // Otherwise assume this project is the root elasticsearch workspace
-            return project.getRootProject().getRootDir();
-        } else {
-            // We're an included build, so keep looking
-            return locateElasticsearchWorkspace(project.getParent());
+                // Otherwise assume this gradle is the root elasticsearch workspace
+                return gradle.getRootProject().getRootDir();
+            } else {
+                // We're an included build, so keep looking
+                return locateElasticsearchWorkspace(gradle.getParent());
+            }
         }
+
+    private static boolean versionFileExists(File rootDir) {
+        return new File(rootDir, "build-tools-internal/version.properties").exists();
     }
 }

+ 4 - 0
build-tools-internal/build.gradle

@@ -107,6 +107,10 @@ gradlePlugin {
       id = 'elasticsearch.internal-test-rerun'
       implementationClass = 'org.elasticsearch.gradle.internal.test.rerun.TestRerunPlugin'
     }
+    javaToolChainPlugin {
+      id = 'elasticsearch.java-toolchain'
+      implementationClass = 'org.elasticsearch.gradle.internal.toolchain.JavaToolChainResolverPlugin'
+    }
     javaDoc {
       id = 'elasticsearch.java-doc'
       implementationClass = 'org.elasticsearch.gradle.internal.ElasticsearchJavadocPlugin'

+ 5 - 0
build-tools-internal/settings.gradle

@@ -1,3 +1,8 @@
+pluginManagement {
+    includeBuild "../build-conventions"
+    includeBuild "../build-tools"
+}
+
 dependencyResolutionManagement {
     versionCatalogs {
         buildLibs {

+ 7 - 7
build-tools-internal/src/main/groovy/elasticsearch.ide.gradle

@@ -247,20 +247,20 @@ Node parseXml(Object path) {
   return xml
 }
 
-Pair<File, IncludedBuild> locateElasticsearchWorkspace(Gradle project) {
-  if (project.parent == null) {
-    // See if any of these included builds is the Elasticsearch project
-    for (IncludedBuild includedBuild : project.includedBuilds) {
+Pair<File, IncludedBuild> locateElasticsearchWorkspace(Gradle gradle) {
+  if (gradle.parent == null) {
+    // See if any of these included builds is the Elasticsearch gradle
+    for (IncludedBuild includedBuild : gradle.includedBuilds) {
       File versionProperties = new File(includedBuild.projectDir, 'build-tools-internal/version.properties')
       if (versionProperties.exists()) {
         return Pair.of(includedBuild.projectDir, includedBuild)
       }
     }
 
-    // Otherwise assume this project is the root elasticsearch workspace
-    return Pair.of(project.getRootProject().getRootDir(), null)
+    // Otherwise assume this gradle is the root elasticsearch workspace
+    return Pair.of(gradle.getRootProject().getRootDir(), null)
   } else {
     // We're an included build, so keep looking
-    return locateElasticsearchWorkspace(project.parent)
+    return locateElasticsearchWorkspace(gradle.parent)
   }
 }

+ 6 - 2
build-tools-internal/src/main/groovy/elasticsearch.runtime-jdk-provision.gradle

@@ -35,8 +35,12 @@ configure(allprojects) {
             test.executable = "${BuildParams.runtimeJavaHome}/bin/java" +
                     (OS.current() == OS.WINDOWS ? '.exe' : '')
         } else {
-            test.dependsOn(project.jdks.provisioned_runtime)
-            test.executable = rootProject.jdks.provisioned_runtime.getBinJavaPath()
+            test.javaLauncher = javaToolchains.launcherFor {
+                languageVersion = JavaLanguageVersion.of(VersionProperties.bundledJdkMajorVersion)
+                vendor = VersionProperties.bundledJdkVendor == "openjdk" ?
+                        JvmVendorSpec.ORACLE :
+                        JvmVendorSpec.matching(VersionProperties.bundledJdkVendor)
+            }
         }
     }
     project.plugins.withId("elasticsearch.testclusters") { testClustersPlugin ->

+ 7 - 1
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java

@@ -22,6 +22,13 @@ import org.gradle.api.attributes.Attribute;
 
 import java.util.Arrays;
 
+/**
+ * @deprecated We wanna get rid from this and custom jdk downloads via this plugin and
+ * make leverage the gradle toolchain resolver capabilities.
+ *
+ * @See @org.elasticsearch.gradle.internal.toolchain.JavaToolChainResolverPlugin
+ * */
+@Deprecated
 public class JdkDownloadPlugin implements Plugin<Project> {
 
     public static final String VENDOR_ADOPTIUM = "adoptium";
@@ -161,7 +168,6 @@ public class JdkDownloadPlugin implements Plugin<Project> {
     private static String dependencyNotation(Jdk jdk) {
         String platformDep = isJdkOnMacOsPlatform(jdk) ? (jdk.getVendor().equals(VENDOR_ADOPTIUM) ? "mac" : "macos") : jdk.getPlatform();
         String extension = jdk.getPlatform().equals("windows") ? "zip" : "tar.gz";
-
         return groupName(jdk) + ":" + platformDep + ":" + jdk.getBaseVersion() + ":" + jdk.getArchitecture() + "@" + extension;
     }
 

+ 37 - 5
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java

@@ -20,6 +20,9 @@ import org.gradle.api.Plugin;
 import org.gradle.api.Project;
 import org.gradle.api.logging.Logger;
 import org.gradle.api.logging.Logging;
+import org.gradle.api.model.ObjectFactory;
+import org.gradle.api.plugins.JvmToolchainsPlugin;
+import org.gradle.api.provider.Property;
 import org.gradle.api.provider.Provider;
 import org.gradle.api.provider.ProviderFactory;
 import org.gradle.internal.jvm.Jvm;
@@ -27,11 +30,14 @@ import org.gradle.internal.jvm.inspection.JvmInstallationMetadata;
 import org.gradle.internal.jvm.inspection.JvmMetadataDetector;
 import org.gradle.internal.jvm.inspection.JvmVendor;
 import org.gradle.jvm.toolchain.JavaLanguageVersion;
+import org.gradle.jvm.toolchain.JavaLauncher;
+import org.gradle.jvm.toolchain.JavaToolchainService;
 import org.gradle.jvm.toolchain.JavaToolchainSpec;
 import org.gradle.jvm.toolchain.JvmVendorSpec;
 import org.gradle.jvm.toolchain.internal.InstallationLocation;
 import org.gradle.jvm.toolchain.internal.JavaInstallationRegistry;
 import org.gradle.util.GradleVersion;
+import org.jetbrains.annotations.NotNull;
 
 import java.io.BufferedReader;
 import java.io.File;
@@ -56,19 +62,24 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
     private static final Logger LOGGER = Logging.getLogger(GlobalBuildInfoPlugin.class);
     private static final String DEFAULT_VERSION_JAVA_FILE_PATH = "server/src/main/java/org/elasticsearch/Version.java";
 
+    private ObjectFactory objectFactory;
     private final JavaInstallationRegistry javaInstallationRegistry;
     private final JvmMetadataDetector metadataDetector;
     private final ProviderFactory providers;
+    private JavaToolchainService toolChainService;
 
     @Inject
     public GlobalBuildInfoPlugin(
+        ObjectFactory objectFactory,
         JavaInstallationRegistry javaInstallationRegistry,
         JvmMetadataDetector metadataDetector,
         ProviderFactory providers
     ) {
+        this.objectFactory = objectFactory;
         this.javaInstallationRegistry = javaInstallationRegistry;
         this.metadataDetector = new ErrorTraceMetadataDetector(metadataDetector);
         this.providers = providers;
+
     }
 
     @Override
@@ -76,6 +87,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
         if (project != project.getRootProject()) {
             throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project.");
         }
+        project.getPlugins().apply(JvmToolchainsPlugin.class);
+        toolChainService = project.getExtensions().getByType(JavaToolchainService.class);
         GradleVersion minimumGradleVersion = GradleVersion.version(getResourceContents("/minimumGradleVersion"));
         if (GradleVersion.current().compareTo(minimumGradleVersion) < 0) {
             throw new GradleException("Gradle " + minimumGradleVersion.getVersion() + "+ is required");
@@ -114,6 +127,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
             params.setTestSeed(getTestSeed());
             params.setIsCi(System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null);
             params.setDefaultParallel(ParallelDetector.findDefaultParallel(project));
+            // TODO: Test if cc issues are coming from here
+            params.setDefaultParallel(8);
             params.setInFipsJvm(Util.getBooleanProperty("tests.fips.enabled", false));
             params.setIsSnapshotBuild(Util.getBooleanProperty("build.snapshot", true));
             AtomicReference<BwcVersions> cache = new AtomicReference<>();
@@ -219,7 +234,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
 
     private boolean isSameFile(File javaHome, InstallationLocation installationLocation) {
         try {
-            return Files.isSameFile(installationLocation.getLocation().toPath(), javaHome.toPath());
+            return Files.isSameFile(javaHome.toPath(), installationLocation.getLocation().toPath());
         } catch (IOException ioException) {
             throw new UncheckedIOException(ioException);
         }
@@ -294,9 +309,14 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
     }
 
     private String findJavaHome(String version) {
-        Provider<String> javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv");
         String javaHomeEnvVar = getJavaHomeEnvVarName(version);
+        String env = System.getenv(javaHomeEnvVar);
+        return env != null ? resolveJavaHomeFromEnvVariable(javaHomeEnvVar) : resolveJavaHomeFromToolChainService(version);
+    }
 
+    @NotNull
+    private String resolveJavaHomeFromEnvVariable(String javaHomeEnvVar) {
+        Provider<String> javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv");
         // Provide a useful error if we're looking for a Java home version that we haven't told Gradle about yet
         Arrays.stream(javaHomeNames.get().split(","))
             .filter(s -> s.equals(javaHomeEnvVar))
@@ -309,7 +329,6 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
                         + "updated in gradle.properties file."
                 )
             );
-
         String versionedJavaHome = System.getenv(javaHomeEnvVar);
         if (versionedJavaHome == null) {
             final String exceptionMessage = String.format(
@@ -320,12 +339,25 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
                     + "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details.",
                 javaHomeEnvVar
             );
-
             throw new GradleException(exceptionMessage);
         }
         return versionedJavaHome;
     }
 
+    @NotNull
+    private String resolveJavaHomeFromToolChainService(String version) {
+        Property<JavaLanguageVersion> value = objectFactory.property(JavaLanguageVersion.class).value(JavaLanguageVersion.of(version));
+        Provider<JavaLauncher> javaLauncherProvider = toolChainService.launcherFor(
+            javaToolchainSpec -> javaToolchainSpec.getLanguageVersion().value(value)
+        );
+
+        try {
+            return javaLauncherProvider.get().getMetadata().getInstallationPath().getAsFile().getCanonicalPath();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
     private static String getJavaHomeEnvVarName(String version) {
         return "JAVA" + version + "_HOME";
     }
@@ -369,7 +401,7 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
         private final JvmVendorSpec expectedVendorSpec;
         private final JavaLanguageVersion expectedJavaLanguageVersion;
 
-        public MetadataBasedToolChainMatcher(JvmInstallationMetadata metadata) {
+        MetadataBasedToolChainMatcher(JvmInstallationMetadata metadata) {
             expectedVendorSpec = JvmVendorSpec.matching(metadata.getVendor().getRawVendor());
             expectedJavaLanguageVersion = JavaLanguageVersion.of(metadata.getLanguageVersion().getMajorVersion());
         }

+ 42 - 0
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AbstractCustomJavaToolchainResolver.java

@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain;
+
+import org.gradle.jvm.toolchain.JavaToolchainResolver;
+import org.gradle.jvm.toolchain.JvmVendorSpec;
+import org.gradle.platform.Architecture;
+import org.gradle.platform.OperatingSystem;
+
+abstract class AbstractCustomJavaToolchainResolver implements JavaToolchainResolver {
+
+    static String toOsString(OperatingSystem operatingSystem) {
+        return toOsString(operatingSystem, null);
+    }
+
+    static String toOsString(OperatingSystem operatingSystem, JvmVendorSpec v) {
+        return switch (operatingSystem) {
+            case MAC_OS -> (v == null || v.equals(JvmVendorSpec.ADOPTIUM) == false) ? "macos" : "mac";
+            case LINUX -> "linux";
+            case WINDOWS -> "windows";
+            default -> throw new UnsupportedOperationException("Operating system " + operatingSystem);
+        };
+    }
+
+    static String toArchString(Architecture architecture) {
+        return switch (architecture) {
+            case X86_64 -> "x64";
+            case AARCH64 -> "aarch64";
+            case X86 -> "x86";
+        };
+    }
+
+    protected static boolean anyVendorOr(JvmVendorSpec givenVendor, JvmVendorSpec expectedVendor) {
+        return givenVendor.matches("any") || givenVendor.equals(expectedVendor);
+    }
+}

+ 125 - 0
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java

@@ -0,0 +1,125 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.commons.compress.utils.Lists;
+import org.gradle.jvm.toolchain.JavaLanguageVersion;
+import org.gradle.jvm.toolchain.JavaToolchainDownload;
+import org.gradle.jvm.toolchain.JavaToolchainRequest;
+import org.gradle.jvm.toolchain.JvmVendorSpec;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URL;
+import java.util.Comparator;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+
+import static org.gradle.jvm.toolchain.JavaToolchainDownload.fromUri;
+
+public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToolchainResolver {
+
+    // package protected for better testing
+    final Map<AdoptiumVersionRequest, Optional<AdoptiumVersionInfo>> CACHED_SEMVERS = new ConcurrentHashMap<>();
+
+    @Override
+    public Optional<JavaToolchainDownload> resolve(JavaToolchainRequest request) {
+        if (requestIsSupported(request) == false) {
+            return Optional.empty();
+        }
+        AdoptiumVersionRequest versionRequestKey = toVersionRequest(request);
+        Optional<AdoptiumVersionInfo> versionInfo = CACHED_SEMVERS.computeIfAbsent(
+            versionRequestKey,
+            (r) -> resolveAvailableVersion(versionRequestKey)
+        );
+
+        return versionInfo.map(v -> fromUri(resolveDownloadURI(versionRequestKey, v)));
+    }
+
+    private AdoptiumVersionRequest toVersionRequest(JavaToolchainRequest request) {
+        String platform = toOsString(request.getBuildPlatform().getOperatingSystem(), JvmVendorSpec.ADOPTIUM);
+        String arch = toArchString(request.getBuildPlatform().getArchitecture());
+        JavaLanguageVersion javaLanguageVersion = request.getJavaToolchainSpec().getLanguageVersion().get();
+        return new AdoptiumVersionRequest(platform, arch, javaLanguageVersion);
+    }
+
+    private Optional<AdoptiumVersionInfo> resolveAvailableVersion(AdoptiumVersionRequest requestKey) {
+        ObjectMapper mapper = new ObjectMapper();
+        try {
+            int languageVersion = requestKey.languageVersion.asInt();
+            URL source = new URL(
+                "https://api.adoptium.net/v3/info/release_versions?architecture="
+                    + requestKey.arch
+                    + "&image_type=jdk&os="
+                    + requestKey.platform
+                    + "&project=jdk&release_type=ga"
+                    + "&version=["
+                    + languageVersion
+                    + ","
+                    + (languageVersion + 1)
+                    + ")"
+            );
+            JsonNode jsonNode = mapper.readTree(source);
+            JsonNode versionsNode = jsonNode.get("versions");
+            return Optional.of(
+                Lists.newArrayList(versionsNode.iterator())
+                    .stream()
+                    .map(node -> toVersionInfo(node))
+                    .sorted(Comparator.comparing(AdoptiumVersionInfo::semver).reversed())
+                    .findFirst()
+                    .get()
+            );
+        } catch (FileNotFoundException e) {
+            // request combo not supported (e.g. aarch64 + windows
+            return Optional.empty();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private AdoptiumVersionInfo toVersionInfo(JsonNode node) {
+        return new AdoptiumVersionInfo(
+            node.get("build").asInt(),
+            node.get("major").asInt(),
+            node.get("minor").asInt(),
+            node.get("openjdk_version").asText(),
+            node.get("security").asInt(),
+            node.get("semver").asText()
+        );
+    }
+
+    private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) {
+        return URI.create(
+            "https://api.adoptium.net/v3/binary/version/jdk-"
+                + versionInfo.openjdkVersion
+                + "/"
+                + request.platform
+                + "/"
+                + request.arch
+                + "/jdk/hotspot/normal/eclipse?project=jdk"
+        );
+    }
+
+    /**
+     * Check if request can be full-filled by this resolver:
+     * 1. vendor must be "any" or adoptium
+     */
+    private boolean requestIsSupported(JavaToolchainRequest request) {
+        return anyVendorOr(request.getJavaToolchainSpec().getVendor().get(), JvmVendorSpec.ADOPTIUM);
+    }
+
+    record AdoptiumVersionInfo(int build, int major, int minor, String openjdkVersion, int security, String semver) {}
+
+    record AdoptiumVersionRequest(String platform, String arch, JavaLanguageVersion languageVersion) {}
+}

+ 82 - 0
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/ArchivedOracleJdkToolchainResolver.java

@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain;
+
+import org.apache.groovy.util.Maps;
+import org.elasticsearch.gradle.VersionProperties;
+import org.gradle.jvm.toolchain.JavaLanguageVersion;
+import org.gradle.jvm.toolchain.JavaToolchainDownload;
+import org.gradle.jvm.toolchain.JavaToolchainRequest;
+import org.gradle.jvm.toolchain.JavaToolchainSpec;
+import org.gradle.jvm.toolchain.JvmVendorSpec;
+import org.gradle.platform.Architecture;
+import org.gradle.platform.BuildPlatform;
+import org.gradle.platform.OperatingSystem;
+
+import java.net.URI;
+import java.util.Map;
+import java.util.Optional;
+
+public abstract class ArchivedOracleJdkToolchainResolver extends AbstractCustomJavaToolchainResolver {
+
+    private static final Map<Integer, String> ARCHIVED_BASE_VERSIONS = Maps.of(19, "19.0.2", 18, "18.0.2.1", 17, "17.0.7");
+
+    @Override
+    public Optional<JavaToolchainDownload> resolve(JavaToolchainRequest request) {
+        if (requestIsSupported(request) == false) {
+            return Optional.empty();
+        }
+        Integer majorVersion = request.getJavaToolchainSpec().getLanguageVersion().get().asInt();
+        String baseVersion = ARCHIVED_BASE_VERSIONS.get(majorVersion);
+        if (baseVersion == null) {
+            return Optional.empty();
+        }
+
+        OperatingSystem operatingSystem = request.getBuildPlatform().getOperatingSystem();
+        String extension = operatingSystem.equals(OperatingSystem.WINDOWS) ? "zip" : "tar.gz";
+        String arch = toArchString(request.getBuildPlatform().getArchitecture());
+        String os = toOsString(operatingSystem);
+        return Optional.of(
+            () -> URI.create(
+                "https://download.oracle.com/java/"
+                    + majorVersion
+                    + "/archive/jdk-"
+                    + baseVersion
+                    + "_"
+                    + os
+                    + "-"
+                    + arch
+                    + "_bin."
+                    + extension
+            )
+        );
+    }
+
+    /**
+     * Check if request can be full-filled by this resolver:
+     * 1. language version not matching bundled jdk version
+     * 2. vendor must be any or oracle
+     * 3. Aarch64 windows images are not supported
+     */
+    private boolean requestIsSupported(JavaToolchainRequest request) {
+        JavaToolchainSpec javaToolchainSpec = request.getJavaToolchainSpec();
+        JavaLanguageVersion bundledJdkMajorVersion = JavaLanguageVersion.of(VersionProperties.getBundledJdkMajorVersion());
+        if (javaToolchainSpec.getLanguageVersion().get().equals(bundledJdkMajorVersion)) {
+            return false;
+        }
+        if (anyVendorOr(javaToolchainSpec.getVendor().get(), JvmVendorSpec.ORACLE) == false) {
+            return false;
+        }
+        BuildPlatform buildPlatform = request.getBuildPlatform();
+        Architecture architecture = buildPlatform.getArchitecture();
+        OperatingSystem operatingSystem = buildPlatform.getOperatingSystem();
+        return Architecture.AARCH64 != architecture || OperatingSystem.WINDOWS != operatingSystem;
+    }
+
+}

+ 28 - 0
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/JavaToolChainResolverPlugin.java

@@ -0,0 +1,28 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.initialization.Settings;
+import org.gradle.jvm.toolchain.JavaToolchainResolverRegistry;
+
+import javax.inject.Inject;
+
+public abstract class JavaToolChainResolverPlugin implements Plugin<Settings> {
+    @Inject
+    protected abstract JavaToolchainResolverRegistry getToolchainResolverRegistry();
+
+    public void apply(Settings settings) {
+        settings.getPlugins().apply("jvm-toolchain-management");
+        JavaToolchainResolverRegistry registry = getToolchainResolverRegistry();
+        registry.register(OracleOpenJdkToolchainResolver.class);
+        registry.register(AdoptiumJdkToolchainResolver.class);
+        registry.register(ArchivedOracleJdkToolchainResolver.class);
+    }
+}

+ 99 - 0
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolver.java

@@ -0,0 +1,99 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain;
+
+import org.elasticsearch.gradle.VersionProperties;
+import org.gradle.jvm.toolchain.JavaLanguageVersion;
+import org.gradle.jvm.toolchain.JavaToolchainDownload;
+import org.gradle.jvm.toolchain.JavaToolchainRequest;
+import org.gradle.jvm.toolchain.JavaToolchainSpec;
+import org.gradle.jvm.toolchain.JvmVendorSpec;
+import org.gradle.platform.Architecture;
+import org.gradle.platform.BuildPlatform;
+import org.gradle.platform.OperatingSystem;
+
+import java.net.URI;
+import java.util.Optional;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public abstract class OracleOpenJdkToolchainResolver extends AbstractCustomJavaToolchainResolver {
+
+    private static final Pattern VERSION_PATTERN = Pattern.compile(
+        "(\\d+)(\\.\\d+\\.\\d+(?:\\.\\d+)?)?\\+(\\d+(?:\\.\\d+)?)(@([a-f0-9]{32}))?"
+    );
+
+    // for testing reasons we keep that a package private field
+    String bundledJdkVersion = VersionProperties.getBundledJdkVersion();
+    JavaLanguageVersion bundledJdkMajorVersion = JavaLanguageVersion.of(VersionProperties.getBundledJdkMajorVersion());
+
+    /**
+     * We need some place to map JavaLanguageVersion to build, minor version etc.
+     * */
+    @Override
+    public Optional<JavaToolchainDownload> resolve(JavaToolchainRequest request) {
+        if (requestIsSupported(request) == false) {
+            return Optional.empty();
+        }
+        Matcher jdkVersionMatcher = VERSION_PATTERN.matcher(bundledJdkVersion);
+        if (jdkVersionMatcher.matches() == false) {
+            throw new IllegalStateException("Unable to parse bundled JDK version " + bundledJdkVersion);
+        }
+        String baseVersion = jdkVersionMatcher.group(1) + (jdkVersionMatcher.group(2) != null ? (jdkVersionMatcher.group(2)) : "");
+        String build = jdkVersionMatcher.group(3);
+        String hash = jdkVersionMatcher.group(5);
+
+        OperatingSystem operatingSystem = request.getBuildPlatform().getOperatingSystem();
+        String extension = operatingSystem.equals(OperatingSystem.WINDOWS) ? "zip" : "tar.gz";
+        String arch = toArchString(request.getBuildPlatform().getArchitecture());
+        String os = toOsString(operatingSystem);
+        return Optional.of(
+            () -> URI.create(
+                "https://download.oracle.com/java/GA/jdk"
+                    + baseVersion
+                    + "/"
+                    + hash
+                    + "/"
+                    + build
+                    + "/GPL/openjdk-"
+                    + baseVersion
+                    + "_"
+                    + os
+                    + "-"
+                    + arch
+                    + "_bin."
+                    + extension
+            )
+        );
+    }
+
+    /**
+     * Check if request can be full-filled by this resolver:
+     * 1. BundledJdkVendor should match openjdk
+     * 2. language version should match bundled jdk version
+     * 3. vendor must be any or oracle
+     * 4. Aarch64 windows images are not supported
+     */
+    private boolean requestIsSupported(JavaToolchainRequest request) {
+        if (VersionProperties.getBundledJdkVendor().toLowerCase().equals("openjdk") == false) {
+            return false;
+        }
+        JavaToolchainSpec javaToolchainSpec = request.getJavaToolchainSpec();
+        if (javaToolchainSpec.getLanguageVersion().get().equals(bundledJdkMajorVersion) == false) {
+            return false;
+        }
+        if (anyVendorOr(javaToolchainSpec.getVendor().get(), JvmVendorSpec.ORACLE) == false) {
+            return false;
+        }
+        BuildPlatform buildPlatform = request.getBuildPlatform();
+        Architecture architecture = buildPlatform.getArchitecture();
+        OperatingSystem operatingSystem = buildPlatform.getOperatingSystem();
+        return Architecture.AARCH64 != architecture || OperatingSystem.WINDOWS != operatingSystem;
+    }
+}

+ 4 - 5
build-tools-internal/src/main/resources/templates/release-highlights.asciidoc

@@ -4,19 +4,18 @@
 coming::[{minor-version}]
 
 Here are the highlights of what's new and improved in {es} {minor-version}!
-ifeval::[\\{release-state}\\"!=\\"unreleased\\"]
+ifeval::["{release-state}"!="unreleased"]
 For detailed information about this release, see the <<es-release-notes>> and
 <<breaking-changes>>.
-endif::[]
 <% if (priorVersions.size() > 0) { %>
 // Add previous release to the list
 Other versions:
 
 <%
 print priorVersions.join("\n| ")
-print "\n"
-}
-
+print "\n" }%>
+endif::[]
+<%
 /* The `notable-highlights` tag needs to exist, whether or not we actually have any notable highlights. */
 if (notableHighlights.isEmpty()) { %>
 // The notable-highlights tag marks entries that

+ 107 - 0
build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AbstractToolchainResolverSpec.groovy

@@ -0,0 +1,107 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain
+
+import org.gradle.api.provider.Property
+import org.gradle.jvm.toolchain.JavaLanguageVersion
+import org.gradle.jvm.toolchain.JavaToolchainDownload
+import org.gradle.jvm.toolchain.JavaToolchainRequest
+import org.gradle.jvm.toolchain.JavaToolchainResolver
+import org.gradle.jvm.toolchain.JavaToolchainSpec
+import org.gradle.jvm.toolchain.JvmVendorSpec
+import org.gradle.platform.Architecture
+import org.gradle.platform.BuildPlatform
+import org.gradle.platform.OperatingSystem
+import spock.lang.Specification
+
+import static org.gradle.platform.Architecture.X86_64
+import static org.gradle.platform.OperatingSystem.MAC_OS
+
+abstract class AbstractToolchainResolverSpec extends Specification {
+
+    def "resolves #os #arch #vendor jdk #langVersion"() {
+        given:
+        def resolver = resolverImplementation()
+
+        when:
+        Optional<JavaToolchainDownload> download = resolver.resolve(request(JavaLanguageVersion.of(langVersion), vendor, platform(os, arch)))
+
+        then:
+        download.get().uri == URI.create(expectedUrl)
+        where:
+
+        [langVersion, vendor, os, arch, expectedUrl] << supportedRequests()
+    }
+
+
+    def "does not resolve #os #arch #vendor jdk #langVersion"() {
+        given:
+        def resolver = resolverImplementation()
+
+        when:
+        Optional<JavaToolchainDownload> download = resolver.resolve(request(JavaLanguageVersion.of(langVersion), vendor, platform(os, arch)))
+
+        then:
+        download.isEmpty()
+        where:
+        [langVersion, vendor, os, arch] << unsupportedRequests()
+    }
+
+    abstract JavaToolchainResolver resolverImplementation();
+
+    abstract supportedRequests();
+
+    abstract unsupportedRequests();
+
+    JavaToolchainRequest request(JavaLanguageVersion languageVersion = null,
+                                 JvmVendorSpec vendorSpec = anyVendor(),
+                                 BuildPlatform platform = platform()) {
+
+        JavaToolchainSpec toolchainSpec = Mock()
+        Property<JavaLanguageVersion> languageVersionProperty = Mock()
+        _ * toolchainSpec.getLanguageVersion() >> languageVersionProperty
+        _ * languageVersionProperty.get() >> languageVersion
+
+        Property<JvmVendorSpec> vendorSpecProperty = Mock()
+        _ * vendorSpecProperty.get() >> vendorSpec
+        _ * toolchainSpec.getVendor() >> vendorSpecProperty
+
+        JavaToolchainRequest request = Mock()
+
+        _ * request.getJavaToolchainSpec() >> toolchainSpec
+        _ * request.getBuildPlatform() >> platform
+        return request
+    }
+
+    JvmVendorSpec anyVendor() {
+        return new AnyJvmVendorSpec();
+    }
+
+    BuildPlatform platform(OperatingSystem os = MAC_OS, Architecture arch = X86_64) {
+        return new TestBuildPlatform(operatingSystem: os, architecture: arch)
+    }
+
+
+    static class TestBuildPlatform implements BuildPlatform {
+        OperatingSystem operatingSystem
+        Architecture architecture
+    }
+
+    static class AnyJvmVendorSpec extends JvmVendorSpec {
+        @Override
+        boolean matches(String vendor) {
+            return vendor == "any"
+        }
+
+        @Override
+        String toString() {
+            return "any"
+        }
+    }
+}

+ 83 - 0
build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy

@@ -0,0 +1,83 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain
+
+import org.gradle.api.services.BuildServiceParameters
+import org.gradle.jvm.toolchain.JavaLanguageVersion
+import org.gradle.jvm.toolchain.JavaToolchainResolver
+import org.gradle.platform.OperatingSystem
+
+import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toArchString
+import static org.elasticsearch.gradle.internal.toolchain.AbstractCustomJavaToolchainResolver.toOsString
+import static org.gradle.jvm.toolchain.JvmVendorSpec.ADOPTIUM
+import static org.gradle.platform.Architecture.AARCH64
+import static org.gradle.platform.Architecture.X86_64
+import static org.gradle.platform.OperatingSystem.LINUX
+import static org.gradle.platform.OperatingSystem.MAC_OS
+import static org.gradle.platform.OperatingSystem.WINDOWS
+
+class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec {
+
+    @Override
+    JavaToolchainResolver resolverImplementation() {
+        def resolver = new AdoptiumJdkToolchainResolver() {
+            @Override
+            BuildServiceParameters.None getParameters() {
+                return null
+            }
+        }
+        supportedRequests().each {
+            def languageVersion = JavaLanguageVersion.of(it[0])
+            def request = new AdoptiumJdkToolchainResolver.AdoptiumVersionRequest(
+                    toOsString(it[2], it[1]),
+                    toArchString(it[3]),
+                    languageVersion);
+            resolver.CACHED_SEMVERS.put(request, Optional.of(new AdoptiumJdkToolchainResolver.AdoptiumVersionInfo(languageVersion.asInt(),
+                    1,
+                    1,
+                    "" + languageVersion.asInt() + ".1.1.1+37",
+                    0, "" + languageVersion.asInt() + ".1.1.1"
+            )))
+
+        }
+        return resolver
+    }
+
+    @Override
+    def supportedRequests() {
+        return [
+                [19, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [19, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [19, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [19, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [19, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"],
+
+                [18, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [18, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [18, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [18, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [18, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [17, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [17, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [17, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [17, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"],
+                [17, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"]
+        ]
+    }
+
+    @Override
+    def unsupportedRequests() {
+        [
+                [19, ADOPTIUM, WINDOWS, AARCH64],
+                [18, ADOPTIUM, WINDOWS, AARCH64],
+                [17, ADOPTIUM, WINDOWS, AARCH64]
+        ]
+    }
+
+}

+ 62 - 0
build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/ArchivedOracleJdkToolchainResolverSpec.groovy

@@ -0,0 +1,62 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain
+
+import org.gradle.api.services.BuildServiceParameters
+import org.gradle.jvm.toolchain.JavaToolchainResolver;
+
+import static org.gradle.jvm.toolchain.JvmVendorSpec.ORACLE
+import static org.gradle.platform.Architecture.AARCH64
+import static org.gradle.platform.Architecture.X86_64
+import static org.gradle.platform.OperatingSystem.LINUX
+import static org.gradle.platform.OperatingSystem.MAC_OS
+import static org.gradle.platform.OperatingSystem.WINDOWS;
+
+class ArchivedOracleJdkToolchainResolverSpec extends AbstractToolchainResolverSpec {
+
+    @Override
+    def supportedRequests() {
+        return [
+                [19, ORACLE, MAC_OS, X86_64, "https://download.oracle.com/java/19/archive/jdk-19.0.2_macos-x64_bin.tar.gz"],
+                [19, ORACLE, MAC_OS, AARCH64, "https://download.oracle.com/java/19/archive/jdk-19.0.2_macos-aarch64_bin.tar.gz"],
+                [19, ORACLE, LINUX, X86_64, "https://download.oracle.com/java/19/archive/jdk-19.0.2_linux-x64_bin.tar.gz"],
+                [19, ORACLE, LINUX, AARCH64, "https://download.oracle.com/java/19/archive/jdk-19.0.2_linux-aarch64_bin.tar.gz"],
+                [19, ORACLE, WINDOWS, X86_64, "https://download.oracle.com/java/19/archive/jdk-19.0.2_windows-x64_bin.zip"],
+
+                [18, ORACLE, MAC_OS, X86_64, "https://download.oracle.com/java/18/archive/jdk-18.0.2.1_macos-x64_bin.tar.gz"],
+                [18, ORACLE, MAC_OS, AARCH64, "https://download.oracle.com/java/18/archive/jdk-18.0.2.1_macos-aarch64_bin.tar.gz"],
+                [18, ORACLE, LINUX, X86_64, "https://download.oracle.com/java/18/archive/jdk-18.0.2.1_linux-x64_bin.tar.gz"],
+                [18, ORACLE, LINUX, AARCH64, "https://download.oracle.com/java/18/archive/jdk-18.0.2.1_linux-aarch64_bin.tar.gz"],
+                [18, ORACLE, WINDOWS, X86_64, "https://download.oracle.com/java/18/archive/jdk-18.0.2.1_windows-x64_bin.zip"],
+
+                [17, ORACLE, MAC_OS, X86_64, "https://download.oracle.com/java/17/archive/jdk-17.0.7_macos-x64_bin.tar.gz"],
+                [17, ORACLE, MAC_OS, AARCH64, "https://download.oracle.com/java/17/archive/jdk-17.0.7_macos-aarch64_bin.tar.gz"],
+                [17, ORACLE, LINUX, X86_64, "https://download.oracle.com/java/17/archive/jdk-17.0.7_linux-x64_bin.tar.gz"],
+                [17, ORACLE, LINUX, AARCH64, "https://download.oracle.com/java/17/archive/jdk-17.0.7_linux-aarch64_bin.tar.gz"],
+                [17, ORACLE, WINDOWS, X86_64, "https://download.oracle.com/java/17/archive/jdk-17.0.7_windows-x64_bin.zip"]
+        ]
+    }
+
+    def unsupportedRequests() {
+        [
+                [19, ORACLE, WINDOWS, AARCH64],
+                [18, ORACLE, WINDOWS, AARCH64],
+                [17, ORACLE, WINDOWS, AARCH64]
+        ]
+    }
+
+    JavaToolchainResolver resolverImplementation() {
+        new ArchivedOracleJdkToolchainResolver() {
+            @Override
+            BuildServiceParameters.None getParameters() {
+                return null
+            }
+        }
+    }
+}

+ 51 - 0
build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/OracleOpenJdkToolchainResolverSpec.groovy

@@ -0,0 +1,51 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.toolchain
+
+
+import org.gradle.api.services.BuildServiceParameters
+import org.gradle.jvm.toolchain.JavaLanguageVersion
+import static org.gradle.jvm.toolchain.JvmVendorSpec.ORACLE
+import static org.gradle.platform.Architecture.*
+import static org.gradle.platform.OperatingSystem.*
+
+class OracleOpenJdkToolchainResolverSpec extends AbstractToolchainResolverSpec {
+
+    OracleOpenJdkToolchainResolver resolverImplementation() {
+        var toolChain = new OracleOpenJdkToolchainResolver() {
+            @Override
+            BuildServiceParameters.None getParameters() {
+                return null
+            }
+        }
+        toolChain.bundledJdkVersion = "20+36@bdc68b4b9cbc4ebcb30745c85038d91d"
+        toolChain.bundledJdkMajorVersion = JavaLanguageVersion.of(20)
+        toolChain
+    }
+
+    def supportedRequests() {
+        [[20, ORACLE, MAC_OS, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_macos-x64_bin.tar.gz"],
+         [20, ORACLE, MAC_OS, AARCH64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_macos-aarch64_bin.tar.gz"],
+         [20, ORACLE, LINUX, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_linux-x64_bin.tar.gz"],
+         [20, ORACLE, LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_linux-aarch64_bin.tar.gz"],
+         [20, ORACLE, WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_windows-x64_bin.zip"],
+         [20, anyVendor(), MAC_OS, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_macos-x64_bin.tar.gz"],
+         [20, anyVendor(), MAC_OS, AARCH64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_macos-aarch64_bin.tar.gz"],
+         [20, anyVendor(), LINUX, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_linux-x64_bin.tar.gz"],
+         [20, anyVendor(), LINUX, AARCH64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_linux-aarch64_bin.tar.gz"],
+         [20, anyVendor(), WINDOWS, X86_64, "https://download.oracle.com/java/GA/jdk20/bdc68b4b9cbc4ebcb30745c85038d91d/36/GPL/openjdk-20_windows-x64_bin.zip"]]
+    }
+
+    def unsupportedRequests() {
+        [
+                [20, ORACLE, WINDOWS, AARCH64]
+        ]
+    }
+
+}

+ 3 - 2
build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.generateFile.asciidoc

@@ -4,10 +4,9 @@
 coming::[{minor-version}]
 
 Here are the highlights of what's new and improved in {es} {minor-version}!
-ifeval::[\{release-state}\"!=\"unreleased\"]
+ifeval::["{release-state}"!="unreleased"]
 For detailed information about this release, see the <<es-release-notes>> and
 <<breaking-changes>>.
-endif::[]
 
 // Add previous release to the list
 Other versions:
@@ -17,6 +16,8 @@ Other versions:
 | {ref-bare}/8.1/release-highlights.html[8.1]
 | {ref-bare}/8.0/release-highlights.html[8.0]
 
+endif::[]
+
 // tag::notable-highlights[]
 
 [discrete]

+ 3 - 2
build-tools-internal/src/test/resources/org/elasticsearch/gradle/internal/release/ReleaseHighlightsGeneratorTest.noHighlights.generateFile.asciidoc

@@ -4,10 +4,9 @@
 coming::[{minor-version}]
 
 Here are the highlights of what's new and improved in {es} {minor-version}!
-ifeval::[\{release-state}\"!=\"unreleased\"]
+ifeval::["{release-state}"!="unreleased"]
 For detailed information about this release, see the <<es-release-notes>> and
 <<breaking-changes>>.
-endif::[]
 
 // Add previous release to the list
 Other versions:
@@ -17,6 +16,8 @@ Other versions:
 | {ref-bare}/8.1/release-highlights.html[8.1]
 | {ref-bare}/8.0/release-highlights.html[8.0]
 
+endif::[]
+
 // The notable-highlights tag marks entries that
 // should be featured in the Stack Installation and Upgrade Guide:
 // tag::notable-highlights[]

+ 4 - 0
build-tools/settings.gradle

@@ -5,6 +5,10 @@
  * in compliance with, at your election, the Elastic License 2.0 or the Server
  * Side Public License, v 1.
  */
+pluginManagement {
+    includeBuild "../build-conventions"
+}
+
 include 'reaper'
 
 dependencyResolutionManagement {

+ 7 - 0
build-tools/src/main/java/org/elasticsearch/gradle/VersionProperties.java

@@ -30,6 +30,10 @@ public class VersionProperties {
         return lucene;
     }
 
+    public static String getBundledJdkMajorVersion() {
+        return bundledJdkMajorVersion;
+    }
+
     public static String getBundledJdkVersion() {
         return bundledJdkVersion;
     }
@@ -45,7 +49,9 @@ public class VersionProperties {
     private static final String elasticsearch;
     private static final String lucene;
     private static final String bundledJdkVersion;
+    private static final String bundledJdkMajorVersion;
     private static final String bundledJdkVendor;
+
     private static final Map<String, String> versions = new HashMap<String, String>();
 
     static {
@@ -54,6 +60,7 @@ public class VersionProperties {
         lucene = props.getProperty("lucene");
         bundledJdkVendor = props.getProperty("bundled_jdk_vendor");
         bundledJdkVersion = props.getProperty("bundled_jdk");
+        bundledJdkMajorVersion = bundledJdkVersion.split("[.+]")[0];
 
         for (String property : props.stringPropertyNames()) {
             versions.put(property, props.getProperty(property));

+ 0 - 2
build.gradle

@@ -19,8 +19,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams
 import org.elasticsearch.gradle.util.GradleUtils
 import org.gradle.plugins.ide.eclipse.model.AccessRule
 import org.gradle.plugins.ide.eclipse.model.ProjectDependency
-import org.gradle.util.internal.DistributionLocator
-import org.gradle.util.GradleVersion
 
 import java.nio.file.Files
 

+ 5 - 0
docs/changelog/95621.yaml

@@ -0,0 +1,5 @@
+pr: 95621
+summary: Check if an analytics event data stream exists before installing pipeline
+area: Application
+type: bug
+issues: []

+ 5 - 0
docs/changelog/95694.yaml

@@ -0,0 +1,5 @@
+pr: 95694
+summary: Add "_storage" internal user
+area: Security
+type: enhancement
+issues: []

+ 1 - 1
docs/reference/cluster/tasks.asciidoc

@@ -44,7 +44,7 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=detailed]
 
 include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=group-by]
 
-include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id-query-parm]
+include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=nodes]
 
 include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=parent-task-id]
 

+ 5 - 0
docs/reference/mapping/types/histogram.asciidoc

@@ -82,6 +82,11 @@ of official GA features.
 default configuration. Synthetic `_source` cannot be used together with
 <<ignore-malformed,`ignore_malformed`>> or <<copy-to,`copy_to`>>.
 
+NOTE: To save space, zero-count buckets are not stored in the histogram doc values.
+As a result, when indexing a histogram field in an index with synthetic source enabled,
+indexing a histogram including zero-count buckets will result in missing buckets when
+fetching back the histogram.
+
 [[histogram-ex]]
 ==== Examples
 

+ 11 - 5
docs/reference/modules/discovery/fault-detection.asciidoc

@@ -364,15 +364,21 @@ other delays on such a connection.
 * Long waits for particular threads to be available can be identified by taking
 stack dumps (for example, using `jstack`) or a profiling trace (for example,
 using Java Flight Recorder) in the few seconds leading up to a node departure.
++
+By default the follower checks will time out after 30s, so if node departures
+are unpredictable then capture stack dumps every 15s to be sure that at least
+one stack dump was taken at the right time.
++
 The <<cluster-nodes-hot-threads>> API sometimes yields useful information, but
 bear in mind that this API also requires a number of `transport_worker` and
 `generic` threads across all the nodes in the cluster. The API may be affected
 by the very problem you're trying to diagnose. `jstack` is much more reliable
-since it doesn't require any JVM threads. The threads involved in the follower
-checks are `transport_worker` and `cluster_coordination` threads, for which
-there should never be a long wait. There may also be evidence of long waits for
-threads in the {es} logs. Refer to <<modules-network-threading-model>> for more
-information.
+since it doesn't require any JVM threads.
++
+The threads involved in the follower checks are `transport_worker` and
+`cluster_coordination` threads, for which there should never be a long wait.
+There may also be evidence of long waits for threads in the {es} logs. See
+<<modules-network-threading-model>> for more information.
 
 ===== Diagnosing `ShardLockObtainFailedException` failures
 

+ 5 - 6
docs/reference/rest-api/common-parms.asciidoc

@@ -714,12 +714,11 @@ tag::node-id[]
 returned information.
 end::node-id[]
 
-tag::node-id-query-parm[]
-`node_id`::
-(Optional, string)
-Comma-separated list of node IDs or names
-used to limit returned information.
-end::node-id-query-parm[]
+tag::nodes[]
+`nodes`::
+(Optional, string) Comma-separated list of node IDs or names used to limit
+returned information.
+end::nodes[]
 
 tag::offsets[]
 `<offsets>`::

+ 1 - 0
docs/reference/troubleshooting.asciidoc

@@ -49,6 +49,7 @@ fix problems that an {es} deployment might encounter.
 [discrete]
 [[troubleshooting-others]]
 === Others
+* <<cluster-fault-detection-troubleshooting,Troubleshooting an unstable cluster>>
 * <<discovery-troubleshooting,Troubleshooting discovery>>
 * <<monitoring-troubleshooting,Troubleshooting monitoring>>
 * <<transform-troubleshooting,Troubleshooting transforms>>

+ 0 - 2
qa/mixed-cluster/build.gradle

@@ -57,8 +57,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName ->
         nonInputProperties.systemProperty('tests.clustername', baseName)
       }
       systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
-      systemProperty 'tests.rest.blacklist', ['tsdb/140_routing_path/missing routing path field',
-                                              'tsdb/140_routing_path/multi-value routing path field'].join(',')
       onlyIf("BWC tests disabled") { project.bwc_tests_enabled }
     }
 

+ 12 - 12
rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/140_routing_path.yml

@@ -1,8 +1,8 @@
 missing routing path field:
   - skip:
       features: close_to
-      version: " - 8.0.99"
-      reason: introduced in 8.1.0
+      version: " - 8.1.99"
+      reason: tsdb indexing changed in 8.2.0
 
   - do:
       indices.create:
@@ -67,18 +67,18 @@ missing routing path field:
                   avg:
                     field: voltage
 
-  - match: {hits.total.value: 8}
-  - length: {aggregations.tsids.buckets: 4}
+  - match: { hits.total.value: 8 }
+  - length: { aggregations.tsids.buckets: 4 }
 
-  - match: {aggregations.tsids.buckets.0.key.uid: "947e4ced-1786-4e53-9e0c-5c447e959507" }
-  - match: {aggregations.tsids.buckets.0.key.tag: null }
-  - match: {aggregations.tsids.buckets.0.doc_count: 2 }
-  - close_to: {aggregations.tsids.buckets.0.voltage.value: { value: 7.15, error: 0.01 }}
+  - match: { aggregations.tsids.buckets.0.key.uid: "947e4ced-1786-4e53-9e0c-5c447e959507" }
+  - match: { aggregations.tsids.buckets.0.key.tag: null }
+  - match: { aggregations.tsids.buckets.0.doc_count: 2 }
+  - close_to: { aggregations.tsids.buckets.0.voltage.value: { value: 7.15, error: 0.01 }}
 
   - match: { aggregations.tsids.buckets.1.key.uid: "df3145b3-0563-4d3b-a0f7-897eb2876ea9" }
   - match: { aggregations.tsids.buckets.1.key.tag: null }
-  - match: {aggregations.tsids.buckets.1.doc_count: 2 }
-  - close_to: {aggregations.tsids.buckets.1.voltage.value: { value: 6.69, error: 0.01 }}
+  - match: { aggregations.tsids.buckets.1.doc_count: 2 }
+  - close_to: { aggregations.tsids.buckets.1.voltage.value: { value: 6.69, error: 0.01 }}
 
   - match: { aggregations.tsids.buckets.2.key.uid: "947e4ced-1786-4e53-9e0c-5c447e959507" }
   - match: { aggregations.tsids.buckets.2.key.tag: "first" }
@@ -125,8 +125,8 @@ missing dimension on routing path field:
 multi-value routing path field:
   - skip:
       features: close_to
-      version: " - 8.0.99"
-      reason: introduced in 8.1.0
+      version: " - 8.1.99"
+      reason: tsdb indexing changed in 8.2.0
 
   - do:
       indices.create:

+ 12 - 18
server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java

@@ -9,7 +9,6 @@
 package org.elasticsearch.action.admin.cluster.node.tasks.get;
 
 import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.ElasticsearchTimeoutException;
 import org.elasticsearch.ExceptionsHelper;
 import org.elasticsearch.ResourceNotFoundException;
 import org.elasticsearch.action.ActionListener;
@@ -162,23 +161,18 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
                     ),
                     () -> taskManager.unregisterRemovedTaskListener(removedTaskListener)
                 );
-                if (future.isDone()) {
-                    // The task has already finished, we can run the completion listener in the same thread
-                    waitedForCompletionListener.onResponse(null);
-                } else {
-                    future.addListener(
-                        new ContextPreservingActionListener<>(
-                            threadPool.getThreadContext().newRestorableContext(false),
-                            waitedForCompletionListener
-                        )
-                    );
-                    var failByTimeout = threadPool.schedule(
-                        () -> future.onFailure(new ElasticsearchTimeoutException("Timed out waiting for completion of task")),
-                        requireNonNullElse(request.getTimeout(), DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT),
-                        ThreadPool.Names.SAME
-                    );
-                    future.addListener(ActionListener.running(failByTimeout::cancel));
-                }
+
+                future.addListener(
+                    new ContextPreservingActionListener<>(
+                        threadPool.getThreadContext().newRestorableContext(false),
+                        waitedForCompletionListener
+                    )
+                );
+                future.addTimeout(
+                    requireNonNullElse(request.getTimeout(), DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT),
+                    threadPool,
+                    ThreadPool.Names.SAME
+                );
             } else {
                 TaskInfo info = runningTask.taskInfo(clusterService.localNode().getId(), true);
                 listener.onResponse(new GetTaskResponse(new TaskResult(false, info)));

+ 14 - 23
server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java

@@ -8,14 +8,12 @@
 
 package org.elasticsearch.action.admin.cluster.node.tasks.list;
 
-import org.elasticsearch.ElasticsearchTimeoutException;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.FailedNodeException;
 import org.elasticsearch.action.TaskOperationFailure;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.ContextPreservingActionListener;
 import org.elasticsearch.action.support.ListenableActionFuture;
-import org.elasticsearch.action.support.ThreadedActionListener;
 import org.elasticsearch.action.support.tasks.TransportTasksAction;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.inject.Inject;
@@ -123,27 +121,20 @@ public class TransportListTasksAction extends TransportTasksAction<Task, ListTas
             removalRefs.decRef();
             collectionComplete.set(true);
 
-            if (future.isDone()) {
-                // No tasks to wait, we can run nodeOperation in the management pool
-                allMatchedTasksRemovedListener.onResponse(null);
-            } else {
-                final var threadPool = clusterService.threadPool();
-                future.addListener(
-                    new ThreadedActionListener<>(
-                        threadPool.executor(ThreadPool.Names.MANAGEMENT),
-                        new ContextPreservingActionListener<>(
-                            threadPool.getThreadContext().newRestorableContext(false),
-                            allMatchedTasksRemovedListener
-                        )
-                    )
-                );
-                var cancellable = threadPool.schedule(
-                    () -> future.onFailure(new ElasticsearchTimeoutException("Timed out waiting for completion of tasks")),
-                    requireNonNullElse(request.getTimeout(), DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT),
-                    ThreadPool.Names.SAME
-                );
-                future.addListener(ActionListener.running(cancellable::cancel));
-            }
+            final var threadPool = clusterService.threadPool();
+            future.addListener(
+                new ContextPreservingActionListener<>(
+                    threadPool.getThreadContext().newRestorableContext(false),
+                    allMatchedTasksRemovedListener
+                ),
+                threadPool.executor(ThreadPool.Names.MANAGEMENT),
+                null
+            );
+            future.addTimeout(
+                requireNonNullElse(request.getTimeout(), DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT),
+                threadPool,
+                ThreadPool.Names.SAME
+            );
         } else {
             super.processTasks(request, operation, nodeOperation);
         }

+ 32 - 0
server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java

@@ -10,6 +10,7 @@ package org.elasticsearch.action.support;
 
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
+import org.elasticsearch.ElasticsearchTimeoutException;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
@@ -17,6 +18,8 @@ import org.elasticsearch.common.util.concurrent.ListenableFuture;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.common.util.concurrent.UncategorizedExecutionException;
 import org.elasticsearch.core.Nullable;
+import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.threadpool.ThreadPool;
 
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Executor;
@@ -290,4 +293,33 @@ public class SubscribableListener<T> implements ActionListener<T> {
             }
         }
     }
+
+    /**
+     * Adds a timeout to this listener, such that if the timeout elapses before the listener is completed then it will be completed with an
+     * {@link ElasticsearchTimeoutException}.
+     * <p>
+     * The process which is racing against this timeout should stop and clean up promptly when the timeout occurs to avoid unnecessary
+     * work. For instance, it could check that the race is not lost by calling {@link #isDone} whenever appropriate, or it could subscribe
+     * another listener which performs any necessary cleanup steps.
+     */
+    public void addTimeout(TimeValue timeout, ThreadPool threadPool, String timeoutExecutor) {
+        if (isDone()) {
+            return;
+        }
+        addListener(ActionListener.running(scheduleTimeout(timeout, threadPool, timeoutExecutor)));
+    }
+
+    private Runnable scheduleTimeout(TimeValue timeout, ThreadPool threadPool, String timeoutExecutor) {
+        try {
+            final var cancellable = threadPool.schedule(
+                () -> onFailure(new ElasticsearchTimeoutException(Strings.format("timed out after [%s/%dms]", timeout, timeout.millis()))),
+                timeout,
+                timeoutExecutor
+            );
+            return cancellable::cancel;
+        } catch (Exception e) {
+            onFailure(e);
+            return () -> {};
+        }
+    }
 }

+ 46 - 94
server/src/main/java/org/elasticsearch/index/shard/IndexShard.java

@@ -283,7 +283,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
     private final AtomicReference<Translog.Location> pendingRefreshLocation = new AtomicReference<>();
     private final RefreshPendingLocationListener refreshPendingLocationListener;
     private volatile boolean useRetentionLeasesInPeerRecovery;
-    private final boolean isDataStreamIndex; // if a shard is a part of data stream
     private final LongSupplier relativeTimeInNanosSupplier;
     private volatile long startedRelativeTimeInNanos;
     private volatile long indexingTimeBeforeShardStartedInNanos;
@@ -333,14 +332,15 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         this.mapperService = mapperService;
         this.indexCache = indexCache;
         this.internalIndexingStats = new InternalIndexingStats();
-        final List<IndexingOperationListener> listenersList = new ArrayList<>(listeners);
-        listenersList.add(internalIndexingStats);
-        this.indexingOperationListeners = new IndexingOperationListener.CompositeListener(listenersList, logger);
+        this.indexingOperationListeners = new IndexingOperationListener.CompositeListener(
+            CollectionUtils.appendToCopyNoNullElements(listeners, internalIndexingStats),
+            logger
+        );
         this.bulkOperationListener = new ShardBulkStats();
         this.globalCheckpointSyncer = globalCheckpointSyncer;
         this.retentionLeaseSyncer = Objects.requireNonNull(retentionLeaseSyncer);
         this.searchOperationListener = new SearchOperationListener.CompositeListener(
-            CollectionUtils.appendToCopy(searchOperationListener, searchStats),
+            CollectionUtils.appendToCopyNoNullElements(searchOperationListener, searchStats),
             logger
         );
         this.getService = new ShardGetService(indexSettings, this, mapperService);
@@ -384,12 +384,17 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         }
         indexShardOperationPermits = new IndexShardOperationPermits(shardId, threadPool);
         readerWrapper = indexReaderWrapper;
-        refreshListeners = buildRefreshListeners();
+        refreshListeners = new RefreshListeners(
+            indexSettings::getMaxRefreshListeners,
+            () -> refresh("too_many_listeners"),
+            logger,
+            threadPool.getThreadContext(),
+            externalRefreshMetric
+        );
         lastSearcherAccess.set(threadPool.relativeTimeInMillis());
         persistMetadata(path, indexSettings, shardRouting, null, logger);
         this.useRetentionLeasesInPeerRecovery = replicationTracker.hasAllPeerRecoveryRetentionLeases();
         this.refreshPendingLocationListener = new RefreshPendingLocationListener();
-        this.isDataStreamIndex = mapperService == null ? false : mapperService.mappingLookup().isDataStreamTimestampFieldEnabled();
         this.relativeTimeInNanosSupplier = relativeTimeInNanosSupplier;
         this.indexCommitListener = indexCommitListener;
     }
@@ -409,14 +414,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         return indexSortSupplier.get();
     }
 
-    /**
-     * Returns if this shard is a part of datastream
-     * @return {@code true} if this shard is a part of datastream, {@code false} otherwise
-     */
-    public boolean isDataStreamIndex() {
-        return isDataStreamIndex;
-    }
-
     public ShardGetService getService() {
         return this.getService;
     }
@@ -491,10 +488,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         return this.shardRouting;
     }
 
-    public QueryCachingPolicy getQueryCachingPolicy() {
-        return cachingPolicy;
-    }
-
     @Override
     public void updateShardState(
         final ShardRouting newRouting,
@@ -1174,8 +1167,23 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         assert opPrimaryTerm <= getOperationPrimaryTerm()
             : "op term [ " + opPrimaryTerm + " ] > shard term [" + getOperationPrimaryTerm() + "]";
         ensureWriteAllowed(origin);
-        final Engine.Delete delete = prepareDelete(id, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm);
-        return delete(engine, delete);
+        active.set(true);
+        Engine.Delete delete = indexingOperationListeners.preDelete(
+            shardId,
+            prepareDelete(id, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm)
+        );
+        final Engine.DeleteResult result;
+        try {
+            if (logger.isTraceEnabled()) {
+                logger.trace("delete [{}] (seq no [{}])", delete.uid().text(), delete.seqNo());
+            }
+            result = engine.delete(delete);
+        } catch (Exception e) {
+            indexingOperationListeners.postDelete(shardId, delete, e);
+            throw e;
+        }
+        indexingOperationListeners.postDelete(shardId, delete, result);
+        return result;
     }
 
     public static Engine.Delete prepareDelete(
@@ -1193,23 +1201,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm);
     }
 
-    private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException {
-        active.set(true);
-        final Engine.DeleteResult result;
-        delete = indexingOperationListeners.preDelete(shardId, delete);
-        try {
-            if (logger.isTraceEnabled()) {
-                logger.trace("delete [{}] (seq no [{}])", delete.uid().text(), delete.seqNo());
-            }
-            result = engine.delete(delete);
-        } catch (Exception e) {
-            indexingOperationListeners.postDelete(shardId, delete, e);
-            throw e;
-        }
-        indexingOperationListeners.postDelete(shardId, delete, result);
-        return result;
-    }
-
     public Engine.GetResult get(Engine.Get get) {
         readAllowed();
         MappingLookup mappingLookup = mapperService.mappingLookup();
@@ -1227,9 +1218,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
      */
     public Engine.RefreshResult refresh(String source) {
         verifyNotClosed();
-        if (logger.isTraceEnabled()) {
-            logger.trace("refresh with source [{}]", source);
-        }
+        logger.trace("refresh with source [{}]", source);
         return getEngine().refresh(source);
     }
 
@@ -1409,10 +1398,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
     }
 
     public void forceMerge(ForceMergeRequest forceMerge) throws IOException {
-        verifyActive();
-        if (logger.isTraceEnabled()) {
-            logger.trace("force merge with {}", forceMerge);
+        IndexShardState state = this.state; // one time volatile read
+        if (state != IndexShardState.STARTED) {
+            throw new IllegalIndexShardStateException(shardId, state, "operation only allowed when shard is active");
         }
+        logger.trace("force merge with {}", forceMerge);
         Engine engine = getEngine();
         engine.forceMerge(forceMerge.flush(), forceMerge.maxNumSegments(), forceMerge.onlyExpungeDeletes(), forceMerge.forceMergeUUID());
     }
@@ -1620,7 +1610,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         }
 
         @Override
-        protected void doClose() throws IOException {
+        protected void doClose() {
             // don't close here - mimic the MultiReader#doClose = false behavior that FilterDirectoryReader doesn't have
         }
 
@@ -2185,13 +2175,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         }
     }
 
-    protected final void verifyActive() throws IllegalIndexShardStateException {
-        IndexShardState state = this.state; // one time volatile read
-        if (state != IndexShardState.STARTED) {
-            throw new IllegalIndexShardStateException(shardId, state, "operation only allowed when shard is active");
-        }
-    }
-
     /**
      * Returns number of heap bytes used by the indexing buffer for this shard, or 0 if the shard is closed
      */
@@ -3051,11 +3034,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         // }
         assert recoveryState.getRecoverySource().equals(shardRouting.recoverySource());
         switch (recoveryState.getRecoverySource().getType()) {
-            case EMPTY_STORE:
-            case EXISTING_STORE:
-                executeRecovery("from store", recoveryState, recoveryListener, this::recoverFromStore);
-                break;
-            case PEER:
+            case EMPTY_STORE, EXISTING_STORE -> executeRecovery("from store", recoveryState, recoveryListener, this::recoverFromStore);
+            case PEER -> {
                 try {
                     markAsRecovering("from " + recoveryState.getSourceNode(), recoveryState);
                     recoveryTargetService.startRecovery(this, recoveryState.getSourceNode(), recoveryListener);
@@ -3063,8 +3043,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
                     failShard("corrupted preexisting index", e);
                     recoveryListener.onRecoveryFailure(new RecoveryFailedException(recoveryState, null, e), true);
                 }
-                break;
-            case SNAPSHOT:
+            }
+            case SNAPSHOT -> {
                 final String repo = ((SnapshotRecoverySource) recoveryState.getRecoverySource()).snapshot().getRepository();
                 executeRecovery(
                     "from snapshot",
@@ -3072,8 +3052,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
                     recoveryListener,
                     l -> restoreFromRepository(repositoriesService.repository(repo), l)
                 );
-                break;
-            case LOCAL_SHARDS:
+            }
+            case LOCAL_SHARDS -> {
                 final IndexMetadata indexMetadata = indexSettings().getIndexMetadata();
                 final Index resizeSourceIndex = indexMetadata.getResizeSourceIndex();
                 final List<IndexShard> startedShards = new ArrayList<>();
@@ -3096,7 +3076,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
                     numShards = -1;
                     requiredShards = Collections.emptySet();
                 }
-
                 if (numShards == startedShards.size()) {
                     assert requiredShards.isEmpty() == false;
                     executeRecovery(
@@ -3127,9 +3106,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
                     }
                     throw e;
                 }
-                break;
-            default:
-                throw new IllegalArgumentException("Unknown recovery source " + recoveryState.getRecoverySource());
+            }
+            default -> throw new IllegalArgumentException("Unknown recovery source " + recoveryState.getRecoverySource());
         }
     }
 
@@ -3704,36 +3682,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         }
     }
 
-    /**
-     * Build {@linkplain RefreshListeners} for this shard.
-     */
-    private RefreshListeners buildRefreshListeners() {
-        return new RefreshListeners(
-            indexSettings::getMaxRefreshListeners,
-            () -> refresh("too_many_listeners"),
-            logger,
-            threadPool.getThreadContext(),
-            externalRefreshMetric
-        );
-    }
-
     /**
      * Simple struct encapsulating a shard failure
      *
      * @see IndexShard#addShardFailureCallback(Consumer)
      */
-    public static final class ShardFailure {
-        public final ShardRouting routing;
-        public final String reason;
-        @Nullable
-        public final Exception cause;
-
-        public ShardFailure(ShardRouting routing, String reason, @Nullable Exception cause) {
-            this.routing = routing;
-            this.reason = reason;
-            this.cause = cause;
-        }
-    }
+    public record ShardFailure(ShardRouting routing, String reason, @Nullable Exception cause) {}
 
     EngineFactory getEngineFactory() {
         return engineFactory;
@@ -3765,9 +3719,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
                 setRefreshPending(engine);
                 return false;
             } else {
-                if (logger.isTraceEnabled()) {
-                    logger.trace("refresh with source [schedule]");
-                }
+                logger.trace("refresh with source [schedule]");
                 return getEngine().maybeRefresh("schedule").refreshed();
             }
         }
@@ -3919,7 +3871,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
         }
 
         @Override
-        public void beforeRefresh() throws IOException {
+        public void beforeRefresh() {
             if (Assertions.ENABLED) {
                 assert callingThread == null
                     : "beforeRefresh was called by " + callingThread.getName() + " without a corresponding call to afterRefresh";

+ 3 - 3
server/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java

@@ -929,14 +929,14 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple
     private class FailedShardHandler implements Consumer<IndexShard.ShardFailure> {
         @Override
         public void accept(final IndexShard.ShardFailure shardFailure) {
-            final ShardRouting shardRouting = shardFailure.routing;
+            final ShardRouting shardRouting = shardFailure.routing();
             threadPool.generic().execute(() -> {
                 synchronized (IndicesClusterStateService.this) {
                     failAndRemoveShard(
                         shardRouting,
                         true,
-                        "shard failure, reason [" + shardFailure.reason + "]",
-                        shardFailure.cause,
+                        "shard failure, reason [" + shardFailure.reason() + "]",
+                        shardFailure.cause(),
                         clusterService.state()
                     );
                 }

+ 81 - 0
server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java

@@ -9,10 +9,13 @@
 package org.elasticsearch.action.support;
 
 import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ElasticsearchTimeoutException;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.core.TimeValue;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.threadpool.ThreadPool;
 
@@ -26,6 +29,7 @@ import java.util.concurrent.atomic.AtomicReference;
 import java.util.function.IntFunction;
 
 import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.instanceOf;
 
 public class SubscribableListenerTests extends ESTestCase {
 
@@ -292,4 +296,81 @@ public class SubscribableListenerTests extends ESTestCase {
 
         assertTrue(completion.get());
     }
+
+    public void testTimeoutBeforeCompletion() {
+        final var deterministicTaskQueue = new DeterministicTaskQueue();
+        final var threadPool = deterministicTaskQueue.getThreadPool();
+
+        final var headerName = "test-header-name";
+        final var headerValue = randomAlphaOfLength(10);
+
+        final var timedOut = new AtomicBoolean();
+        final var listener = new SubscribableListener<Void>();
+        listener.addListener(new ActionListener<>() {
+            @Override
+            public void onResponse(Void unused) {
+                fail("should not execute");
+            }
+
+            @Override
+            public void onFailure(Exception e) {
+                assertThat(e, instanceOf(ElasticsearchTimeoutException.class));
+                assertEquals("timed out after [30s/30000ms]", e.getMessage());
+                assertEquals(headerValue, threadPool.getThreadContext().getHeader(headerName));
+                assertTrue(timedOut.compareAndSet(false, true));
+            }
+        });
+        try (var ignored = threadPool.getThreadContext().stashContext()) {
+            threadPool.getThreadContext().putHeader(headerName, headerValue);
+            listener.addTimeout(TimeValue.timeValueSeconds(30), threadPool, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC));
+        }
+
+        if (randomBoolean()) {
+            deterministicTaskQueue.scheduleAt(
+                deterministicTaskQueue.getCurrentTimeMillis() + randomLongBetween(
+                    TimeValue.timeValueSeconds(30).millis() + 1,
+                    TimeValue.timeValueSeconds(60).millis()
+                ),
+                () -> listener.onResponse(null)
+            );
+        }
+
+        assertFalse(timedOut.get());
+        assertFalse(listener.isDone());
+        deterministicTaskQueue.runAllTasksInTimeOrder();
+        assertTrue(timedOut.get());
+        assertTrue(listener.isDone());
+    }
+
+    public void testCompletionBeforeTimeout() {
+        final var deterministicTaskQueue = new DeterministicTaskQueue();
+        final var threadPool = deterministicTaskQueue.getThreadPool();
+
+        final var complete = new AtomicBoolean();
+        final var listener = new SubscribableListener<Void>();
+        listener.addListener(new ActionListener<>() {
+            @Override
+            public void onResponse(Void unused) {
+                assertTrue(complete.compareAndSet(false, true));
+            }
+
+            @Override
+            public void onFailure(Exception e) {
+                fail("should not fail");
+            }
+        });
+        listener.addTimeout(TimeValue.timeValueSeconds(30), threadPool, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC));
+
+        deterministicTaskQueue.scheduleAt(
+            deterministicTaskQueue.getCurrentTimeMillis() + randomLongBetween(0, TimeValue.timeValueSeconds(30).millis() - 1),
+            () -> listener.onResponse(null)
+        );
+
+        assertFalse(complete.get());
+        assertFalse(listener.isDone());
+        deterministicTaskQueue.runAllTasksInTimeOrder();
+        assertTrue(complete.get());
+        assertTrue(listener.isDone());
+    }
+
 }

+ 2 - 2
server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java

@@ -586,7 +586,7 @@ public class DesiredBalanceComputerTests extends ESTestCase {
             usedDiskSpace.put(nodeId, 0L);
         }
 
-        var indices = scaledRandomIntBetween(1, 1000);
+        var indices = scaledRandomIntBetween(1, 500);
         var totalShards = 0;
 
         var shardSizes = new HashMap<String, Long>();
@@ -597,7 +597,7 @@ public class DesiredBalanceComputerTests extends ESTestCase {
         for (int i = 0; i < indices; i++) {
             var indexName = "index-" + i;
             var shards = randomIntBetween(1, 10);
-            var replicas = randomIntBetween(1, nodes - 1);
+            var replicas = scaledRandomIntBetween(1, nodes - 1);
             totalShards += shards * (replicas + 1);
             var inSyncIds = randomList(shards * (replicas + 1), shards * (replicas + 1), () -> UUIDs.randomBase64UUID(random()));
             var shardSize = randomLongBetween(10_000_000L, 10_000_000_000L);

+ 0 - 5
server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java

@@ -12,7 +12,6 @@ import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.MatchNoDocsQuery;
 import org.apache.lucene.search.Query;
-import org.apache.lucene.search.QueryCachingPolicy;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.tests.index.RandomIndexWriter;
@@ -74,8 +73,6 @@ public class DefaultSearchContextTests extends ESTestCase {
 
         ThreadPool threadPool = new TestThreadPool(this.getClass().getName());
         IndexShard indexShard = mock(IndexShard.class);
-        QueryCachingPolicy queryCachingPolicy = mock(QueryCachingPolicy.class);
-        when(indexShard.getQueryCachingPolicy()).thenReturn(queryCachingPolicy);
         when(indexShard.getThreadPool()).thenReturn(threadPool);
 
         int maxResultWindow = randomIntBetween(50, 100);
@@ -303,8 +300,6 @@ public class DefaultSearchContextTests extends ESTestCase {
 
         ThreadPool threadPool = new TestThreadPool(this.getClass().getName());
         IndexShard indexShard = mock(IndexShard.class);
-        QueryCachingPolicy queryCachingPolicy = mock(QueryCachingPolicy.class);
-        when(indexShard.getQueryCachingPolicy()).thenReturn(queryCachingPolicy);
         when(indexShard.getThreadPool()).thenReturn(threadPool);
 
         IndexService indexService = mock(IndexService.class);

+ 25 - 4
settings.gradle

@@ -1,18 +1,23 @@
+import org.elasticsearch.gradle.internal.toolchain.OracleOpenJdkToolchainResolver
+import org.elasticsearch.gradle.internal.toolchain.ArchivedOracleJdkToolchainResolver
+import org.elasticsearch.gradle.internal.toolchain.AdoptiumJdkToolchainResolver
+
 pluginManagement {
   repositories {
     mavenCentral()
     gradlePluginPortal()
   }
+
+  includeBuild "build-conventions"
+  includeBuild "build-tools"
+  includeBuild "build-tools-internal"
 }
 
 plugins {
   id "com.gradle.enterprise" version "3.12.6"
+  id 'elasticsearch.java-toolchain'
 }
 
-includeBuild "build-conventions"
-includeBuild "build-tools"
-includeBuild "build-tools-internal"
-
 rootProject.name = "elasticsearch"
 
 dependencyResolutionManagement {
@@ -23,6 +28,22 @@ dependencyResolutionManagement {
   }
 }
 
+toolchainManagement {
+  jvm {
+    javaRepositories {
+      repository('bundledOracleOpendJdk') {
+        resolverClass = OracleOpenJdkToolchainResolver
+      }
+      repository('adoptiumJdks') {
+        resolverClass = AdoptiumJdkToolchainResolver
+      }
+      repository('archivedOracleJdks') {
+        resolverClass = ArchivedOracleJdkToolchainResolver
+      }
+    }
+  }
+}
+
 List projects = [
   'rest-api-spec',
   'docs',

+ 1 - 1
test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java

@@ -115,7 +115,7 @@ public abstract class IndexShardTestCase extends ESTestCase {
 
     private static final Consumer<IndexShard.ShardFailure> DEFAULT_SHARD_FAILURE_HANDLER = failure -> {
         if (failOnShardFailures.get()) {
-            throw new AssertionError(failure.reason, failure.cause);
+            throw new AssertionError(failure.reason(), failure.cause());
         }
     };
 

+ 1 - 1
test/test-clusters/build.gradle

@@ -14,5 +14,5 @@ dependencies {
 }
 
 tasks.named("processResources").configure {
-  from(new File(Util.locateElasticsearchWorkspace(gradle), "build-tools-internal/version.properties"))
+  from(new File(Util.locateElasticsearchWorkspace(project.gradle), "build-tools-internal/version.properties"))
 }

+ 1 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java

@@ -51,6 +51,7 @@ public class InternalUsers {
             null /* CrossClusterAccessUser has a role descriptor, but it should never be resolved by this class */,
             CrossClusterAccessUser::is
         );
+        defineUser(StorageInternalUser.NAME, StorageInternalUser.INSTANCE, StorageInternalUser.ROLE_DESCRIPTOR, StorageInternalUser::is);
     }
 
     private static void defineUser(String name, User user, @Nullable RoleDescriptor roleDescriptor, Predicate<User> predicate) {

+ 55 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/StorageInternalUser.java

@@ -0,0 +1,55 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+package org.elasticsearch.xpack.core.security.user;
+
+import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
+import org.elasticsearch.xpack.core.security.support.MetadataUtils;
+
+/**
+ * "Storage" internal user - used when the indexing/storage subsystem needs to perform actions on specific indices
+ * (that may not be permitted by the authenticated user)
+ */
+public class StorageInternalUser extends User {
+
+    public static final String NAME = UsernamesField.STORAGE_USER_NAME;
+    public static final RoleDescriptor ROLE_DESCRIPTOR = new RoleDescriptor(
+        UsernamesField.STORAGE_ROLE_NAME,
+        new String[] {},
+        new RoleDescriptor.IndicesPrivileges[] {
+            RoleDescriptor.IndicesPrivileges.builder()
+                .indices("*")
+                .privileges(RefreshAction.NAME + "*")
+                .allowRestrictedIndices(true)
+                .build() },
+        new String[] {},
+        MetadataUtils.DEFAULT_RESERVED_METADATA
+    );
+    public static final StorageInternalUser INSTANCE = new StorageInternalUser();
+
+    private StorageInternalUser() {
+        super(NAME, Strings.EMPTY_ARRAY);
+        assert enabled();
+        assert roles() != null && roles().length == 0;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return INSTANCE == o;
+    }
+
+    @Override
+    public int hashCode() {
+        return System.identityHashCode(this);
+    }
+
+    public static boolean is(User user) {
+        return INSTANCE.equals(user);
+    }
+
+}

+ 2 - 1
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/User.java

@@ -154,7 +154,8 @@ public class User implements ToXContentObject {
             || XPackSecurityUser.is(user)
             || SecurityProfileUser.is(user)
             || AsyncSearchUser.is(user)
-            || CrossClusterAccessUser.is(user);
+            || CrossClusterAccessUser.is(user)
+            || StorageInternalUser.is(user);
     }
 
     /** Write the given {@link User} */

+ 2 - 0
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java

@@ -30,6 +30,8 @@ public final class UsernamesField {
     public static final String APM_ROLE = "apm_system";
     public static final String ASYNC_SEARCH_NAME = "_async_search";
     public static final String ASYNC_SEARCH_ROLE = "_async_search";
+    public static final String STORAGE_USER_NAME = "_storage";
+    public static final String STORAGE_ROLE_NAME = "_storage";
 
     public static final String REMOTE_MONITORING_NAME = "remote_monitoring_user";
     public static final String REMOTE_MONITORING_COLLECTION_ROLE = "remote_monitoring_collector";

+ 6 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/InternalUsersTests.java

@@ -58,6 +58,12 @@ public class InternalUsersTests extends ESTestCase {
         assertThat(e, throwableWithMessage("should never try to get the roles for internal user [_cross_cluster_access]"));
     }
 
+    public void testStorageUser() {
+        assertThat(InternalUsers.getUser("_storage"), is(StorageInternalUser.INSTANCE));
+        assertThat(InternalUsers.getInternalUserName(StorageInternalUser.INSTANCE), is("_storage"));
+        assertThat(InternalUsers.getRoleDescriptor(StorageInternalUser.INSTANCE), is(StorageInternalUser.ROLE_DESCRIPTOR));
+    }
+
     public void testRegularUser() {
         var username = randomAlphaOfLengthBetween(4, 12);
         expectThrows(IllegalStateException.class, () -> InternalUsers.getUser(username));

+ 82 - 0
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/user/StorageInternalUserTests.java

@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.core.security.user;
+
+import org.apache.lucene.util.automaton.Automaton;
+import org.apache.lucene.util.automaton.CharacterRunAutomaton;
+import org.elasticsearch.Version;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
+import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
+import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
+import org.elasticsearch.action.admin.indices.refresh.TransportUnpromotableShardRefreshAction;
+import org.elasticsearch.action.bulk.BulkAction;
+import org.elasticsearch.action.get.GetAction;
+import org.elasticsearch.cluster.metadata.IndexAbstraction;
+import org.elasticsearch.cluster.metadata.IndexMetadata;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.xpack.core.security.authz.permission.ApplicationPermission;
+import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache;
+import org.elasticsearch.xpack.core.security.authz.permission.RemoteIndicesPermission;
+import org.elasticsearch.xpack.core.security.authz.permission.Role;
+import org.elasticsearch.xpack.core.security.authz.permission.RunAsPermission;
+import org.elasticsearch.xpack.core.security.authz.permission.SimpleRole;
+import org.elasticsearch.xpack.core.security.test.TestRestrictedIndices;
+
+import java.util.List;
+
+import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7;
+import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.is;
+
+public class StorageInternalUserTests extends ESTestCase {
+
+    public void testRoleDescriptor() {
+        final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY);
+        final SimpleRole role = Role.buildFromRoleDescriptor(
+            StorageInternalUser.ROLE_DESCRIPTOR,
+            fieldPermissionsCache,
+            TestRestrictedIndices.RESTRICTED_INDICES
+        );
+
+        assertThat(role.cluster().privileges(), hasSize(0));
+        assertThat(role.runAs(), is(RunAsPermission.NONE));
+        assertThat(role.application(), is(ApplicationPermission.NONE));
+        assertThat(role.remoteIndices(), is(RemoteIndicesPermission.NONE));
+
+        final List<String> sampleAllowedActions = List.of(RefreshAction.NAME, TransportUnpromotableShardRefreshAction.NAME);
+        checkIndexAccess(role, randomFrom(sampleAllowedActions), randomAlphaOfLengthBetween(4, 8), true);
+        checkIndexAccess(role, randomFrom(sampleAllowedActions), ".ds-" + randomAlphaOfLengthBetween(4, 8), true);
+        checkIndexAccess(role, randomFrom(sampleAllowedActions), INTERNAL_SECURITY_MAIN_INDEX_7, true);
+
+        final List<String> sampleDeniedActions = List.of(GetAction.NAME, BulkAction.NAME, PutMappingAction.NAME, DeleteIndexAction.NAME);
+        checkIndexAccess(role, randomFrom(sampleDeniedActions), randomAlphaOfLengthBetween(4, 8), false);
+        checkIndexAccess(role, randomFrom(sampleDeniedActions), ".ds-" + randomAlphaOfLengthBetween(4, 8), false);
+        checkIndexAccess(role, randomFrom(sampleDeniedActions), INTERNAL_SECURITY_MAIN_INDEX_7, false);
+    }
+
+    private static void checkIndexAccess(SimpleRole role, String action, String indexName, boolean expectedValue) {
+        assertThat("Role " + role + " should grant " + action, role.indices().check(action), is(expectedValue));
+
+        final Automaton automaton = role.indices().allowedActionsMatcher(indexName);
+        assertThat(
+            "Role " + role + " should grant " + action + " access to " + indexName,
+            new CharacterRunAutomaton(automaton).run(action),
+            is(expectedValue)
+        );
+
+        final IndexMetadata metadata = IndexMetadata.builder(indexName).settings(indexSettings(Version.CURRENT, 1, 1)).build();
+        final IndexAbstraction.ConcreteIndex index = new IndexAbstraction.ConcreteIndex(metadata);
+        assertThat(
+            "Role " + role + " should grant " + action + " access to " + indexName,
+            role.allowedIndicesMatcher(action).test(index),
+            is(expectedValue)
+        );
+    }
+
+}

+ 18 - 0
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsIngestPipelineRegistry.java

@@ -7,6 +7,8 @@
 package org.elasticsearch.xpack.application.analytics;
 
 import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.cluster.ClusterChangedEvent;
+import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.xpack.application.utils.ingest.PipelineRegistry;
@@ -14,6 +16,7 @@ import org.elasticsearch.xpack.application.utils.ingest.PipelineTemplateConfigur
 
 import java.util.Collections;
 import java.util.List;
+import java.util.Set;
 
 import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.EVENT_DATA_STREAM_INDEX_PREFIX;
 import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.ROOT_RESOURCE_PATH;
@@ -50,4 +53,19 @@ public class AnalyticsIngestPipelineRegistry extends PipelineRegistry {
     protected List<PipelineTemplateConfiguration> getIngestPipelineConfigs() {
         return INGEST_PIPELINES;
     }
+
+    @Override
+    protected boolean isClusterReady(ClusterChangedEvent event) {
+        return super.isClusterReady(event) && (isIngestPipelineInstalled(event.state()) || hasAnalyticsEventDataStream(event.state()));
+    }
+
+    private boolean hasAnalyticsEventDataStream(ClusterState state) {
+        Set<String> dataStreamNames = state.metadata().dataStreams().keySet();
+
+        return dataStreamNames.stream().anyMatch(dataStreamName -> dataStreamName.startsWith(EVENT_DATA_STREAM_INDEX_PREFIX));
+    }
+
+    private boolean isIngestPipelineInstalled(ClusterState state) {
+        return ingestPipelineExists(state, EVENT_DATA_STREAM_INGEST_PIPELINE_NAME);
+    }
 }

+ 15 - 8
x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/utils/ingest/PipelineRegistry.java

@@ -62,25 +62,32 @@ public abstract class PipelineRegistry implements ClusterStateListener {
 
     @Override
     public void clusterChanged(ClusterChangedEvent event) {
+
+        if (isClusterReady(event)) {
+            addIngestPipelinesIfMissing(event.state());
+        }
+    }
+
+    protected abstract String getOrigin();
+
+    protected abstract List<PipelineTemplateConfiguration> getIngestPipelineConfigs();
+
+    protected boolean isClusterReady(ClusterChangedEvent event) {
         ClusterState state = event.state();
         if (state.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) {
             // wait until recovered from disk, so the cluster state view is consistent
-            return;
+            return false;
         }
 
         DiscoveryNode masterNode = event.state().getNodes().getMasterNode();
         if (masterNode == null || state.nodes().isLocalNodeElectedMaster() == false) {
             // no master node elected or current node is not master
-            return;
+            return false;
         }
 
-        addIngestPipelinesIfMissing(state);
+        return true;
     }
 
-    protected abstract String getOrigin();
-
-    protected abstract List<PipelineTemplateConfiguration> getIngestPipelineConfigs();
-
     private void addIngestPipelinesIfMissing(ClusterState state) {
         for (PipelineTemplateConfiguration pipelineTemplateConfig : getIngestPipelineConfigs()) {
             PipelineConfiguration newPipeline = pipelineTemplateConfig.load();
@@ -121,7 +128,7 @@ public abstract class PipelineRegistry implements ClusterStateListener {
         }
     }
 
-    private static boolean ingestPipelineExists(ClusterState state, String pipelineId) {
+    protected boolean ingestPipelineExists(ClusterState state, String pipelineId) {
         Optional<IngestMetadata> maybeMeta = Optional.ofNullable(state.metadata().custom(IngestMetadata.TYPE));
         return maybeMeta.isPresent() && maybeMeta.get().getPipelines().containsKey(pipelineId);
     }

+ 59 - 5
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/AnalyticsIngestPipelineRegistryTests.java

@@ -18,6 +18,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent;
 import org.elasticsearch.cluster.ClusterName;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.block.ClusterBlocks;
+import org.elasticsearch.cluster.metadata.DataStream;
 import org.elasticsearch.cluster.metadata.Metadata;
 import org.elasticsearch.cluster.node.DiscoveryNode;
 import org.elasticsearch.cluster.node.DiscoveryNodes;
@@ -25,8 +26,11 @@ import org.elasticsearch.cluster.node.TestDiscoveryNode;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.TriFunction;
 import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.collect.MapBuilder;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.IndexMode;
 import org.elasticsearch.ingest.IngestMetadata;
 import org.elasticsearch.ingest.PipelineConfiguration;
 import org.elasticsearch.test.ClusterServiceUtils;
@@ -123,6 +127,24 @@ public class AnalyticsIngestPipelineRegistryTests extends ESTestCase {
         assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelineConfigs().size())));
     }
 
+    public void testThatPipelinesAreNotInstalledWhenNoAnalyticsCollectionExist() {
+        DiscoveryNode node = TestDiscoveryNode.create("node");
+        DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
+
+        ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), nodes, false);
+
+        client.setVerifier((action, request, listener) -> {
+            if (action instanceof PutPipelineAction) {
+                fail("no behavioral analytics collection exists, pipeline should not be installed");
+            } else {
+                fail("client called with unexpected request: " + request.toString());
+            }
+            return null;
+        });
+
+        registry.clusterChanged(event);
+    }
+
     public void testThatNewerPipelinesAreNotUpgraded() throws Exception {
         DiscoveryNode node = TestDiscoveryNode.create("node");
         DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build();
@@ -211,7 +233,15 @@ public class AnalyticsIngestPipelineRegistryTests extends ESTestCase {
     }
 
     private ClusterChangedEvent createClusterChangedEvent(Map<String, Integer> existingIngestPipelines, DiscoveryNodes nodes) {
-        ClusterState cs = createClusterState(existingIngestPipelines, nodes);
+        return createClusterChangedEvent(existingIngestPipelines, nodes, true);
+    }
+
+    private ClusterChangedEvent createClusterChangedEvent(
+        Map<String, Integer> existingIngestPipelines,
+        DiscoveryNodes nodes,
+        boolean withDataStreams
+    ) {
+        ClusterState cs = createClusterState(existingIngestPipelines, nodes, withDataStreams);
         ClusterChangedEvent realEvent = new ClusterChangedEvent(
             "created-from-test",
             cs,
@@ -223,21 +253,45 @@ public class AnalyticsIngestPipelineRegistryTests extends ESTestCase {
         return event;
     }
 
-    private ClusterState createClusterState(Map<String, Integer> existingIngestPipelines, DiscoveryNodes nodes) {
+    private ClusterState createClusterState(Map<String, Integer> existingIngestPipelines, DiscoveryNodes nodes, boolean withDataStreams) {
         Map<String, PipelineConfiguration> pipelines = new HashMap<>();
         for (Map.Entry<String, Integer> e : existingIngestPipelines.entrySet()) {
             pipelines.put(e.getKey(), createMockPipelineConfiguration(e.getKey(), e.getValue()));
         }
 
+        Metadata.Builder metadataBuilder = Metadata.builder()
+            .transientSettings(Settings.EMPTY)
+            .putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines));
+
+        if (withDataStreams) {
+            DataStream dataStream = createDataStream();
+            metadataBuilder.dataStreams(
+                MapBuilder.<String, DataStream>newMapBuilder().put(dataStream.getName(), dataStream).map(),
+                Collections.emptyMap()
+            );
+        }
+
         return ClusterState.builder(new ClusterName("test"))
-            .metadata(
-                Metadata.builder().transientSettings(Settings.EMPTY).putCustom(IngestMetadata.TYPE, new IngestMetadata(pipelines)).build()
-            )
+            .metadata(metadataBuilder)
             .blocks(new ClusterBlocks.Builder().build())
             .nodes(nodes)
             .build();
     }
 
+    private DataStream createDataStream() {
+        return new DataStream(
+            AnalyticsConstants.EVENT_DATA_STREAM_INDEX_PREFIX + randomIdentifier(),
+            randomList(1, 10, () -> new Index(randomIdentifier(), randomIdentifier())),
+            0,
+            Collections.emptyMap(),
+            false,
+            false,
+            false,
+            false,
+            IndexMode.STANDARD
+        );
+    }
+
     private PipelineConfiguration createMockPipelineConfiguration(String pipelineId, int version) {
         try (XContentBuilder configBuilder = JsonXContent.contentBuilder().startObject().field("version", version).endObject()) {
             BytesReference config = BytesReference.bytes(configBuilder);

+ 1 - 1
x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java

@@ -44,7 +44,7 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin
     public static final String UTILITY_THREAD_POOL_NAME = "ml_utility";
 
     private static final String MODEL_REPOSITORY_DOCUMENTATION_LINK = format(
-        "https://www.elastic.co/guide/en/machine-learning/%d.%d/ml-nlp-deploy-models.html#ml-nlp-deploy-model-air-gapped",
+        "https://www.elastic.co/guide/en/machine-learning/%d.%d/ml-nlp-elser.html#air-gapped-install",
         Version.CURRENT.major,
         Version.CURRENT.minor
     );

+ 131 - 0
x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml

@@ -219,6 +219,77 @@ setup:
         body:
           index.blocks.write: true
 
+  - do:
+      indices.create:
+        index: test-empty-missing
+        body:
+          settings:
+            number_of_shards: 1
+            number_of_replicas: 0
+            index:
+              mode: time_series
+              routing_path: [ metricset, k8s.pod.uid ]
+              time_series:
+                start_time: 2021-04-28T00:00:00Z
+                end_time: 2021-04-29T00:00:00Z
+          mappings:
+            properties:
+              "@timestamp":
+                type: date
+              metricset:
+                type: keyword
+                time_series_dimension: true
+              k8s:
+                properties:
+                  pod:
+                    properties:
+                      uid:
+                        type: keyword
+                        time_series_dimension: true
+                      name:
+                        type: keyword
+                      value:
+                        type: integer
+                        time_series_metric: gauge
+                      label:
+                        type: keyword
+
+  - do:
+      bulk:
+        refresh: true
+        index: test-empty-missing
+        body:
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:55:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "value": 10 }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "value": 20, "label": null, "unmapped": null }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:45:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "value": 30, "label": "abc", "unmapped": "abc"  }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:40:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "value": 40, "label": "xyz", "unmapped": "xyz" }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:55:20.467Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "value": 10 }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:50:20.467Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "value": 20, "label": null, "unmapped": null }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:45:20.467Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "value": 30, "label": "xyz", "unmapped": "xyz" }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:40:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "value": 40, "label": "abc", "unmapped": "abc" }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:55:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e9597ab", "value": 10 }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e9597ab", "value": 20, "label": null, "unmapped": null }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:45:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e9597ab", "value": 30, "label": null  }}}'
+          - '{"index": {}}'
+          - '{"@timestamp": "2021-04-28T18:40:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e9597ab", "value": 40, "unmapped": null }}}'
+
+  - do:
+      indices.put_settings:
+        index: test-empty-missing
+        body:
+          index.blocks.write: true
+
 ---
 "Downsample index":
   - skip:
@@ -1232,3 +1303,63 @@ setup:
   - match: { hits.hits.3._source.k8s.pod.value.sum: 42.0 }
   - match: { hits.hits.3._source.k8s.pod.agent.id: "second" }
   - match: { hits.hits.3._source.k8s.pod.agent.version: "2.1.7" }
+
+---
+"Downsample empty and missing labels":
+  - skip:
+      version: " - 8.6.99"
+      reason: "Downsampling GA-ed in 8.7.0"
+
+  - do:
+      indices.downsample:
+        index: test-empty-missing
+        target_index: rollup-test-empty-missing
+        body:  >
+          {
+            "fixed_interval": "1h"
+          }
+  - is_true: acknowledged
+
+  - do:
+      search:
+        index: rollup-test-empty-missing
+        body:
+          sort: [ "_tsid", "@timestamp" ]
+
+  - length: { hits.hits: 3 }
+
+  - match: { hits.hits.0._source._doc_count: 4 }
+  - match: { hits.hits.0._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e959507 }
+  - match: { hits.hits.0._source.metricset: pod }
+  - match: { hits.hits.0._source.@timestamp: "2021-04-28T18:00:00.000Z" }
+  - match: { hits.hits.0._source.k8s.pod.name: "cat" }
+  - match: { hits.hits.0._source.k8s.pod.value.min: 10.0 }
+  - match: { hits.hits.0._source.k8s.pod.value.max: 40.0 }
+  - match: { hits.hits.0._source.k8s.pod.value.sum: 100.0 }
+  - match: { hits.hits.0._source.k8s.pod.value.value_count: 4 }
+  - match: { hits.hits.0._source.k8s.pod.label: "abc" }
+  - match: { hits.hits.0._source.k8s.pod.unmapped: "abc" }
+
+  - match: { hits.hits.1._source._doc_count: 4 }
+  - match: { hits.hits.1._source.k8s.pod.uid: 947e4ced-1786-4e53-9e0c-5c447e9597ab }
+  - match: { hits.hits.1._source.metricset: pod }
+  - match: { hits.hits.1._source.@timestamp: "2021-04-28T18:00:00.000Z" }
+  - match: { hits.hits.1._source.k8s.pod.name: "cat" }
+  - match: { hits.hits.1._source.k8s.pod.value.min: 10.0 }
+  - match: { hits.hits.1._source.k8s.pod.value.max: 40.0 }
+  - match: { hits.hits.1._source.k8s.pod.value.sum: 100.0 }
+  - match: { hits.hits.1._source.k8s.pod.value.value_count: 4 }
+  - match: { hits.hits.1._source.k8s.pod.label: null }
+  - match: { hits.hits.1._source.k8s.pod.unmapped: null }
+
+  - match: { hits.hits.2._source._doc_count: 4 }
+  - match: { hits.hits.2._source.k8s.pod.uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 }
+  - match: { hits.hits.2._source.metricset: pod }
+  - match: { hits.hits.2._source.@timestamp: "2021-04-28T18:00:00.000Z" }
+  - match: { hits.hits.2._source.k8s.pod.name: "dog" }
+  - match: { hits.hits.2._source.k8s.pod.value.min: 10.0 }
+  - match: { hits.hits.2._source.k8s.pod.value.max: 40.0 }
+  - match: { hits.hits.2._source.k8s.pod.value.sum: 100.0 }
+  - match: { hits.hits.2._source.k8s.pod.value.value_count: 4 }
+  - match: { hits.hits.2._source.k8s.pod.label: "xyz" }
+  - match: { hits.hits.2._source.k8s.pod.unmapped: "xyz" }

+ 34 - 0
x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/analytics/histogram.yml

@@ -217,3 +217,37 @@ histogram with synthetic source:
         latency:
           values: [0.0, 0.1, 0.2, 0.3, 0.4, 0.5]
           counts: [3, 2, 5, 10, 1, 8]
+
+---
+histogram with synthetic source and zero counts:
+  - skip:
+      version: " - 8.4.99"
+      reason: introduced in 8.5.0
+
+  - do:
+      indices.create:
+        index: histo_synthetic
+        body:
+          mappings:
+            _source:
+              mode: synthetic
+            properties:
+              latency:
+                type: histogram
+  - do:
+      bulk:
+        index: histo_synthetic
+        refresh: true
+        body:
+          - '{"index": {"_id": 1}}'
+          - '{"latency": {"values" : [0.1, 0.2, 0.3, 0.4, 0.5], "counts" : [0, 7, 0, 6, 0]}}'
+
+  - do:
+      get:
+        index: histo_synthetic
+        id: 1
+  - match:
+      _source:
+        latency:
+          values: [0.2, 0.4]
+          counts: [7, 6]

+ 5 - 2
x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java

@@ -8,6 +8,7 @@
 package org.elasticsearch.upgrades;
 
 import org.apache.http.util.EntityUtils;
+import org.elasticsearch.Version;
 import org.elasticsearch.client.Request;
 import org.elasticsearch.client.Response;
 import org.hamcrest.Matchers;
@@ -17,6 +18,8 @@ import java.nio.charset.StandardCharsets;
 public class GeoIpUpgradeIT extends AbstractUpgradeTestCase {
 
     public void testGeoIpDownloader() throws Exception {
+        assumeTrue("Disabled until PR #95621 is backported to branch " + Version.V_8_8_0, UPGRADE_FROM_VERSION.onOrBefore(Version.V_8_7_0));
+
         if (CLUSTER_TYPE == ClusterType.UPGRADED) {
             assertBusy(() -> {
                 Response response = client().performRequest(new Request("GET", "_cat/tasks"));
@@ -26,8 +29,8 @@ public class GeoIpUpgradeIT extends AbstractUpgradeTestCase {
             assertBusy(() -> {
                 Response response = client().performRequest(new Request("GET", "_ingest/geoip/stats"));
                 String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8);
-                // The geoip downloader should be executed since a geoip processors is present in behavioral analytics default pipeline:
-                assertThat(tasks, Matchers.containsString("failed_downloads\":1"));
+                // The geoip downloader doesn't actually do anything since there are no geoip processors:
+                assertThat(tasks, Matchers.containsString("failed_downloads\":0"));
                 assertThat(tasks, Matchers.containsString("successful_downloads\":0"));
             });
         }

+ 27 - 11
x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java

@@ -26,9 +26,9 @@ import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
 import static org.elasticsearch.client.WarningsHandler.PERMISSIVE;
-import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.oneOf;
 
 public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
 
@@ -73,6 +73,7 @@ public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
             {
               "persistent": {
                 "logger.org.elasticsearch.xpack.ml.inference": "TRACE",
+                "logger.org.elasticsearch.xpack.ml.inference.assignments": "DEBUG",
                 "logger.org.elasticsearch.xpack.ml.process": "DEBUG",
                 "logger.org.elasticsearch.xpack.ml.action": "TRACE"
               }
@@ -96,7 +97,6 @@ public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
         client().performRequest(request);
     }
 
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/95360")
     public void testTrainedModelDeployment() throws Exception {
         assumeTrue("NLP model deployments added in 8.0", UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_0_0));
 
@@ -112,10 +112,21 @@ public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
                     request.addParameter("wait_for_status", "yellow");
                     request.addParameter("timeout", "70s");
                 }));
-                waitForDeploymentStarted(modelId);
-                // attempt inference on new and old nodes multiple times
-                for (int i = 0; i < 10; i++) {
-                    assertInfer(modelId);
+
+                // Workaround for an upgrade test failure where an ingest
+                // pipeline config cannot be parsed by older nodes:
+                // https://github.com/elastic/elasticsearch/issues/95766
+                //
+                // In version 8.3.1 ml stopped parsing the full ingest
+                // pipeline configuration so will avoid this problem.
+                // TODO remove this check once https://github.com/elastic/elasticsearch/issues/95766
+                // is resolved
+                if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_3_1)) {
+                    waitForDeploymentStarted(modelId);
+                    // attempt inference on new and old nodes multiple times
+                    for (int i = 0; i < 10; i++) {
+                        assertInfer(modelId);
+                    }
                 }
             }
             case UPGRADED -> {
@@ -133,7 +144,6 @@ public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
         }
     }
 
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/95501")
     public void testTrainedModelDeploymentStopOnMixedCluster() throws Exception {
         assumeTrue("NLP model deployments added in 8.0", UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_0_0));
 
@@ -157,7 +167,6 @@ public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
                     request.addParameter("timeout", "70s");
                 }));
                 assertThatTrainedModelAssignmentMetadataIsEmpty(modelId);
-
             }
             default -> throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]");
         }
@@ -263,11 +272,18 @@ public class MLModelDeploymentsUpgradeIT extends AbstractUpgradeTestCase {
             "_cluster/state?filter_path=metadata.trained_model_assignment." + modelId
         );
         Response getTrainedModelAssignmentMetadataResponse = client().performRequest(getTrainedModelAssignmentMetadataRequest);
-        assertThat(EntityUtils.toString(getTrainedModelAssignmentMetadataResponse.getEntity()), containsString("{}"));
-
+        String responseBody = EntityUtils.toString(getTrainedModelAssignmentMetadataResponse.getEntity());
+        assertThat(responseBody, oneOf("{}", "{\"metadata\":{\"trained_model_assignment\":{}}}"));
+
+        // trained_model_allocation was renamed to trained_model_assignment
+        // in v8.3. The renaming happens automatically and the old
+        // metadata should be removed once all nodes are upgraded.
+        // However, sometimes there aren't enough cluster state change
+        // events in the upgraded cluster test for this to happen
         getTrainedModelAssignmentMetadataRequest = new Request("GET", "_cluster/state?filter_path=metadata.trained_model_allocation");
         getTrainedModelAssignmentMetadataResponse = client().performRequest(getTrainedModelAssignmentMetadataRequest);
-        assertThat(EntityUtils.toString(getTrainedModelAssignmentMetadataResponse.getEntity()), equalTo("{}"));
+        responseBody = EntityUtils.toString(getTrainedModelAssignmentMetadataResponse.getEntity());
+        assertThat(responseBody, oneOf("{}", "{\"metadata\":{\"trained_model_allocation\":{}}}"));
     }
 
     private Response getTrainedModelStats(String modelId) throws IOException {

+ 13 - 3
x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java

@@ -56,7 +56,6 @@ public class MlTrainedModelsUpgradeIT extends AbstractUpgradeTestCase {
             .collect(Collectors.toSet());
     }
 
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/95360")
     public void testTrainedModelInference() throws Exception {
         assumeTrue("We should only test if old cluster is after trained models we GA", UPGRADE_FROM_VERSION.after(Version.V_7_13_0));
         switch (CLUSTER_TYPE) {
@@ -75,8 +74,19 @@ public class MlTrainedModelsUpgradeIT extends AbstractUpgradeTestCase {
                     request.addParameter("timeout", "70s");
                 }));
                 List<String> modelIds = getTrainedModels();
-                // Test that stats are serializable and can be gathered
-                getTrainedModelStats();
+
+                // Workaround for an upgrade test failure where an ingest
+                // pipeline config cannot be parsed by older nodes:
+                // https://github.com/elastic/elasticsearch/issues/95766
+                //
+                // In version 8.3.1 ml stopped parsing the full ingest
+                // pipeline configuration so will avoid this problem.
+                // TODO remove this check once https://github.com/elastic/elasticsearch/issues/95766
+                // is resolved
+                if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_3_1)) {
+                    // Test that stats are serializable and can be gathered
+                    getTrainedModelStats();
+                }
                 // Verify that the pipelines still work and inference is possible
                 testInfer(modelIds);
             }