Преглед изворни кода

Not throwing FileNotFoundException if BlobContainer has been deleted (#75991)

If the underlying directory for an HdfsBlobContainer has been deleted (such as by calling HdfsBlobContainer.delete()) then listBlobsByPrefix() was throwing a FileNotFoundException. This change makes listBlobsByPrefix() return an empty array instead, which is inline with the behavior of FsBlobContainer. It also adds HdfsSnapshotRepoTestKitIT, which runs the repo analyzer against the HDFS repo.
Closes #73708
Keith Massey пре 4 година
родитељ
комит
0ae9e77637

+ 7 - 2
plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java

@@ -226,8 +226,13 @@ final class HdfsBlobContainer extends AbstractBlobContainer {
 
     @Override
     public Map<String, BlobMetadata> listBlobsByPrefix(@Nullable final String prefix) throws IOException {
-        FileStatus[] files = store.execute(fileContext -> fileContext.util().listStatus(path,
-            path -> prefix == null || path.getName().startsWith(prefix)));
+        FileStatus[] files;
+        try {
+            files = store.execute(fileContext -> fileContext.util().listStatus(path,
+                path -> prefix == null || path.getName().startsWith(prefix)));
+        } catch (FileNotFoundException e) {
+            files = new FileStatus[0];
+        }
         Map<String, BlobMetadata> map = new LinkedHashMap<>();
         for (FileStatus file : files) {
             if (file.isFile()) {

+ 35 - 0
plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsBlobStoreContainerTests.java

@@ -159,6 +159,41 @@ public class HdfsBlobStoreContainerTests extends ESTestCase {
         assertTrue(container.blobExists("foo"));
     }
 
+    public void testListBlobsByPrefix() throws Exception {
+        FileContext fileContext = createTestContext();
+        HdfsBlobStore hdfsBlobStore = new HdfsBlobStore(fileContext, "dir", 1024, false);
+        FileContext.Util util = fileContext.util();
+        Path root = fileContext.makeQualified(new Path("dir"));
+        assertTrue(util.exists(root));
+        BlobPath blobPath = BlobPath.EMPTY.add("path");
+
+        hdfsBlobStore.blobContainer(blobPath);
+        Path hdfsPath = root;
+        for (String p : blobPath.parts()) {
+            hdfsPath = new Path(hdfsPath, p);
+        }
+        assertTrue(util.exists(hdfsPath));
+
+        BlobContainer container = hdfsBlobStore.blobContainer(blobPath);
+
+        byte[] data = randomBytes(randomIntBetween(10, scaledRandomIntBetween(1024, 1 << 16)));
+        writeBlob(container, "foo", new BytesArray(data), randomBoolean());
+        assertArrayEquals(readBlobFully(container, "foo", data.length), data);
+        assertTrue(container.blobExists("foo"));
+        writeBlob(container, "bar", new BytesArray(data), randomBoolean());
+        assertArrayEquals(readBlobFully(container, "bar", data.length), data);
+        assertTrue(container.blobExists("bar"));
+
+        assertEquals(2, container.listBlobsByPrefix(null).size());
+        assertEquals(1, container.listBlobsByPrefix("fo").size());
+        assertEquals(0, container.listBlobsByPrefix("noSuchFile").size());
+
+        container.delete();
+        assertEquals(0, container.listBlobsByPrefix(null).size());
+        assertEquals(0, container.listBlobsByPrefix("fo").size());
+        assertEquals(0, container.listBlobsByPrefix("noSuchFile").size());
+    }
+
     public static byte[] readBlobPartially(BlobContainer container, String name, int pos, int length) throws IOException {
         byte[] data = new byte[length];
         try (InputStream inputStream = container.readBlob(name, pos, length)) {

+ 183 - 0
x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle

@@ -0,0 +1,183 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+
+import org.apache.tools.ant.taskdefs.condition.Os
+import org.elasticsearch.gradle.internal.info.BuildParams
+import org.elasticsearch.gradle.internal.test.RestIntegTestTask
+import org.elasticsearch.gradle.internal.util.ports.ReservedPortRange
+
+import java.nio.file.Files
+import java.nio.file.Paths
+
+import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
+
+apply plugin: 'elasticsearch.test.fixtures'
+apply plugin: 'elasticsearch.standalone-rest-test'
+apply plugin: 'elasticsearch.rest-test'
+apply plugin: 'elasticsearch.rest-resources'
+apply plugin: 'elasticsearch.internal-available-ports'
+
+final Project hdfsFixtureProject = project(':test:fixtures:hdfs-fixture')
+final Project krbFixtureProject = project(':test:fixtures:krb5kdc-fixture')
+final Project hdfsRepoPluginProject = project(':plugins:repository-hdfs')
+
+dependencies {
+  testImplementation testArtifact(project(xpackModule('snapshot-repo-test-kit')))
+  testImplementation hdfsRepoPluginProject
+}
+
+restResources {
+  restApi {
+    include 'indices', 'search', 'bulk', 'snapshot', 'nodes', '_common', 'snapshot_repo_test_kit'
+  }
+}
+
+testFixtures.useFixture(krbFixtureProject.path, 'hdfs-snapshot-repo-tests')
+
+configurations {
+  hdfsFixture
+}
+
+dependencies {
+  hdfsFixture hdfsFixtureProject
+  // Set the keytab files in the classpath so that we can access them from test code without the security manager freaking out.
+  if (isEclipse == false) {
+    testRuntimeOnly files(krbFixtureProject.ext.krb5Keytabs("hdfs-snapshot-repo-tests", "hdfs_hdfs.build.elastic.co.keytab").parent){
+      builtBy ":test:fixtures:krb5kdc-fixture:preProcessFixture"
+    }
+  }
+}
+
+normalization {
+  runtimeClasspath {
+    // ignore generated keytab files for the purposes of build avoidance
+    ignore '*.keytab'
+    // ignore fixture ports file which is on the classpath primarily to pacify the security manager
+    ignore 'ports'
+  }
+}
+
+String realm = "BUILD.ELASTIC.CO"
+String krb5conf = krbFixtureProject.ext.krb5Conf("hdfs")
+
+// Create HDFS File System Testing Fixtures
+for (String fixtureName : ['hdfsFixture', 'secureHdfsFixture']) {
+  project.tasks.register(fixtureName, org.elasticsearch.gradle.internal.test.AntFixture) {
+    dependsOn project.configurations.hdfsFixture, krbFixtureProject.tasks.postProcessFixture
+    executable = "${BuildParams.runtimeJavaHome}/bin/java"
+    env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
+    maxWaitInSeconds 60
+    onlyIf { BuildParams.inFipsJvm == false }
+    waitCondition = { fixture, ant ->
+      // the hdfs.MiniHDFS fixture writes the ports file when
+      // it's ready, so we can just wait for the file to exist
+      return fixture.portsFile.exists()
+    }
+    final List<String> miniHDFSArgs = []
+
+    // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
+    if (name.equals('secureHdfsFixture')) {
+      onlyIf { BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16 }
+      miniHDFSArgs.addAll(["--add-exports", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"])
+      miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}")
+    }
+    // configure port dynamically
+    def portRange = project.getExtensions().getByType(ReservedPortRange)
+    miniHDFSArgs.add("-Dhdfs.config.port=${portRange.getOrAllocate(name)}")
+
+    // Common options
+    miniHDFSArgs.add('hdfs.MiniHDFS')
+    miniHDFSArgs.add(baseDir)
+
+    // If it's a secure fixture, then set the principal name and keytab locations to use for auth.
+    if (name.equals('secureHdfsFixture')) {
+      miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
+      miniHDFSArgs.add(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab"))
+    }
+
+    args miniHDFSArgs.toArray()
+  }
+}
+
+// Disable integration test if Fips mode
+tasks.named("integTest").configure {
+  description = "Runs rest tests against an elasticsearch cluster with HDFS."
+  def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("hdfsFixture")
+  systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort"
+  nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/repository_test_kit/simple'
+  onlyIf { BuildParams.inFipsJvm == false }
+}
+
+tasks.register("integTestSecure", RestIntegTestTask) {
+  description = "Runs rest tests against an elasticsearch cluster with Secured HDFS."
+  def hdfsPort = project.getExtensions().getByType(ReservedPortRange).getOrAllocate("secureHdfsFixture")
+  nonInputProperties.systemProperty 'test.hdfs.uri', "hdfs://localhost:$hdfsPort"
+  nonInputProperties.systemProperty 'test.hdfs.path', '/user/elasticsearch/test/repository_test_kit/secure'
+  nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
+  nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
+  nonInputProperties.systemProperty(
+    "test.krb5.keytab.hdfs",
+    project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
+  )
+  onlyIf { BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_16}
+}
+tasks.named("check").configure { dependsOn("integTestSecure") }
+
+testClusters.configureEach {
+  testDistribution = 'DEFAULT'
+  plugin(hdfsRepoPluginProject.path)
+  setting 'xpack.license.self_generated.type', 'trial'
+  setting 'xpack.security.enabled', 'false'
+}
+
+testClusters.matching { it.name == "integTestSecure" }.configureEach {
+  systemProperty "java.security.krb5.conf", krb5conf
+  extraConfigFile(
+    "repository-hdfs/krb5.keytab",
+    file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
+  )
+}
+
+// Determine HDFS Fixture compatibility for the current build environment.
+boolean fixtureSupported = false
+if (Os.isFamily(Os.FAMILY_WINDOWS)) {
+  // hdfs fixture will not start without hadoop native libraries on windows
+  String nativePath = System.getenv("HADOOP_HOME")
+  if (nativePath != null) {
+    java.nio.file.Path path = Paths.get(nativePath)
+    if (Files.isDirectory(path) &&
+      Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
+      Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
+      Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
+      fixtureSupported = true
+    } else {
+      throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin")
+    }
+  }
+} else {
+  fixtureSupported = true
+}
+
+boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
+if (legalPath == false) {
+  fixtureSupported = false
+}
+
+if (fixtureSupported) {
+  tasks.named("integTest").configure {dependsOn "hdfsFixture" }
+  tasks.named("integTestSecure").configure {dependsOn "secureHdfsFixture" }
+} else {
+  tasks.named("integTest").configure {enabled = false }
+  tasks.named("integTestSecure").configure { enabled = false }
+  if (legalPath) {
+    logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
+  } else {
+    logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
+  }
+}
+

+ 38 - 0
x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/src/test/java/org/elasticsearch/repositories/blobstore/testkit/HdfsSnapshotRepoTestKitIT.java

@@ -0,0 +1,38 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+package org.elasticsearch.repositories.blobstore.testkit;
+
+import org.elasticsearch.common.settings.Settings;
+
+import static org.hamcrest.Matchers.blankOrNullString;
+import static org.hamcrest.Matchers.not;
+
+public class HdfsSnapshotRepoTestKitIT extends AbstractSnapshotRepoTestKitRestTestCase {
+
+    @Override
+    protected String repositoryType() {
+        return "hdfs";
+    }
+
+    @Override
+    protected Settings repositorySettings() {
+        final String uri = System.getProperty("test.hdfs.uri");
+        assertThat(uri, not(blankOrNullString()));
+
+        final String path = System.getProperty("test.hdfs.path");
+        assertThat(path, not(blankOrNullString()));
+
+        // Optional based on type of test
+        final String principal = System.getProperty("test.krb5.principal.es");
+
+        Settings.Builder repositorySettings = Settings.builder().put("client", "repository_test_kit").put("uri", uri).put("path", path);
+        if (principal != null) {
+            repositorySettings.put("security.principal", principal);
+        }
+        return repositorySettings.build();
+    }
+}