Browse Source

Remove extra checks from HdfsBlobContainer (#31126)

This commit saves one network roundtrip when reading or deleting files from an HDFS repository.
Yannick Welsch 7 years ago
parent
commit
1dca00deb9

+ 14 - 10
plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java

@@ -31,6 +31,7 @@ import org.elasticsearch.common.blobstore.support.AbstractBlobContainer;
 import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
 import org.elasticsearch.repositories.hdfs.HdfsBlobStore.Operation;
 
+import java.io.FileNotFoundException;
 import java.io.FilterInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -69,11 +70,13 @@ final class HdfsBlobContainer extends AbstractBlobContainer {
 
     @Override
     public void deleteBlob(String blobName) throws IOException {
-        if (!blobExists(blobName)) {
-            throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
+        try {
+            if (store.execute(fileContext -> fileContext.delete(new Path(path, blobName), true)) == false) {
+                throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
+            }
+        } catch (FileNotFoundException fnfe) {
+            throw new NoSuchFileException("[" + blobName + "] blob not found");
         }
-
-        store.execute(fileContext -> fileContext.delete(new Path(path, blobName), true));
     }
 
     @Override
@@ -86,16 +89,17 @@ final class HdfsBlobContainer extends AbstractBlobContainer {
 
     @Override
     public InputStream readBlob(String blobName) throws IOException {
-        if (!blobExists(blobName)) {
-            throw new NoSuchFileException("Blob [" + blobName + "] does not exist");
-        }
         // FSDataInputStream does buffering internally
         // FSDataInputStream can open connections on read() or skip() so we wrap in
         // HDFSPrivilegedInputSteam which will ensure that underlying methods will
         // be called with the proper privileges.
-        return store.execute(fileContext ->
-            new HDFSPrivilegedInputSteam(fileContext.open(new Path(path, blobName), bufferSize), securityContext)
-        );
+        try {
+            return store.execute(fileContext ->
+                new HDFSPrivilegedInputSteam(fileContext.open(new Path(path, blobName), bufferSize), securityContext)
+            );
+        } catch (FileNotFoundException fnfe) {
+            throw new NoSuchFileException("[" + blobName + "] blob not found");
+        }
     }
 
     @Override

+ 7 - 0
test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java

@@ -47,6 +47,13 @@ import static org.hamcrest.CoreMatchers.notNullValue;
  */
 public abstract class ESBlobStoreContainerTestCase extends ESTestCase {
 
+    public void testReadNonExistingPath() throws IOException {
+        try(BlobStore store = newBlobStore()) {
+            final BlobContainer container = store.blobContainer(new BlobPath());
+            expectThrows(NoSuchFileException.class, () -> container.readBlob("non-existing"));
+        }
+    }
+
     public void testWriteRead() throws IOException {
         try(BlobStore store = newBlobStore()) {
             final BlobContainer container = store.blobContainer(new BlobPath());