|
@@ -3,7 +3,7 @@ import shutil
|
|
|
import json
|
|
|
import logging
|
|
|
from abc import ABC, abstractmethod
|
|
|
-from typing import BinaryIO, Tuple
|
|
|
+from typing import BinaryIO, Tuple, Dict
|
|
|
|
|
|
import boto3
|
|
|
from botocore.config import Config
|
|
@@ -44,7 +44,7 @@ class StorageProvider(ABC):
|
|
|
pass
|
|
|
|
|
|
@abstractmethod
|
|
|
- def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
|
|
|
+ def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
|
|
|
pass
|
|
|
|
|
|
@abstractmethod
|
|
@@ -58,7 +58,7 @@ class StorageProvider(ABC):
|
|
|
|
|
|
class LocalStorageProvider(StorageProvider):
|
|
|
@staticmethod
|
|
|
- def upload_file(file: BinaryIO, filename: str) -> Tuple[bytes, str]:
|
|
|
+ def upload_file(file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
|
|
|
contents = file.read()
|
|
|
if not contents:
|
|
|
raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
|
|
@@ -131,12 +131,18 @@ class S3StorageProvider(StorageProvider):
|
|
|
self.bucket_name = S3_BUCKET_NAME
|
|
|
self.key_prefix = S3_KEY_PREFIX if S3_KEY_PREFIX else ""
|
|
|
|
|
|
- def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
|
|
|
+ def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
|
|
|
"""Handles uploading of the file to S3 storage."""
|
|
|
- _, file_path = LocalStorageProvider.upload_file(file, filename)
|
|
|
+ _, file_path = LocalStorageProvider.upload_file(file, filename, tags)
|
|
|
+ tagging = {'TagSet': [{'Key': k, 'Value': v} for k, v in tags.items()]}
|
|
|
try:
|
|
|
s3_key = os.path.join(self.key_prefix, filename)
|
|
|
self.s3_client.upload_file(file_path, self.bucket_name, s3_key)
|
|
|
+ self.s3_client.put_object_tagging(
|
|
|
+ Bucket=self.bucket_name,
|
|
|
+ Key=s3_key,
|
|
|
+ Tagging=tagging,
|
|
|
+ )
|
|
|
return (
|
|
|
open(file_path, "rb").read(),
|
|
|
"s3://" + self.bucket_name + "/" + s3_key,
|
|
@@ -207,9 +213,9 @@ class GCSStorageProvider(StorageProvider):
|
|
|
self.gcs_client = storage.Client()
|
|
|
self.bucket = self.gcs_client.bucket(GCS_BUCKET_NAME)
|
|
|
|
|
|
- def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
|
|
|
+ def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
|
|
|
"""Handles uploading of the file to GCS storage."""
|
|
|
- contents, file_path = LocalStorageProvider.upload_file(file, filename)
|
|
|
+ contents, file_path = LocalStorageProvider.upload_file(file, filename, tags)
|
|
|
try:
|
|
|
blob = self.bucket.blob(filename)
|
|
|
blob.upload_from_filename(file_path)
|
|
@@ -277,9 +283,9 @@ class AzureStorageProvider(StorageProvider):
|
|
|
self.container_name
|
|
|
)
|
|
|
|
|
|
- def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
|
|
|
+ def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
|
|
|
"""Handles uploading of the file to Azure Blob Storage."""
|
|
|
- contents, file_path = LocalStorageProvider.upload_file(file, filename)
|
|
|
+ contents, file_path = LocalStorageProvider.upload_file(file, filename, tags)
|
|
|
try:
|
|
|
blob_client = self.container_client.get_blob_client(filename)
|
|
|
blob_client.upload_blob(contents, overwrite=True)
|