diff --git a/medusa/storage/google_storage.py b/medusa/storage/google_storage.py index f6271677..35e7bb15 100644 --- a/medusa/storage/google_storage.py +++ b/medusa/storage/google_storage.py @@ -127,18 +127,15 @@ async def _upload_object(self, data: io.BytesIO, object_key: str, headers: t.Dic ) ) - storage_class = self.get_storage_class() - ex_header = {"storageClass": storage_class} if storage_class else {} resp = await self.gcs_storage.upload( bucket=self.bucket_name, object_name=object_key, file_data=data, force_resumable_upload=True, timeout=-1, - headers=ex_header, ) return AbstractBlob( - resp['name'], int(resp['size']), resp['md5Hash'], resp['timeCreated'], storage_class.upper() + resp['name'], int(resp['size']), resp['md5Hash'], resp['timeCreated'], None ) @retry(stop_max_attempt_number=MAX_UP_DOWN_LOAD_RETRIES, wait_fixed=5000) @@ -206,15 +203,12 @@ async def _upload_blob(self, src: str, dest: str) -> ManifestObject: ) ) - storage_class = self.get_storage_class() - ex_header = {"storageClass": storage_class} if storage_class else {} resp = await self.gcs_storage.copy( bucket=self.bucket_name, object_name=f'{src}'.replace(f'gs://{self.bucket_name}/', ''), destination_bucket=self.bucket_name, new_name=object_key, timeout=-1, - headers=ex_header, ) resp = resp['resource'] else: diff --git a/tests/storage/abstract_storage_test.py b/tests/storage/abstract_storage_test.py index 56624830..123760eb 100644 --- a/tests/storage/abstract_storage_test.py +++ b/tests/storage/abstract_storage_test.py @@ -12,9 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import io +import pathlib +import typing as t import unittest +from pathlib import Path +from medusa.storage import ManifestObject, AbstractBlob from medusa.storage.abstract_storage import AbstractStorage @@ -24,6 +28,50 @@ class AttributeDict(dict): __setattr__ = dict.__setitem__ +class TestAbstractStorage(AbstractStorage): + def connect(self): + pass + + def disconnect(self): + pass + + async def _list_blobs(self, prefix=None): + pass + + async def _upload_object(self, data: io.BytesIO, object_key: str, headers: t.Dict[str, str]) -> AbstractBlob: + pass + + async def _download_blob(self, src: str, dest: str): + pass + + async def _upload_blob(self, src: str, dest: str) -> ManifestObject: + pass + + async def _get_object(self, object_key: t.Union[Path, str]) -> AbstractBlob: + pass + + async def _read_blob_as_bytes(self, blob: AbstractBlob) -> bytes: + pass + + async def _delete_object(self, obj: AbstractBlob): + pass + + @staticmethod + def blob_matches_manifest(blob, object_in_manifest, enable_md5_checks=False): + pass + + @staticmethod + def file_matches_storage(src: pathlib.Path, cached_item: ManifestObject, threshold=None, enable_md5_checks=False): + pass + + @staticmethod + def compare_with_manifest(actual_size, size_in_manifest, actual_hash=None, hash_in_manifest=None, threshold=None): + pass + + def __init__(self, config): + super().__init__(config) + + class AbstractStorageTest(unittest.TestCase): def test_convert_human_friendly_size_to_bytes(self): @@ -44,3 +92,16 @@ def test_convert_human_friendly_size_to_bytes(self): self.assertEqual(2 * 1024 ** 3, AbstractStorage._human_size_to_bytes('2GB')) self.assertEqual(2 * 1024 ** 4, AbstractStorage._human_size_to_bytes('2TB')) self.assertEqual(2 * 1024 ** 5, AbstractStorage._human_size_to_bytes('2PB')) + + def test_get_storage_class(self): + config = AttributeDict({ + 'bucket_name': 'must_be_set', + 'storage_class': 'hot' + }) + storage = TestAbstractStorage(config) + self.assertEqual('HOT', storage.get_storage_class()) + + config.storage_class = None + self.assertEqual(None, storage.get_storage_class()) + storage_class = storage.get_storage_class() + self.assertEqual('unset', storage_class.capitalize() if storage_class else 'unset')