diff --git a/.gitignore b/.gitignore index ac38f83b..8963e0b8 100644 --- a/.gitignore +++ b/.gitignore @@ -284,3 +284,7 @@ venv.bak/ .mypy_cache/ .dmypy.json dmypy.json + + +# For testing purposes only. +test_image.svg diff --git a/pyproject.toml b/pyproject.toml index 67599c99..dbd119ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,8 @@ sphinx-toolbox = "^3.4.0" [tool.pytest.ini_options] asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" +addopts = "--ignore=tests/resumable" [build-system] build-backend = "poetry.core.masonry.api" diff --git a/storage3/_async/bucket.py b/storage3/_async/bucket.py index 1dccdb90..9d5460fc 100644 --- a/storage3/_async/bucket.py +++ b/storage3/_async/bucket.py @@ -7,8 +7,9 @@ from ..types import CreateOrUpdateBucketOptions, RequestMethod from ..utils import AsyncClient, StorageException from .file_api import AsyncBucket +from .resumable import AsyncResumableUpload -__all__ = ["AsyncStorageBucketAPI"] +__all__ = ("AsyncStorageBucketAPI",) class AsyncStorageBucketAPI: @@ -16,6 +17,13 @@ class AsyncStorageBucketAPI: def __init__(self, session: AsyncClient) -> None: self._client = session + self._resumable = None + + @property + def resumable(self): + if self._resumable is None: + self._resumable = AsyncResumableUpload(self._client) + return self._resumable async def _request( self, diff --git a/storage3/_async/resumable.py b/storage3/_async/resumable.py new file mode 100644 index 00000000..909746c2 --- /dev/null +++ b/storage3/_async/resumable.py @@ -0,0 +1,205 @@ +import os +from datetime import datetime + +from ..types import FileInfo, UploadMetadata +from ..utils import ( + AsyncClient, + FileStore, + StorageException, + base64encode_metadata, + is_valid_arg, +) + +__all__ = ("AsyncResumableUpload",) + + +class AsyncResumableUpload: + def __init__(self, session: AsyncClient) -> None: + self._client = session + self.url = f"{self._client.base_url}upload/resumable" + self.expiration_time_format = "%a, %d %b %Y %X %Z" + self._filestore = FileStore() + + def get_link(self, objectname) -> str: + """Get the link associated with objectname in the bucket + + Parameters + ---------- + objectname + This could be the local filename or objectname in the storage + """ + if not is_valid_arg(objectname): + raise StorageException("Bucketname cannot be empty") + return self._filestore.get_link(objectname) + + async def create_unique_link( + self, bucketname=None, objectname=None, filename=None + ) -> None: + """Create unique link according to bucketname and objectname + + Parameters + ---------- + bucketname + Storage bucket + objectname + Filename in the bucket + filename + Local file + """ + if not is_valid_arg(bucketname): + raise StorageException("Bucketname cannot be empty") + + if not (is_valid_arg(objectname) or is_valid_arg(filename)): + raise StorageException("Must specify objectname or filename") + + file = filename if filename else objectname + + if not is_valid_arg(file): + raise StorageException("Must specify objectname or filename") + + upload_mode = None + + info = FileInfo( + name=file, link="", length="", headers={"Tus-Resumable": "1.0.0"} + ) + + if not filename: + upload_mode = "Upload-Defer-Length" + info["headers"][upload_mode] = "1" + else: + upload_mode = "Upload-Length" + size = str(os.stat(filename).st_size) + + if int(size) == 0: + raise StorageException( + f"Cannot create a link for an empty file: {file}" + ) + + info["headers"][upload_mode] = size + info["length"] = size + + obj_name = os.path.split(file)[1] + metadata = UploadMetadata(bucketName=bucketname, objectName=obj_name) + + info["headers"]["Upload-Metadata"] = base64encode_metadata(metadata) + response = await self._client.post(self.url, headers=info["headers"]) + + if response.status_code != 201: + raise StorageException(response.content) + + expiration_time = datetime.strptime( + response.headers["upload-expires"], self.expiration_time_format + ) + info["expiration_time"] = expiration_time.timestamp() + + info["link"] = response.headers["location"] + del info["headers"][upload_mode] + self._filestore.mark_file(info) + + async def resumable_offset(self, link, headers) -> str: + """Get the current offset to be used + + Parameters + ---------- + link + Target url + headers + Metadata headers sent to the server + """ + + if not self._filestore.link_exists(link): + raise StorageException(f"There's no reference to that link: {link}") + + response = await self._client.head(link, headers=headers) + + if "upload-offset" not in response.headers: + raise StorageException("Error while fetching the next offset.") + + return response.headers["upload-offset"] + + async def terminate(self, file: str) -> None: + """Drop the link associated with a file + + Parameters + ---------- + file + file name used to get its metadata info + """ + if not is_valid_arg(file): + raise StorageException("File argument cannot be empty") + + info = self._filestore.get_file_info(file) + response = await self._client.delete(info["link"], headers=info["headers"]) + + if response.status_code != 204: + raise StorageException(response.content) + + self._filestore.remove_file(file) + + async def upload( + self, filename, upload_defer=False, link=None, objectname=None, mb_size=1 + ) -> None: + """Send file's content in chunks to the target url + + Parameters + ---------- + filename + Local file + upload_defer + Requires link and objectname to be True to retrieve file info in the FileStore + link + Target url + objectname + Name of the file in the bucket + mb_size + Amount of megabytes to be sent in each iteration + """ + if upload_defer: + if not (is_valid_arg(link) and is_valid_arg(objectname)): + raise StorageException( + "Upload-Defer mode requires a link and objectname" + ) + + if not is_valid_arg(filename): + raise StorageException("Must specify a filename") + + target_file = objectname if upload_defer else filename + chunk_size = 1048576 * int(max(1, mb_size)) # 1024 * 1024 * mb_size + size = None + self._filestore.update_file_headers( + target_file, "Content-Type", "application/offset+octet-stream" + ) + storage_link = link if upload_defer else self.get_link(target_file) + + if upload_defer: + size = str(os.stat(filename).st_size) + + if int(size) == 0: + raise StorageException(f"Cannot upload an empty file: {filename}") + + self._filestore.update_file_headers(target_file, "Upload-Length", size) + self._filestore.update_file_headers(target_file, "Upload-Offset", "0") + headers = self._filestore.get_file_headers(target_file) + response = await self._client.patch(storage_link, headers=headers) + self._filestore.delete_file_headers(target_file, "Upload-Length") + + while True: + headers = self._filestore.get_file_headers(target_file) + offset = await self.resumable_offset(storage_link, headers) + file = self._filestore.open_file(filename, offset=int(offset)) + self._filestore.update_file_headers(target_file, "Upload-Offset", offset) + + chunk = file.read(chunk_size) + headers = self._filestore.get_file_headers(target_file) + + response = await self._client.patch( + storage_link, headers=headers, content=chunk + ) + + if response.status_code not in {201, 204}: + raise StorageException(response.content) + + if "tus-complete" in response.headers: + self._filestore.close_file(file) + self._filestore.remove_file(target_file) + break diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py index f8247eeb..8a941c6e 100644 --- a/storage3/_sync/bucket.py +++ b/storage3/_sync/bucket.py @@ -7,8 +7,9 @@ from ..types import CreateOrUpdateBucketOptions, RequestMethod from ..utils import StorageException, SyncClient from .file_api import SyncBucket +from .resumable import ResumableUpload -__all__ = ["SyncStorageBucketAPI"] +__all__ = ("SyncStorageBucketAPI",) class SyncStorageBucketAPI: @@ -16,6 +17,7 @@ class SyncStorageBucketAPI: def __init__(self, session: SyncClient) -> None: self._client = session + self._resumable = None def _request( self, @@ -33,6 +35,13 @@ def _request( return response + @property + def resumable(self): + if self._resumable is None: + self._resumable = ResumableUpload(self._client) + + return self._resumable + def list_buckets(self) -> list[SyncBucket]: """Retrieves the details of all storage buckets within an existing product.""" # if the request doesn't error, it is assured to return a list diff --git a/storage3/_sync/resumable.py b/storage3/_sync/resumable.py new file mode 100644 index 00000000..75228107 --- /dev/null +++ b/storage3/_sync/resumable.py @@ -0,0 +1,203 @@ +import os +from datetime import datetime + +from ..types import FileInfo, UploadMetadata +from ..utils import ( + FileStore, + StorageException, + SyncClient, + base64encode_metadata, + is_valid_arg, +) + +__all__ = ("ResumableUpload",) + + +class ResumableUpload: + def __init__(self, session: SyncClient) -> None: + self._client = session + self.url = f"{self._client.base_url}upload/resumable" + self.expiration_time_format = "%a, %d %b %Y %X %Z" + self._filestore = FileStore() + + def get_link(self, objectname: str) -> str: + """Get the link associated with objectname in the bucket + + Parameters + ---------- + objectname + This could be the local filename or objectname in the storage + """ + if not is_valid_arg(objectname): + raise StorageException("Bucketname cannot be empty") + return self._filestore.get_link(objectname) + + def create_unique_link( + self, bucketname=None, objectname=None, filename=None + ) -> None: + """Create unique link according to bucketname and objectname + + Parameters + ---------- + bucketname + Storage bucket + objectname + Filename in the bucket + filename + Local file + """ + if not is_valid_arg(bucketname): + raise StorageException("Bucketname cannot be empty") + + if not (is_valid_arg(objectname) or is_valid_arg(filename)): + raise StorageException("Must specify objectname or filename") + + file = filename if filename else objectname + + if not is_valid_arg(file): + raise StorageException("Must specify objectname or filename") + + upload_mode = None + + info = FileInfo( + name=file, link="", length="", headers={"Tus-Resumable": "1.0.0"} + ) + + if not filename: + upload_mode = "Upload-Defer-Length" + info["headers"][upload_mode] = "1" + else: + upload_mode = "Upload-Length" + size = str(os.stat(filename).st_size) + + if int(size) == 0: + raise StorageException( + f"Cannot create a link for an empty file: {file}" + ) + + info["headers"][upload_mode] = size + info["length"] = size + + obj_name = os.path.split(file)[1] + metadata = UploadMetadata(bucketName=bucketname, objectName=obj_name) + + info["headers"]["Upload-Metadata"] = base64encode_metadata(metadata) + response = self._client.post(self.url, headers=info["headers"]) + + if response.status_code != 201: + raise StorageException(response.content) + + expiration_time = datetime.strptime( + response.headers["upload-expires"], self.expiration_time_format + ) + info["expiration_time"] = expiration_time.timestamp() + + info["link"] = response.headers["location"] + del info["headers"][upload_mode] + self._filestore.mark_file(info) + + def resumable_offset(self, link: str, headers) -> str: + """Get the current offset to be used + + Parameters + ---------- + link + Target url + headers + Metadata headers sent to the server + """ + + if not self._filestore.link_exists(link): + raise StorageException(f"There's no a reference to that link: {link}") + + response = self._client.head(link, headers=headers) + + if "upload-offset" not in response.headers: + raise StorageException("Error while fetching the next offset.") + + return response.headers["upload-offset"] + + def terminate(self, file: str) -> None: + """Drop the link associated with a file + + Parameters + ---------- + file + file name used to get its metadata info + """ + if not is_valid_arg(file): + raise StorageException("File argument cannot be empty") + + info = self._filestore.get_file_info(file) + response = self._client.delete(info["link"], headers=info["headers"]) + + if response.status_code != 204: + raise StorageException(response.content) + + self._filestore.remove_file(file) + + def upload( + self, filename, upload_defer=False, link=None, objectname=None, mb_size=1 + ) -> None: + """Send file's content in chunks to the target url + + Parameters + ---------- + filename + Local file + upload_defer + Requires link and objectname to be True to retrieve file info in the FileStore + link + Target url + objectname + Name of the file in the bucket + mb_size + Amount of megabytes to be sent in each iteration + """ + + if upload_defer: + if not (is_valid_arg(link) and is_valid_arg(objectname)): + raise StorageException( + "Upload-Defer mode requires a link and objectname" + ) + + if not is_valid_arg(filename): + raise StorageException("Must specify a filename") + + target_file = objectname if upload_defer else filename + chunk_size = 1048576 * int(max(1, mb_size)) # 1024 * 1024 * mb_size + size = None + self._filestore.update_file_headers( + target_file, "Content-Type", "application/offset+octet-stream" + ) + storage_link = link if upload_defer else self.get_link(target_file) + + if upload_defer: + size = str(os.stat(filename).st_size) + + if int(size) == 0: + raise StorageException(f"Cannot upload an empty file: {filename}") + + self._filestore.update_file_headers(target_file, "Upload-Length", size) + self._filestore.update_file_headers(target_file, "Upload-Offset", "0") + headers = self._filestore.get_file_headers(target_file) + response = self._client.patch(storage_link, headers=headers) + self._filestore.delete_file_headers(target_file, "Upload-Length") + + while True: + headers = self._filestore.get_file_headers(target_file) + offset = self.resumable_offset(storage_link, headers) + file = self._filestore.open_file(filename, offset=int(offset)) + self._filestore.update_file_headers(target_file, "Upload-Offset", offset) + + chunk = file.read(chunk_size) + headers = self._filestore.get_file_headers(target_file) + response = self._client.patch(storage_link, headers=headers, content=chunk) + + if response.status_code not in {201, 204}: + raise StorageException(response.content) + + if "tus-complete" in response.headers: + self._filestore.close_file(file) + self._filestore.remove_file(target_file) + break diff --git a/storage3/types.py b/storage3/types.py index 5e86d3c6..a6e80eca 100644 --- a/storage3/types.py +++ b/storage3/types.py @@ -2,7 +2,7 @@ from dataclasses import asdict, dataclass from datetime import datetime -from typing import Literal, Optional, TypedDict, Union +from typing import Dict, Literal, Optional, TypedDict, Union import dateutil.parser @@ -82,6 +82,21 @@ class DownloadOptions(TypedDict, total=False): ) +class UploadMetadata(TypedDict): + bucketName: str + objectName: str + + +class FileInfo(TypedDict): + name: str + link: str + length: str + headers: Dict[str, str] + expiration_time: float + fingerprint: str + mtime: float + + class UploadData(TypedDict, total=False): Id: str Key: str diff --git a/storage3/utils.py b/storage3/utils.py index 9689fc8e..d1a2cd48 100644 --- a/storage3/utils.py +++ b/storage3/utils.py @@ -1,6 +1,16 @@ +import json +import os +import tempfile +from base64 import b64encode +from datetime import datetime +from hashlib import md5 +from typing import Dict + from httpx import AsyncClient as AsyncClient # noqa: F401 from httpx import Client as BaseClient +from .types import FileInfo, UploadMetadata + class SyncClient(BaseClient): def aclose(self) -> None: @@ -9,3 +19,198 @@ def aclose(self) -> None: class StorageException(Exception): """Error raised when an operation on the storage API fails.""" + + +class FileStore: + """This class serves as storage of files to be sent in the resumable upload workflow""" + + def __init__(self): + self.disk_storage = tempfile.NamedTemporaryFile(mode="w+t", delete=False) + self.reload_storage() + + def fingerprint(self, file_info: FileInfo): + """Generates a fingerprint based on the content of the file being sent""" + + block_size = 64 * 1024 + min_size = min(block_size, int(file_info["length"])) + + with open(file_info["name"], "rb") as f: + data = f.read(min_size) + file_info["fingerprint"] = md5(data).hexdigest() + + def persist(self) -> None: + """Save the current state of in-memory storage to disk""" + with open(self.disk_storage.name, "w") as f: + f.seek(0) + f.write(json.dumps(self.storage, indent=2)) + f.flush() + + def mark_file(self, file_info: FileInfo): + """Store file metadata in a in-memory storage""" + + if len(file_info["length"]) != 0: + self.fingerprint(file_info) + file_info["mtime"] = os.stat(file_info["name"]).st_mtime + + self.storage[file_info["name"]] = file_info + self.persist() + + def reload_storage(self) -> None: + """Refresh the in-memory storage""" + self.storage = {} + size = os.stat(self.disk_storage.name).st_size + if size > 0: + with open(self.disk_storage.name) as f: + self.storage = json.load(f) + + def file_exists(self, filename: str) -> bool: + """Verify if the file exists in the storage + + Parameters + ---------- + filename + This could be the local filename or objectname in the storage + """ + self.reload_storage() + return filename in self.storage + + def get_file_info(self, filename) -> FileInfo: + """Returns the file info metadata associated with a filename in the storage + + Parameters + ---------- + filename + key name referencing to filename attributes. + """ + if not self.file_exists(filename): + raise StorageException(f"There is no entry for {filename} in FileStore") + + return self.storage[filename] + + def update_file_headers(self, filename, key, value) -> None: + """Update key values from the file info metadata + + Parameters + ---------- + filename + key name referencing to filename attributes. + key + key name referencing to header attribute to be modified + value + new value + """ + file = self.get_file_info(filename) + is_link_expired = file["expiration_time"] < datetime.now().timestamp() + + if not is_link_expired: + file["headers"][key] = value + self.storage[filename] = file + self.persist() + else: + self.remove_file(filename) + raise StorageException("Upload link is expired") + + def delete_file_headers(self, filename, key) -> None: + """Remove keys from the file info metadata + + Parameters + ---------- + filename + key name referencing to filename attributes. + key + key name referencing to header attribute to be removed + """ + file = self.get_file_info(filename) + if key in file["headers"]: + del file["headers"][key] + self.storage[filename] = file + self.persist() + + def get_file_headers(self, filename) -> Dict[str, str]: + """Returns the file's headers used during the upload workflow + + Parameters + ---------- + filename + key name referencing to filename attributes. + """ + return self.get_file_info(filename)["headers"] + + def open_file(self, filename: str, offset: int): + """Open file in the specified offset + + Parameters + ---------- + filename + local file + offset + set current the file-pointer + """ + file = open(filename, "rb") + file.seek(offset) + return file + + def close_file(self, filename) -> None: + """Close the file. + + Parameters + ---------- + filename + key name referencing to filename attributes. + """ + filename.close() + + def remove_file(self, filename: str) -> None: + """Remove filename entry in the in-memory storage and then commit the changes into the disk storage + + Parameters + ---------- + filename + key name referencing to filename attributes. + """ + + if not self.file_exists(filename): + raise StorageException(f"There is no entry for {filename} in FileStore") + + del self.storage[filename] + self.persist() + + def get_link(self, filename: str) -> str: + """Returns the filename's link associated with its resumable endpoint + + Parameters + ---------- + filename + key name referencing to filename attributes. + """ + + if not self.file_exists(filename): + raise StorageException(f"There is no entry for {filename} in FileStore") + + return self.storage[filename]["link"] + + def link_exists(self, link: str) -> bool: + """Check if the link is already in the storage + + Parameters + ---------- + link: + link associated with a resumable endpoint + """ + return any(self.get_link(obj) == link for obj in self.storage.keys()) + + +def is_valid_arg(target: str) -> bool: + return target is not None and isinstance(target, str) and len(target.strip()) != 0 + + +def base64encode_metadata(metadata: UploadMetadata) -> str: + """Generate base64 encoding for Upload-Metadata header + + Parameters + ---------- + metadata + Bucket and object pair representing the resulting file in the storage + """ + res = [f"{k} {b64encode(bytes(v, 'utf-8')).decode()}" for k, v in metadata.items()] + return ",".join(res) diff --git a/tests/resumable/README.md b/tests/resumable/README.md new file mode 100644 index 00000000..a5ec2f3b --- /dev/null +++ b/tests/resumable/README.md @@ -0,0 +1,73 @@ +# Tests for Resumable + +- How to run tests? + + +### Create a new project + +- Create 1 new empty project from the [Dashboard](https://supabase.com/dashboard/projects). + + +### Create a new bucket + +- Create 1 new empty bucket named "test". + +![](create_new_bucket.png) + + +### Create policies + +- Create policies to allow read/write operations into the bucket. +- Policies should grant read access into `storage.objects` and `storage.buckets`. +- Policies should grant insert (upload) and delete access of resources. +- Set temporary promiscuous permissions for testing purposes only (delete it after tests). + +![](policies.png) + + +![](policies_list.png) + + +#### Policies templates + +- Example templates for policies, **must be edited before use** to fit your use case. + +At `storage.objects`: + +```sql +CREATE POLICY "Enable all access for all users" ON "storage"."objects" +AS PERMISSIVE FOR ALL +TO public +USING (true) +WITH CHECK (true); +``` + +At `storage.buckets`: + +```sql +CREATE POLICY "Enable all access for all users" ON "storage"."buckets" +AS PERMISSIVE FOR ALL +TO public +USING (true) +WITH CHECK (true); +``` + + +### Set environment variables + +![](env_variables.png) + +```bash +export SUPABASE_URL = 'Supabase_HTTPS_URL_here' +export SUPABASE_KEY = 'Supabase_API_Key_here' +export TEST_BUCKET = 'test' # BucketName +``` + + +### Run tests + +```bash +poetry run pytest tests/resumable -v -s +``` + +- You can delete the bucket and policies after running the tests. diff --git a/tests/resumable/conftest.py b/tests/resumable/conftest.py new file mode 100644 index 00000000..2ce79a99 --- /dev/null +++ b/tests/resumable/conftest.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +import asyncio +import os +from urllib.parse import urlparse + +import pytest +import pytest_asyncio + +from storage3 import AsyncStorageClient, SyncStorageClient + + +def is_https_url(url: str) -> bool: + """Simple helper that checks if string argument is an HTTPS URL.""" + return urlparse(url).scheme == "https" + + +@pytest.fixture +def file() -> str: + """Simple helper that writes an SVG file.""" + file_name = "test_image.svg" + # Supabase logo SVG, for testing purposes only. + file_content = b""" + + + + + + """.strip() + + with open(file_name, "wb") as f: + f.write(file_content) + + return f + + +@pytest_asyncio.fixture(scope="package") +def event_loop() -> asyncio.AbstractEventLoop: + """Returns an event loop for the current thread""" + return asyncio.get_event_loop_policy().get_event_loop() + + +@pytest.fixture +def test_bucket() -> str: + """Get Bucket name from env args.""" + return os.getenv("TEST_BUCKET") + + +@pytest.fixture(scope="module") +def configure_client(): + """Get API URL and API Key from env args.""" + url = f'{os.getenv("SUPABASE_URL")}/storage/v1' + key = os.getenv("SUPABASE_KEY") + return (url, key) + + +@pytest.fixture(scope="module") +def sync_client(configure_client) -> SyncStorageClient: + """Simple helper that returns an SyncStorageClient.""" + url, key = configure_client + client = SyncStorageClient(url, {"apiKey": key, "Authorization": f"Bearer {key}"}) + return client + + +@pytest.fixture(scope="module") +def async_client(configure_client) -> AsyncStorageClient: + """Simple helper that returns an AsyncStorageClient.""" + url, key = configure_client + client = AsyncStorageClient(url, {"apiKey": key, "Authorization": f"Bearer {key}"}) + return client diff --git a/tests/resumable/create_new_bucket.png b/tests/resumable/create_new_bucket.png new file mode 100644 index 00000000..f2febf27 Binary files /dev/null and b/tests/resumable/create_new_bucket.png differ diff --git a/tests/resumable/env_variables.png b/tests/resumable/env_variables.png new file mode 100644 index 00000000..ccc853e8 Binary files /dev/null and b/tests/resumable/env_variables.png differ diff --git a/tests/resumable/policies.png b/tests/resumable/policies.png new file mode 100644 index 00000000..b7c4416a Binary files /dev/null and b/tests/resumable/policies.png differ diff --git a/tests/resumable/policies_list.png b/tests/resumable/policies_list.png new file mode 100644 index 00000000..b2e78454 Binary files /dev/null and b/tests/resumable/policies_list.png differ diff --git a/tests/resumable/test_resumable.py b/tests/resumable/test_resumable.py new file mode 100644 index 00000000..7e123e99 --- /dev/null +++ b/tests/resumable/test_resumable.py @@ -0,0 +1,170 @@ +import os + +from conftest import is_https_url + +from storage3.utils import StorageException + + +def test_non_valid_resumable_options(sync_client): + client = sync_client + + """Raise an exception when argument is not a string""" + try: + client.resumable.resumable_offset(1, {}) + except Exception as e: + assert isinstance(e, StorageException) + + """Raise an exception when argument is an empty string""" + try: + client.resumable.resumable_offset("https://random_bucket_id_link", {}) + except Exception as e: + assert isinstance(e, StorageException) + + +def test_non_valid_terminate_options(sync_client): + client = sync_client + + """Raise an exception when argument is not a string""" + try: + client.resumable.terminate(1) + except Exception as e: + assert isinstance(e, StorageException) + + """Raise an exception when argument is an empty string""" + try: + client.resumable.terminate(" ") + except Exception as e: + assert isinstance(e, StorageException) + + """Raise an exception when there's no fileinfo associated with the argument passed""" + try: + client.resumable.terminate("random_🐍.log") + except Exception as e: + assert isinstance(e, StorageException) + + +def test_sync_client(sync_client, file, test_bucket): + client = sync_client + + """Check file was created during configuration phase""" + assert file is not None + + """Verify test_bucket is not an empty string""" + assert len(test_bucket.strip()) > 0 + + client.resumable.create_unique_link(bucketname=test_bucket, filename=file.name) + link = client.resumable.get_link(file.name) + + """Verify the link was generated as expected""" + assert is_https_url(link) + + """Check the file is not empty""" + assert os.stat(file.name).st_size > 0 + + """Verify if the file was loaded correctly""" + client.resumable.upload(file.name) + bucket = client.from_(test_bucket) + + is_file_loaded = any(item["name"] == file.name for item in bucket.list()) + assert is_file_loaded, f"File not loaded:\n{bucket.list()}" + + try: + client.resumable.terminate("") + except Exception as e: + assert isinstance(e, StorageException) + + bucket.remove(file.name) + + +def test_deferred_sync_client(sync_client, file, test_bucket): + + client = sync_client + + """Check file was created during configuration phase""" + assert file is not None + + """Verify test_bucket is not an empty string""" + assert len(test_bucket.strip()) > 0 + + client.resumable.create_unique_link(bucketname=test_bucket, objectname=file.name) + link = client.resumable.get_link(file.name) + + """Verify the link was generated as expected""" + assert is_https_url(link) + + """Check the file is not empty""" + assert os.stat(file.name).st_size > 0 + + """Verify if the file was loaded correctly""" + client.resumable.upload( + file.name, mb_size=10, upload_defer=True, link=link, objectname=file.name + ) + bucket = client.from_(test_bucket) + + is_file_loaded = any(item["name"] == file.name for item in bucket.list()) + assert is_file_loaded, f"File not loaded:\n{bucket.list()}" + + bucket.remove(file.name) + + +async def test_async_client(async_client, file, test_bucket): + client = async_client + + """Check file was created during configuration phase""" + assert file is not None + + """Verify test_bucket is not an empty string""" + assert len(test_bucket.strip()) > 0 + + await client.resumable.create_unique_link( + bucketname=test_bucket, filename=file.name + ) + link = client.resumable.get_link(file.name) + + """Verify the link was generated as expected""" + assert is_https_url(link) + + """Check the file is not empty""" + assert os.stat(file.name).st_size > 0 + + """Verify if the file was loaded correctly""" + await client.resumable.upload(file.name) + bucket = client.from_(test_bucket) + + is_file_loaded = any(item["name"] == file.name for item in await bucket.list()) + assert is_file_loaded, f"File not loaded:\n{bucket.list()}" + + await bucket.remove(file.name) + + +async def test_deferred_async_client(async_client, file, test_bucket): + + client = async_client + + """Check file was created during configuration phase""" + assert file is not None + + """Verify test_bucket is not an empty string""" + assert len(test_bucket.strip()) > 0 + + await client.resumable.create_unique_link( + bucketname=test_bucket, objectname=file.name + ) + link = client.resumable.get_link(file.name) + + """Verify the link was generated as expected""" + assert is_https_url(link) + + """Check the file is not empty""" + assert os.stat(file.name).st_size > 0 + + """Verify if the file was loaded correctly""" + await client.resumable.upload( + file.name, mb_size=10, upload_defer=True, link=link, objectname=file.name + ) + bucket = client.from_(test_bucket) + + is_file_loaded = any(item["name"] == file.name for item in await bucket.list()) + assert is_file_loaded, f"File not loaded:\n{bucket.list()}" + + await bucket.remove(file.name)