diff --git a/.editorconfig b/.editorconfig index 89a5079..7b619ae 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,23 +3,15 @@ root = true [Makefile] indent_style = tab -[*.{html,py,js,yml}] +[*.{py,yml}] charset = utf-8 -[*.js] -indent_style = space -indent_size = 2 - [*.yml] indent_style = space indent_size = 2 -[*.html] -indent_style = space -indent_size = 4 - [*.py] indent_style = space indent_size = 4 -line_length = 79 +line_length = 100 multi_line_output = 3 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml new file mode 100644 index 0000000..0e24a76 --- /dev/null +++ b/.github/workflows/pr.yml @@ -0,0 +1,73 @@ +name: pr + +on: + - pull_request + +permissions: + contents: read + pull-requests: read + checks: write + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + PYTHON_VERSION: "3.8" + POETRY_VERSION: "1.8.3" + RUFF_VERSION: "0.6.7" + +jobs: + lint: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - uses: actions/checkout@v4 + + - name: Install python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install dependencies + run: | + pip install --upgrade pip + pip install ruff==${{ env.RUFF_VERSION }} + + - name: Run Ruff + run: ruff check --output-format=github . + + test: + runs-on: ubuntu-latest + strategy: + matrix: + python: ["3.8", "3.9", "3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + cache: 'pip' + + - name: Install Poetry + run: pip install poetry==${{ env.POETRY_VERSION }} + + - name: Restore dependencies from cache + uses: actions/cache@v4 + with: + path: ~/.cache/pypoetry + key: dependencies-cache-${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ env.POETRY_VERSION }} + restore-keys: | + dependencies-cache-${{ runner.os }}-${{ env.PYTHON_VERSION }}- + + - name: Install dependencies + if: steps.setup-python.outputs.cache-hit != 'true' + run: | + poetry config virtualenvs.create false + poetry install --no-root --no-interaction + + - name: Run Pytest on Python ${{ matrix.python }} + run: poetry run pytest -m "not integration" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..c80ec24 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,60 @@ +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +env: + PYTHON_VERSION: "3.8" + POETRY_CORE_VERSION: "1.9.0" + +jobs: + release-build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Build release distributions + run: | + python -m pip install build poetry-core==${{ env.POETRY_CORE_VERSION }} + python -m build + + - name: Upload distributions + uses: actions/upload-artifact@v4 + with: + name: release-dists + path: dist/ + + pypi-publish: + runs-on: ubuntu-latest + + needs: + - release-build + + permissions: + # IMPORTANT: this permission is mandatory for trusted publishing + id-token: write + + environment: + name: test_pypi + url: https://test.pypi.org/p/toggl_python + + steps: + - name: Retrieve release distributions + uses: actions/download-artifact@v4 + with: + name: release-dists + path: dist/ + + - name: Publish release distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ diff --git a/.gitignore b/.gitignore index be19f30..ca4cf5c 100644 --- a/.gitignore +++ b/.gitignore @@ -47,39 +47,9 @@ coverage.xml .hypothesis/ .pytest_cache/ -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - # pyenv .python-version -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - # dotenv .env @@ -88,23 +58,13 @@ celerybeat-schedule venv/ ENV/ -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - # mypy .mypy_cache/ +.ruff_cache + # IDE settings .vscode/ local_*.py .idea/ - -# readthedocs sphinx generated documentation -_build/ +pyrightconfig.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 061c326..e187218 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,17 +1,43 @@ default_language_version: python: python3.8 +default_install_hook_types: + - pre-commit + - pre-push + repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - id: mixed-line-ending + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.6.7 + hooks: + - id: ruff + args: [ --fix ] + # Spell Checker + - repo: https://github.com/crate-ci/typos + rev: v1.24.6 + hooks: + - id: typos + # Git commit linter + - repo: https://github.com/jorisroovers/gitlint + rev: v0.19.1 + hooks: + - id: gitlint + # Detect hardcoded secrets + - repo: https://github.com/zricethezav/gitleaks + rev: v8.19.2 + hooks: + - id: gitleaks - repo: local hooks: - - id: black - name: black - entry: poetry run black . - language: python - types: [python] - - - id: isort - name: isort - entry: poetry run isort . + - id: test + name: test + entry: poetry run nox + pass_filenames: false + stages: [pre-push] language: python types: [python] diff --git a/LICENSE b/LICENSE index bc8b0bc..82d3e1b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2020, Ivlev Denis +Copyright (c) 2024, Evrone.com Inc Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index 5af51c6..d2698ed 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,140 @@ -# Toggl Python API +# toggl-python -![https://pypi.python.org/pypi/toggl_python](https://img.shields.io/pypi/v/toggl_python.svg) [![Supported python versions](https://img.shields.io/pypi/pyversions/toggl_python.svg?style=flat-square)](https://pypi.python.org/pypi/toggl_python) [![MIT License](https://img.shields.io/pypi/l/aiogram.svg?style=flat-square)](https://opensource.org/licenses/MIT) +![https://pypi.python.org/pypi/toggl_python](https://img.shields.io/pypi/v/toggl_python.svg) ![Downloads](https://img.shields.io/pypi/dm/toggl-python) [![Supported python versions](https://img.shields.io/pypi/pyversions/toggl_python.svg?style=flat-square)](https://pypi.python.org/pypi/toggl_python) [![MIT License](https://img.shields.io/pypi/l/aiogram.svg?style=flat-square)](https://opensource.org/licenses/MIT) +Typed `Toggl API` Python wrapper with pre-validation to avoid extra network usage. -* Based on open [Toggl API documentation](https://engineering.toggl.com/docs/) -* [Documentation](https://toggl-python.readthedocs.io) +* Based on [Toggl API](https://engineering.toggl.com/docs/) +* [Documentation](https://toggl-python.readthedocs.io) -## Warning -The package is currently broken because it uses **deprecated** Toggl API V8. Migration to V9 is currently in progress. +## Important Note + +Migration to API V9 is currently in progress. Many methods are not implemented yet. Feel free to open an issue to escalate their development. + +## Install + +`pip install toggl-python` + +## Usage + +Fetch information about current user via `TokenAuth` (`TOGGL_TOKEN` is required): + +```python +from toggl_python.auth import TokenAuth +from toggl_python.entities.user import CurrentUser + + +if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + CurrentUser(auth=auth).me() +``` + +`Basic Auth` is also supported: + + +```python +from toggl_python.auth import BasicAuth +from toggl_python.entities.user import CurrentUser + + +if __name__ == "__main__": + auth = BasicAuth(username="username", password="password") + CurrentUser(auth=auth).me() + +``` + +Package supports different input formats for `datetime` arguments: + +* `str`: + +```python +from toggl_python.auth import TokenAuth +from toggl_python.entities.user import CurrentUser + + +if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + CurrentUser(auth=auth).get_time_entries( + start_date="2024-01-01", + end_date="2024-02-01T15:00:00-02:00", + ) +``` + +- `datetime`: + +```python +from datetime import datetime, timezone + +from toggl_python.auth import TokenAuth +from toggl_python.entities.user import CurrentUser + + +if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + CurrentUser(auth=auth).get_time_entries( + start_date=datetime(2024, 1, 1, tzinfo=timezone.utc), + end_date=datetime(2024, 2, 1, 15, tzinfo=timezone.utc), + ) +``` + +Query params are available as well: + +```python +from toggl_python.auth import TokenAuth +from toggl_python.entities.workspace import Workspace + + +if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + workspace_id = "WORKSPACE_ID" + Workspace(auth=auth).get_projects(active=True) +``` + +Pre-validation to avoid extra network usage: + +```python +from datetime import datetime, timezone + +from toggl_python.auth import TokenAuth +from toggl_python.entities.workspace import Workspace + + +if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + workspace_id = "WORKSPACE_ID" + since = datetime(2024, 1, 20, tzinfo=timezone.utc) + # Assume that datetime.now is 2024-05-01 + Workspace(auth=auth).list(since=since) + + # ValidationError: Since cannot be older than 3 months +``` + +## Development + +`poetry` is required during local setup. + +Run `poetry install --no-root` to setup local environment. `pre-commit install` is also advisable. + + +### Unit Testing + +In order to run tests using different Python versions, please follow these steps: +* Install `pyenv` +* Install all supported Python versions - `pyenv install 3.8.* 3.9.* ...` +* Run `pyenv local 3.8.* 3.9.* ...` +* Run `poetry run nox` + +To run classic unit tests, execute `pytest -m "not integration"` + +### Integration Testing + +Pre-defined `Workspace` and `Project` are required to have in `Toggl` system. + +Command `TOGGL_TOKEN=... WORKSPACE_ID=... PROJECT_ID=... USER_ID=... TOGGL_PASSWORD=... pytest -m integration` ## Credits -------- -This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [cookiecutter-pypackage](https://github.com/audreyr/cookiecutter-pypackage) project template. +This package follows [evrone-python-guidelines](https://github.com/evrone/evrone-python-guidelines) and uses configs from [evrone-django-template](https://github.com/evrone/evrone-django-template). -[](https://evrone.com/?utm_source=github.com) +[](https://evrone.com/?utm_source=github.com) diff --git a/docs/index.rst b/docs/index.rst old mode 100644 new mode 100755 index e4ad884..e431b55 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,46 +1,160 @@ -Toggl Python API -================ +toggl-python +============ -.. image:: https://evrone.com/logo/evrone-sponsored-logo.png - :width: 231 - :alt: Sponsored by evrone.com - :target: https://evrone.com/?utm_source=github.com +|https://pypi.python.org/pypi/toggl_python| |Supported python versions| +|MIT License| -Based on open `Toggl API documentation `_ +Typed python wrapper for ``Toggl API`` with pre-validation to avoid +extra network usage. -Installation -============ -`pip install toggl-python` or use `poetry `_ `poetry add toggl-python` +- Based on `Toggl API `__ +- `Documentation `__ + +Important Note +-------------- + +Migration to API V9 is currently in progress. Many methods are not +implemented yet. Feel free to open an issue to escalate their +implementation. + +Install +------- + +``pip install toggl-python`` + +Usage +----- + +Fetch information about current user via ``TokenAuth`` (``TOGGL_TOKEN`` +is required): + +.. code:: python + + from toggl_python.auth import TokenAuth + from toggl_python.entities.user import CurrentUser + + + if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + CurrentUser(auth=auth).me() + +``Basic Auth`` is also supported. + +.. code:: python + + from toggl_python.auth import BasicAuth + from toggl_python.entities.user import CurrentUser + + + if __name__ == "__main__": + auth = BasicAuth(username="username", password="password") + CurrentUser(auth=auth).me() + +Package supports different input formats for ``datetime`` arguments: + +- ``str``: + +.. code:: python + + from toggl_python.auth import TokenAuth + from toggl_python.entities.user import CurrentUser + + + if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + CurrentUser(auth=auth).get_time_entries( + start_date="2024-01-01", + end_date="2024-02-01T15:00:00-02:00", + ) + +- ``datetime``: + +.. code:: python + + from datetime import datetime, timezone + + from toggl_python.auth import TokenAuth + from toggl_python.entities.user import CurrentUser + + + if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + CurrentUser(auth=auth).get_time_entries( + start_date=datetime(2024, 1, 1, tzinfo=timezone.utc), + end_date=datetime(2024, 2, 1, 15, tzinfo=timezone.utc), + ) + +Query params are available as well: + +.. code:: python + + from toggl_python.auth import TokenAuth + from toggl_python.entities.workspace import Workspace + + + if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + workspace_id = "WORKSPACE_ID" + Workspace(auth=auth).get_projects(active=True) + +Pre-validation to avoid extra network usage: + +.. code:: python + + from datetime import datetime, timezone + + from toggl_python.auth import TokenAuth + from toggl_python.entities.workspace import Workspace + + + if __name__ == "__main__": + auth = TokenAuth(token="TOGGL_TOKEN") + workspace_id = "WORKSPACE_ID" + since = datetime(2024, 1, 20, tzinfo=timezone.utc) + # Assume that datetime.now is 2024-05-01 + Workspace(auth=auth).list(since=since) + + # ValidationError: Since cannot be older than 3 months -Usage example -============= +Development +----------- -Get authenticated user time entries: +``poetry`` is required during local setup. -.. code-block:: python +Run ``poetry install --no-root`` to setup local environment. +``pre-commit install`` is also advisable. - from toggl_python import TokenAuth, TimeEntries +Unit Testing +~~~~~~~~~~~~ - if __name__ == "__main__": - auth = TokenAuth('AUTH_TOKEN') - print(TimeEntries(auth=auth).list()) +In order to run tests using different Python versions, please follow +these steps: \* Install ``pyenv`` \* Install all supported Python +versions - ``pyenv install 3.8.* 3.9.* ...`` \* Run +``pyenv local 3.8.* 3.9.* ...`` \* Run ``poetry run nox`` -Get information about authenticated user: +To run classic unit tests, execute ``pytest -m "not integration"`` -.. code-block:: python +Integration Testing +~~~~~~~~~~~~~~~~~~~ - from toggl_python import TokenAuth, Users +Pre-defined ``Workspace`` and ``Project`` are required to have in +``Toggl`` system. - if __name__ == "__main__": - auth = TokenAuth('AUTH_TOKEN') - print(Users(auth=auth).me()) +Command +``TOGGL_TOKEN=... WORKSPACE_ID=... PROJECT_ID=... USER_ID=... TOGGL_PASSWORD=... pytest -m integration`` -Get information about authenticated user workspaces: +Credits +------- -.. code-block:: python +This package follows +`evrone-python-guidelines `__ +and uses configs from +`evrone-django-template `__. - from toggl_python import TokenAuth, Workspaces +` `__ - if __name__ == "__main__": - auth = TokenAuth('AUTH_TOKEN') - print(Workspaces(auth=auth).list()) +.. |https://pypi.python.org/pypi/toggl_python| image:: https://img.shields.io/pypi/v/toggl_python.svg +.. |Supported python versions| image:: https://img.shields.io/pypi/pyversions/toggl_python.svg?style=flat-square + :target: https://pypi.python.org/pypi/toggl_python +.. |MIT License| image:: https://img.shields.io/pypi/l/aiogram.svg?style=flat-square + :target: https://opensource.org/licenses/MIT diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 91590b1..0000000 --- a/mypy.ini +++ /dev/null @@ -1,6 +0,0 @@ -[mypy] -ignore_missing_imports = True -allow_untyped_globals = True - -[mypy-*.migrations.*] -ignore_errors = True diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..6a8412a --- /dev/null +++ b/noxfile.py @@ -0,0 +1,14 @@ +from typing import TYPE_CHECKING + +import nox + + +if TYPE_CHECKING: + from nox.sessions import Session + +python_versions = ["3.8", "3.9", "3.10", "3.11", "3.12"] + +@nox.session(python=python_versions, reuse_venv=True) +def tests(session: "Session") -> None: + session.install(".") + _ = session.run("pytest", "-m", "not integration") diff --git a/poetry.lock b/poetry.lock index f68053e..f2a2deb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,107 +1,92 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" -version = "0.6.0" +version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" files = [ - {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, - {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + [[package]] name = "anyio" -version = "4.3.0" +version = "4.5.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, + {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, ] [package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] -name = "asttokens" -version = "2.4.1" -description = "Annotate AST trees with source code positions" +name = "argcomplete" +version = "3.5.0" +description = "Bash tab completion for argparse" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, + {file = "argcomplete-3.5.0-py3-none-any.whl", hash = "sha256:d4bcf3ff544f51e16e54228a7ac7f486ed70ebf2ecfe49a63a91171c76bf029b"}, + {file = "argcomplete-3.5.0.tar.gz", hash = "sha256:4349400469dccfb7950bb60334a680c58d88699bff6159df61251878dc6bf74b"}, ] -[package.dependencies] -six = ">=1.12.0" - [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] -name = "black" -version = "23.12.1" -description = "The uncompromising code formatter." +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - [package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] +tzdata = ["tzdata"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -115,20 +100,6 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "colorama" version = "0.4.6" @@ -141,79 +112,21 @@ files = [ ] [[package]] -name = "coverage" -version = "7.4.4" -description = "Code coverage measurement for Python" +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, ] -[package.extras] -toml = ["tomli"] +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] [[package]] name = "distlib" @@ -248,13 +161,13 @@ wmi = ["wmi (>=1.5.1)"] [[package]] name = "email-validator" -version = "2.1.1" +version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.1.1-py3-none-any.whl", hash = "sha256:97d882d174e2a65732fb43bfce81a3a834cbc1bde8bf419e30ef5ea976370a05"}, - {file = "email_validator-2.1.1.tar.gz", hash = "sha256:200a70680ba08904be6d1eef729205cc0d687634399a5924d842533efb824b84"}, + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, ] [package.dependencies] @@ -262,50 +175,48 @@ dnspython = ">=2.0.0" idna = ">=2.0.0" [[package]] -name = "executing" -version = "2.0.1" -description = "Get the currently executing AST node of a frame, and other information" +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +test = ["pytest (>=6)"] [[package]] -name = "filelock" -version = "3.13.1" -description = "A platform independent file lock." +name = "faker" +version = "28.4.1" +description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "Faker-28.4.1-py3-none-any.whl", hash = "sha256:e59c01d1e8b8e20a83255ab8232c143cb2af3b4f5ab6a3f5ce495f385ad8ab4c"}, + {file = "faker-28.4.1.tar.gz", hash = "sha256:4294d169255a045990720d6f3fa4134b764a4cdf46ef0d3c7553d2506f1adaa1"}, ] -[package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] -typing = ["typing-extensions (>=4.8)"] +[package.dependencies] +python-dateutil = ">=2.4" [[package]] -name = "flake8" -version = "6.1.0" -description = "the modular source code checker: pep8 pyflakes and co" +name = "filelock" +version = "3.16.1" +description = "A platform independent file lock." optional = false -python-versions = ">=3.8.1" +python-versions = ">=3.8" files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "h11" @@ -346,13 +257,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.4" +version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.4-py3-none-any.whl", hash = "sha256:ac418c1db41bade2ad53ae2f3834a3a0f5ae76b56cf5aa497d2d033384fc7d73"}, - {file = "httpcore-1.0.4.tar.gz", hash = "sha256:cb2839ccfcba0d2d3c1131d3c3e26dfc327326fbe7a5dc0dbfe9f6c9151bb022"}, + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, ] [package.dependencies] @@ -363,17 +274,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.25.0)"] +trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.25.2" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, - {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -389,6 +300,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "hyperframe" @@ -403,13 +315,13 @@ files = [ [[package]] name = "identify" -version = "2.5.35" +version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -417,15 +329,18 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.6" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -437,245 +352,75 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "ipython" -version = "8.22.2" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -files = [ - {file = "ipython-8.22.2-py3-none-any.whl", hash = "sha256:3c86f284c8f3d8f2b6c662f885c4889a91df7cd52056fd02b7d8d6195d7f56e9"}, - {file = "ipython-8.22.2.tar.gz", hash = "sha256:2dcaad9049f9056f1fef63514f176c7d41f930daa78d05b82a176202818f2c14"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5.13.0" - -[package.extras] -all = ["ipython[black,doc,kernel,nbconvert,nbformat,notebook,parallel,qtconsole,terminal]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "jedi" -version = "0.19.1" -description = "An autocompletion tool for Python that can be used for text editors." -optional = false -python-versions = ">=3.6" -files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, -] - -[package.dependencies] -parso = ">=0.8.3,<0.9.0" - -[package.extras] -docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] -qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] - -[[package]] -name = "matplotlib-inline" -version = "0.1.6" -description = "Inline Matplotlib backend for Jupyter" -optional = false -python-versions = ">=3.5" -files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, -] - -[package.dependencies] -traitlets = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy" -version = "1.9.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - [[package]] name = "nodeenv" -version = "1.8.0" +version = "1.9.1" description = "Node.js virtual environment builder" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, - {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[package.dependencies] -setuptools = "*" - [[package]] -name = "packaging" -version = "24.0" -description = "Core utilities for Python packages" +name = "nox" +version = "2024.4.15" +description = "Flexible test automation." optional = false python-versions = ">=3.7" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "nox-2024.4.15-py3-none-any.whl", hash = "sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565"}, + {file = "nox-2024.4.15.tar.gz", hash = "sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f"}, ] -[[package]] -name = "parso" -version = "0.8.3" -description = "A Python Parser" -optional = false -python-versions = ">=3.6" -files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] +[package.dependencies] +argcomplete = ">=1.9.4,<4.0" +colorlog = ">=2.6.1,<7.0.0" +packaging = ">=20.9" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.14.1" [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +tox-to-nox = ["jinja2", "tox"] +uv = ["uv (>=0.1.6)"] [[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -description = "Pexpect allows easy control of interactive console applications." -optional = false -python-versions = "*" -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] -[package.dependencies] -ptyprocess = ">=0.5" - [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -684,13 +429,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.6.2" +version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "pre_commit-3.6.2-py2.py3-none-any.whl", hash = "sha256:ba637c2d7a670c10daedc059f5c49b5bd0aadbccfcd7ec15592cf9665117532c"}, - {file = "pre_commit-3.6.2.tar.gz", hash = "sha256:c3ef34f463045c88658c5b99f38c1e297abdcc0ff13f98d3370055fbbfabc67e"}, + {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, + {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, ] [package.dependencies] @@ -700,302 +445,270 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" -[[package]] -name = "prompt-toolkit" -version = "3.0.43" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -optional = false -python-versions = "*" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.2" -description = "Safely evaluate AST nodes without side effects" -optional = false -python-versions = "*" -files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] - -[package.extras] -tests = ["pytest"] - -[[package]] -name = "pycodestyle" -version = "2.11.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, -] - [[package]] name = "pydantic" -version = "2.6.4" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" +annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.16.3" -typing-extensions = ">=4.6.1" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] -name = "pyflakes" -version = "3.1.0" -description = "passive checker of Python programs" +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" optional = false -python-versions = ">=3.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +six = ">=1.5" [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "respx" -version = "0.20.2" +version = "0.21.1" description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." optional = false python-versions = ">=3.7" files = [ - {file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"}, - {file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"}, + {file = "respx-0.21.1-py2.py3-none-any.whl", hash = "sha256:05f45de23f0c785862a2c92a3e173916e8ca88e4caad715dd5f68584d6053c20"}, + {file = "respx-0.21.1.tar.gz", hash = "sha256:0bd7fe21bfaa52106caa1223ce61224cf30786985f17c63c5d71eff0307ee8af"}, ] [package.dependencies] httpx = ">=0.21.0" [[package]] -name = "setuptools" -version = "69.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" +name = "ruff" +version = "0.5.7" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1019,59 +732,36 @@ files = [ ] [[package]] -name = "stack-data" -version = "0.6.3" -description = "Extract data from python stack frames and tracebacks for informative displays" -optional = false -python-versions = "*" -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[package.dependencies] -asttokens = ">=2.1.0" -executing = ">=1.2.0" -pure-eval = "*" - -[package.extras] -tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] - -[[package]] -name = "traitlets" -version = "5.14.2" -description = "Traitlets Python configuration system" +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, - {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] - [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, + {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, ] [package.dependencies] @@ -1080,21 +770,10 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - [metadata] lock-version = "2.0" -python-versions = "^3.12" -content-hash = "4ba5e8bf5b236f5bb34558eb1d1455b5e8e0bee57b553e85f7167a57ad9bf146" +python-versions = "^3.8.18" +content-hash = "a2b61a43ef988732f818a1b057fe090a119205848f0085ccbbcd6c11cfec2c2f" diff --git a/pyproject.toml b/pyproject.toml index 7c1fb53..69a2903 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,67 +1,109 @@ [tool.poetry] name = "toggl_python" -version = "0.2.9" -description = "Python wrapper for Toggl API." -authors = ["Ivlev Denis "] +version = "0.3.0" +description = "Typed `Toggl API` Python wrapper with pre-validation to avoid extra network usage." +authors = ["Evrone "] +maintainers = ["Nifadev Vadim "] readme = "README.md" homepage = "https://github.com/evrone/toggl_python" repository = "https://github.com/evrone/toggl_python" documentation = "https://toggl-python.readthedocs.io" classifiers = [ - "Development Status :: 2 - Pre-Alpha", + "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", + "Operating System :: OS Independent", "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Typing :: Typed", ] -[tool.poetry.dependencies] -python = "^3.12" -httpx = {extras=["http2"], version="^0.25.1"} -pydantic = {extras = ["email"], version = "^2.5.1"} -[tool.poetry.dev-dependencies] -pytest = "^7.4.3" -respx = "^0.20.2" -black = "^23.11.0" -ipython = "^8.17.2" -mypy = "^1.7.0" -coverage = "^7.3.2" -flake8 = "^6.1.0" -isort = "^5.8.0" -pre-commit = "^3.5.0" +[project] +name = "toggl_python" +requires-python = ">=3.8" -[tool.black] -line-length = 100 -target-version = ['py38'] -exclude = ''' -( - \.eggs - |\.git - |\.hg - |\.mypy_cache - |\.nox - |\.tox - |\.venv - |_build - |buck-out - |build - |dist -) -''' +[tool.poetry.dependencies] +python = "^3.8.18" +httpx = {extras = ["http2"], version = "^0.27.2"} +# Necessary for listing available timezones +# use zoneinfo.available_timezones() +backports-zoneinfo = {version = "^0.2.1", python = "3.8"} +pydantic = {extras = ["email"], version = "^2.9.2"} -[tool.isort] -line_length = 100 -sections = ["FUTURE", "STDLIB", "DJANGO", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] -multi_line_output = 3 -known_django = "django" -profile = "django" -src_paths = "toggl_python" -lines_after_imports = 2 +[tool.poetry.group.dev.dependencies] +pytest = "^8.3.3" +nox = "^2024.4.15" +respx = "^0.21.1" +ruff = "^0.5.7" +pre-commit = "3.5.0" +faker = "^28.4.1" [build-system] -requires = ["poetry>=0.12"] -build-backend = "poetry.masonry.api" \ No newline at end of file +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +src = ["toggl_python"] +line-length = 99 +unsafe-fixes = true +extend-exclude = [ + "docs/*", + ".venv", + "venv", +] + +[tool.ruff.lint] +select = ["ALL"] +ignore = [ + "COM812", # Trailing comma missing + "D100", # Missing docstring in public module + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D103", # Missing docstring in public function + "D104", # Missing docstring in public package + "D105", # Missing docstring in magic method + "D106", # Missing docstring in public nested class + "D107", # Missing docstring in __init__ + "UP006", # Use `list` instead of `List` for type annotation + "UP007", # Use `X | Y` for type annotations (not supported on current Python version - 3.8) + # Ignored because function args emulates query args which could be boolean + "FBT001", # Boolean-typed positional argument in function definition + "FBT002", # Boolean default positional argument in function definition + "N818", # Exception name should be named with Error suffix + "PT004", # Deprecated error, will be removed in future release +] + +[tool.ruff.lint.per-file-ignores] +"tests/*" = [ + "S101", + "S106", # Possible hardcoded password assigned to argument +] +# Move standard/third-party library import into a type-checking block +# Necessary for valid Pydantic schemas +"toggl_python/schemas/*" = ["TCH002", "TCH003"] + +[tool.ruff.lint.isort] +lines-after-imports = 2 + +[tool.ruff.lint.pylint] +max-args = 12 +max-public-methods = 10 +max-locals = 16 + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.pydocstyle] +convention = "google" + +[tool.pytest.ini_options] +markers = [ + "integration: make API calls during testing (deselect with '-m \"not integration\"')", +] diff --git a/tests/conftest.py b/tests/conftest.py index 1f2c168..0183772 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,30 +1,49 @@ +from typing import Generator + import pytest -from tests.fixtures import REPORT_TIME_ENTRIES_RESPONSE, TIME_ENTRIES_RESPONSE +from faker import Faker +from respx import MockRouter +from respx import mock as respx_mock +from toggl_python.api import ROOT_URL +from toggl_python.auth import TokenAuth +from toggl_python.entities.report_time_entry import REPORT_ROOT_URL, ReportTimeEntry +from toggl_python.entities.user import CurrentUser +from toggl_python.entities.workspace import Workspace + +from tests.responses.me_get import FAKE_TOKEN + -from toggl_python import ReportTimeEntries, TimeEntries +fake = Faker() @pytest.fixture -def patch_report_time_entries(monkeypatch): - class MockResponse: - def __init__(self, *args, **kwargs): - pass +def response_mock() -> Generator[MockRouter, None, None]: + with respx_mock(base_url=ROOT_URL) as mock_with_base_url: + yield mock_with_base_url - @staticmethod - def json(): - return REPORT_TIME_ENTRIES_RESPONSE - monkeypatch.setattr(ReportTimeEntries, "get", MockResponse, raising=False) +@pytest.fixture +def response_report_mock() -> Generator[MockRouter, None, None]: + with respx_mock(base_url=REPORT_ROOT_URL) as mock_with_base_url: + yield mock_with_base_url @pytest.fixture -def patch_time_entries(monkeypatch): - class MockResponse: - def __init__(self, *args, **kwargs): - pass +def authed_current_user() -> CurrentUser: + auth = TokenAuth(token=FAKE_TOKEN) + + return CurrentUser(auth=auth) + - @staticmethod - def json(): - return TIME_ENTRIES_RESPONSE +@pytest.fixture +def authed_workspace() -> Workspace: + auth = TokenAuth(token=FAKE_TOKEN) + + return Workspace(auth=auth) + + +@pytest.fixture +def authed_report_time_entry() -> ReportTimeEntry: + auth = TokenAuth(token=FAKE_TOKEN) - monkeypatch.setattr(TimeEntries, "get", MockResponse, raising=False) + return ReportTimeEntry(auth=auth) diff --git a/tests/factories/__init__.py b/tests/factories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/factories/time_entry.py b/tests/factories/time_entry.py new file mode 100644 index 0000000..ae058ba --- /dev/null +++ b/tests/factories/time_entry.py @@ -0,0 +1,112 @@ +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import TYPE_CHECKING, Dict, List, Optional, Union + +from tests.conftest import fake + + +if TYPE_CHECKING: + from backports.zoneinfo import ZoneInfo + from pydantic_core import TzInfo + + +try: + import zoneinfo +except ImportError: + from backports import zoneinfo + + +def _datetime_repr_factory(timezone: Union[ZoneInfo, TzInfo, None] = None) -> str: + if not timezone: + timezone_name = fake.timezone() + timezone = zoneinfo.ZoneInfo(timezone_name) + + return fake.date_time_this_decade(tzinfo=timezone).isoformat(timespec="seconds") + + +def _stop_datetime_repr_factory( + duration: int, start_repr: str, timezone: Union[ZoneInfo, TzInfo] +) -> str: + if duration: + start_datetime = datetime.fromisoformat(start_repr) + stop_datetime = start_datetime + timedelta(seconds=duration) + else: + stop_datetime = fake.date_time_this_decade(tzinfo=timezone) + + return stop_datetime.isoformat(timespec="seconds") + + +def time_entry_request_factory(workspace_id: Optional[int] = None) -> Dict[str, Union[str, int]]: + return { + "created_with": fake.color_name(), + "start": _datetime_repr_factory(), + "workspace_id": workspace_id or fake.random_int(), + } + + +def time_entry_extended_request_factory( + workspace_id: Optional[int] = None, +) -> Dict[str, Union[str, bool, int, None, List[Union[str, int]]]]: + timezone_name = fake.timezone() + timezone = zoneinfo.ZoneInfo(timezone_name) + duration = fake.random_int(min=-1) + start = _datetime_repr_factory(timezone) + + return { + "created_with": fake.color_name(), + "billable": fake.boolean(), + "description": fake.text(max_nb_chars=100), + "duration": duration, + "project_id": fake.random_int(), + "start": start, + "stop": _stop_datetime_repr_factory(duration, start, timezone), + "tag_ids": [fake.random_int() for _ in range(fake.random_int(min=0, max=20))], + "tags": [fake.word() for _ in range(fake.random_int(min=0, max=20))], + "task_id": fake.random_int(), + "user_id": fake.random_int(), + "workspace_id": workspace_id or fake.random_int(), + } + + +def time_entry_response_factory( + workspace_id: int, + start: Optional[str] = None, + billable: Optional[bool] = None, + description: Optional[str] = None, + duration: Optional[int] = None, + stop: Optional[str] = None, + project_id: Optional[int] = None, + tag_ids: Optional[List[int]] = None, + tags: Optional[List[str]] = None, + task_id: Optional[int] = None, + user_id: Optional[int] = None, +) -> Dict[str, Union[str, bool, int, None, List[Union[str, int]]]]: + if start: + tz = datetime.strptime(start, "%Y-%m-%dT%H:%M:%S%z").tzinfo + else: + timezone_name = fake.timezone() + tz = zoneinfo.ZoneInfo(timezone_name) + + return { + "at": _datetime_repr_factory(tz), + "billable": billable or fake.boolean(), + "description": description or fake.text(max_nb_chars=100), + "duration": duration or fake.random_int(), + "duronly": fake.boolean(), + "id": fake.random_number(digits=11, fix_len=True), + "permissions": None, + "project_id": project_id or fake.random_int(), + "server_deleted_at": ( + fake.date_time_this_month(tzinfo=tz).isoformat(timespec="seconds") + if fake.boolean() + else None + ), + "start": start or _datetime_repr_factory(tz), + "stop": stop or _datetime_repr_factory(tz), + "tag_ids": tag_ids or [], + "tags": tags or [], + "task_id": task_id or fake.random_int(), + "user_id": user_id or fake.random_int(), + "workspace_id": workspace_id, + } diff --git a/tests/fixtures.py b/tests/fixtures.py deleted file mode 100644 index e96ef60..0000000 --- a/tests/fixtures.py +++ /dev/null @@ -1,47 +0,0 @@ -REPORT_TIME_ENTRIES_RESPONSE = { - "total_grand": 0, - "total_billable": None, - "total_currencies": [{"currency": None, "amount": None}], - "total_count": 1, - "per_page": 50, - "data": [ - { - "id": 45675345, - "pid": 44556545, - "tid": None, - "uid": 123456, - "description": "", - "start": "2020-08-24T15:43:11+03:00", - "end": "2020-08-24T15:43:19+03:00", - "updated": "2020-08-24T15:43:19+03:00", - "dur": 8000, - "user": "Test User", - "use_stop": True, - "client": "test-client", - "project": "test project", - "project_color": "0", - "project_hex_color": "#990099", - "task": None, - "billable": None, - "is_billable": False, - "cur": None, - "tags": [], - } - ], -} - -TIME_ENTRIES_RESPONSE = [ - { - "id": 45675345, - "guid": "3ab9166aac16cbf1374edda1cb652f69", - "wid": 4581172, - "pid": 44556545, - "billable": False, - "start": "2020-08-24T12:43:11+00:00", - "stop": "2020-08-24T12:43:19+00:00", - "duration": 8, - "duronly": False, - "at": "2020-08-24T12:43:19+00:00", - "uid": 123456, - }, -] diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..d774481 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1,4 @@ +import pytest + + +pytestmark = [pytest.mark.integration] diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..694651c --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import os +import time +from typing import TYPE_CHECKING, Generator + +import pytest +from toggl_python.auth import TokenAuth +from toggl_python.entities.report_time_entry import ReportTimeEntry +from toggl_python.entities.user import CurrentUser +from toggl_python.entities.workspace import Workspace + + +if TYPE_CHECKING: + from toggl_python.schemas.current_user import MePreferencesResponse, MeResponse + + +@pytest.fixture(scope="session") +def i_authed_user() -> CurrentUser: + token = os.environ["TOGGL_TOKEN"] + auth = TokenAuth(token=token) + + return CurrentUser(auth=auth) + + +@pytest.fixture(scope="session") +def i_authed_workspace() -> Workspace: + token = os.environ["TOGGL_TOKEN"] + auth = TokenAuth(token=token) + + return Workspace(auth=auth) + + +@pytest.fixture(scope="session") +def i_authed_report_time_entry() -> ReportTimeEntry: + token = os.environ["TOGGL_TOKEN"] + auth = TokenAuth(token=token) + + return ReportTimeEntry(auth=auth) + + +@pytest.fixture +def me_response(i_authed_user: CurrentUser) -> MeResponse: + return i_authed_user.me() + + +@pytest.fixture +def me_preferences_response(i_authed_user: CurrentUser) -> MePreferencesResponse: + return i_authed_user.preferences() + + +@pytest.fixture(autouse=True) +def slow_down_tests() -> Generator[None, None, None]: + """Wait for some time between separate tests to avoid API rate limiting. + + Toggl API uses Leaky Bucket, recommended rate is 1 request per second per API token. + """ + yield + time.sleep(1) diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py new file mode 100644 index 0000000..2a23902 --- /dev/null +++ b/tests/integration/test_project.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +import os +from typing import TYPE_CHECKING + +from toggl_python.schemas.project import ProjectResponse + +# Necessary to mark all tests in module as integration +from tests.integration import pytestmark # noqa: F401 - imported but unused + + +if TYPE_CHECKING: + from toggl_python.entities.workspace import Workspace + + +def test_get_projects__without_query_params(i_authed_workspace: Workspace) -> None: + # Later Create project and init and delete it at the end + # Now this actions are not implemented + workspace_id = int(os.environ["WORKSPACE_ID"]) + expected_result = set(ProjectResponse.model_fields.keys()) + + result = i_authed_workspace.get_projects(workspace_id) + + assert result[0].model_fields_set == expected_result + + +def test_get_project_by_id(i_authed_workspace: Workspace) -> None: + # Later Create project and init and delete it at the end + # Now this actions are not implemented + workspace_id = int(os.environ["WORKSPACE_ID"]) + project_id = int(os.environ["PROJECT_ID"]) + expected_result = set(ProjectResponse.model_fields.keys()) + + result = i_authed_workspace.get_project(workspace_id, project_id) + + assert result.model_fields_set == expected_result diff --git a/tests/integration/test_report_time_entry.py b/tests/integration/test_report_time_entry.py new file mode 100644 index 0000000..a3f777c --- /dev/null +++ b/tests/integration/test_report_time_entry.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +import os +from datetime import timedelta +from typing import TYPE_CHECKING + +import pytest +from toggl_python.schemas.report_time_entry import ( + SearchReportTimeEntriesResponse, +) + +from tests.conftest import fake + +# Necessary to mark all tests in module as integration +from tests.integration import pytestmark # noqa: F401 - imported but unused + + +if TYPE_CHECKING: + from toggl_python.entities.report_time_entry import ReportTimeEntry + from toggl_python.entities.workspace import Workspace + + +try: + import zoneinfo +except ImportError: + from backports import zoneinfo + + +@pytest.mark.parametrize( + argnames="use_dates_repr", + argvalues=(True, False), + ids=("str date arguments", "date date arguments"), +) +def test_search_report_time_entries__with_start_and_end_dates( + use_dates_repr: bool, + i_authed_report_time_entry: ReportTimeEntry, + i_authed_workspace: Workspace, +) -> None: + workspace_id = int(os.environ["WORKSPACE_ID"]) + timezone_name = fake.timezone() + tz = zoneinfo.ZoneInfo(timezone_name) + start_date = fake.date_this_decade() + delta = fake.random_int(min=1, max=364) + end_date = start_date + timedelta(days=delta) + time_entry = i_authed_workspace.create_time_entry( + workspace_id, + start_datetime=fake.date_time_between_dates(start_date, end_date, tzinfo=tz), + created_with=fake.word(), + ) + + expected_result = set(SearchReportTimeEntriesResponse.model_fields.keys()) + + result = i_authed_report_time_entry.search( + workspace_id, + start_date=start_date.isoformat() if use_dates_repr else start_date, + end_date=end_date.isoformat() if use_dates_repr else end_date, + ) + + assert result[0].model_fields_set == expected_result + + _ = i_authed_workspace.delete_time_entry(workspace_id, time_entry.id) + + +def test_search_report_time_entries__not_found( + i_authed_report_time_entry: ReportTimeEntry, +) -> None: + workspace_id = int(os.environ["WORKSPACE_ID"]) + # Set explicit date range to avoid finding unexpected existing test TimeEntries + time_entry_start_date = fake.date_between(start_date="-15y", end_date="-2y") + delta = fake.random_int(min=1, max=364) + end_date = time_entry_start_date + timedelta(days=delta) + start_date = fake.date_between_dates(time_entry_start_date, end_date) + + result = i_authed_report_time_entry.search( + workspace_id, + start_date=start_date, + end_date=end_date, + ) + + assert result == [] diff --git a/tests/integration/test_time_entry.py b/tests/integration/test_time_entry.py new file mode 100644 index 0000000..8d17d87 --- /dev/null +++ b/tests/integration/test_time_entry.py @@ -0,0 +1,131 @@ +from __future__ import annotations + +import os +from datetime import timedelta +from typing import TYPE_CHECKING + +from toggl_python.schemas.time_entry import MeTimeEntryResponse + +from tests.conftest import fake +from tests.factories.time_entry import ( + time_entry_extended_request_factory, + time_entry_request_factory, +) + +# Necessary to mark all tests in module as integration +from tests.integration import pytestmark # noqa: F401 - imported but unused + + +try: + import zoneinfo +except ImportError: + from backports import zoneinfo + + +if TYPE_CHECKING: + from toggl_python.entities.user import CurrentUser + from toggl_python.entities.workspace import Workspace + + +def test_create_time_entry__only_necessary_fields(i_authed_workspace: Workspace) -> None: + workspace_id = int(os.environ["WORKSPACE_ID"]) + request_body = time_entry_request_factory(workspace_id) + expected_result = set(MeTimeEntryResponse.model_fields.keys()) + + result = i_authed_workspace.create_time_entry( + workspace_id, + start_datetime=request_body["start"], + created_with=request_body["created_with"], + ) + + assert result.model_fields_set == expected_result + + _ = i_authed_workspace.delete_time_entry(workspace_id=workspace_id, time_entry_id=result.id) + + +def test_create_time_entry__all_fields(i_authed_workspace: Workspace) -> None: + """Create TimeEntry without fields `tag_ids` and `task_id`. + + `tag_ids` requires existing Tags and it is complicated to test + and `task_id` is available on paid plan. + """ + workspace_id = int(os.environ["WORKSPACE_ID"]) + request_body = time_entry_extended_request_factory(workspace_id) + expected_result = set(MeTimeEntryResponse.model_fields.keys()) + + result = i_authed_workspace.create_time_entry( + workspace_id, + start_datetime=request_body["start"], + created_with=request_body["created_with"], + billable=request_body["billable"], + description=request_body["description"], + duration=request_body["duration"], + project_id=os.environ["PROJECT_ID"], + stop=request_body["stop"], + tags=request_body["tags"], + user_id=os.environ["USER_ID"], + ) + + assert result.model_fields_set == expected_result + + _ = i_authed_workspace.delete_time_entry(workspace_id=workspace_id, time_entry_id=result.id) + + +def test_list_time_entries__with_start_and_end_date__datetime( + i_authed_user: CurrentUser, i_authed_workspace: Workspace +) -> None: + workspace_id = int(os.environ["WORKSPACE_ID"]) + timezone_name = fake.timezone() + tz = zoneinfo.ZoneInfo(timezone_name) + start_date = fake.date_time_this_month(tzinfo=tz, before_now=True) + delta = fake.random_int(min=1, max=999999) + end_date = start_date + timedelta(seconds=delta) + time_entry = i_authed_workspace.create_time_entry( + workspace_id, + start_datetime=fake.date_time_between_dates(start_date, end_date, tzinfo=tz), + created_with=fake.word(), + ) + + expected_result = set(MeTimeEntryResponse.model_fields.keys()) + + result = i_authed_user.get_time_entries(start_date=start_date, end_date=end_date) + + assert result[0].model_fields_set == expected_result + + _ = i_authed_workspace.delete_time_entry(workspace_id, time_entry.id) + + +def test_list_time_entries__with_start_and_end_date__str( + i_authed_user: CurrentUser, i_authed_workspace: Workspace +) -> None: + workspace_id = int(os.environ["WORKSPACE_ID"]) + start_date = fake.date_this_month(before_today=True) + delta = fake.random_int(min=1, max=999) + end_date = start_date + timedelta(days=delta) + timezone_name = fake.timezone() + tz = zoneinfo.ZoneInfo(timezone_name) + time_entry = i_authed_workspace.create_time_entry( + workspace_id, + start_datetime=fake.date_time_between_dates(start_date, end_date, tzinfo=tz), + created_with=fake.word(), + ) + + expected_result = set(MeTimeEntryResponse.model_fields.keys()) + + result = i_authed_user.get_time_entries( + start_date=start_date.isoformat(), end_date=end_date.isoformat() + ) + + assert result[0].model_fields_set == expected_result + + _ = i_authed_workspace.delete_time_entry(workspace_id, time_entry.id) + + +def test_list_time_entries__no_results(i_authed_user: CurrentUser) -> None: + start_date = fake.date_time_between(start_date="-6m", end_date="-3m") + delta = fake.random_int(min=0, max=999) + end_date = start_date + timedelta(days=delta) + + result = i_authed_user.get_time_entries(start_date=start_date, end_date=end_date) + + assert result == [] diff --git a/tests/integration/test_user.py b/tests/integration/test_user.py new file mode 100644 index 0000000..ce15af4 --- /dev/null +++ b/tests/integration/test_user.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +import os +from typing import Union + +import pytest +from toggl_python.auth import TokenAuth +from toggl_python.entities.user import CurrentUser +from toggl_python.exceptions import BadRequest +from toggl_python.schemas.current_user import ( + AlphaFeatureResponse, + DateFormat, + DurationFormat, + MeFeatureResponse, + MePreferencesResponse, + MeResponse, + MeResponseWithRelatedData, + TimeFormat, +) + +from tests.conftest import fake + +# Necessary to mark all tests in module as integration +from tests.integration import pytestmark # noqa: F401 - imported but unused + + +def test_logged(i_authed_user: CurrentUser) -> None: + result = i_authed_user.logged() + + assert result is True + + +def test_logged__unauthorized() -> None: + token_auth = TokenAuth(token="invalid token") + user = CurrentUser(auth=token_auth) + error_message = "Incorrect username and/or password" + + with pytest.raises(BadRequest, match=error_message): + _ = user.logged() + + +def test_me__ok(i_authed_user: CurrentUser) -> None: + expected_result = set(MeResponse.model_fields.keys()) + + result = i_authed_user.me() + + assert result.model_fields_set == expected_result + + +def test_me__with_related_data(i_authed_user: CurrentUser) -> None: + expected_result = set(MeResponseWithRelatedData.model_fields.keys()) + expected_result.remove("clients") + + result = i_authed_user.me(with_related_data=True) + + assert result.model_fields_set == expected_result + assert result.workspaces[0] is not None + + +@pytest.mark.parametrize( + argnames=("field_name", "field_value"), + argvalues=[ + ("beginning_of_week", fake.random_int(min=0, max=6)), + ("country_id", fake.random_int(min=100, max=200)), + ("email", fake.email()), + ("fullname", fake.name()), + ("timezone", fake.timezone()), + ], + ids=( + "beginning_of_week", + "country_id", + "email", + "fullname", + "timezone", + ), +) +def test_update_me__ok( + i_authed_user: CurrentUser, + me_response: MeResponse, + field_name: str, + field_value: Union[str, int], +) -> None: + # default_workspace_id is not tested because it requires method to create and delete workspace + update_body = {field_name: field_value} + current_state_body = {field_name: getattr(me_response, field_name)} + + result = i_authed_user.update_me(**update_body) + + assert getattr(result, field_name) == field_value + + _ = i_authed_user.update_me(**current_state_body) + + +def test_update_me__email_is_already_used(i_authed_user: CurrentUser) -> None: + error_message = "user with this email already exists" + used_email = "test@gmail.com" + + with pytest.raises(BadRequest, match=error_message): + _ = i_authed_user.update_me(email=used_email) + + +def test_update_me__unavailable_default_workspace_id(i_authed_user: CurrentUser) -> None: + error_message = "Invalid default_workspace_id" + invalid_default_workspace_id = fake.random_int(min=2, max=999) + + with pytest.raises(BadRequest, match=error_message): + _ = i_authed_user.update_me(default_workspace_id=invalid_default_workspace_id) + + +def test_change_password__ok(i_authed_user: CurrentUser) -> None: + current_password = os.environ["TOGGL_PASSWORD"] + new_password = fake.password() + + result = i_authed_user.change_password(current_password, new_password) + + assert result is True + + result = i_authed_user.change_password( + current_password=new_password, new_password=current_password + ) + + +def test_change_password__invalid_current_password(i_authed_user: CurrentUser) -> None: + current_password = fake.password() + new_password = fake.password() + error_message = "Current password is not valid" + + with pytest.raises(BadRequest, match=error_message): + _ = i_authed_user.change_password(current_password, new_password) + + +def test_features__ok(i_authed_user: CurrentUser) -> None: + expected_result = set(MeFeatureResponse.model_fields.keys()) + + result = i_authed_user.features() + + assert result[0].workspace_id is not None + assert len(result[0].features) > 0 + assert result[0].features[0].model_fields_set == expected_result + + +def test_preferences__ok(i_authed_user: CurrentUser) -> None: + expected_result = set(MePreferencesResponse.model_fields.keys()) + expected_alpha_feature = set(AlphaFeatureResponse.model_fields.keys()) + + result = i_authed_user.preferences() + + assert result.model_fields_set == expected_result + assert len(result.alpha_features) > 0 + assert result.alpha_features[0].model_fields_set == expected_alpha_feature + + +@pytest.mark.parametrize( + argnames=("field_name", "schema_field_name", "field_value"), + argvalues=[ + ( + "date_format", + "date_format", + fake.random_element(item.value for item in DateFormat), + ), + ( + "duration_format", + "duration_format", + fake.random_element(item.value for item in DurationFormat), + ), + ( + "time_format", + "timeofday_format", + fake.random_element(item.value for item in TimeFormat), + ), + ], + ids=("date_format", "duration_format", "time_format"), +) +def test_update_preferences__ok( + i_authed_user: CurrentUser, + me_preferences_response: MePreferencesResponse, + field_name: str, + schema_field_name: str, + field_value: Union[DateFormat, DurationFormat, TimeFormat], +) -> None: + update_body = {field_name: field_value} + current_state_body = {field_name: getattr(me_preferences_response, schema_field_name)} + + result = i_authed_user.update_preferences(**update_body) + + assert result is True + + _ = i_authed_user.update_preferences(**current_state_body) diff --git a/tests/integration/test_workspace.py b/tests/integration/test_workspace.py new file mode 100644 index 0000000..bb9df20 --- /dev/null +++ b/tests/integration/test_workspace.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import os +from typing import TYPE_CHECKING + +from toggl_python.schemas.workspace import WorkspaceResponse + +# Necessary to mark all tests in module as integration +from tests.integration import pytestmark # noqa: F401 - imported but unused + + +if TYPE_CHECKING: + from toggl_python.entities.workspace import Workspace + + +def test_get_workspace_by_id(i_authed_workspace: Workspace) -> None: + workspace_id = int(os.environ["WORKSPACE_ID"]) + expected_result = set(WorkspaceResponse.model_fields.keys()) + + result = i_authed_workspace.get(workspace_id) + + assert result.model_fields_set == expected_result + + +def test_get_workspaces__without_query_params(i_authed_workspace: Workspace)-> None: + expected_result = set(WorkspaceResponse.model_fields.keys()) + + result = i_authed_workspace.list() + + assert result[0].model_fields_set == expected_result diff --git a/tests/responses/__init__.py b/tests/responses/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/responses/me_get.py b/tests/responses/me_get.py new file mode 100644 index 0000000..2611db2 --- /dev/null +++ b/tests/responses/me_get.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +from typing import Dict, Final, List, Union + + +FLAT_RESPONSE_TYPE = Dict[str, Union[str, int, bool, None]] +NESTED_RESPONSE_TYPE = Dict[str, Union[str, int, bool, List, None]] + +FAKE_TOKEN: str = "flstsapa42cdwiueii2tjg2t08f91kdr" + +ME_RESPONSE_SHORT: Final[FLAT_RESPONSE_TYPE] = { + "at": "2024-07-24T09:42:55.391879Z", + "authorization_updated_at": "2024-07-02T16:13:32.109174Z", + "beginning_of_week": 1, + "country_id": None, + "created_at": "2024-05-16T12:01:04.834085Z", + "default_workspace_id": 43644207, + "email": "example@mail.com", + "fullname": "Test User", + "has_password": True, + "id": 30809356, + "image_url": "https://assets.track.toggl.com/images/profile.png", + "openid_email": None, + "openid_enabled": False, + "timezone": "Europe/London", + "toggl_accounts_id": "uWGsHAeXZGhJvQ3XjdY63h", + "updated_at": "2024-05-16T12:01:24.447981Z", +} + +ME_RESPONSE: NESTED_RESPONSE_TYPE = { + "api_token": FAKE_TOKEN, + "at": "2024-07-24T09:42:55.391879Z", + "authorization_updated_at": "2024-07-02T16:13:32.109174Z", + "beginning_of_week": 1, + "country_id": None, + "created_at": "2024-05-16T12:01:04.834085Z", + "default_workspace_id": 43644207, + "email": "example@mail.com", + "fullname": "Test User", + "has_password": True, + "id": 30809356, + "image_url": "https://assets.track.toggl.com/images/profile.png", + "intercom_hash": "78hcsq59lsca33ivsd5iwy42yu3gdf0sctutuku5gvjfk1qbj71puu7r1z74dzdp", + "openid_email": None, + "openid_enabled": False, + "timezone": "Europe/London", + "toggl_accounts_id": "uWGsHAeXZGhJvQ3XjdY63h", + "updated_at": "2024-05-16T12:01:24.447981Z", +} + +ME_RESPONSE_WITH_RELATED_DATA: NESTED_RESPONSE_TYPE = { + "api_token": FAKE_TOKEN, + "at": "2024-07-24T09:42:55.391879Z", + "authorization_updated_at": "2024-07-02T16:13:32.109174Z", + "beginning_of_week": 1, + "country_id": None, + "created_at": "2024-05-16T12:01:04.834085Z", + "default_workspace_id": 43644207, + "email": "example@mail.com", + "fullname": "Test User", + "has_password": True, + "id": 30809356, + "image_url": "https://assets.track.toggl.com/images/profile.png", + "intercom_hash": "78hcsq59lsca33ivsd5iwy42yu3gdf0sctutuku5gvjfk1qbj71puu7r1z74dzdp", + "openid_email": None, + "openid_enabled": False, + "timezone": "Europe/London", + "toggl_accounts_id": "uWGsHAeXZGhJvQ3XjdY63h", + "updated_at": "2024-05-16T12:01:24.447981Z", + "workspaces": [], +} + +ME_FEATURES_RESPONSE: List[Dict[str, Union[int, List[Dict]]]] = [ + { + "features": [ + {"enabled": True, "feature_id": 0, "name": "free"}, + {"enabled": False, "feature_id": 13, "name": "pro"}, + {"enabled": False, "feature_id": 15, "name": "business"}, + {"enabled": False, "feature_id": 55, "name": "tracking_reminders"}, + {"enabled": False, "feature_id": 64, "name": "tasks"}, + {"enabled": False, "feature_id": 65, "name": "project_dashboard"}, + ], + "workspace_id": 43644207, + } +] + +ME_PREFERENCES_RESPONSE: Dict[str, Union[int, str, List[Dict]]] = { + "BeginningOfWeek": 1, + "alpha_features": [ + {"code": "paging_project_list", "enabled": False}, + {"code": "jira_v2", "enabled": False}, + {"code": "alerts_v2", "enabled": True}, + {"code": "analytics", "enabled": True}, + ], + "date_format": "MM/DD/YYYY", + "duration_format": "improved", + "pg_time_zone_name": "Europe/Moscow", + "record_timeline": False, + "send_product_emails": False, + "send_timer_notifications": True, + "send_weekly_report": False, + "timeofday_format": "H:mm", +} + +ME_WEB_TIMER_RESPONSE: Dict[str, Union[None, List[Dict]]] = { + "clients": None, + "projects": [], + "tags": [], + "tasks": None, + "time_entries": [ + { + "billable": False, + "deleted": None, + "description": "test timer", + "duration_in_seconds": 52, + "ignore_start_and_stop": True, + "planned_task_id": None, + "project_id": 202793182, + "tag_ids": [16501871], + "task_id": 3545645770, + "updated_at": "2024-07-30T08:14:38+00:00", + "user_id": 30809356, + "utc_start": "2024-07-30T08:13:46+00:00", + "utc_stop": "2024-07-30T08:14:38+00:00", + "workspace_id": 43644207, + }, + ], +} diff --git a/tests/responses/me_put.py b/tests/responses/me_put.py new file mode 100644 index 0000000..2d7e2e7 --- /dev/null +++ b/tests/responses/me_put.py @@ -0,0 +1,26 @@ +from typing import Dict, List, Union + +from tests.responses.me_get import FAKE_TOKEN + + +NESTED_RESPONSE_TYPE = Dict[str, Union[str, int, bool, List, None]] + +UPDATE_ME_RESPONSE: NESTED_RESPONSE_TYPE = { + "api_token": FAKE_TOKEN, + "at": "2024-07-24T09:42:55.391879Z", + "beginning_of_week": 1, + "country_id": None, + "created_at": "2024-05-16T12:01:04.834085Z", + "default_workspace_id": 43644207, + "email": "example@mail.com", + "fullname": "Test User", + "has_password": True, + "id": 30809356, + "image_url": "https://assets.track.toggl.com/images/profile.png", + "openid_email": None, + "openid_enabled": False, + "timezone": "Europe/London", + "toggl_accounts_id": "uWGsHAeXZGhJvQ3XjdY63h", + "toggl_accounts_updated_at": "2024-05-16T12:01:24.447981Z", + "updated_at": "2024-05-16T12:01:24.447981Z", +} diff --git a/tests/responses/project_get.py b/tests/responses/project_get.py new file mode 100644 index 0000000..c2ad9f1 --- /dev/null +++ b/tests/responses/project_get.py @@ -0,0 +1,38 @@ +from __future__ import annotations + +from typing import Dict, List, Union + + +PROJECT_RESPONSE: Dict[str, Union[str, bool, int, None, List]] = { + "active": True, + "actual_hours": 0, + "actual_seconds": 83, + "at": "2024-05-16T12:40:29+00:00", + "auto_estimates": None, + "billable": False, + "can_track_time": True, + "cid": None, + "client_id": None, + "color": "#c9806b", + "created_at": "2024-05-16T12:40:29+00:00", + "currency": None, + "estimated_hours": None, + "estimated_seconds": None, + "fixed_fee": None, + "id": 202793181, + "is_private": True, + "is_shared": False, + "name": "test project", + "permissions": None, + "rate": None, + "rate_last_updated": None, + "recurring": False, + "recurring_parameters": None, + "server_deleted_at": None, + "start_date": "2024-05-16", + "status": "active", + "template": None, + "template_id": None, + "wid": 43644207, + "workspace_id": 43644207 +} diff --git a/tests/responses/report_time_entry_post.py b/tests/responses/report_time_entry_post.py new file mode 100644 index 0000000..16d7c80 --- /dev/null +++ b/tests/responses/report_time_entry_post.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from typing import Dict, List, Union + + +SEARCH_REPORT_TIME_ENTRY_RESPONSE: Dict[str, Union[bool, None, str, int, List]] = { + "billable": False, + "billable_amount_in_cents": None, + "currency": "USD", + "description": "sample description", + "hourly_rate_in_cents": None, + "project_id": 202793182, + "row_number": 1, + "tag_ids": [16501871], + "task_id": None, + "time_entries": [ + { + "at": "2024-07-30T08:14:38+00:00", + "at_tz": "2024-07-30T11:14:38+03:00", + "id": 3545645770, + "seconds": 52, + "start": "2024-07-30T11:13:46+03:00", + "stop": "2024-07-30T11:14:38+03:00", + } + ], + "user_id": 30809356, + "username": "test user", +} diff --git a/tests/responses/time_entry_get.py b/tests/responses/time_entry_get.py new file mode 100644 index 0000000..246508a --- /dev/null +++ b/tests/responses/time_entry_get.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +from typing import Dict, Union + + +ME_TIME_ENTRY_RESPONSE: Dict[str, Union[str, int, bool, list, None]] = { + "at": "2024-07-29T12:28:56+00:00", + "billable": False, + "description": "test timer", + "duration": 22, + "duronly": True, + "id": 3544298808, + "permissions": None, + "project_id": None, + "server_deleted_at": None, + "start": "2024-07-29T12:28:33+00:00", + "stop": "2024-07-29T12:28:55+00:00", + "tag_ids": [], + "tags": [], + "task_id": None, + "user_id": 30809356, + "workspace_id": 43644207, +} + +ME_TIME_ENTRY_WITH_META_RESPONSE: Dict[str, Union[str, int, bool, list, None]] = { + "at": "2024-07-29T12:28:56+00:00", + "billable": False, + "description": "test timer", + "duration": 22, + "duronly": True, + "id": 3544298808, + "permissions": None, + "project_id": None, + "server_deleted_at": None, + "start": "2024-07-29T12:28:33+00:00", + "stop": "2024-07-29T12:28:55+00:00", + "tag_ids": [], + "tags": [], + "task_id": None, + "user_avatar_url": "", + "user_id": 30809356, + "user_name": "Test User", + "workspace_id": 43644207, +} diff --git a/tests/responses/time_entry_put_and_patch.py b/tests/responses/time_entry_put_and_patch.py new file mode 100644 index 0000000..c08b77d --- /dev/null +++ b/tests/responses/time_entry_put_and_patch.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from typing import Dict, List, Union + + +BULK_EDIT_TIME_ENTRIES_RESPONSE: Dict[str, List[Union[int, Dict[str, Union[int, str]]]]] = { + "success": [3544298808], + "failure": [ + { + "id": 202793182, + "message": "Time entry with ID: 202793182 was not found/is not accessible", + } + ], +} diff --git a/tests/responses/workspace_get.py b/tests/responses/workspace_get.py new file mode 100644 index 0000000..766f8eb --- /dev/null +++ b/tests/responses/workspace_get.py @@ -0,0 +1,41 @@ +from typing import Final + +from tests.responses.me_get import FLAT_RESPONSE_TYPE + + +WORKSPACE_RESPONSE: Final[FLAT_RESPONSE_TYPE] = { + "admin": True, + "api_token": None, + "at": "2024-07-30T08:13:46+00:00", + "business_ws": False, + "csv_upload": None, + "default_currency": "USD", + "default_hourly_rate": None, + "hide_start_end_times": False, + "ical_enabled": False, + "ical_url": None, + "id": 43644207, + "last_modified": None, + "logo_url": "https://assets.track.toggl.com/images/workspace.jpg", + "name": "test workspace", + "only_admins_may_create_projects": False, + "only_admins_may_create_tags": False, + "only_admins_see_billable_rates": False, + "only_admins_see_team_dashboard": False, + "organization_id": 8364520, + "permissions": None, + "premium": False, + "profile": 0, + "projects_billable_by_default": True, + "projects_enforce_billable": False, + "projects_private_by_default": True, + "rate_last_updated": None, + "reports_collapse": False, + "role": "admin", + "rounding": 1, + "rounding_minutes": 0, + "server_deleted_at": None, + "subscription": None, + "suspended_at": None, + "working_hours_in_minutes": None +} diff --git a/tests/test_api.py b/tests/test_api.py deleted file mode 100644 index 46c20cd..0000000 --- a/tests/test_api.py +++ /dev/null @@ -1,19 +0,0 @@ -from unittest import mock - -import httpx -import pytest - -from toggl_python import ReportTimeEntries, TokenAuth -from toggl_python.exceptions import TooManyRequests - - -def test_raises_too_many_requests(): - auth = TokenAuth("token") - report_time_entries_api = ReportTimeEntries(auth=auth) - with mock.patch.object( - report_time_entries_api.client, - "get", - mock.MagicMock(__name__="get", return_value=httpx.Response(status_code=429, text="test")), - ): - with pytest.raises(TooManyRequests): - report_time_entries_api.list() diff --git a/tests/test_entities.py b/tests/test_entities.py deleted file mode 100644 index 53bbe52..0000000 --- a/tests/test_entities.py +++ /dev/null @@ -1,93 +0,0 @@ -from datetime import datetime - -import pydantic -import pytest - -from toggl_python import BaseEntity, Client, Group, Project, ProjectUser, Tag, Task, TimeEntry - - -def test_base_entity(): - BaseEntity(id=1) - BaseEntity(id="1") - BaseEntity(id=1, at=datetime.now()) - - with pytest.raises(pydantic.ValidationError): - BaseEntity(id="foo") - - -def test_client_entity(): - Client(name="foo", wid=1) - Client(name="foo", wid=1, notes="some string") - - with pytest.raises(pydantic.ValidationError): - Client(name="foo") - with pytest.raises(pydantic.ValidationError): - Client(wid=1) - with pytest.raises(pydantic.ValidationError): - Client(name=1, wid=None) - - -def test_group_entity(): - Group(name="foo", workspace_id=1) - Group(name="foo", workspace_id=1, notes="some string") - - with pytest.raises(pydantic.ValidationError): - Group(name="foo") - with pytest.raises(pydantic.ValidationError): - Group(workspace_id=1) - with pytest.raises(pydantic.ValidationError): - Group(name=1, wid=None) - - -def test_project_entity(): - Project(name="foo", wid=1) - Project(name="foo", wid=1, notes="some string") - - with pytest.raises(pydantic.ValidationError): - Project(name="foo") - with pytest.raises(pydantic.ValidationError): - Project(wid=1) - with pytest.raises(pydantic.ValidationError): - Project(name=1, wid=None) - - -def test_project_user_entity(): - ProjectUser(pid=1, uid=1, wid=1) - - with pytest.raises(pydantic.ValidationError): - ProjectUser(pid=1, uid=1) - with pytest.raises(pydantic.ValidationError): - ProjectUser(pid=1, wid=1) - with pytest.raises(pydantic.ValidationError): - ProjectUser(uid=1, wid=1) - with pytest.raises(pydantic.ValidationError): - ProjectUser(pid="foo", uid=1, wid=1) - - -def test_tag_entity(): - Tag(name="foo", workspace_id=1) - - with pytest.raises(pydantic.ValidationError): - Tag(name="foo") - with pytest.raises(pydantic.ValidationError): - Tag(workspace_id=1) - - -def test_task_entity(): - Task(name="foo", project_id=1, workspace_id=1) - - with pytest.raises(pydantic.ValidationError): - Task(name="foo", project_id=1) - with pytest.raises(pydantic.ValidationError): - Task(name="foo", workspace_id=1) - with pytest.raises(pydantic.ValidationError): - Task(project_id=1, workspace_id=1) - - -def test_time_entry_entity(): - TimeEntry(wid=1, duration=1) - - with pytest.raises(pydantic.ValidationError): - TimeEntry(wid=1) - with pytest.raises(pydantic.ValidationError): - TimeEntry(duration=1) diff --git a/tests/test_project.py b/tests/test_project.py new file mode 100644 index 0000000..a7d0615 --- /dev/null +++ b/tests/test_project.py @@ -0,0 +1,99 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Dict, Union +from unittest.mock import Mock, patch + +import pytest +from httpx import Response as HttpxResponse +from pydantic import ValidationError +from toggl_python.schemas.project import ProjectResponse + +from tests.responses.project_get import PROJECT_RESPONSE + + +if TYPE_CHECKING: + from respx import MockRouter + from toggl_python.entities.workspace import Workspace + + +def test_get_project_by_id(response_mock: MockRouter, authed_workspace: Workspace) -> None: + workspace_id = 123 + project_id = 123 + mocked_route = response_mock.get(f"/workspaces/{workspace_id}/projects/{project_id}").mock( + return_value=HttpxResponse(status_code=200, json=PROJECT_RESPONSE), + ) + expected_result = ProjectResponse.model_validate(PROJECT_RESPONSE) + + result = authed_workspace.get_project(workspace_id=workspace_id, project_id=project_id) + + assert mocked_route.called is True + assert result == expected_result + + +def test_get_projects__without_query_params( + response_mock: MockRouter, authed_workspace: Workspace +) -> None: + workspace_id = 123 + mocked_route = response_mock.get(f"/workspaces/{workspace_id}/projects").mock( + return_value=HttpxResponse(status_code=200, json=[PROJECT_RESPONSE]), + ) + expected_result = [ProjectResponse.model_validate(PROJECT_RESPONSE)] + + result = authed_workspace.get_projects(workspace_id=workspace_id) + + assert mocked_route.called is True + assert result == expected_result + + +@patch("toggl_python.schemas.base.datetime") +def test_get_projects__too_old_since_value( + mocked_datetime: Mock, authed_workspace: Workspace +) -> None: + error_message = "Since cannot be older than 3 months" + since = datetime(2020, 1, 1, tzinfo=timezone.utc) + mocked_datetime.now.return_value = datetime(2020, 4, 1, tzinfo=timezone.utc) + + with pytest.raises(ValidationError, match=error_message): + _ = authed_workspace.get_projects(workspace_id=123, since=since) + + +@patch("toggl_python.schemas.base.datetime") +@pytest.mark.parametrize( + argnames="query_params", + argvalues=( + {"active": False}, + {"since": int(datetime(2024, 5, 10, tzinfo=timezone.utc).timestamp())}, + {"billable": True}, + {"user_ids": [1234567]}, + {"client_ids": [209327532]}, + {"group_ids": [214327]}, + {"statuses": "active"}, + {"name": "random project name"}, + {"page": 1}, + {"per_page": 10}, + {"sort_field": "billable"}, + {"sort_order": "DESC"}, + {"only_templates": True}, + {"only_me": True}, + ), +) +def test_get_projects__with_query_params( + mocked_datetime: Mock, + query_params: Dict[str, Union[str, int]], + response_mock: MockRouter, + authed_workspace: Workspace, +) -> None: + mocked_datetime.now.return_value = datetime(2024, 7, 20, tzinfo=timezone.utc) + workspace_id = 123 + mocked_route = response_mock.get( + f"/workspaces/{workspace_id}/projects", params=query_params + ).mock( + return_value=HttpxResponse(status_code=200, json=[PROJECT_RESPONSE]), + ) + expected_result = [ProjectResponse.model_validate(PROJECT_RESPONSE)] + + result = authed_workspace.get_projects(workspace_id=workspace_id, **query_params) + + assert mocked_route.called is True + assert result == expected_result diff --git a/tests/test_report_time_entry.py b/tests/test_report_time_entry.py new file mode 100644 index 0000000..785a3cc --- /dev/null +++ b/tests/test_report_time_entry.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Union + +import pytest +from httpx import Response +from pydantic import ValidationError +from toggl_python.schemas.report_time_entry import SearchReportTimeEntriesResponse + +from tests.conftest import fake +from tests.responses.report_time_entry_post import SEARCH_REPORT_TIME_ENTRY_RESPONSE + + +if TYPE_CHECKING: + from datetime import date + + from respx import MockRouter + from toggl_python.entities.report_time_entry import ReportTimeEntry + + +def test_search_report_time_entries__without_params( + authed_report_time_entry: ReportTimeEntry, +) -> None: + error_message = "At least one parameter must be set" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_report_time_entry.search(workspace_id=123) + + +@pytest.mark.parametrize( + argnames="start_date, end_date", + argvalues=( + ( + fake.date_this_decade(before_today=True).isoformat(), + fake.date_this_decade(before_today=True).isoformat(), + ), + ( + fake.date_this_decade(before_today=True), + fake.date_this_decade(before_today=True), + ), + ), +) +def test_search_report_time_entries__with_start_and_end_date( + start_date: Union[date, str], + end_date: Union[date, str], + response_report_mock: MockRouter, + authed_report_time_entry: ReportTimeEntry, +) -> None: + fake_workspace_id = 123 + uri = f"/{fake_workspace_id}/search/time_entries" + request_body = { + "start_date": start_date if isinstance(start_date, str) else start_date.isoformat(), + "end_date": end_date if isinstance(end_date, str) else end_date.isoformat(), + } + mocked_route = response_report_mock.post(uri, json=request_body).mock( + return_value=Response(status_code=200, json=[SEARCH_REPORT_TIME_ENTRY_RESPONSE]), + ) + expected_result = [ + SearchReportTimeEntriesResponse.model_validate(SEARCH_REPORT_TIME_ENTRY_RESPONSE) + ] + + result = authed_report_time_entry.search( + workspace_id=fake_workspace_id, + start_date=start_date, + end_date=end_date, + ) + + assert mocked_route.called is True + assert result == expected_result + + +def test_search_report_time_entries__with_all_params( + response_report_mock: MockRouter, + authed_report_time_entry: ReportTimeEntry, +) -> None: + fake_workspace_id = fake.random_int(min=1) + page_size = fake.random_int(min=1, max=100) + request_body = { + "start_date": fake.date(), + "end_date": fake.date(), + "user_ids": [fake.random_int()], + "project_ids": [fake.random_int()], + "page_size": page_size, + "first_row_number": page_size + 1, + } + uri = f"/{fake_workspace_id}/search/time_entries" + mocked_route = response_report_mock.post(uri, json=request_body).mock( + return_value=Response(status_code=200, json=[SEARCH_REPORT_TIME_ENTRY_RESPONSE]), + ) + expected_result = [ + SearchReportTimeEntriesResponse.model_validate(SEARCH_REPORT_TIME_ENTRY_RESPONSE) + ] + + result = authed_report_time_entry.search( + workspace_id=fake_workspace_id, + start_date=request_body["start_date"], + end_date=request_body["end_date"], + user_ids=request_body["user_ids"], + project_ids=request_body["project_ids"], + page_size=request_body["page_size"], + page_number=1, + ) + + assert mocked_route.called is True + assert result == expected_result diff --git a/tests/test_repositories.py b/tests/test_repositories.py deleted file mode 100644 index d687963..0000000 --- a/tests/test_repositories.py +++ /dev/null @@ -1,25 +0,0 @@ -from tests.fixtures import REPORT_TIME_ENTRIES_RESPONSE, TIME_ENTRIES_RESPONSE - -from toggl_python import ReportTimeEntries, ReportTimeEntry, TimeEntries, TimeEntry, TokenAuth - - -def test_report_time_entries_pagination(patch_report_time_entries): - auth = TokenAuth("token") - report_time_entries = ReportTimeEntries(auth=auth).list() - total_count = REPORT_TIME_ENTRIES_RESPONSE["total_count"] - per_page = REPORT_TIME_ENTRIES_RESPONSE["per_page"] - assert report_time_entries.total_count == total_count - assert report_time_entries.per_page == per_page - assert len(report_time_entries) == len(REPORT_TIME_ENTRIES_RESPONSE["data"]) - for report_time_entry in report_time_entries: - assert isinstance(report_time_entry, ReportTimeEntry) - - -def test_time_entries_no_pagination(patch_time_entries): - auth = TokenAuth("token") - time_entries = TimeEntries(auth=auth).list() - assert not hasattr(time_entries, "total_count") - assert not hasattr(time_entries, "per_page") - assert len(time_entries) == len(TIME_ENTRIES_RESPONSE) - for time_entry in time_entries: - assert isinstance(time_entry, TimeEntry) diff --git a/tests/test_time_entry.py b/tests/test_time_entry.py new file mode 100644 index 0000000..3a541fe --- /dev/null +++ b/tests/test_time_entry.py @@ -0,0 +1,524 @@ +from __future__ import annotations + +from datetime import date, datetime, timezone +from random import randint +from typing import TYPE_CHECKING, Dict, List, Union +from unittest.mock import Mock, patch + +import pytest +from httpx import Response +from pydantic import ValidationError +from toggl_python.exceptions import BadRequest +from toggl_python.schemas.time_entry import ( + BulkEditTimeEntriesFieldNames, + BulkEditTimeEntriesOperation, + BulkEditTimeEntriesOperations, + BulkEditTimeEntriesResponse, + MeTimeEntryResponse, + MeTimeEntryWithMetaResponse, + MeWebTimerResponse, +) + +from tests.conftest import fake +from tests.factories.time_entry import ( + time_entry_extended_request_factory, + time_entry_request_factory, + time_entry_response_factory, +) +from tests.responses.me_get import ME_WEB_TIMER_RESPONSE +from tests.responses.time_entry_get import ME_TIME_ENTRY_RESPONSE, ME_TIME_ENTRY_WITH_META_RESPONSE +from tests.responses.time_entry_put_and_patch import BULK_EDIT_TIME_ENTRIES_RESPONSE + + +if TYPE_CHECKING: + from respx import MockRouter + from toggl_python.entities.user import CurrentUser + from toggl_python.entities.workspace import Workspace + + +def test_create_time_entry__only_required_fields( + response_mock: MockRouter, authed_workspace: Workspace +) -> None: + workspace_id = fake.random_int() + request_body = time_entry_request_factory(workspace_id) + fake_response = time_entry_response_factory(workspace_id, request_body["start"]) + mocked_route = response_mock.post( + f"/workspaces/{workspace_id}/time_entries", json=request_body + ).mock( + return_value=Response(status_code=200, json=fake_response), + ) + expected_result = MeTimeEntryResponse.model_validate(fake_response) + + result = authed_workspace.create_time_entry( + workspace_id, + start_datetime=request_body["start"], + created_with=request_body["created_with"], + ) + + assert mocked_route.called is True + assert result == expected_result + + +def test_create_time_entry__all_fields( + response_mock: MockRouter, authed_workspace: Workspace +) -> None: + workspace_id = fake.random_int() + request_body = time_entry_extended_request_factory(workspace_id) + fake_response = time_entry_response_factory( + workspace_id, + start=request_body["start"], + billable=request_body["billable"], + description=request_body["description"], + duration=request_body["duration"], + project_id=request_body["project_id"], + stop=request_body["stop"], + tag_ids=request_body["tag_ids"], + tags=request_body["tags"], + task_id=request_body["task_id"], + user_id=request_body["user_id"], + ) + mocked_route = response_mock.post( + f"/workspaces/{workspace_id}/time_entries", json=request_body + ).mock( + return_value=Response(status_code=200, json=fake_response), + ) + expected_result = MeTimeEntryResponse.model_validate(fake_response) + + result = authed_workspace.create_time_entry( + workspace_id, + start_datetime=request_body["start"], + created_with=request_body["created_with"], + billable=request_body["billable"], + description=request_body["description"], + duration=request_body["duration"], + project_id=request_body["project_id"], + stop=request_body["stop"], + tag_ids=request_body["tag_ids"], + tags=request_body["tags"], + task_id=request_body["task_id"], + user_id=request_body["user_id"], + ) + + assert mocked_route.called is True + assert result == expected_result + + +def test_create_time_entry__invalid_start_stop_and_duration(authed_workspace: Workspace) -> None: + workspace_id = fake.random_int() + request_body = time_entry_extended_request_factory(workspace_id) + error_message = ( + r"`start`, `stop` and `duration` must be consistent - `start` \+ `duration` == `stop`" + ) + + with pytest.raises(ValidationError, match=error_message): + _ = authed_workspace.create_time_entry( + workspace_id, + start_datetime=request_body["start"], + created_with=request_body["created_with"], + duration=request_body["duration"] + fake.random_int(), + stop=request_body["stop"], + ) + +def test_get_time_entry__without_query_params( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + fake_time_entry_id = 123 + mocked_route = response_mock.get(f"/me/time_entries/{fake_time_entry_id}").mock( + return_value=Response(status_code=200, json=ME_TIME_ENTRY_RESPONSE), + ) + expected_result = MeTimeEntryResponse.model_validate(ME_TIME_ENTRY_RESPONSE) + + result = authed_current_user.get_time_entry(fake_time_entry_id) + + assert mocked_route.called is True + assert result == expected_result + + +def test_get_time_entry__with_meta_query_param( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + fake_time_entry_id = 123 + mocked_route = response_mock.get(f"/me/time_entries/{fake_time_entry_id}?meta=true").mock( + return_value=Response(status_code=200, json=ME_TIME_ENTRY_WITH_META_RESPONSE), + ) + expected_result = MeTimeEntryWithMetaResponse.model_validate(ME_TIME_ENTRY_WITH_META_RESPONSE) + + result = authed_current_user.get_time_entry(fake_time_entry_id, meta=True) + + assert mocked_route.called is True + assert result == expected_result + + +def test_get_current_time_entry__ok( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + mocked_route = response_mock.get("/me/time_entries/current").mock( + return_value=Response(status_code=200, json=ME_TIME_ENTRY_RESPONSE), + ) + expected_result = MeTimeEntryResponse.model_validate(ME_TIME_ENTRY_RESPONSE) + + result = authed_current_user.get_current_time_entry() + + assert mocked_route.called is True + assert result == expected_result + + +def test_get_current_time_entry__no_current_entry( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + mocked_route = response_mock.get("/me/time_entries/current").mock( + return_value=Response(status_code=200, json={}), + ) + + result = authed_current_user.get_current_time_entry() + + assert mocked_route.called is True + assert result is None + + +def test_get_time_entries__without_query_params( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + fake_response = [ME_TIME_ENTRY_RESPONSE] + mocked_route = response_mock.get("/me/time_entries").mock( + return_value=Response(status_code=200, json=fake_response), + ) + expected_result = [MeTimeEntryResponse.model_validate(ME_TIME_ENTRY_RESPONSE)] + + result = authed_current_user.get_time_entries() + + assert mocked_route.called is True + assert result == expected_result + + +def test_get_time_entries__with_meta_query_param( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + mocked_route = response_mock.get("/me/time_entries", params={"meta": True}).mock( + return_value=Response(status_code=200, json=[ME_TIME_ENTRY_WITH_META_RESPONSE]), + ) + expected_result = [ + MeTimeEntryWithMetaResponse.model_validate(ME_TIME_ENTRY_WITH_META_RESPONSE) + ] + + result = authed_current_user.get_time_entries(meta=True) + + assert mocked_route.called is True + assert result == expected_result + + +@patch("toggl_python.schemas.base.datetime") +@patch("toggl_python.schemas.time_entry.datetime") +@pytest.mark.parametrize( + argnames="query_params, method_kwargs", + argvalues=( + ( + {"since": 1715299200}, + {"since": int(datetime(2024, 5, 10, tzinfo=timezone.utc).timestamp())}, + ), + ({"since": 1718755200}, {"since": 1718755200}), + ({"before": "2024-07-28"}, {"before": "2024-07-28"}), + ( + {"before": "2023-01-01"}, + {"before": datetime(2023, 1, 1, tzinfo=timezone.utc)}, + ), + ( + {"start_date": "2024-03-27", "end_date": "2024-04-12"}, + {"start_date": "2024-03-27T00:00:00-03:00", "end_date": "2024-04-12T00:00:00-01:00"}, + ), + ), +) +def test_get_time_entries__with_datetime_query_params( + mocked_datetime: Mock, + mocked_time_entry_datetime: Mock, + query_params: Dict[str, Union[int, str]], + method_kwargs: Dict[str, Union[datetime, str]], + response_mock: MockRouter, + authed_current_user: CurrentUser, +) -> None: + query_params["meta"] = False + # Required to pass `since` query param validation + mocked_datetime.now.return_value = datetime(2024, 6, 20, tzinfo=timezone.utc) + mocked_time_entry_datetime.now.return_value = datetime(2024, 6, 20, tzinfo=timezone.utc) + mocked_route = response_mock.get("/me/time_entries", params=query_params).mock( + return_value=Response(status_code=200, json=[ME_TIME_ENTRY_RESPONSE]), + ) + expected_result = [MeTimeEntryResponse.model_validate(ME_TIME_ENTRY_RESPONSE)] + + result = authed_current_user.get_time_entries(**method_kwargs) + + assert mocked_route.called is True + assert result == expected_result + + +@patch("toggl_python.schemas.time_entry.datetime") +@pytest.mark.parametrize( + argnames="query_params", + argvalues=( + {"end_date": "2015-08-14"}, + {"since": 17223107204, "before": "2015-07-28"}, + { + "since": 17223107204, + "start_date": "2015-09-20", + "end_date": "2015-09-21", + }, + { + "before": "2015-07-11", + "start_date": "2015-07-12", + "end_date": "2015-07-16", + }, + { + "since": 17223107204, + "before": "2015-07-11", + "start_date": "2015-07-12", + "end_date": "2015-07-16", + }, + ), +) +def test_get_time_entries__invalid_query_params( + mocked_datetime: Mock, + query_params: Dict[str, Union[int, str]], + response_mock: MockRouter, + authed_current_user: CurrentUser, +) -> None: + mocked_datetime.now.return_value = datetime(2015, 9, 22, tzinfo=timezone.utc) + error_message = "can not be present simultaneously" + _ = response_mock.get("/me/time_entries", params=query_params).mock( + return_value=Response(status_code=400, json=error_message), + ) + + with pytest.raises(BadRequest, match=error_message): + _ = authed_current_user.get_time_entries(**query_params) + + +@patch("toggl_python.schemas.time_entry.datetime") +@patch("toggl_python.schemas.base.datetime") +@pytest.mark.parametrize( + argnames="arg_name", + argvalues=("start_date", "end_date"), +) +@pytest.mark.parametrize( + argnames="value", + argvalues=( + "2020-01-01T00:00:00+08:00", + "2020-01-01", + fake.date(end_datetime=date(2020, 1, 1)), + fake.date_time(end_datetime=date(2020, 1, 1)), + ), +) +def test_get_time_entries__too_old_dates( + mocked_time_entry_datetime: Mock, + mocked_datetime: Mock, + arg_name: str, + value: Union[str, datetime], + authed_current_user: CurrentUser, +) -> None: + mocked_time_entry_datetime.now.return_value = datetime(2020, 4, 1, tzinfo=timezone.utc) + mocked_datetime.now.return_value = datetime(2020, 4, 1, tzinfo=timezone.utc) + error_message = "Start and end dates must not be earlier than 2020-01-02" + + with pytest.raises(ValueError, match=error_message): + _ = authed_current_user.get_time_entries(**{arg_name: value}) + + +@patch("toggl_python.schemas.base.datetime") +def test_get_time_entries__too_old_since_value( + mocked_datetime: Mock, authed_current_user: CurrentUser +) -> None: + error_message = "Since cannot be older than 3 months" + since = datetime(2020, 1, 1, tzinfo=timezone.utc) + mocked_datetime.now.return_value = datetime(2020, 4, 1, tzinfo=timezone.utc) + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.get_time_entries(since=since) + + +def test_get_web_timer__ok(response_mock: MockRouter, authed_current_user: CurrentUser) -> None: + mocked_route = response_mock.get("/me/web-timer").mock( + return_value=Response(status_code=200, json=ME_WEB_TIMER_RESPONSE), + ) + expected_result = MeWebTimerResponse.model_validate(ME_WEB_TIMER_RESPONSE) + + result = authed_current_user.get_web_timer() + + assert mocked_route.called is True + assert result == expected_result + + +@pytest.mark.parametrize( + argnames=("field_name", "field_value"), + argvalues=[ + ("billable", True), + ("description", "updated description"), + ("duration", -1), + ("project_id", 757542305), + ("shared_with_user_ids", [1243543643, 676586868]), + ("start", "2020-11-11T09:30:00-04:00"), + ("stop", "2010-01-29T19:50:00+02:00"), + ("tag_ids", [24032, 354742502]), + ("tags", ["new tag"]), + ("task_id", 1593268409), + ("user_id", 573250897), + ], +) +def test_workspace_update_time_entry__ok( + field_name: str, + field_value: Union[bool, str, int, List[int]], + response_mock: MockRouter, + authed_workspace: Workspace, +) -> None: + workspace_id = 123 + time_entry_id = 98765 + payload = {field_name: field_value} + fake_response = ME_TIME_ENTRY_RESPONSE.copy() + fake_response.update(**payload) + mocked_route = response_mock.put( + f"/workspaces/{workspace_id}/time_entries/{time_entry_id}" + ).mock( + return_value=Response(status_code=200, json=fake_response), + ) + expected_result = MeTimeEntryResponse.model_validate(fake_response) + + result = authed_workspace.update_time_entry(workspace_id, time_entry_id, **payload) + + assert mocked_route.called is True + assert result == expected_result + + +def test_update_time_entry__user_cannot_access_project( + response_mock: MockRouter, authed_workspace: Workspace +) -> None: + workspace_id = 123 + time_entry_id = 98765 + error_message = "User cannot access the selected project" + mocked_route = response_mock.put( + f"/workspaces/{workspace_id}/time_entries/{time_entry_id}" + ).mock( + return_value=Response(status_code=400, text=error_message), + ) + + with pytest.raises(BadRequest, match=error_message): + _ = authed_workspace.update_time_entry(workspace_id, time_entry_id, project_id=125872350) + + assert mocked_route.called is True + + +def test_delete_time_entry__ok(response_mock: MockRouter, authed_workspace: Workspace) -> None: + workspace_id = 123 + time_entry_id = 98765 + mocked_route = response_mock.delete( + f"/workspaces/{workspace_id}/time_entries/{time_entry_id}" + ).mock( + return_value=Response(status_code=200), + ) + + result = authed_workspace.delete_time_entry(workspace_id, time_entry_id) + + assert mocked_route.called is True + assert result is True + + +def test_bulk_edit_time_entries__too_much_ids(authed_workspace: Workspace) -> None: + workspace_id = 123 + time_entry_ids = [randint(100000, 999999) for _ in range(101)] # noqa: S311 + error_message = "Limit to max TimeEntry IDs exceeded. " + + with pytest.raises(ValueError, match=error_message): + _ = authed_workspace.bulk_edit_time_entries(workspace_id, time_entry_ids, operations=[]) + + +def test_bulk_edit_time_entries__empty_time_entry_ids(authed_workspace: Workspace) -> None: + workspace_id = 123 + error_message = "Specify at least one TimeEntry ID" + + with pytest.raises(ValueError, match=error_message): + _ = authed_workspace.bulk_edit_time_entries(workspace_id, time_entry_ids=[], operations=[]) + + +def test_bulk_edit_time_entries__empty_operations(authed_workspace: Workspace) -> None: + workspace_id = 123 + time_entry_ids = [12345677] + error_message = "Specify at least one edit operation" + + with pytest.raises(ValueError, match=error_message): + _ = authed_workspace.bulk_edit_time_entries(workspace_id, time_entry_ids, operations=[]) + + +@pytest.mark.parametrize( + argnames=("operation"), argvalues=[item.value for item in BulkEditTimeEntriesOperations] +) +@pytest.mark.parametrize( + argnames=("field_name", "field_value"), + argvalues=[ + (BulkEditTimeEntriesFieldNames.billable.value, True), + (BulkEditTimeEntriesFieldNames.description.value, "updated description"), + (BulkEditTimeEntriesFieldNames.duration.value, -1), + (BulkEditTimeEntriesFieldNames.project_id.value, 757542305), + (BulkEditTimeEntriesFieldNames.shared_with_user_ids.value, [1243543643, 676586868]), + (BulkEditTimeEntriesFieldNames.start.value, datetime(2024, 5, 10, tzinfo=timezone.utc)), + (BulkEditTimeEntriesFieldNames.stop.value, datetime(2022, 4, 15, tzinfo=timezone.utc)), + (BulkEditTimeEntriesFieldNames.tag_ids.value, [24032, 354742502]), + (BulkEditTimeEntriesFieldNames.tags.value, ["new tag"]), + (BulkEditTimeEntriesFieldNames.task_id.value, 1593268409), + (BulkEditTimeEntriesFieldNames.user_id.value, 573250897), + ], +) +def test_bulk_edit_time_entries__ok( + field_name: BulkEditTimeEntriesFieldNames, + field_value: Union[str, int], + operation: BulkEditTimeEntriesOperations, + response_mock: MockRouter, + authed_workspace: Workspace, +) -> None: + workspace_id = 123 + time_entry_ids = [98765, 43210] + edit_operation = BulkEditTimeEntriesOperation( + operation=operation, field_name=field_name, field_value=field_value + ) + mocked_route = response_mock.patch( + f"/workspaces/{workspace_id}/time_entries/{time_entry_ids}" + ).mock( + return_value=Response(status_code=200, json=BULK_EDIT_TIME_ENTRIES_RESPONSE), + ) + expected_result = BulkEditTimeEntriesResponse.model_validate(BULK_EDIT_TIME_ENTRIES_RESPONSE) + + result = authed_workspace.bulk_edit_time_entries( + workspace_id, time_entry_ids, operations=[edit_operation] + ) + + assert mocked_route.called is True + assert result == expected_result + + +def test_stop_time_entry__ok(response_mock: MockRouter, authed_workspace: Workspace) -> None: + workspace_id = 123 + time_entry_id = 98765 + mocked_route = response_mock.patch( + f"/workspaces/{workspace_id}/time_entries/{time_entry_id}/stop" + ).mock( + return_value=Response(status_code=200, json=ME_TIME_ENTRY_RESPONSE), + ) + expected_result = MeTimeEntryResponse.model_validate(ME_TIME_ENTRY_RESPONSE) + + result = authed_workspace.stop_time_entry(workspace_id, time_entry_id) + + assert mocked_route.called is True + assert result == expected_result + + +def test_stop_time_entry__already_stopped( + response_mock: MockRouter, authed_workspace: Workspace +) -> None: + workspace_id = 123 + time_entry_id = 98765 + error_message = "Time entry already stopped" + mocked_route = response_mock.patch( + f"/workspaces/{workspace_id}/time_entries/{time_entry_id}/stop" + ).mock( + return_value=Response(status_code=409, text=error_message), + ) + + with pytest.raises(BadRequest, match=error_message): + _ = authed_workspace.stop_time_entry(workspace_id, time_entry_id) + + assert mocked_route.called is True diff --git a/tests/test_user.py b/tests/test_user.py new file mode 100644 index 0000000..51107d1 --- /dev/null +++ b/tests/test_user.py @@ -0,0 +1,364 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Union + +import httpx +import pytest +from pydantic import ValidationError +from toggl_python.auth import BasicAuth +from toggl_python.entities.user import CurrentUser +from toggl_python.exceptions import BadRequest +from toggl_python.schemas.current_user import ( + DateFormat, + DurationFormat, + MeFeaturesResponse, + MePreferencesResponse, + MeResponse, + MeResponseWithRelatedData, + TimeFormat, + UpdateMeResponse, +) + +from tests.conftest import fake +from tests.responses.me_get import ( + ME_FEATURES_RESPONSE, + ME_PREFERENCES_RESPONSE, + ME_RESPONSE, + ME_RESPONSE_SHORT, + ME_RESPONSE_WITH_RELATED_DATA, +) +from tests.responses.me_put import UPDATE_ME_RESPONSE + + +if TYPE_CHECKING: + from respx import MockRouter + + +def test_logged__ok(response_mock: MockRouter, authed_current_user: CurrentUser) -> None: + mocked_route = response_mock.get("/me/logged").mock( + return_value=httpx.Response(status_code=200), + ) + + result = authed_current_user.logged() + + assert mocked_route.called is True + assert result is True + + +def test_logged__exception_is_raised( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + mocked_route = response_mock.get("/me/logged").mock( + return_value=httpx.Response(status_code=403), + ) + + with pytest.raises(BadRequest): + _ = authed_current_user.logged() + + assert mocked_route.called is True + + +def test_me__ok(response_mock: MockRouter, authed_current_user: CurrentUser) -> None: + mocked_route = response_mock.get("/me").mock( + return_value=httpx.Response(status_code=200, json=ME_RESPONSE), + ) + expected_result = MeResponse.model_validate(ME_RESPONSE) + + result = authed_current_user.me() + + assert mocked_route.called is True + assert result == expected_result + + +def test_me__ok__with_empty_fields(response_mock: MockRouter) -> None: + mocked_route = response_mock.get("/me").mock( + return_value=httpx.Response(status_code=200, json=ME_RESPONSE_SHORT), + ) + auth = BasicAuth(username="username", password="pass") + user = CurrentUser(auth=auth) + expected_result = MeResponse.model_validate(ME_RESPONSE_SHORT) + + result = user.me() + + assert mocked_route.called is True + assert result == expected_result + + +def test_me__ok_with_related_data( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + mocked_route = response_mock.get("/me").mock( + return_value=httpx.Response(status_code=200, json=ME_RESPONSE_WITH_RELATED_DATA), + ) + expected_result = MeResponseWithRelatedData.model_validate(ME_RESPONSE_WITH_RELATED_DATA) + + result = authed_current_user.me(with_related_data=True) + + assert mocked_route.called is True + assert result == expected_result + + +@pytest.mark.parametrize( + argnames=("field_name", "field_value"), + argvalues=[ + ("beginning_of_week", 0), + ("country_id", 123), + ("default_workspace_id", 12345678), + ("email", "new_user@mail.com"), + ("fullname", "New User"), + ("timezone", "Europe/Moscow"), + ], + ids=( + "beginning_of_week", + "country_id", + "default_workspace_id", + "email", + "fullname", + "timezone", + ), +) +def test_update_me__ok( + response_mock: MockRouter, + authed_current_user: CurrentUser, + field_name: str, + field_value: Union[str, int], +) -> None: + payload = {field_name: field_value} + fake_response = UPDATE_ME_RESPONSE.copy() + fake_response.update(**payload) + mocked_route = response_mock.put("/me").mock( + return_value=httpx.Response(status_code=200, json=fake_response), + ) + expected_result = UpdateMeResponse.model_validate(fake_response) + + result = authed_current_user.update_me(**payload) + + assert mocked_route.called is True + assert result == expected_result + + +def test_update_me__invalid_email(authed_current_user: CurrentUser) -> None: + """Raise default Pydantic ValidationError on invalid email. + + Later, it will be wrapped into custom exception with clear error message. + """ + error_message = "value is not a valid email address" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_me(email="invalid_mail@@mail.com") + + +def test_update_me__email_is_already_used( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + error_message = "user with this email already exists" + mocked_route = response_mock.put("/me").mock( + return_value=httpx.Response(status_code=400, text=error_message), + ) + + with pytest.raises(BadRequest, match=error_message): + _ = authed_current_user.update_me(email="existing_address@mail.com") + + assert mocked_route.called is True + + +@pytest.mark.parametrize( + argnames=("value"), + argvalues=["Canada", "Europe/Beerlin", "Materic/City"], + ids=("No city", "Typo", "Not existing timezone"), +) +def test_update_me__invalid_timezone(authed_current_user: CurrentUser, value: str) -> None: + error_message = f"Specified timezone {value} is invalid" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_me(timezone=value) + + +def test_update_me__invalid_default_workspace_id(authed_current_user: CurrentUser) -> None: + error_message = "Input should be greater than or equal to 1" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_me(default_workspace_id=0) + + +def test_update_me__unavailable_default_workspace_id( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + """Raise if user is trying to set unavailable Workspace.""" + error_message = "Invalid default_workspace_id" + mocked_route = response_mock.put("/me").mock( + return_value=httpx.Response(status_code=400, text=error_message), + ) + + with pytest.raises(BadRequest, match=error_message): + _ = authed_current_user.update_me(default_workspace_id=11111111) + + assert mocked_route.called is True + + +@pytest.mark.parametrize( + argnames=("value", "error_message"), + argvalues=[ + (-1, "Input should be greater than or equal to 0"), + (7, "Input should be less than or equal to 6"), + ], + ids=("Negative", "More than max allowed value"), +) +def test_update_me__invalid_beginning_of_week( + authed_current_user: CurrentUser, value: int, error_message: str +) -> None: + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_me(beginning_of_week=value) + + +def test_update_me__invalid_country_id(authed_current_user: CurrentUser) -> None: + error_message = "Input should be greater than or equal to 1" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_me(country_id=0) + + +def test_update_me__invalid_fullname(authed_current_user: CurrentUser) -> None: + error_message = "String should have at least 1 character" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_me(fullname="") + + +def test_change_password__ok(response_mock: MockRouter, authed_current_user: CurrentUser) -> None: + mocked_route = response_mock.put("/me").mock( + return_value=httpx.Response(status_code=200, json=UPDATE_ME_RESPONSE), + ) + + result = authed_current_user.change_password( + current_password="paSsw0rd", + new_password="neW_passw0rd", + ) + + assert mocked_route.called is True + assert result is True + + +def test_change_password__equal_current_and_new_passwords( + authed_current_user: CurrentUser, +) -> None: + error_message = "New password should differ from current password" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.change_password( + current_password="current_Passw0rd", new_password="current_Passw0rd" + ) + + +def test_change_password__invalid_current_password( + response_mock: MockRouter, authed_current_user: CurrentUser +) -> None: + error_message = "Current password is not valid" + mocked_route = response_mock.put("/me").mock( + return_value=httpx.Response(status_code=400, text=error_message), + ) + + with pytest.raises(BadRequest, match=error_message): + _ = authed_current_user.change_password( + current_password="4incorrect_passworD", + new_password="New_passw0rd", + ) + + assert mocked_route.called is True + + +@pytest.mark.parametrize( + argnames=("value"), + argvalues=["1", "12345678", "12345Qw"], + ids=("Too short", "No symbols and chars", "No symbols"), +) +def test_change_password__weak_new_password(authed_current_user: CurrentUser, value: str) -> None: + error_message = "Password is too weak" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.change_password( + current_password="current_password", + new_password=value, + ) + + +def test_features__ok(response_mock: MockRouter, authed_current_user: CurrentUser) -> None: + mocked_route = response_mock.get("/me/features").mock( + return_value=httpx.Response(status_code=200, json=ME_FEATURES_RESPONSE), + ) + expected_result = [ + MeFeaturesResponse.model_validate(workspace_features) + for workspace_features in ME_FEATURES_RESPONSE + ] + + result = authed_current_user.features() + + assert mocked_route.called is True + assert result == expected_result + + +def test_preferences__ok(response_mock: MockRouter, authed_current_user: CurrentUser) -> None: + mocked_route = response_mock.get("/me/preferences").mock( + return_value=httpx.Response(status_code=200, json=ME_PREFERENCES_RESPONSE), + ) + expected_result = MePreferencesResponse.model_validate(ME_PREFERENCES_RESPONSE) + + result = authed_current_user.preferences() + + assert mocked_route.called is True + assert result == expected_result + + +@pytest.mark.parametrize( + argnames=("field_name", "field_value"), + argvalues=[ + ("date_format", fake.random_element({item.value for item in DateFormat})), + ("duration_format", fake.random_element({item.value for item in DurationFormat})), + ("time_format", fake.random_element({item.value for item in TimeFormat})), + ], +) +def test_update_preferences__ok( + response_mock: MockRouter, + authed_current_user: CurrentUser, + field_name: str, + field_value: str, +) -> None: + payload = {field_name: field_value} + mocked_route = response_mock.post("/me/preferences").mock( + return_value=httpx.Response(status_code=200), + ) + + result = authed_current_user.update_preferences(**payload) + + assert mocked_route.called is True + assert result is True + + +def test_update_preferences__invalid_duration_format(authed_current_user: CurrentUser) -> None: + all_values = ", ".join(f"'{item.value}'" for item in DurationFormat) + last_value = DurationFormat.decimal.value + allowed_values = all_values.replace(f", '{last_value}'", f" or '{last_value}'") + error_message = f"Input should be {allowed_values}" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_preferences(duration_format="extended") + + +def test_update_preferences__invalid_time_format(authed_current_user: CurrentUser) -> None: + all_values = ", ".join(f"'{item.value}'" for item in TimeFormat) + last_value = TimeFormat.hour_24.value + allowed_values = all_values.replace(f", '{last_value}'", f" or '{last_value}'") + error_message = f"Input should be {allowed_values}" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_preferences(time_format="hh:mm B") + + +def test_update_preferences__invalid_date_format(authed_current_user: CurrentUser) -> None: + all_values = ", ".join(f"'{item.value}'" for item in DateFormat) + last_value = DateFormat.dmy_dot.value + allowed_values = all_values.replace(f", '{last_value}'", f" or '{last_value}'") + error_message = f"Input should be {allowed_values}" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_current_user.update_preferences(date_format="DDMMYY") diff --git a/tests/test_workspace.py b/tests/test_workspace.py new file mode 100644 index 0000000..4ca3434 --- /dev/null +++ b/tests/test_workspace.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Dict, Union +from unittest.mock import Mock, patch + +import pytest +from httpx import Response as HttpxResponse +from pydantic import ValidationError +from toggl_python.schemas.workspace import WorkspaceResponse + +from tests.responses.workspace_get import WORKSPACE_RESPONSE + + +if TYPE_CHECKING: + from respx import MockRouter + from toggl_python.entities.workspace import Workspace + + +def test_get_workspace_by_id(response_mock: MockRouter, authed_workspace: Workspace) -> None: + workspace_id = 123 + mocked_route = response_mock.get(f"/workspaces/{workspace_id}").mock( + return_value=HttpxResponse(status_code=200, json=WORKSPACE_RESPONSE), + ) + expected_result = WorkspaceResponse.model_validate(WORKSPACE_RESPONSE) + + result = authed_workspace.get(workspace_id=workspace_id) + + assert mocked_route.called is True + assert result == expected_result + + +def test_get_workspaces__without_query_params( + response_mock: MockRouter, authed_workspace: Workspace +) -> None: + mocked_route = response_mock.get("/workspaces").mock( + return_value=HttpxResponse(status_code=200, json=[WORKSPACE_RESPONSE]), + ) + expected_result = [WorkspaceResponse.model_validate(WORKSPACE_RESPONSE)] + + result = authed_workspace.list() + + assert mocked_route.called is True + assert result == expected_result + + +@patch("toggl_python.schemas.base.datetime") +@pytest.mark.parametrize( + argnames="query_params, method_kwargs", + argvalues=( + ( + {"since": 1721433600}, + {"since": datetime(2024, 7, 20, tzinfo=timezone.utc)}, + ), + ({"since": 1718755200}, {"since": 1718755200}), + ), +) +def test_get_workspaces__with_query_param_since( + mocked_datetime: Mock, + query_params: Dict[str, int], + method_kwargs: Dict[str, Union[datetime, int]], + response_mock: MockRouter, + authed_workspace: Workspace, +) -> None: + mocked_datetime.now.return_value = datetime(2024, 8, 20, tzinfo=timezone.utc) + mocked_route = response_mock.get("/workspaces", params=query_params).mock( + return_value=HttpxResponse(status_code=200, json=[WORKSPACE_RESPONSE]), + ) + expected_result = [WorkspaceResponse.model_validate(WORKSPACE_RESPONSE)] + + result = authed_workspace.list(**method_kwargs) + + assert mocked_route.called is True + assert result == expected_result + + +@patch("toggl_python.schemas.base.datetime") +def test_get_workspaces__too_old_since_value( + mocked_datetime: Mock, authed_workspace: Workspace +) -> None: + since = datetime(2024, 5, 20, tzinfo=timezone.utc) + mocked_datetime.now.return_value = datetime(2024, 8, 21, tzinfo=timezone.utc) + error_message = "Since cannot be older than 3 months" + + with pytest.raises(ValidationError, match=error_message): + _ = authed_workspace.list(since=since) diff --git a/toggl_python/__init__.py b/toggl_python/__init__.py index 7303e8b..d94f395 100644 --- a/toggl_python/__init__.py +++ b/toggl_python/__init__.py @@ -1,63 +1,35 @@ -__version__ = "0.2.7" +__version__ = "0.3.0" -from .auth import BasicAuth, TokenAuth # noqa: F401 -from .entities import Dashboard # noqa: F401 -from .entities import ( - Activity, - BaseEntity, - Client, - Group, - MostActiveUser, - Project, - ProjectUser, - ReportTimeEntry, - Tag, - Task, - TimeEntry, - User, - Workspace, - WorkspaceUser, -) -from .repository import ( - Clients, - Dashboards, - Groups, - ProjectUsers, - ReportTimeEntries, - Tags, - TimeEntries, - Users, - Workspaces, - WorkspaceUsers, +from .auth import BasicAuth, TokenAuth +from .entities.report_time_entry import ReportTimeEntry +from .entities.user import CurrentUser +from .entities.workspace import Workspace +from .exceptions import BadRequest, TogglException +from .schemas.current_user import MeResponse +from .schemas.project import ProjectQueryParams, ProjectResponse +from .schemas.report_time_entry import ( + SearchReportTimeEntriesRequest, + SearchReportTimeEntriesResponse, ) +from .schemas.time_entry import MeTimeEntryResponse, TimeEntryCreateRequest, TimeEntryRequest +from .schemas.workspace import WorkspaceResponse -__all__ = [ - "Activity", +__all__ = ( "BasicAuth", - "BaseEntity", - "Client", - "Clients", - "Dashboard", - "Dashboards", - "Group", - "Groups", - "MostActiveUser", - "Project", - "ProjectUser", - "ProjectUsers", - "ReportTimeEntry", - "ReportTimeEntries", - "Tag", - "Tags", - "Task", - "TimeEntries", - "TimeEntry", "TokenAuth", - "User", - "Users", + "CurrentUser", "Workspace", - "Workspaces", - "WorkspaceUser", - "WorkspaceUsers", -] + "ReportTimeEntry", + "SearchReportTimeEntriesResponse", + "SearchReportTimeEntriesRequest", + "WorkspaceResponse", + "ProjectResponse", + "ProjectQueryParams", + "MeTimeEntryResponse", + "TimeEntryRequest", + "TimeEntryCreateRequest", + "MeResponse", + "BadRequest", + "TogglException", +) diff --git a/toggl_python/api.py b/toggl_python/api.py index 89791f5..feed985 100644 --- a/toggl_python/api.py +++ b/toggl_python/api.py @@ -1,84 +1,31 @@ -"""Module contains simple Web API wraper.""" +from __future__ import annotations -import typing -from functools import partial -from typing import Any, Dict, Optional +from typing import TYPE_CHECKING -import httpx +from httpx import Client, HTTPStatusError, Response -from .auth import BasicAuth, TokenAuth -from .exceptions import raise_from_response +from toggl_python.exceptions import BadRequest -class Api: - """ - Simple api wrapper. - Allow to interact with official Toggl API via httpx. - """ +if TYPE_CHECKING: + from toggl_python.auth import BasicAuth, TokenAuth - BASE_URL: httpx.URL = httpx.URL("https://api.track.toggl.com/api/v9/") - HEADERS = { - "content-type": "application/json", - "user_agent": "toggl-python", - } +COMMON_HEADERS: dict[str, str] = {"content-type": "application/json"} +ROOT_URL: str = "https://api.track.toggl.com/api/v9" - def __init__( - self, - base_url: typing.Optional[str] = None, - auth: Optional[typing.Union[BasicAuth, TokenAuth]] = None, - ): - if base_url: - self.BASE_URL = httpx.URL(base_url) - self.client = httpx.Client(base_url=self.BASE_URL, auth=auth) - def __getattr__(self, httpmethod: str) -> Any: - """ - Checking existence of `httpmethod` method in httpx-client - and `partial` it to our client `api_method` - - :param httpmethod: method name we trying to serve - :return: - """ - try: - method = getattr(self.client, httpmethod) - except AttributeError: - raise AttributeError(f"No such http method ({httpmethod})!") - - return partial(self.api_method, method) - - def api_method( - self, - method: Any, - url: str, - params: Optional[Dict[str, Any]] = None, - data: Optional[Dict[str, Any]] = None, - files: Optional[Dict[str, Any]] = None, - ) -> Any: - """ - Call httpx method with specified url and params - - :param method: method we throwed from httpx-client via `__getattr__` - :param url: target url we nesting on `BASE_URL` - :param params: params to pass - :param data: json-data to pass - :param files: files to pass - :return: - """ - - _url = self.BASE_URL.join(url) - - response = ( - method(_url, params=params, headers=self.HEADERS) - if method.__name__ == "get" - else method( - _url, - params=params, - json=data, - files=files, - headers=self.HEADERS, - ) +class ApiWrapper: + def __init__(self, auth: BasicAuth | TokenAuth, base_url: str = ROOT_URL) -> None: + self.client = Client( + base_url=base_url, + auth=auth, + headers=COMMON_HEADERS, + http2=True, ) - raise_from_response(response) - - return response + def raise_for_status(self, response: Response) -> None: + """Disable exception chaining to avoid huge not informative traceback.""" + try: + _ = response.raise_for_status() + except HTTPStatusError as base_exception: + raise BadRequest(base_exception.response.text) from None diff --git a/toggl_python/auth.py b/toggl_python/auth.py index fad9731..bcf35e0 100644 --- a/toggl_python/auth.py +++ b/toggl_python/auth.py @@ -1,16 +1,15 @@ -import typing +from typing import Final -import httpx +from httpx import BasicAuth as HttpxBasicAuth -class BasicAuth(httpx.BasicAuth): - """Httpx basic auth class""" +class BasicAuth(HttpxBasicAuth): + pass -class TokenAuth(httpx.BasicAuth): - """Httpx basic auth class with token insertion on class init""" +class TokenAuth(HttpxBasicAuth): + SECRET: Final[str] = "api_token" - SECRET: str = "api_token" - - def __init__(self, token: typing.Union[str, bytes]): - super().__init__(token, self.SECRET) + def __init__(self, token: str) -> None: + """Render `Authorization` header with required by Toggl API format `:api_token`.""" + super().__init__(username=token, password=self.SECRET) diff --git a/toggl_python/entities.py b/toggl_python/entities.py deleted file mode 100644 index b962693..0000000 --- a/toggl_python/entities.py +++ /dev/null @@ -1,150 +0,0 @@ -from datetime import datetime -from typing import Callable, List, Optional, Union - -from pydantic import BaseModel, EmailStr, Field, HttpUrl - - -class BaseEntity(BaseModel): - id: Optional[int] = None - at: Optional[datetime] = None - - -class Client(BaseEntity): - name: str - wid: int - notes: Optional[str] = None - - -class Group(BaseEntity): - name: str - wid: int = Field(alias="workspace_id") - - -class Project(BaseEntity): - name: str - wid: int - cid: Optional[int] = None - active: bool = True - is_private: bool = True - template: Optional[bool] = None - template_id: Optional[int] = None - billable: Optional[bool] = True - auto_estimates: Optional[bool] = False - estimated_hours: Optional[int] = None - color: Union[str, int] = 0 - rate: Optional[float] = None - created_at: Optional[datetime] = None - - -class ProjectUser(BaseEntity): - pid: int - uid: int - wid: int - notes: Optional[str] = None - manage: Optional[bool] = False - rate: Optional[float] = None - full_name: Optional[float] = None - - -class Tag(BaseEntity): - name: str - wid: int = Field(alias="workspace_id") - - -class Task(BaseEntity): - name: str - pid: int = Field(alias="project_id") - wid: int = Field(alias="workspace_id") - uid: Optional[int] = Field(alias="user_id", default=None) - estimated_seconds: Optional[int] = None - tracked_seconds: Optional[int] = None - active: Optional[bool] = True - - -class TimeEntry(BaseEntity): - wid: int - pid: Optional[int] = None - tid: Optional[int] = None - description: Optional[str] = None - billable: Optional[bool] = False - start: Union[datetime, Callable[[], datetime]] = datetime.now - stop: Optional[Union[datetime, Callable[[], datetime]]] = None - duration: int - created_with: Optional[str] = None - tags: List[str] = [] - duronly: Optional[bool] = None - - -class ReportTimeEntry(BaseEntity): - wid: Optional[int] = None - pid: Optional[int] = None - tid: Optional[int] = None - uid: Optional[int] = None - description: Optional[str] = None - billable: Optional[int] = False - is_billable: Optional[bool] = False - cur: Optional[Union[str, bool]] = False - start: Union[datetime, Callable[[], datetime]] = datetime.now - end: Optional[Union[datetime, Callable[[], datetime]]] = None - dur: int - tags: List[str] = [] - - -class Workspace(BaseEntity): - name: str - premium: bool - admin: bool - default_hourly_rate: Optional[float] = None - default_currency: str - only_admins_may_create_projects: bool - only_admins_see_billable_rates: bool - rounding: int - rounding_minutes: int - logo_url: Optional[HttpUrl] = None - - -class User(BaseEntity): - api_token: Optional[str] = None - default_wid: Optional[int] = Field(alias="default_workspace_id", default=None) - email: EmailStr - fullname: str - beginning_of_week: int = 0 - image_url: Optional[HttpUrl] = None - openid_enabled: Optional[bool] = None - timezone: Optional[str] = None - country_id: Optional[int] = None - projects: Optional[Project] = None - tags: Optional[Tag] = None - tasks: Optional[Task] = None - time_entries: Optional[TimeEntry] = None - updated_at: str - workspaces: Optional[Workspace] = None - - -class WorkspaceUser(BaseEntity): - uid: int - wid: int - admin: bool - active: bool - name: Optional[str] = None - email: Optional[EmailStr] = None - invite_url: Optional[HttpUrl] = None - - -class Activity(BaseEntity): - user_id: int - project_id: int - duration: int - description: str - stop: datetime - tid: int - - -class MostActiveUser(BaseEntity): - user_id: int - duration: int - - -class Dashboard(BaseEntity): - most_active_user: List[MostActiveUser] = [] - activity: List[Activity] diff --git a/toggl_python/entities/__init__.py b/toggl_python/entities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/toggl_python/entities/report_time_entry.py b/toggl_python/entities/report_time_entry.py new file mode 100644 index 0000000..9d17ee1 --- /dev/null +++ b/toggl_python/entities/report_time_entry.py @@ -0,0 +1,61 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, List, Optional, Union + +from toggl_python.api import ApiWrapper +from toggl_python.schemas.report_time_entry import ( + SearchReportTimeEntriesRequest, + SearchReportTimeEntriesResponse, +) + + +if TYPE_CHECKING: + from datetime import date + + from toggl_python.auth import BasicAuth, TokenAuth + +REPORT_ROOT_URL: str = "https://api.track.toggl.com/reports/api/v3/workspace" +DEFAULT_PAGE_SIZE: int = 50 + + +class ReportTimeEntry(ApiWrapper): + def __init__(self, auth: Union[BasicAuth, TokenAuth]) -> None: + super().__init__(auth, base_url=REPORT_ROOT_URL) + + def search( + self, + workspace_id: int, + start_date: Union[date, str, None] = None, + end_date: Union[date, str, None] = None, + user_ids: Optional[List[int]] = None, + project_ids: Optional[List[int]] = None, + page_size: Optional[int] = None, + page_number: Optional[int] = None, + ) -> List[SearchReportTimeEntriesResponse]: + """Return TimeEntries grouped by common values.""" + # API does not support page number but allows to specify first row number on current page + # So pagination is achieved by changing its value + if page_number: + current_page_size = page_size or DEFAULT_PAGE_SIZE + first_row_number = page_number * current_page_size + 1 + else: + first_row_number = None + + payload_schema = SearchReportTimeEntriesRequest( + start_date=start_date, + end_date=end_date, + user_ids=user_ids, + project_ids=project_ids, + page_size=page_size, + first_row_number=first_row_number, + ) + payload = payload_schema.model_dump(mode="json", exclude_none=True, exclude_unset=True) + + response = self.client.post(url=f"/{workspace_id}/search/time_entries", json=payload) + self.raise_for_status(response) + + response_body = response.json() + return [ + SearchReportTimeEntriesResponse.model_validate(report_time_entry_data) + for report_time_entry_data in response_body + ] diff --git a/toggl_python/entities/user.py b/toggl_python/entities/user.py new file mode 100644 index 0000000..62df654 --- /dev/null +++ b/toggl_python/entities/user.py @@ -0,0 +1,201 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, List, Optional, Union + +from toggl_python.api import ApiWrapper +from toggl_python.schemas.current_user import ( + DateFormat, + DurationFormat, + MeFeaturesResponse, + MePreferencesResponse, + MeResponse, + MeResponseWithRelatedData, + TimeFormat, + UpdateMePasswordRequest, + UpdateMePreferencesRequest, + UpdateMeRequest, + UpdateMeResponse, +) +from toggl_python.schemas.time_entry import ( + MeTimeEntryQueryParams, + MeTimeEntryResponse, + MeTimeEntryWithMetaResponse, + MeWebTimerResponse, +) + + +if TYPE_CHECKING: + from datetime import datetime + + from pydantic import EmailStr + + +class CurrentUser(ApiWrapper): + prefix: str = "/me" + + def logged(self) -> bool: + response = self.client.get(url=f"{self.prefix}/logged") + self.raise_for_status(response) + + # Returns 200 OK and empty response body + return response.is_success + + def me(self, with_related_data: bool = False) -> MeResponse: + response_schema = MeResponseWithRelatedData if with_related_data else MeResponse + response = self.client.get( + url=self.prefix, + params={"with_related_data": with_related_data}, + ) + self.raise_for_status(response) + + response_body = response.json() + + return response_schema.model_validate(response_body) + + def update_me( + self, + beginning_of_week: Optional[int] = None, + country_id: Optional[int] = None, + default_workspace_id: Optional[int] = None, + email: Optional[EmailStr] = None, + fullname: Optional[str] = None, + timezone: Optional[str] = None, + ) -> UpdateMeResponse: + """Update instance without validating if new value is equal to current one. + + So API request will be sent anyway. + """ + payload_schema = UpdateMeRequest( + beginning_of_week=beginning_of_week, + country_id=country_id, + default_workspace_id=default_workspace_id, + email=email, + fullname=fullname, + timezone=timezone, + ) + payload = payload_schema.model_dump(mode="json", exclude_none=True, exclude_unset=True) + + response = self.client.put(url=self.prefix, json=payload) + self.raise_for_status(response) + + response_body = response.json() + return UpdateMeResponse.model_validate(response_body) + + def change_password(self, current_password: str, new_password: str) -> bool: + """Validate and change user password. + + API response does not indicate about successful password change, + that is why return if response is successful. + """ + payload_schema = UpdateMePasswordRequest( + current_password=current_password, new_password=new_password + ) + payload = payload_schema.model_dump_json() + + response = self.client.put(url=self.prefix, content=payload) + self.raise_for_status(response) + + return response.is_success + + def features(self) -> List[MeFeaturesResponse]: + response = self.client.get(url=f"{self.prefix}/features") + self.raise_for_status(response) + response_body = response.json() + + return [ + MeFeaturesResponse.model_validate(workspace_features) + for workspace_features in response_body + ] + + def preferences(self) -> MePreferencesResponse: + response = self.client.get(url=f"{self.prefix}/preferences") + self.raise_for_status(response) + response_body = response.json() + + return MePreferencesResponse.model_validate(response_body) + + def update_preferences( + self, + date_format: Optional[DateFormat] = None, + duration_format: Optional[DurationFormat] = None, + time_format: Optional[TimeFormat] = None, + ) -> bool: + """Update different formats using pre-defined Enums. + + API documentation is not up to date, available fields to update are found manually. + """ + payload_schema = UpdateMePreferencesRequest( + date_format=date_format, + duration_format=duration_format, + timeofday_format=time_format, + ) + payload = payload_schema.model_dump_json(exclude_none=True, exclude_unset=True) + + response = self.client.post(url=f"{self.prefix}/preferences", content=payload) + self.raise_for_status(response) + + return response.is_success + + def get_time_entry( + self, time_entry_id: int, meta: bool = False + ) -> Union[MeTimeEntryResponse, MeTimeEntryWithMetaResponse]: + """Intentionally use the same schema for requests with `include_sharing=true`. + + Tested responses do not differ from requests with `include_sharing=false` + that is why there is no `include_sharing` method argument. + """ + response = self.client.get( + url=f"{self.prefix}/time_entries/{time_entry_id}", + params={"meta": meta}, + ) + self.raise_for_status(response) + + response_schema = MeTimeEntryWithMetaResponse if meta else MeTimeEntryResponse + + response_body = response.json() + return response_schema.model_validate(response_body) + + def get_current_time_entry(self) -> Optional[MeTimeEntryResponse]: + """Return empty response if there is no running TimeEntry.""" + response = self.client.get(url=f"{self.prefix}/time_entries/current") + self.raise_for_status(response) + + response_body = response.json() + return MeTimeEntryResponse.model_validate(response_body) if response_body else None + + def get_time_entries( + self, + meta: bool = False, + since: Union[int, datetime, None] = None, + before: Union[str, datetime, None] = None, + start_date: Union[str, datetime, None] = None, + end_date: Union[str, datetime, None] = None, + ) -> List[Union[MeTimeEntryResponse, MeTimeEntryWithMetaResponse]]: + """Intentionally use the same schema for requests with `include_sharing=true`. + + Tested responses do not differ from requests with `include_sharing=false` + that is why there is no `include_sharing` method argument. + """ + payload_schema = MeTimeEntryQueryParams( + meta=meta, + since=since, + before=before, + start_date=start_date, + end_date=end_date, + ) + payload = payload_schema.model_dump(mode="json", exclude_none=True) + + response = self.client.get(url=f"{self.prefix}/time_entries", params=payload) + self.raise_for_status(response) + + response_schema = MeTimeEntryWithMetaResponse if meta else MeTimeEntryResponse + + response_body = response.json() + return [response_schema.model_validate(time_entry) for time_entry in response_body] + + def get_web_timer(self) -> MeWebTimerResponse: + response = self.client.get(url=f"{self.prefix}/web-timer") + self.raise_for_status(response) + + response_body = response.json() + return MeWebTimerResponse.model_validate(response_body) diff --git a/toggl_python/entities/workspace.py b/toggl_python/entities/workspace.py new file mode 100644 index 0000000..252ae79 --- /dev/null +++ b/toggl_python/entities/workspace.py @@ -0,0 +1,229 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, List, Optional, Union + +from toggl_python.api import ApiWrapper +from toggl_python.schemas.project import ProjectQueryParams, ProjectResponse +from toggl_python.schemas.time_entry import ( + BulkEditTimeEntriesOperation, + BulkEditTimeEntriesResponse, + MeTimeEntryResponse, + TimeEntryCreateRequest, + TimeEntryRequest, +) +from toggl_python.schemas.workspace import GetWorkspacesQueryParams, WorkspaceResponse + + +if TYPE_CHECKING: + from datetime import datetime + + +class Workspace(ApiWrapper): + prefix: str = "/workspaces" + + def get(self, workspace_id: int) -> WorkspaceResponse: + response = self.client.get(url=f"{self.prefix}/{workspace_id}") + self.raise_for_status(response) + + response_body = response.json() + + return WorkspaceResponse.model_validate(response_body) + + def list(self, since: Union[int, datetime, None] = None) -> List[WorkspaceResponse]: + payload_schema = GetWorkspacesQueryParams(since=since) + params = payload_schema.model_dump(mode="json", exclude_none=True) + + response = self.client.get(url=self.prefix, params=params) + self.raise_for_status(response) + + response_body = response.json() + + return [ + WorkspaceResponse.model_validate(workspace_data) for workspace_data in response_body + ] + + def get_project(self, workspace_id: int, project_id: int) -> ProjectResponse: + response = self.client.get(url=f"{self.prefix}/{workspace_id}/projects/{project_id}") + self.raise_for_status(response) + + response_body = response.json() + + return ProjectResponse.model_validate(response_body) + + def get_projects( # noqa: PLR0913 - Too many arguments in function definition (15 > 12) + self, + workspace_id: int, + active: Optional[bool] = None, + billable: Optional[bool] = None, + user_ids: Optional[List[int]] = None, + client_ids: Optional[List[int]] = None, + group_ids: Optional[List[int]] = None, + statuses: Optional[str] = None, + since: Union[int, datetime, None] = None, + name: Optional[str] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, + sort_field: Optional[str] = None, + sort_order: Optional[str] = None, + only_templates: Optional[bool] = None, + only_me: Optional[bool] = None, + ) -> List[ProjectResponse]: + payload_schema = ProjectQueryParams( + active=active, + billable=billable, + user_ids=user_ids, + client_ids=client_ids, + group_ids=group_ids, + statuses=statuses, + since=since, + name=name, + page=page, + per_page=per_page, + sort_field=sort_field, + sort_order=sort_order, + only_templates=only_templates, + only_me=only_me, + ) + payload = payload_schema.model_dump(mode="json", exclude_none=True) + + response = self.client.get(url=f"{self.prefix}/{workspace_id}/projects", params=payload) + self.raise_for_status(response) + + response_body = response.json() + + return [ProjectResponse.model_validate(project_data) for project_data in response_body] + + def create_time_entry( + self, + workspace_id: int, + start_datetime: Union[datetime, str], + created_with: str, + billable: Optional[bool] = None, + description: Optional[str] = None, + duration: Optional[int] = None, + stop: Optional[str] = None, + project_id: Optional[int] = None, + tag_ids: Optional[List[int]] = None, + tags: Optional[List[str]] = None, + task_id: Optional[int] = None, + user_id: Optional[int] = None, + ) -> MeTimeEntryResponse: + request_body_schema = TimeEntryCreateRequest( + created_with=created_with, + start=start_datetime, + workspace_id=workspace_id, + billable=billable, + description=description, + duration=duration, + stop=stop, + project_id=project_id, + tag_ids=tag_ids, + tags=tags, + task_id=task_id, + user_id=user_id, + ) + request_body = request_body_schema.model_dump( + mode="json", exclude_none=True, exclude_unset=True + ) + + response = self.client.post( + url=f"{self.prefix}/{workspace_id}/time_entries", json=request_body + ) + self.raise_for_status(response) + + response_body = response.json() + + return MeTimeEntryResponse.model_validate(response_body) + + def update_time_entry( # noqa: PLR0913 - Too many arguments in function definition (13 > 12) + self, + workspace_id: int, + time_entry_id: int, + billable: Optional[bool] = None, + description: Optional[str] = None, + duration: Optional[int] = None, + project_id: Optional[int] = None, + shared_with_user_ids: Optional[List[int]] = None, + start: Optional[datetime] = None, + stop: Optional[datetime] = None, + tag_ids: Optional[List[int]] = None, + tags: Optional[List[str]] = None, + task_id: Optional[int] = None, + user_id: Optional[int] = None, + ) -> MeTimeEntryResponse: + """Some params from docs are not listed because API don't use them to change object.""" + request_body_schema = TimeEntryRequest( + billable=billable, + description=description, + duration=duration, + project_id=project_id, + shared_with_user_ids=shared_with_user_ids, + start=start, + stop=stop, + tag_ids=tag_ids, + tags=tags, + task_id=task_id, + user_id=user_id, + ) + request_body = request_body_schema.model_dump(mode="json", exclude_none=True) + + response = self.client.put( + url=f"{self.prefix}/{workspace_id}/time_entries/{time_entry_id}", json=request_body + ) + self.raise_for_status(response) + + response_body = response.json() + + return MeTimeEntryResponse.model_validate(response_body) + + def delete_time_entry(self, workspace_id: int, time_entry_id: int) -> bool: + response = self.client.delete( + url=f"{self.prefix}/{workspace_id}/time_entries/{time_entry_id}" + ) + self.raise_for_status(response) + + return response.is_success + + def bulk_edit_time_entries( + self, + workspace_id: int, + time_entry_ids: List[int], + operations: List[BulkEditTimeEntriesOperation], + ) -> MeTimeEntryResponse: + if not time_entry_ids: + error_message = "Specify at least one TimeEntry ID" + raise ValueError(error_message) + + max_time_entries_ids = 100 + if len(time_entry_ids) > max_time_entries_ids: + error_message = ( + f"Limit to max TimeEntry IDs exceeded. " + f"Max {max_time_entries_ids} ids per request are allowed" + ) + raise ValueError(error_message) + if not operations: + error_message = "Specify at least one edit operation" + raise ValueError(error_message) + + request_body = [ + operation.model_dump(mode="json", exclude_none=True) for operation in operations + ] + + response = self.client.patch( + url=f"{self.prefix}/{workspace_id}/time_entries/{time_entry_ids}", json=request_body + ) + self.raise_for_status(response) + + response_body = response.json() + + return BulkEditTimeEntriesResponse.model_validate(response_body) + + def stop_time_entry(self, workspace_id: int, time_entry_id: int) -> MeTimeEntryResponse: + response = self.client.patch( + url=f"{self.prefix}/{workspace_id}/time_entries/{time_entry_id}/stop" + ) + self.raise_for_status(response) + + response_body = response.json() + + return MeTimeEntryResponse.model_validate(response_body) diff --git a/toggl_python/exceptions.py b/toggl_python/exceptions.py index 81fa562..7a911b1 100644 --- a/toggl_python/exceptions.py +++ b/toggl_python/exceptions.py @@ -1,52 +1,6 @@ -import httpx - - class TogglException(Exception): pass class BadRequest(TogglException): pass - - -class Unauthorized(TogglException): - pass - - -class Forbidden(TogglException): - pass - - -class NotFound(TogglException): - pass - - -class MethodNotAllowed(TogglException): - pass - - -class NotSupported(TogglException): - pass - - -class TooManyRequests(TogglException): - pass - - -STATUS_2_EXCEPTION = { - 400: BadRequest, - 401: Unauthorized, - 403: Forbidden, - 404: NotFound, - 405: MethodNotAllowed, - 429: TooManyRequests, -} - - -def raise_from_response(response: httpx.Response) -> None: - """Raise exception based on the response status code.""" - if response.status_code < 400: - return - - exception_cls = STATUS_2_EXCEPTION.get(response.status_code, TogglException) - raise exception_cls(response.text) diff --git a/toggl_python/repository.py b/toggl_python/repository.py deleted file mode 100644 index 06c05bb..0000000 --- a/toggl_python/repository.py +++ /dev/null @@ -1,269 +0,0 @@ -from functools import partial -from typing import Any, Dict, Optional, Tuple, Type, Union - -import httpx - -from .api import Api -from .auth import BasicAuth, TokenAuth -from .entities import ( - BaseEntity, - Client, - Dashboard, - Group, - Project, - ProjectUser, - ReportTimeEntry, - Tag, - Task, - TimeEntry, - User, - Workspace, - WorkspaceUser, -) -from .exceptions import MethodNotAllowed, NotSupported -from .response import ListResponse, ReportTimeEntriesList - - -class BaseRepository(Api): - """ - Base API-class for managing entities on Toggl side: - list, detail them and etc. - """ - - LIST_URL = "" - DETAIL_URL: Optional[str] = None - ENTITY_CLASS: Type[BaseEntity] = BaseEntity - ADDITIONAL_METHODS: Dict[str, Dict[str, Any]] = {} - EXCLUDED_METHODS: Optional[Tuple[str, ...]] = None - ADDITIONAL_PARAMS: Dict[str, Dict[str, Any]] = {} - DATA_CONTAINER: Dict[str, Optional[str]] = {} - LIST_RESPONSE: Optional[Type[ListResponse]] = None - - def __init__( - self, - base_url: Optional[str] = None, - auth: Optional[Union[BasicAuth, TokenAuth]] = None, - ) -> None: - super().__init__(base_url=base_url, auth=auth) - if not self.DETAIL_URL: - self.DETAIL_URL = self.LIST_URL + "/{id}" - - def __getattr__(self, attr: str) -> Any: - """ - Trying to get `partial`ed method `attr` from httpx-client. - In case of fail -> try to get own method from `ADDITIONAL_METHODS` - and `partial`ing it as well from payload of `ADDITIONAL_METHODS` item. - :param attr: - :return: - """ - if self.EXCLUDED_METHODS and attr in self.EXCLUDED_METHODS: - raise MethodNotAllowed - try: - method = super().__getattr__(attr) - except AttributeError: - if attr in self.ADDITIONAL_METHODS.keys(): - method = partial(self.additionat_method, **self.ADDITIONAL_METHODS[attr]) - else: - raise AttributeError(f"No such method ({attr})!") - return method - - def additionat_method( - self, - url: str, - _id: Optional[int] = None, - additional_id: Optional[int] = None, - entity: Any = None, - detail: bool = False, - single_item: bool = False, - data_key: Optional[str] = None, - params: Optional[Dict[str, Any]] = None, - data: Optional[Dict[str, Any]] = None, - files: Optional[Dict[str, Any]] = None, - ) -> Any: - """ - Call additional method with specified url and params. - - :param url: url we use to build a target url - :param _id: - :param additional_id: - :param entity: - :param detail: - :param single_item: - :param data_key: - :param params: params to pass - :param data: json-data to pass - :param files: files to pass - :return: - """ - if detail: - if not self.DETAIL_URL: - raise AttributeError("Not defined DETAIL_URL") - _url = (self.DETAIL_URL + "/" + url + "/{additional_id}").format( - id=_id, additional_id=additional_id - ) - return self._retrieve(_url, entity, headers=self.HEADERS, params=params) - elif _id: - if not self.DETAIL_URL: - raise AttributeError("Not defined DETAIL_URL") - _url = (self.DETAIL_URL + "/" + url).format(id=_id) - return self._list(_url, entity, headers=self.HEADERS, param=params, data_key=data_key) - elif single_item: - _url = str(self.BASE_URL) + f"{url}" - return self._retrieve( - _url, - entity, - headers=self.HEADERS, - params=params, - ) - else: - raise NotSupported - - def _retrieve( - self, - _url: Union[str, httpx.URL], - entity_class: Any, - data_key: Optional[str] = None, - **kwargs: Any, - ) -> Any: - params = kwargs - params.update(self.ADDITIONAL_PARAMS.get("retrieve", {})) - - response = self.get(_url, params=params) - data = response.json() - data_key = data_key or self.DATA_CONTAINER.get("retrieve", None) - if data_key: - data = data[data_key] - if data: - return entity_class(**data) - - def retrieve(self, id: Optional[int] = None) -> Any: - if not self.DETAIL_URL: - raise AttributeError("Not defined DETAIL_URL") - full_url = self.BASE_URL.join(self.DETAIL_URL.format(id=id)) - return self._retrieve(full_url, self.ENTITY_CLASS) - - def _list( - self, - _url: Union[str, httpx.URL], - entity_class: Any, - data_key: Optional[str] = None, - **kwargs: Any, - ) -> Any: - params = kwargs - params.update(self.ADDITIONAL_PARAMS.get("list", {})) - - response = self.get(_url, params=params) - response_body = response.json() - - data = response_body - data_key = data_key or self.DATA_CONTAINER.get("list", None) - if data_key: - data = data[data_key] - if data: - value = [entity_class(**entity) for entity in data] - if self.LIST_RESPONSE: - value = self.LIST_RESPONSE(value, response_body) - return value - - def list(self, **kwargs: Any) -> Any: - if self.EXCLUDED_METHODS and "list" in self.EXCLUDED_METHODS: - raise MethodNotAllowed - full_url = self.BASE_URL.join(self.LIST_URL) - return self._list(full_url, self.ENTITY_CLASS, **kwargs) - - def create(self, entity: Any, **kwargs: Any) -> Any: - if self.EXCLUDED_METHODS and "create" in self.EXCLUDED_METHODS: - raise MethodNotAllowed - full_url = self.BASE_URL.join(self.LIST_URL) - response = self.post(full_url, data=entity.dict(), **kwargs) - return self.ENTITY_CLASS(**response.json()) - - def update(self, entity: Any, **kwargs: Any) -> Any: - if self.EXCLUDED_METHODS and "update" in self.EXCLUDED_METHODS: - raise MethodNotAllowed - if not self.DETAIL_URL: - raise AttributeError("Not defined DETAIL_URL") - full_url = self.BASE_URL.join(self.DETAIL_URL.format(id=entity.id)) - response = self.put(full_url, data=entity.dict(), **kwargs) - return self.ENTITY_CLASS(**response.json()) - - def partial_update(self, entity: Any, **kwargs: Any) -> Any: - if self.EXCLUDED_METHODS and "partial_update" in self.EXCLUDED_METHODS: - raise MethodNotAllowed - if not self.DETAIL_URL: - raise AttributeError("Not defined DETAIL_URL") - full_url = self.BASE_URL.join(self.DETAIL_URL.format(id=entity.id)) - response = self.patch(full_url, data=entity.dict(), **kwargs) - return self.ENTITY_CLASS(**response.json()) - - -class Clients(BaseRepository): - LIST_URL = "clients" - ENTITY_CLASS = Client - - -class Groups(BaseRepository): - LIST_URL = "groups" - ENTITY_CLASS = Group - - -class ProjectUsers(BaseRepository): - LIST_URL = "project_users" - ENTITY_CLASS = ProjectUser - - -class Tags(BaseRepository): - LIST_URL = "tags" - ENTITY_CLASS = Tag - - -class TimeEntries(BaseRepository): - LIST_URL = "me/time_entries" - ENTITY_CLASS = TimeEntry - - -class ReportTimeEntries(BaseRepository): - BASE_URL: httpx.URL = httpx.URL("https://api.track.toggl.com/reports/api/v2/") - ADDITIONAL_PARAMS = {"list": {"user_agent": "toggl_python"}} - DATA_CONTAINER = {"list": "data"} - LIST_URL = "details" - ENTITY_CLASS = ReportTimeEntry - LIST_RESPONSE = ReportTimeEntriesList - - -class Users(BaseRepository): - EXCLUDED_METHODS = ("list", "create", "update", "partial_update") - LIST_URL = "users" - ENTITY_CLASS = User - ADDITIONAL_METHODS = { - "me": {"url": "me", "entity": User, "single_item": True}, - } - - -class Workspaces(BaseRepository): - LIST_URL = "workspaces" - ENTITY_CLASS = Workspace - ADDITIONAL_METHODS = { - "projects": {"url": "projects", "entity": Project, "detail": False}, - "detail_project": {"url": "projects", "entity": Project, "detail": True}, - "users": {"url": "users", "entity": User, "detail": False}, - "clients": {"url": "clients", "entity": Client, "detail": False}, - "groups": {"url": "groups", "entity": Group, "detail": False}, - "tasks": {"url": "tasks", "entity": Task, "data_key": "data", "detail": False}, - "tags": {"url": "tags", "entity": Tag, "detail": False}, - "workspace_users": { - "url": "workspace_users", - "entity": WorkspaceUser, - "detail": False, - }, - } - - -class WorkspaceUsers(BaseRepository): - LIST_URL = "workspace_users" - ENTITY_CLASS = WorkspaceUser - - -class Dashboards(BaseRepository): - LIST_URL = "dashboards" - ENTITY_CLASS = Dashboard diff --git a/toggl_python/response.py b/toggl_python/response.py deleted file mode 100644 index a008ee4..0000000 --- a/toggl_python/response.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Any, List, Tuple - - -class ListResponse(List[Any]): - """ - Base class for list response based from mutable sequence - """ - - response_parameters: Tuple[str, ...] = () - - def __init__(self, value: Any, response_body: Any): - super(ListResponse, self).__init__(value) - - for parameter in self.response_parameters: - if parameter in response_body: - setattr(self, parameter, response_body[parameter]) - - -class ReportTimeEntriesList(ListResponse): - """ - Simple ListResponse, extended with params `response_parameters` - from `response_body` on init - """ - - response_parameters = ( - "total_count", - "per_page", - "total_grand", - "total_billable", - "total_currencies", - ) diff --git a/toggl_python/schemas/__init__.py b/toggl_python/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/toggl_python/schemas/base.py b/toggl_python/schemas/base.py new file mode 100644 index 0000000..69099a7 --- /dev/null +++ b/toggl_python/schemas/base.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from datetime import datetime, timedelta, timezone +from typing import Optional + +from pydantic import AwareDatetime, BaseModel, field_serializer, field_validator + + +class BaseSchema(BaseModel): + pass + + +class SinceParamSchemaMixin(BaseSchema): + since: Optional[AwareDatetime] + + @field_validator("since") + @classmethod + def check_if_since_is_too_old(cls, value: Optional[datetime]) -> Optional[datetime]: + if not value: + return value + + now = datetime.now(tz=timezone.utc) + three_months = timedelta(days=90) + utc_value = value.astimezone(tz=timezone.utc) + + if now - three_months > utc_value: + error_message = "Since cannot be older than 3 months" + raise ValueError(error_message) + + return value + + @field_serializer("since", when_used="json") + def serialize_since(self, value: Optional[datetime]) -> Optional[int]: + if not value: + return value + + return int(value.timestamp()) diff --git a/toggl_python/schemas/current_user.py b/toggl_python/schemas/current_user.py new file mode 100644 index 0000000..9cd162b --- /dev/null +++ b/toggl_python/schemas/current_user.py @@ -0,0 +1,183 @@ +from __future__ import annotations + +from enum import Enum + +from toggl_python.schemas.base import BaseSchema + + +try: + import zoneinfo +except ImportError: + from backports import zoneinfo +from datetime import datetime +from typing import Dict, List, Optional + +from pydantic import ( + EmailStr, + SecretStr, + field_serializer, + field_validator, + model_validator, +) +from pydantic.fields import Field +from pydantic_core import Url + +# Move application import `toggl_python.schemas.project.ProjectResponse` into a type-checking block +from toggl_python.schemas.project import ProjectResponse # noqa: TCH001 +from toggl_python.schemas.time_entry import MeTimeEntryResponse # noqa: TCH001 +from toggl_python.schemas.workspace import WorkspaceResponse # noqa: TCH001 + + +class DateFormat(str, Enum): + mdy_slash = "MM/DD/YYYY" + dmy_dash = "DD-MM-YYYY" + mdy_dash = "MM-DD-YYYY" + ymd_dash = "YYYY-MM-DD" + dmy_slash = "DD/MM/YYYY" + dmy_dot = "DD.MM.YYYY" + + +class DurationFormat(str, Enum): + classic = "classic" + improved = "improved" + decimal = "decimal" + + +class TimeFormat(str, Enum): + hour_12 = "h:mm A" + hour_24 = "H:mm" + + +class MeResponseBase(BaseSchema): + api_token: Optional[str] = Field(default=None, min_length=32, max_length=32) + at: datetime + beginning_of_week: int = Field(ge=0, le=6, description="0 equals to Sunday, 1 - Monday, etc.") + country_id: Optional[int] = None + created_at: datetime + default_workspace_id: int + email: EmailStr + fullname: str + has_password: bool + id: int + image_url: Url + openid_email: Optional[EmailStr] = None + openid_enabled: bool + timezone: str + toggl_accounts_id: str = Field(min_length=22, max_length=22) + updated_at: datetime + + +class MeResponse(MeResponseBase): + authorization_updated_at: datetime + intercom_hash: Optional[str] = Field(default=None, min_length=64, max_length=64) + + +class UpdateMeRequest(BaseSchema): + beginning_of_week: Optional[int] = Field(default=None, ge=0, le=6) + country_id: Optional[int] = Field( + default=None, + ge=1, + description="Cannot validate, api documentation does not contain available county_ids", + ) + default_workspace_id: Optional[int] = Field( + default=None, + ge=1, + description="Cannot check workspace_id availability without making API request", + ) + email: Optional[EmailStr] = None # Cannot check uniqueness without making API request + fullname: Optional[str] = Field(default=None, min_length=1) + timezone: Optional[str] = None + + @field_validator("timezone") + @classmethod + def check_if_timezone_exists(cls, value: Optional[str]) -> Optional[str]: + if not value or value in zoneinfo.available_timezones(): + return value + + error_message = f"Specified timezone {value} is invalid" + raise ValueError(error_message) + + +class UpdateMeResponse(MeResponseBase): + toggl_accounts_updated_at: datetime + + +class MeResponseWithRelatedData(MeResponse): + clients: Optional[List] = None + projects: Optional[List[ProjectResponse]] = None + tags: Optional[List] = None + time_entries: Optional[List[MeTimeEntryResponse]] = None + workspaces: List[WorkspaceResponse] # Default workspace is created after signup, + + +class UpdateMePasswordRequest(BaseSchema): + current_password: SecretStr + password: SecretStr = Field(alias="new_password") + + @model_validator(mode="before") + @classmethod + def check_if_passwords_are_equal(cls, data: Dict[str, str]) -> Dict[str, str]: + if data["current_password"] == data["new_password"]: + error_message = "New password should differ from current password" + raise ValueError(error_message) + + return data + + @field_validator("password") + @classmethod + def check_if_password_is_weak(cls, value: SecretStr) -> SecretStr: + min_password_length = 8 + new_password = value.get_secret_value() + + if ( + len(new_password) >= min_password_length + and any(char.isupper() for char in new_password) + and any(char.islower() for char in new_password) + ): + return value + + error_message = ( + "Password is too weak. Strong password should contain min 8 characters, " + "at least 1 uppercase and 1 lowercase letters" + ) + raise ValueError(error_message) + + @field_serializer("current_password", "password", when_used="json") + def reveal_secret(self, value: SecretStr) -> str: + """Reveal secrets on `model_dump_json` call.""" + return value.get_secret_value() + + +class MeFeaturesResponse(BaseSchema): + workspace_id: int + features: List[MeFeatureResponse] + + +class MeFeatureResponse(BaseSchema): + feature_id: int + enabled: bool + name: str + + +class MePreferencesResponse(BaseSchema): + beginning_of_week: int = Field(alias="BeginningOfWeek") + alpha_features: List[AlphaFeatureResponse] + date_format: str + duration_format: str + pg_time_zone_name: str + record_timeline: bool + send_product_emails: bool + send_timer_notifications: bool + send_weekly_report: bool + timeofday_format: str + + +class AlphaFeatureResponse(BaseSchema): + code: str + enabled: bool + + +class UpdateMePreferencesRequest(BaseSchema): + date_format: Optional[DateFormat] = None + duration_format: Optional[DurationFormat] = None + timeofday_format: Optional[TimeFormat] = None diff --git a/toggl_python/schemas/project.py b/toggl_python/schemas/project.py new file mode 100644 index 0000000..6e5d26d --- /dev/null +++ b/toggl_python/schemas/project.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from datetime import datetime +from typing import List, Optional + +from toggl_python.schemas.base import BaseSchema, SinceParamSchemaMixin + + +class ProjectResponse(BaseSchema): + active: bool + actual_hours: Optional[int] + actual_seconds: Optional[int] + at: datetime + auto_estimates: Optional[bool] + billable: Optional[bool] + can_track_time: bool + client_id: Optional[int] + color: str + created_at: datetime + currency: Optional[str] + estimated_hours: Optional[int] + estimated_seconds: Optional[int] + fixed_fee: Optional[int] + id: int + is_private: bool + is_shared: bool + name: str + permissions: Optional[str] + rate: Optional[int] + rate_last_updated: Optional[datetime] + recurring: bool + recurring_parameters: Optional[List] + server_deleted_at: Optional[datetime] + start_date: datetime + status: Optional[str] = None + template: Optional[bool] + template_id: Optional[int] + workspace_id: int + + +class ProjectQueryParams(SinceParamSchemaMixin, BaseSchema): + active: Optional[bool] + billable: Optional[bool] + user_ids: Optional[List[int]] + client_ids: Optional[List[int]] + group_ids: Optional[List[int]] + statuses: Optional[str] + name: Optional[str] + page: Optional[int] + per_page: Optional[int] + sort_field: Optional[str] + sort_order: Optional[str] + only_templates: Optional[bool] + only_me: Optional[bool] diff --git a/toggl_python/schemas/report_time_entry.py b/toggl_python/schemas/report_time_entry.py new file mode 100644 index 0000000..fefb59c --- /dev/null +++ b/toggl_python/schemas/report_time_entry.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from datetime import date +from typing import Dict, List, Optional + +from pydantic import AwareDatetime, field_serializer, model_validator + +from toggl_python.schemas.base import BaseSchema + + +class SearchReportTimeEntriesResponse(BaseSchema): + billable: bool + billable_amount_in_cents: Optional[int] + currency: str + description: Optional[str] + hourly_rate_in_cents: Optional[int] + project_id: Optional[int] + row_number: int + tag_ids: List[int] + task_id: Optional[int] + time_entries: List[ReportTimeEntryItem] + user_id: int + username: str + + +class ReportTimeEntryItem(BaseSchema): + at: AwareDatetime + at_tz: AwareDatetime + id: int + seconds: int + start: AwareDatetime + stop: AwareDatetime + + +class SearchReportTimeEntriesRequest(BaseSchema): + start_date: Optional[date] = None + end_date: Optional[date] = None + project_ids: Optional[List[int]] = None + user_ids: Optional[List[int]] = None + page_size: Optional[int] = None + first_row_number: Optional[int] = None + + @model_validator(mode="before") + @classmethod + def check_if_at_least_one_param_is_set(cls, data: Dict[str, str]) -> Dict[str, str]: + if any(data.values()): + return data + + error_message = "At least one parameter must be set" + raise ValueError(error_message) + + @field_serializer("start_date", "end_date", when_used="json") + def serialize_datetimes(self, value: Optional[date]) -> Optional[str]: + if not value: + return value + + return value.isoformat() diff --git a/toggl_python/schemas/time_entry.py b/toggl_python/schemas/time_entry.py new file mode 100644 index 0000000..ca5b790 --- /dev/null +++ b/toggl_python/schemas/time_entry.py @@ -0,0 +1,197 @@ +from __future__ import annotations + +from datetime import datetime, timedelta, timezone +from enum import Enum +from typing import Dict, List, Optional, Union + +from pydantic import ( + AwareDatetime, + field_serializer, + field_validator, + model_serializer, + model_validator, +) +from typing_extensions import Self + +from toggl_python.schemas.base import BaseSchema, SinceParamSchemaMixin +from toggl_python.schemas.project import ProjectResponse # noqa: TCH001 + + +class BulkEditTimeEntriesOperations(str, Enum): + add = "add" + remove = "remove" + # Renamed to avoid using system keyword + change = "replace" + + +class BulkEditTimeEntriesFieldNames(str, Enum): + billable = "billable" + description = "description" + duration = "duration" + project_id = "project_id" + shared_with_user_ids = "shared_with_user_ids" + start = "start" + stop = "stop" + tag_ids = "tag_ids" + tags = "tags" + task_id = "task_id" + user_id = "user_id" + + +class MeTimeEntryResponseBase(BaseSchema): + billable: bool + description: Optional[str] + project_id: Optional[int] + tag_ids: Optional[List[int]] + task_id: Optional[int] + user_id: int + workspace_id: int + + +class MeTimeEntryResponse(MeTimeEntryResponseBase): + at: datetime + duration: int + duronly: bool + id: int + permissions: Optional[List] + server_deleted_at: Optional[datetime] + start: datetime + stop: Optional[datetime] + tags: Optional[List[str]] + + +class MeTimeEntryWithMetaResponse(MeTimeEntryResponse): + user_avatar_url: str + user_name: str + + +class MeTimeEntryQueryParams(SinceParamSchemaMixin, BaseSchema): + meta: bool + before: Optional[datetime] = None + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + + @field_serializer("before", "start_date", "end_date", when_used="json") + def serialize_datetimes(self, value: Optional[datetime]) -> Optional[str]: + if not value: + return value + + return value.date().isoformat() + + @field_validator("start_date", "end_date") + @classmethod + def check_if_dates_are_too_old(cls, value: Optional[datetime]) -> Optional[datetime]: + if not value: + return value + + now = datetime.now(tz=timezone.utc) + three_months = timedelta(days=90) + utc_value = value.astimezone(tz=timezone.utc) + + if now - three_months > utc_value: + first_allowed_date = (now - three_months).date().isoformat() + error_message = f"Start and end dates must not be earlier than {first_allowed_date}" + raise ValueError(error_message) + + return value + + +class WebTimerTimeEntryResponse(MeTimeEntryResponseBase): + deleted: Optional[datetime] + duration_in_seconds: int + ignore_start_and_stop: bool + planned_task_id: Optional[int] + updated_at: datetime + utc_start: datetime + utc_stop: datetime + + +class MeWebTimerResponse(BaseSchema): + clients: Optional[List] + projects: Optional[List[ProjectResponse]] + tags: List + time_entries: List[WebTimerTimeEntryResponse] + + +class TimeEntryRequest(BaseSchema): + billable: Optional[bool] + description: Optional[str] + project_id: Optional[int] + tag_ids: Optional[List[int]] + task_id: Optional[int] + user_id: Optional[int] + duration: Optional[int] + start: Optional[AwareDatetime] + stop: Optional[AwareDatetime] + shared_with_user_ids: Optional[List[int]] + tags: Optional[List[str]] + + @field_serializer("start", "stop", when_used="json") + def serialize_datetimes(self, value: Optional[datetime]) -> Optional[str]: + if not value: + return value + + return value.isoformat() + + +class TimeEntryCreateRequest(BaseSchema): + created_with: str + start: AwareDatetime + workspace_id: int + billable: Optional[bool] = None + description: Optional[str] = None + duration: Optional[int] = None + stop: Optional[AwareDatetime] = None + project_id: Optional[int] = None + tag_ids: Optional[List[int]] = None + tags: Optional[List[str]] = None + task_id: Optional[int] = None + user_id: Optional[int] = None + + @field_serializer("start", "stop", when_used="json") + def serialize_datetimes(self, value: Optional[datetime]) -> Optional[str]: + if not value: + return value + + return value.isoformat() + + @model_validator(mode="after") + def validate_stop_and_duration(self) -> Self: + if ( + self.duration + and self.stop + and (self.start + timedelta(seconds=self.duration) != self.stop) + ): + error_message = ( + "`start`, `stop` and `duration` must be consistent - " + "`start` + `duration` == `stop`" + ) + raise ValueError(error_message) + + return self + + +class BulkEditTimeEntriesOperation(BaseSchema): + operation: BulkEditTimeEntriesOperations + field_name: BulkEditTimeEntriesFieldNames + field_value: Union[bool, str, int, AwareDatetime, List[int], List[str]] + + @model_serializer(when_used="json") + def serialize_schema( + self, + ) -> Dict[str, Union[bool, str, int, AwareDatetime, List[int], List[str]]]: + return { + "op": self.operation, + "path": f"/{self.field_name}", + "value": self.field_value, + } + + +class BulkEditTimeEntriesResponseFailure(BaseSchema): + id: int + message: str + + +class BulkEditTimeEntriesResponse(BaseSchema): + success: List[int] + failure: List[BulkEditTimeEntriesResponseFailure] diff --git a/toggl_python/schemas/workspace.py b/toggl_python/schemas/workspace.py new file mode 100644 index 0000000..0494ba8 --- /dev/null +++ b/toggl_python/schemas/workspace.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from datetime import datetime +from typing import List, Optional + +from pydantic.fields import Field + +from toggl_python.schemas.base import BaseSchema, SinceParamSchemaMixin + + +class WorkspaceResponseBase(BaseSchema): + admin: bool + api_token: Optional[str] = Field(default=None, deprecated=True) + at: datetime + business_ws: bool = Field(description="Is workspace on Premium subscription") + csv_upload: Optional[List] + default_currency: str + default_hourly_rate: Optional[float] + hide_start_end_times: bool + ical_enabled: bool + ical_url: Optional[str] + id: int + last_modified: Optional[datetime] + logo_url: str + name: str + only_admins_may_create_projects: bool + only_admins_may_create_tags: bool + only_admins_see_billable_rates: bool + only_admins_see_team_dashboard: bool + organization_id: int + permissions: Optional[List[str]] + premium: bool + profile: int = Field(deprecated=True) + projects_billable_by_default: bool + projects_enforce_billable: bool + projects_private_by_default: bool + rate_last_updated: Optional[datetime] + reports_collapse: bool + role: str + rounding: int = Field(le=1, ge=-1) + rounding_minutes: int + server_deleted_at: Optional[datetime] + subscription: Optional[List] + suspended_at: Optional[datetime] + working_hours_in_minutes: Optional[int] + +class WorkspaceResponse(WorkspaceResponseBase): + pass + +class GetWorkspacesQueryParams(SinceParamSchemaMixin, BaseSchema): + pass