From 0b81865fc321ae8ad1ae16c04652b81cc51edc26 Mon Sep 17 00:00:00 2001 From: Javed Habib <100477031+JaeAeich@users.noreply.github.com> Date: Sun, 19 May 2024 17:04:28 +0530 Subject: [PATCH] feat: clean up & refactor majorly (#171) Co-authored-by: Alex Kanitz --- .dockerignore | 24 +- .github/ISSUE_TEMPLATE/general-purpose.md | 33 + ...ild.yaml => build_and_publish_charts.yaml} | 65 +- ...filer.yml => build_and_publish_filer.yaml} | 5 +- ....yml => build_and_publish_taskmaster.yaml} | 4 +- .github/workflows/code_quality.yaml | 102 + .github/workflows/code_test_unit.yaml | 42 + .github/workflows/tox.yml | 25 - .github/workflows/validation_pr.yaml | 45 + .github/workflows/vulnerabilities.yaml | 56 + .safety-policy.yml | 49 + MANIFEST.in | 1 - Makefile | 197 ++ PULL_REQUEST_TEMPLATE.md | 26 + README.md | 34 +- cloudbuild.yaml | 6 - cloudbuild_testing.yaml | 6 - containers/filer.Dockerfile | 25 - containers/taskmaster.Dockerfile | 26 - {charts => deployment/charts}/tesk/.gitignore | 0 .../charts}/tesk/.helmignore | 0 {charts => deployment/charts}/tesk/Chart.yaml | 0 {charts => deployment/charts}/tesk/README.md | 2 +- .../charts}/tesk/ftp/.netrc-TEMPLATE | 0 .../charts}/tesk/s3-config/config-TEMPLATE | 0 .../tesk/s3-config/credentials-TEMPLATE | 0 .../tesk/service-info/service-info.yaml | 0 .../templates/common/oauth-client-secret.yaml | 0 .../templates/common/service-info-cm.yaml | 0 .../templates/common/taskmaster-rbac.yaml | 0 .../templates/common/tesk-deployment.yaml | 0 .../tesk/templates/common/tesk-svc.yaml | 0 .../tesk/templates/ftp/ftp-endpoint.yaml | 0 .../tesk/templates/ftp/ftp-secret.yaml | 0 .../tesk/templates/ftp/ftp-service.yaml | 0 .../tesk/templates/ftp/netrc-secret.yaml | 0 .../tesk/templates/ingress/ingress-rules.yaml | 0 .../tesk/templates/openshift/oc-route.yaml | 0 .../tesk/templates/storage/aws-secret.yaml | 0 .../tesk/templates/storage/openstack.yaml | 0 .../charts}/tesk/tls_secret_name.yml-TEMPLATE | 0 .../charts}/tesk/values.yaml | 2 +- deployment/containers/filer.Dockerfile | 39 + deployment/containers/taskmaster.Dockerfile | 39 + .../documentation}/deployment.md | 4 +- .../documentation}/integrated_wes_tes.md | 0 .../documentation}/local_ftp.md | 0 dockerBuild | 1 - dockerRun | 1 - {examples => docs/examples}/inputFile.json | 0 .../examples}/inputHelloWorld.json | 0 {examples => docs/examples}/inputHttp.json | 0 .../examples}/localftp/taskWithIO.json | 0 .../examples}/resources/cpu.json | 0 .../resources/more_cpu_than_nodes.json | 0 {examples => docs/examples}/taskCreate | 0 {examples => docs/examples}/taskList | 0 .../examples}/transferPvc/Readme.md | 0 {examples => docs/examples}/transferPvc/clean | 0 .../examples}/transferPvc/minikubeStart | 0 .../examples}/transferPvc/pod.yaml | 0 .../examples}/transferPvc/pv.yaml | 0 .../examples}/transferPvc/pvc.yaml | 0 {doc => docs}/taskmaster_architecture.png | Bin {documentation => docs}/tesintro.md | 0 {documentation/img => images}/TESKlogo.png | Bin .../img => images}/TESKlogowfont.png | Bin .../img => images}/architecture.png | Bin .../img => images}/project-architecture.png | Bin init | 8 - install | 4 - mypy.ini | 8 + poetry.lock | 2119 +++++++++++++++++ pyproject.toml | 106 + pytest.ini | 9 - scripts/dockerBuild | 31 - scripts/dockerRun | 12 - scripts/run | 2 + scripts/taskmaster | 3 + setup.cfg | 4 - setup.py | 93 - src/tesk_core/Util.py | 7 - src/tesk_core/exception.py | 10 - src/tesk_core/filer.py | 500 ---- src/tesk_core/filer_class.py | 180 -- src/tesk_core/filer_s3.py | 117 - src/tesk_core/job.py | 78 - src/tesk_core/path.py | 73 - src/tesk_core/pvc.py | 56 - src/tesk_core/taskmaster.py | 339 --- src/tesk_core/transput.py | 81 - taskmaster | 3 - tesk/__init__.py | 3 + {src/tesk_core => tesk/services}/README.md | 0 tesk/services/__init__.py | 3 + tesk/services/constants.py | 3 + tesk/services/exceptions.py | 29 + tesk/services/filer.py | 510 ++++ tesk/services/filer_class.py | 178 ++ tesk/services/filer_s3.py | 146 ++ tesk/services/job.py | 101 + tesk/services/path.py | 60 + tesk/services/pvc.py | 64 + tesk/services/taskmaster.py | 332 +++ tesk/services/transput.py | 76 + tesk/services/utils.py | 5 + tests/FilerClassTest.py | 155 -- tests/TaskMasterTest.py | 83 - tests/assertThrows.py | 15 - tests/test_filer.py | 251 -- tests/test_filer_ftp_pytest.py | 226 -- tests/test_filer_general_pytest.py | 64 - tests/test_filer_http_pytest.py | 129 - tests/test_job.py | 288 --- tests/test_s3_filer.py | 174 -- tests/test_taskmaster.py | 133 -- .../test_unit/test_services/FilerClassTest.py | 142 ++ .../test_unit/test_services/TaskMasterTest.py | 93 + tests/test_unit/test_services/assertThrows.py | 7 + .../resources/copyDirTest/dst1}/3.txt | 0 .../resources/copyDirTest/dst1}/a/1.txt | 0 .../resources/copyDirTest/dst1}/a/2.txt | 0 .../resources/copyDirTest/dst2/3.txt | 0 .../resources/copyDirTest/dst2/a/1.txt | 0 .../resources/copyDirTest/dst2/a/2.txt | 0 .../resources/copyDirTest/src/3.txt | 0 .../resources/copyDirTest/src/a/1.txt | 0 .../resources/copyDirTest/src/a/2.txt | 0 .../test_services}/resources/inputFile.json | 0 .../test_services}/resources/test_config | 0 tests/test_unit/test_services/test_filer.py | 260 ++ .../test_services/test_filer_ftp_pytest.py | 213 ++ .../test_filer_general_pytest.py | 62 + .../test_services/test_filer_http_pytest.py | 115 + tests/test_unit/test_services/test_job.py | 437 ++++ .../test_unit/test_services/test_s3_filer.py | 231 ++ .../test_services/test_taskmaster.py | 185 ++ tox.ini | 25 - 138 files changed, 6214 insertions(+), 3286 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/general-purpose.md rename .github/workflows/{build.yaml => build_and_publish_charts.yaml} (51%) rename .github/workflows/{docker-build-publish-filer.yml => build_and_publish_filer.yaml} (89%) rename .github/workflows/{docker-build-publish-taskmaster.yml => build_and_publish_taskmaster.yaml} (92%) create mode 100644 .github/workflows/code_quality.yaml create mode 100644 .github/workflows/code_test_unit.yaml delete mode 100644 .github/workflows/tox.yml create mode 100644 .github/workflows/validation_pr.yaml create mode 100644 .github/workflows/vulnerabilities.yaml create mode 100644 .safety-policy.yml delete mode 100644 MANIFEST.in create mode 100644 Makefile create mode 100644 PULL_REQUEST_TEMPLATE.md delete mode 100644 cloudbuild.yaml delete mode 100644 cloudbuild_testing.yaml delete mode 100644 containers/filer.Dockerfile delete mode 100644 containers/taskmaster.Dockerfile rename {charts => deployment/charts}/tesk/.gitignore (100%) rename {charts => deployment/charts}/tesk/.helmignore (100%) rename {charts => deployment/charts}/tesk/Chart.yaml (100%) rename {charts => deployment/charts}/tesk/README.md (95%) rename {charts => deployment/charts}/tesk/ftp/.netrc-TEMPLATE (100%) rename {charts => deployment/charts}/tesk/s3-config/config-TEMPLATE (100%) rename {charts => deployment/charts}/tesk/s3-config/credentials-TEMPLATE (100%) rename {charts => deployment/charts}/tesk/service-info/service-info.yaml (100%) rename {charts => deployment/charts}/tesk/templates/common/oauth-client-secret.yaml (100%) rename {charts => deployment/charts}/tesk/templates/common/service-info-cm.yaml (100%) rename {charts => deployment/charts}/tesk/templates/common/taskmaster-rbac.yaml (100%) rename {charts => deployment/charts}/tesk/templates/common/tesk-deployment.yaml (100%) rename {charts => deployment/charts}/tesk/templates/common/tesk-svc.yaml (100%) rename {charts => deployment/charts}/tesk/templates/ftp/ftp-endpoint.yaml (100%) rename {charts => deployment/charts}/tesk/templates/ftp/ftp-secret.yaml (100%) rename {charts => deployment/charts}/tesk/templates/ftp/ftp-service.yaml (100%) rename {charts => deployment/charts}/tesk/templates/ftp/netrc-secret.yaml (100%) rename {charts => deployment/charts}/tesk/templates/ingress/ingress-rules.yaml (100%) rename {charts => deployment/charts}/tesk/templates/openshift/oc-route.yaml (100%) rename {charts => deployment/charts}/tesk/templates/storage/aws-secret.yaml (100%) rename {charts => deployment/charts}/tesk/templates/storage/openstack.yaml (100%) rename {charts => deployment/charts}/tesk/tls_secret_name.yml-TEMPLATE (100%) rename {charts => deployment/charts}/tesk/values.yaml (97%) create mode 100644 deployment/containers/filer.Dockerfile create mode 100644 deployment/containers/taskmaster.Dockerfile rename {documentation => deployment/documentation}/deployment.md (98%) rename {documentation => deployment/documentation}/integrated_wes_tes.md (100%) rename {documentation => deployment/documentation}/local_ftp.md (100%) delete mode 120000 dockerBuild delete mode 120000 dockerRun rename {examples => docs/examples}/inputFile.json (100%) rename {examples => docs/examples}/inputHelloWorld.json (100%) rename {examples => docs/examples}/inputHttp.json (100%) rename {examples => docs/examples}/localftp/taskWithIO.json (100%) rename {examples => docs/examples}/resources/cpu.json (100%) rename {examples => docs/examples}/resources/more_cpu_than_nodes.json (100%) rename {examples => docs/examples}/taskCreate (100%) rename {examples => docs/examples}/taskList (100%) rename {examples => docs/examples}/transferPvc/Readme.md (100%) rename {examples => docs/examples}/transferPvc/clean (100%) rename {examples => docs/examples}/transferPvc/minikubeStart (100%) rename {examples => docs/examples}/transferPvc/pod.yaml (100%) rename {examples => docs/examples}/transferPvc/pv.yaml (100%) rename {examples => docs/examples}/transferPvc/pvc.yaml (100%) rename {doc => docs}/taskmaster_architecture.png (100%) rename {documentation => docs}/tesintro.md (100%) rename {documentation/img => images}/TESKlogo.png (100%) rename {documentation/img => images}/TESKlogowfont.png (100%) rename {documentation/img => images}/architecture.png (100%) rename {documentation/img => images}/project-architecture.png (100%) delete mode 100755 init delete mode 100755 install create mode 100644 mypy.ini create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100755 pytest.ini delete mode 100755 scripts/dockerBuild delete mode 100755 scripts/dockerRun create mode 100755 scripts/taskmaster delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 src/tesk_core/Util.py delete mode 100644 src/tesk_core/exception.py delete mode 100755 src/tesk_core/filer.py delete mode 100644 src/tesk_core/filer_class.py delete mode 100644 src/tesk_core/filer_s3.py delete mode 100644 src/tesk_core/job.py delete mode 100644 src/tesk_core/path.py delete mode 100644 src/tesk_core/pvc.py delete mode 100755 src/tesk_core/taskmaster.py delete mode 100644 src/tesk_core/transput.py delete mode 100755 taskmaster create mode 100644 tesk/__init__.py rename {src/tesk_core => tesk/services}/README.md (100%) create mode 100644 tesk/services/__init__.py create mode 100644 tesk/services/constants.py create mode 100644 tesk/services/exceptions.py create mode 100755 tesk/services/filer.py create mode 100644 tesk/services/filer_class.py create mode 100644 tesk/services/filer_s3.py create mode 100644 tesk/services/job.py create mode 100644 tesk/services/path.py create mode 100644 tesk/services/pvc.py create mode 100755 tesk/services/taskmaster.py create mode 100644 tesk/services/transput.py create mode 100644 tesk/services/utils.py delete mode 100644 tests/FilerClassTest.py delete mode 100644 tests/TaskMasterTest.py delete mode 100644 tests/assertThrows.py delete mode 100644 tests/test_filer.py delete mode 100755 tests/test_filer_ftp_pytest.py delete mode 100755 tests/test_filer_general_pytest.py delete mode 100755 tests/test_filer_http_pytest.py delete mode 100644 tests/test_job.py delete mode 100644 tests/test_s3_filer.py delete mode 100644 tests/test_taskmaster.py create mode 100644 tests/test_unit/test_services/FilerClassTest.py create mode 100644 tests/test_unit/test_services/TaskMasterTest.py create mode 100644 tests/test_unit/test_services/assertThrows.py rename tests/{resources/copyDirTest/src => test_unit/test_services/resources/copyDirTest/dst1}/3.txt (100%) rename tests/{resources/copyDirTest/src => test_unit/test_services/resources/copyDirTest/dst1}/a/1.txt (100%) rename tests/{resources/copyDirTest/src => test_unit/test_services/resources/copyDirTest/dst1}/a/2.txt (100%) rename src/tesk_core/__init__.py => tests/test_unit/test_services/resources/copyDirTest/dst2/3.txt (100%) create mode 100644 tests/test_unit/test_services/resources/copyDirTest/dst2/a/1.txt create mode 100644 tests/test_unit/test_services/resources/copyDirTest/dst2/a/2.txt create mode 100644 tests/test_unit/test_services/resources/copyDirTest/src/3.txt create mode 100644 tests/test_unit/test_services/resources/copyDirTest/src/a/1.txt create mode 100644 tests/test_unit/test_services/resources/copyDirTest/src/a/2.txt rename tests/{ => test_unit/test_services}/resources/inputFile.json (100%) rename tests/{ => test_unit/test_services}/resources/test_config (100%) create mode 100644 tests/test_unit/test_services/test_filer.py create mode 100755 tests/test_unit/test_services/test_filer_ftp_pytest.py create mode 100755 tests/test_unit/test_services/test_filer_general_pytest.py create mode 100755 tests/test_unit/test_services/test_filer_http_pytest.py create mode 100644 tests/test_unit/test_services/test_job.py create mode 100644 tests/test_unit/test_services/test_s3_filer.py create mode 100644 tests/test_unit/test_services/test_taskmaster.py delete mode 100644 tox.ini diff --git a/.dockerignore b/.dockerignore index 72617693..23d6a048 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,21 @@ -.coverage -.pytest_cache/ -.tox/ +# Cache and dot files +.coverage/ .eggs/ -tesk-core/containers/ +.github/ +.gitignore +.mypy_cache/ +.pytest_cache/ +.ruff/ +.ruff_cache/ +.venv/ +# Docs and artifacts +./deployment +./docs +./images +# Build +./build +./dist +# Scripts and tools +./scripts +Makefile +PULL_REQUEST_TEMPLATE.md \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/general-purpose.md b/.github/ISSUE_TEMPLATE/general-purpose.md new file mode 100644 index 00000000..3c1ab457 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/general-purpose.md @@ -0,0 +1,33 @@ +Your issue may already be reported! +Please search on the [issue tracker](https://github.com/elixir-cloud-aai/TESK/issues) before creating one. + +## Expected Behavior + + + +## Current Behavior + + + +## Possible Solution + + + +## Steps to Reproduce (for bugs) + + +1. +2. +3. +4. + +## Context + + + +## Your Environment + +* Version used: +* Browser Name and version: +* Operating System and version (desktop or mobile): +* Link to your project: diff --git a/.github/workflows/build.yaml b/.github/workflows/build_and_publish_charts.yaml similarity index 51% rename from .github/workflows/build.yaml rename to .github/workflows/build_and_publish_charts.yaml index 95432c8b..30f5e731 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build_and_publish_charts.yaml @@ -1,12 +1,18 @@ -name: Helm Chart Testing +name: Build and Publish Helm Charts on: - pull_request: - branches: [master] + push: + branches: + - main + pull_request: + branches: + - main jobs: - helm: + create-cluster: + name: Create k3s Cluster runs-on: ubuntu-latest + needs: checkout steps: - name: Checkout uses: actions/checkout@v3.3.0 @@ -20,31 +26,47 @@ jobs: - name: Create namespace run: kubectl create ns tesk - + + helm-deps: + name: Helm Dependencies + runs-on: ubuntu-latest + needs: checkout + steps: + - name: Checkout + uses: actions/checkout@v3.3.0 + with: + fetch-depth: 0 + - name: Helm Deps run: | - for dir in $(ls -d charts/*); do + for dir in $(ls -d deployment/charts/*); do helm dependency update $dir; done - + + helm-lint: + name: Helm Lint + runs-on: ubuntu-latest + needs: [helm-deps] + steps: + - name: Checkout + uses: actions/checkout@v3.3.0 + with: + fetch-depth: 0 + - name: Helm Lint run: | - for dir in $(ls -d charts/*); do + for dir in $(ls -d deployment/charts/*); do helm lint $dir done - - - name: Apply Helm file - run: helm install -n tesk tesk . -f values.yaml - working-directory: charts/tesk - - - name: Sleep for 30 seconds - run: sleep 30 - - - name: Get Helm and k8s - run: helm list -n tesk && kubectl get all -n tesk - - - name: curl URL - run: curl localhost -vL + + chart-releaser: + name: Run chart-releaser + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3.3.0 + with: + fetch-depth: 0 - name: Configure Git run: | @@ -57,4 +79,3 @@ jobs: skip_existing: true env: CR_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - \ No newline at end of file diff --git a/.github/workflows/docker-build-publish-filer.yml b/.github/workflows/build_and_publish_filer.yaml similarity index 89% rename from .github/workflows/docker-build-publish-filer.yml rename to .github/workflows/build_and_publish_filer.yaml index 51885296..b43f12ff 100644 --- a/.github/workflows/docker-build-publish-filer.yml +++ b/.github/workflows/build_and_publish_filer.yaml @@ -1,4 +1,4 @@ -name: tesk-core-filer +name: Build and Publish Filer on: push: @@ -14,6 +14,7 @@ on: jobs: build-from-source: + name: Build and publish Docker image runs-on: ubuntu-latest steps: - name: Checkout elixir-cloud-aai/tesk-core @@ -43,7 +44,7 @@ jobs: with: context: . push: true - file: ./containers/filer.Dockerfile + file: ./deployment/containers/filer.Dockerfile tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/docker-build-publish-taskmaster.yml b/.github/workflows/build_and_publish_taskmaster.yaml similarity index 92% rename from .github/workflows/docker-build-publish-taskmaster.yml rename to .github/workflows/build_and_publish_taskmaster.yaml index f638b22d..482c8a2b 100644 --- a/.github/workflows/docker-build-publish-taskmaster.yml +++ b/.github/workflows/build_and_publish_taskmaster.yaml @@ -1,4 +1,4 @@ -name: tesk-core-taskmaster +name: Build and Publish Taskmaster on: push: @@ -43,7 +43,7 @@ jobs: with: context: . push: true - file: ./containers/taskmaster.Dockerfile + file: ./deployment/containers/taskmaster.Dockerfile tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/code_quality.yaml b/.github/workflows/code_quality.yaml new file mode 100644 index 00000000..e63af734 --- /dev/null +++ b/.github/workflows/code_quality.yaml @@ -0,0 +1,102 @@ +name: Code Quality + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + lint: + name: Lint + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python + uses: actions/setup-python@v5 + id: cq + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install lint dependencies + run: poetry install --only=lint --no-interaction --no-root + + - name: Check code quality + run: poetry run ruff check . + + format: + name: Format + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python + uses: actions/setup-python@v5 + id: cq + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install lint dependencies + run: poetry install --only=lint --no-interaction --no-root + + - name: Check code style + run: poetry run ruff format --check + + type-check: + name: Type Check + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up python + id: cqtc + uses: actions/setup-python@v5 + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install types and mypy dependencies + run: poetry install --only=types --no-interaction --no-root + + - name: Check types + run: poetry run mypy tesk/ + + spell-check: + name: Spell Check + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python + uses: actions/setup-python@v5 + id: cq + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install lint dependencies + run: poetry install --only=lint --no-interaction --no-root + + - name: Check spellings + run: poetry run typos . diff --git a/.github/workflows/code_test_unit.yaml b/.github/workflows/code_test_unit.yaml new file mode 100644 index 00000000..119b452d --- /dev/null +++ b/.github/workflows/code_test_unit.yaml @@ -0,0 +1,42 @@ +name: Code Test Unit + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + unit-test: + name: Unit Test + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up Python + uses: actions/setup-python@v5 + id: ct + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install test dependencies + run: poetry install --only=test --no-interaction + + - name: Run tests and generate coverage as test_unit.xml + run: poetry run pytest --cov-report term --cov-report xml:test_unit.xml --cov=tests/test_unit + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + flags: test_unit + files: ./test_unit.xml + fail_ci_if_error: true + verbose: true diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml deleted file mode 100644 index 6e9cb975..00000000 --- a/.github/workflows/tox.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Python package - -on: - - push - - pull_request - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] - - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install tox tox-gh-actions - - name: Test with tox - run: tox diff --git a/.github/workflows/validation_pr.yaml b/.github/workflows/validation_pr.yaml new file mode 100644 index 00000000..5c958d6b --- /dev/null +++ b/.github/workflows/validation_pr.yaml @@ -0,0 +1,45 @@ +name: PR Evaluation + +on: + pull_request: + types: [opened, synchronize, reopened, ready_for_review, edited] + branches: ['main'] + +jobs: + semantic_pr: + name: Semantic PR title + runs-on: ubuntu-latest + if: ${{ github.event.action != 'edited' || github.event.changes.title != null }} + steps: + - uses: amannn/action-semantic-pull-request@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: | + fix + feat + docs + style + refactor + perf + test + build + ci + chore + revert + subjectPattern: ^(?![A-Z])(?=.{1,50}$).+$ + subjectPatternError: | + The subject "{subject}" found in the pull request title "{title}" + didn't match the configured pattern. Please ensure that the subject + doesn't start with an uppercase character & not have more than 50 characters. + + detect-unresolved-conflicts: + name: Detect unresolved merge conflicts + runs-on: ubuntu-latest + needs: semantic_pr + steps: + - uses: actions/checkout@v3 + - name: List files with merge conflict markers + run: git --no-pager grep "<<<<<<<" ":(exclude).github/" || true + - name: Fail or succeed job if any files with merge conflict markers have been checked in + run: exit $(git grep "<<<<<<<" ":(exclude).github/" | wc --lines) diff --git a/.github/workflows/vulnerabilities.yaml b/.github/workflows/vulnerabilities.yaml new file mode 100644 index 00000000..91263471 --- /dev/null +++ b/.github/workflows/vulnerabilities.yaml @@ -0,0 +1,56 @@ +name: Vulnerability Test + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + code-vulnerabilities: + name: Code + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up python + id: vt + uses: actions/setup-python@v5 + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install vulnerabilities check dependencies + run: poetry install --only=security --no-interaction --no-root + + - name: Check code vulnerabilities with bandit + run: poetry run bandit -c pyproject.toml -r tesk/ + + dependency-vulnerabilities: + name: Dependencies + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Install poetry + run: pipx install poetry + + - name: Set up python + id: vt + uses: actions/setup-python@v5 + with: + python-version: '3.11' + cache: 'poetry' + + - name: Install vulnerabilities check dependencies + run: poetry install --only=security --no-interaction --no-root + + - name: Check dependency vulnerabilities with safety + run: poetry run safety check --full-report diff --git a/.safety-policy.yml b/.safety-policy.yml new file mode 100644 index 00000000..e4762b67 --- /dev/null +++ b/.safety-policy.yml @@ -0,0 +1,49 @@ +version: '3.0' + +scanning-settings: + max-depth: 6 + exclude: [] + include-files: [] + system: + targets: [] + +security: + ignore-vulnerabilities: + 65213: + reason: 'Fix is downgrading the pyopenssl package to 21.0.0, doing that breaks all the test.' + expires: '2025-5-18' + 67599: + reason: 'No fix available.' + expires: '2025-5-18' + 70612: + reason: 'No fix available.' + expires: '2025-5-18' + +report: + dependency-vulnerabilities: + enabled: true + auto-ignore-in-report: + python: + environment-results: true + unpinned-requirements: true + cvss-severity: [] + + +fail-scan-with-exit-code: + dependency-vulnerabilities: + enabled: true + fail-on-any-of: + cvss-severity: + - critical + - high + - medium + exploitability: + - critical + - high + - medium + +security-updates: + dependency-vulnerabilities: + auto-security-updates-limit: + - patch + diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index bb3ec5f0..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1 +0,0 @@ -include README.md diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..bd30878a --- /dev/null +++ b/Makefile @@ -0,0 +1,197 @@ +# Define variables +PYTHON_CMD := $(shell command -v python3 2> /dev/null) +BUILDAH_CMD := $(shell command -v buildah 2> /dev/null) +DOCKER_CMD := $(shell command -v docker 2> /dev/null) +POETRY_CMD := $(shell command -v poetry 2> /dev/null) +ELIXIR_CLOUD_REGISTRY := docker.io/elixircloud +DOCKER_FILE_PATH := deployment/containers + +# Define arguments +IMAGE ?= filer +TAG ?= testing + +default: help + +# Help message +.PHONY: help +help: + @echo "Usage: make [target]" + @echo "Available targets:" + @echo " \033[1mvenv \033[37m(v\033[0m)" + @echo " \033[36mCreate virtual environment\033[0m" + @echo " \033[1mclean-venv \033[37m(cv\033[0m)" + @echo " \033[36mRemove virtual environment\033[0m" + @echo " \033[1mclean-dot \033[37m(cd\033[0m)" + @echo " \033[36mRemove dot generated cache dirs\033[0m" + @echo " \033[1minstall \033[37m(i\033[0m)" + @echo " \033[36mInstall dependencies\033[0m" + @echo " \033[1mformat-lint \033[37m(fl\033[0m)" + @echo " \033[36mFormats and lints python files\033[0m" + @echo " \033[1mtest \033[37m(t\033[0m)" + @echo " \033[36mRun tests\033[0m" + @echo " \033[1mbuild-service-image \033[37m(bsi\033[0m)" + @echo " \033[36mBuild image for service (tesk_core)\033[0m" + @echo " \033[36mEg: make bsi IMAGE=filer TAG=1.1.0\033[0m" + @echo " \033[1mbuild-service-image-all \033[37m(bsia\033[0m)" + @echo " \033[36mBuild images for all services\033[0m" + @echo " \033[1mrun-service\033[0m" + @echo " \033[36mRun container for service (tesk_core)\033[0m" + @echo " \033[36mEg: make run-service IMAGE=filer TAG=testing\033[0m" + @echo " \033[1mclean-service-image \033[37m(csi\033[0m)" + @echo " \033[36mClean image for service (tesk_core)\033[0m" + @echo " \033[36mEg: make csi IMAGE=filer TAG=testing\033[0m" + @echo " \033[1mclean-service-image-all \033[37m(csia\033[0m)" + @echo " \033[36mClean images for all services of the given tag\033[0m" + @echo " \033[36mEg: make csia TAG=testing\033[0m" + @echo " \033[1mhelp\033[0m" + @echo " \033[36mDisplay this help message\033[0m" + +.PHONY: venv +venv: + @if [ -x "$(PYTHON_CMD)" ]; then \ + $(PYTHON_CMD) -m venv .venv; \ + echo "🙏 Virtual environment created. To activate, run:"; \ + echo "source .venv/bin/activate"; \ + else \ + echo "🐍 Please install `python3` to create virtual environment."; \ + exit 1; \ + fi + +.PHONY: v +v: venv + +.PHONY: clean-venv +clean-venv: + rm -rf .venv + +.PHONY: cv +cv: clean-venv + +.PHONY: clean-dot +clean-dot: + rm -rf .venv .mypy_cache .pytest_cache .coverage .ruff .ruff_cache .eggs __pycache__/ + find . -type d -name "*.egg-info" -exec rm -rf {} + + find . -type d -name "__pycache__" -exec rm -rf {} + + +.PHONY: cd +cd: clean-dot + +.PHONY: install +install: + @if [ -x "$(POETRY_CMD)" ]; then \ + poetry install; \ + else \ + echo "🔏 Consider installing poetry to leverage poetry.lock."; \ + if [ -f .venv/bin/pip ]; then \ + .venv/bin/pip install .; \ + else \ + echo "⬇️ Install dependencies, create virtual environment using 'make v'."; \ + fi; \ + fi + +.PHONY: i +i: install + +.PHONY: wheel +wheel: + @if [ -x "$(POETRY_CMD)" ]; then \ + poetry build -f wheel; \ + else \ + echo "🔏 Poetry needed to build wheel."; \ + fi + +.PHONY: w +w: wheel + +.PHONY: format-lint +format-lint: + @if [ -f .venv/bin/ruff ]; then \ + ruff format; \ + ruff check; \ + else \ + echo "⬇️ Install deps, create venv using 'make v' and install using `make i`."; \ + fi + +.PHONY: fl +fl: format-lint + +.PHONY: build-service-image +build-service-image: + @if [ -x "$(BUILDAH_CMD)" ]; then \ + $(BUILDAH_CMD) bud \ + -t $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG) \ + --format=oci \ + --no-cache \ + -f $(DOCKER_FILE_PATH)/$(IMAGE).Dockerfile; \ + elif [ -x "$(DOCKER_CMD)" ]; then \ + $(DOCKER_CMD) build \ + -t $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG) \ + -f $(DOCKER_FILE_PATH)/$(IMAGE).Dockerfile .; \ + else \ + echo "🐳 Please install buildah or docker to build images."; \ + exit 1; \ + fi + +.PHONY: bsi +bsi: build-service-image + +.PHONY: build-service-image-all +build-service-image-all: + @make build-service-image IMAGE=filer TAG=$(TAG) + @make build-service-image IMAGE=taskmaster TAG=$(TAG) + +.PHONY: bsia +bsia: build-service-image-all + +.PHONY: run-service +run-service: + @if [ -x "$(DOCKER_CMD)" ]; then \ + $(DOCKER_CMD) run \ + -it --rm $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG); \ + else \ + echo "🐳 Please install docker to run images."; \ + exit 1; \ + fi + +.PHONY: clean-service-image +clean-service-image: + @if [ -x "$(BUILDAH_CMD)" ]; then \ + if $(BUILDAH_CMD) inspect $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG) > /dev/null 2>&1; then \ + $(BUILDAH_CMD) rmi $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG); \ + else \ + echo "🔍 Image $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG) not found."; \ + fi \ + elif [ -x "$(DOCKER_CMD)" ]; then \ + if $(DOCKER_CMD) inspect $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG) > /dev/null 2>&1; then \ + $(DOCKER_CMD) rmi $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG); \ + else \ + echo "🔍 Image $(ELIXIR_CLOUD_REGISTRY)/tesk-core-$(IMAGE):$(TAG) not found."; \ + fi \ + else \ + echo "🐳 Please install buildah or docker to clean images."; \ + exit 1; \ + fi + +.PHONY: csi +csi: clean-service-image + +.PHONY: clean-service-image-all +clean-service-image-all: + @make clean-service-image IMAGE=filer TAG=$(TAG) + @make clean-service-image IMAGE=taskmaster TAG=$(TAG) + +.PHONY: csia +csia: clean-service-image-all + +.PHONY: test +test: + @if [ -x "$(POETRY_CMD)" ]; then \ + $(POETRY_CMD) run pytest tests; \ + elif [ -f .venv/bin/pytest ]; then \ + .venv/bin/pytest tests; \ + else \ + echo "⬇️ Install dependencies, create virtual environment using 'make v', and install using 'make i'."; \ + fi + +.PHONY: t +t: test diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..df559d29 --- /dev/null +++ b/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,26 @@ +## Description + +Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. + +Fixes # (issue) + +## Type of change + +Please delete options that are not relevant. + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds functionality) +- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) +- [ ] Documentation update + +## Checklist: + +- [ ] My code follows the [style guidelines](https://github.com/elixir-cloud-aai/elixir-cloud-aai/blob/dev/resources/contributing_guidelines.md#language-specific-guidelines) of this project +- [ ] I have performed a self-review of my own code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] My changes generate no new warnings +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] I have not reduced the existing code coverage +- [ ] I have added docstrings following the [Python style guidelines](https://github.com/elixir-cloud-aai/elixir-cloud-aai/blob/dev/resources/python.md) of this project to all new modules, classes, methods and functions are documented with docstrings following; I have updated any previously existing docstrings, if applicable +- [ ] I have updated any sections of the app's documentation that are affected by the proposed changes, if applicable \ No newline at end of file diff --git a/README.md b/README.md index ff0d377c..9ffaa886 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,41 @@ - +[![codecov](https://codecov.io/gh/elixir-cloud-aai/TESK/branch/main/graph/badge.svg)](https://codecov.io/gh/elixir-cloud-aai/TESK) +[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](./LICENSE) +[![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-311/) +[![Development Status](https://img.shields.io/badge/status-beta-yellow.svg)](https://github.com/elixir-cloud-aai/TESK) +[![GitHub contributors](https://img.shields.io/github/contributors/elixir-cloud-aai/TESK)](https://github.com/elixir-cloud-aai/TESK/graphs/contributors) +[![Bandit](https://img.shields.io/badge/security-bandit-yellow.svg)](https://bandit.readthedocs.io/en/latest/) +[![Safety](https://img.shields.io/badge/security-safety-orange.svg)](https://safetycli.com/product/safety-cli) +[![Ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://docs.astral.sh/ruff/) + -An implementation of a task execution engine based on the [TES standard](https://github.com/ga4gh/task-execution-schemas) running on `Kubernetes`. For more details on `TES`, see the (very) brief [introduction to TES](documentation/tesintro.md). +An implementation of a task execution engine based on the [TES standard](https://github.com/ga4gh/task-execution-schemas) running on `Kubernetes`. For more details on `TES`, see the (very) brief [introduction to TES](docs/tesintro.md). For organisational reasons, this project is split into 3 repositories: + - This one, which contains documentation and deployment files - [tesk-api](https://github.com/elixir-cloud-aai/tesk-api): Contains the service that implements the TES API and translates tasks into kubernetes batch calls -- [tesk-core](https://github.com/elixir-cloud-aai/tesk-core): Contains the code that is launched as images into the kubernetes cluster by tesk-api. +- [tesk-core](https://github.com/elixir-cloud-aai/tesk-core): Contains the code that is launched as images into the kubernetes cluster by tesk-api. If the API is running on your cluster it will pull the images from our `docker.io` repository automatically. -`TESK` is designed with the goal to support any `Kubernetes` cluster, for its deployment please refer to the [deployment](documentation/deployment.md) page. - -The technical documentation is available in the [documentation](documentation) folder. +`TESK` is designed with the goal to support any `Kubernetes` cluster, for its deployment please refer to the [deployment](deployment/documentation/deployment.md) page. +The technical documentation is available in the [documentation](deployment/documentation) folder. ## Architecture -As a diagram: -![TESK architecture](documentation/img/architecture.png) + -**Description**: The first pod in the task lifecycle is the API pod, a pod which runs a web server (`Tomcat`) and exposes the `TES` specified endpoints. It consumes `TES` requests, validates them and translates them to `Kubernetes` jobs. The API pod then creates a `task controller` pod, or `taskmaster`. +**Description**: The first pod in the task lifecycle is the API pod, a pod which runs a web server (`Tomcat`) and exposes the `TES` specified endpoints. It consumes `TES` requests, validates them and translates them to `Kubernetes` jobs. The API pod then creates a `task controller` pod, or `taskmaster`. The `taskmaster` consumes the executor jobs, inputs and outputs. It first creates `filer` pod, which creates a persistent volume claim (PVC) to mount as scratch space. All mounts are initialized and all files are downloaded into the locations specified in the TES request; the populated PVC can then be used by each executor pod one after the other. After the `filer` has finished, the taskmaster goes through the executors and executes them as pods one by one. **Note**: Each TES task has a separate taskmaster, PVC and executor pods belonging to it; the only 'singleton' pod across tasks is the API pod. After the last executor, the `filer` is called once more to process the outputs and push them to remote locations from the PVC. The PVC is the scrubbed, deleted and the taskmaster ends, completing the task. ## Requirements -- A working [Kubernetes](https://kubernetes.io/) cluster version 1.9 and later. -- If you want TESK to handle tasks with I/O (and you probably want), you additionally need: -- A default storage class, which TESK will use to create temporary PVCs. It is enough that the storage class supports the RWO mode. -- And, if you want TESK to integrate with workflow managers, you additionally need either an FTP account or a PVC that can be accessed from within or from outside of the cluster by the workflow manager (more in the [deployment](documentation/deployment.md) page). + +- A working [Kubernetes](https://kubernetes.io/) cluster version 1.9 and later. +- If you want TESK to handle tasks with I/O (and you probably want), you additionally need: +- A default storage class, which TESK will use to create temporary PVCs. It is enough that the storage class supports the RWO mode. +- And, if you want TESK to integrate with workflow managers, you additionally need either an FTP account or a PVC that can be accessed from within or from outside of the cluster by the workflow manager (more in the [deployment](deployment/documentation/deployment.md) page). diff --git a/cloudbuild.yaml b/cloudbuild.yaml deleted file mode 100644 index c78069d5..00000000 --- a/cloudbuild.yaml +++ /dev/null @@ -1,6 +0,0 @@ -steps: -- name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'eu.gcr.io/tes-wes/taskmaster:$TAG_NAME', '-f', 'containers/taskmaster.Dockerfile', '.'] -- name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'eu.gcr.io/tes-wes/filer:$TAG_NAME', '-f', 'containers/filer.Dockerfile', '.'] -images: ['eu.gcr.io/tes-wes/taskmaster:$TAG_NAME', 'eu.gcr.io/tes-wes/filer:$TAG_NAME'] diff --git a/cloudbuild_testing.yaml b/cloudbuild_testing.yaml deleted file mode 100644 index fc85869a..00000000 --- a/cloudbuild_testing.yaml +++ /dev/null @@ -1,6 +0,0 @@ -steps: -- name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'eu.gcr.io/tes-wes/taskmaster:testing', '-f', 'containers/taskmaster.Dockerfile', '.'] -- name: 'gcr.io/cloud-builders/docker' - args: ['build', '-t', 'eu.gcr.io/tes-wes/filer:testing', '-f', 'containers/filer.Dockerfile', '.'] -images: ['eu.gcr.io/tes-wes/taskmaster:testing', 'eu.gcr.io/tes-wes/filer:testing'] diff --git a/containers/filer.Dockerfile b/containers/filer.Dockerfile deleted file mode 100644 index c9685121..00000000 --- a/containers/filer.Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -# Builder: produce wheels - -FROM alpine:3.10 as builder - -RUN apk add --no-cache python3 -RUN apk add --no-cache git -RUN python3 -m pip install --upgrade setuptools pip wheel - -WORKDIR /app/ -COPY . . - -RUN python3 setup.py bdist_wheel - -# Install: copy tesk-core*.whl and install it with dependencies - -FROM alpine:3.10 - -RUN apk add --no-cache python3 - -COPY --from=builder /app/dist/tesk*.whl /root/ -RUN python3 -m pip install --disable-pip-version-check --no-cache-dir /root/tesk*.whl - -USER 100 - -ENTRYPOINT ["filer"] diff --git a/containers/taskmaster.Dockerfile b/containers/taskmaster.Dockerfile deleted file mode 100644 index 5e5418d3..00000000 --- a/containers/taskmaster.Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -# Builder: produce wheels - -FROM alpine:3.10 as builder - -RUN apk add --no-cache python3 -RUN apk add --no-cache git -RUN python3 -m pip install --upgrade setuptools pip wheel - -WORKDIR /app/ -COPY . . - -RUN python3 setup.py bdist_wheel - -# Install: copy tesk-core*.whl and install it with dependencies - -FROM alpine:3.10 - -RUN apk add --no-cache python3 - -COPY --from=builder /app/dist/tesk*.whl /root/ -RUN python3 -m pip install --disable-pip-version-check --no-cache-dir /root/tesk*.whl - -RUN adduser --uid 100 -S taskmaster -USER 100 - -ENTRYPOINT ["taskmaster"] diff --git a/charts/tesk/.gitignore b/deployment/charts/tesk/.gitignore similarity index 100% rename from charts/tesk/.gitignore rename to deployment/charts/tesk/.gitignore diff --git a/charts/tesk/.helmignore b/deployment/charts/tesk/.helmignore similarity index 100% rename from charts/tesk/.helmignore rename to deployment/charts/tesk/.helmignore diff --git a/charts/tesk/Chart.yaml b/deployment/charts/tesk/Chart.yaml similarity index 100% rename from charts/tesk/Chart.yaml rename to deployment/charts/tesk/Chart.yaml diff --git a/charts/tesk/README.md b/deployment/charts/tesk/README.md similarity index 95% rename from charts/tesk/README.md rename to deployment/charts/tesk/README.md index 8f7af28e..1b83561f 100644 --- a/charts/tesk/README.md +++ b/deployment/charts/tesk/README.md @@ -82,5 +82,5 @@ See [`values.yaml`](values.yaml) for default values. | ingress.rules| boolean | Apply or not the ingress rule | | ingress.ingressClassName | string | Name of the Ingress Class | | ingress.path | string | | -| ingress.tls_secret_name | string | If no TLS secret name configured, TLS will be switched off. A template can be found at [deployment/tls_secret_name.yml-TEMPLATE](deployment/tls_secret_name.yml-TEMPLATE). If you are using cert-manager the secret will be created automatically.| +| ingress.tls_secret_name | string | If no TLS secret name configured, TLS will be switched off. A template can be found at [tls_secret_name.yml-TEMPLATE](tls_secret_name.yml-TEMPLATE). If you are using cert-manager the secret will be created automatically.| | ingress.annotations | string | Annotations for the ingress rules | diff --git a/charts/tesk/ftp/.netrc-TEMPLATE b/deployment/charts/tesk/ftp/.netrc-TEMPLATE similarity index 100% rename from charts/tesk/ftp/.netrc-TEMPLATE rename to deployment/charts/tesk/ftp/.netrc-TEMPLATE diff --git a/charts/tesk/s3-config/config-TEMPLATE b/deployment/charts/tesk/s3-config/config-TEMPLATE similarity index 100% rename from charts/tesk/s3-config/config-TEMPLATE rename to deployment/charts/tesk/s3-config/config-TEMPLATE diff --git a/charts/tesk/s3-config/credentials-TEMPLATE b/deployment/charts/tesk/s3-config/credentials-TEMPLATE similarity index 100% rename from charts/tesk/s3-config/credentials-TEMPLATE rename to deployment/charts/tesk/s3-config/credentials-TEMPLATE diff --git a/charts/tesk/service-info/service-info.yaml b/deployment/charts/tesk/service-info/service-info.yaml similarity index 100% rename from charts/tesk/service-info/service-info.yaml rename to deployment/charts/tesk/service-info/service-info.yaml diff --git a/charts/tesk/templates/common/oauth-client-secret.yaml b/deployment/charts/tesk/templates/common/oauth-client-secret.yaml similarity index 100% rename from charts/tesk/templates/common/oauth-client-secret.yaml rename to deployment/charts/tesk/templates/common/oauth-client-secret.yaml diff --git a/charts/tesk/templates/common/service-info-cm.yaml b/deployment/charts/tesk/templates/common/service-info-cm.yaml similarity index 100% rename from charts/tesk/templates/common/service-info-cm.yaml rename to deployment/charts/tesk/templates/common/service-info-cm.yaml diff --git a/charts/tesk/templates/common/taskmaster-rbac.yaml b/deployment/charts/tesk/templates/common/taskmaster-rbac.yaml similarity index 100% rename from charts/tesk/templates/common/taskmaster-rbac.yaml rename to deployment/charts/tesk/templates/common/taskmaster-rbac.yaml diff --git a/charts/tesk/templates/common/tesk-deployment.yaml b/deployment/charts/tesk/templates/common/tesk-deployment.yaml similarity index 100% rename from charts/tesk/templates/common/tesk-deployment.yaml rename to deployment/charts/tesk/templates/common/tesk-deployment.yaml diff --git a/charts/tesk/templates/common/tesk-svc.yaml b/deployment/charts/tesk/templates/common/tesk-svc.yaml similarity index 100% rename from charts/tesk/templates/common/tesk-svc.yaml rename to deployment/charts/tesk/templates/common/tesk-svc.yaml diff --git a/charts/tesk/templates/ftp/ftp-endpoint.yaml b/deployment/charts/tesk/templates/ftp/ftp-endpoint.yaml similarity index 100% rename from charts/tesk/templates/ftp/ftp-endpoint.yaml rename to deployment/charts/tesk/templates/ftp/ftp-endpoint.yaml diff --git a/charts/tesk/templates/ftp/ftp-secret.yaml b/deployment/charts/tesk/templates/ftp/ftp-secret.yaml similarity index 100% rename from charts/tesk/templates/ftp/ftp-secret.yaml rename to deployment/charts/tesk/templates/ftp/ftp-secret.yaml diff --git a/charts/tesk/templates/ftp/ftp-service.yaml b/deployment/charts/tesk/templates/ftp/ftp-service.yaml similarity index 100% rename from charts/tesk/templates/ftp/ftp-service.yaml rename to deployment/charts/tesk/templates/ftp/ftp-service.yaml diff --git a/charts/tesk/templates/ftp/netrc-secret.yaml b/deployment/charts/tesk/templates/ftp/netrc-secret.yaml similarity index 100% rename from charts/tesk/templates/ftp/netrc-secret.yaml rename to deployment/charts/tesk/templates/ftp/netrc-secret.yaml diff --git a/charts/tesk/templates/ingress/ingress-rules.yaml b/deployment/charts/tesk/templates/ingress/ingress-rules.yaml similarity index 100% rename from charts/tesk/templates/ingress/ingress-rules.yaml rename to deployment/charts/tesk/templates/ingress/ingress-rules.yaml diff --git a/charts/tesk/templates/openshift/oc-route.yaml b/deployment/charts/tesk/templates/openshift/oc-route.yaml similarity index 100% rename from charts/tesk/templates/openshift/oc-route.yaml rename to deployment/charts/tesk/templates/openshift/oc-route.yaml diff --git a/charts/tesk/templates/storage/aws-secret.yaml b/deployment/charts/tesk/templates/storage/aws-secret.yaml similarity index 100% rename from charts/tesk/templates/storage/aws-secret.yaml rename to deployment/charts/tesk/templates/storage/aws-secret.yaml diff --git a/charts/tesk/templates/storage/openstack.yaml b/deployment/charts/tesk/templates/storage/openstack.yaml similarity index 100% rename from charts/tesk/templates/storage/openstack.yaml rename to deployment/charts/tesk/templates/storage/openstack.yaml diff --git a/charts/tesk/tls_secret_name.yml-TEMPLATE b/deployment/charts/tesk/tls_secret_name.yml-TEMPLATE similarity index 100% rename from charts/tesk/tls_secret_name.yml-TEMPLATE rename to deployment/charts/tesk/tls_secret_name.yml-TEMPLATE diff --git a/charts/tesk/values.yaml b/deployment/charts/tesk/values.yaml similarity index 97% rename from charts/tesk/values.yaml rename to deployment/charts/tesk/values.yaml index fe72083b..9da933fd 100644 --- a/charts/tesk/values.yaml +++ b/deployment/charts/tesk/values.yaml @@ -74,7 +74,7 @@ ftp: #netrc_secret: netrc-secret # If you install FTP locally, but outside of k8s and need a DNS entry for it (because your workflow manager might not like the IP address) # one way of getting a DNS entry for your FTP service is to use a k8s "service without a selector" - # Put the IP under which your pods see see services running on your host (differs depending on the way you installes K8s) + # Put the IP under which your pods see see services running on your host (differs depending on the way you installs K8s) # For virtualBox, it is 192.168.99.1 and your ftp service will be visible under ftp name # You will be able to use it like this: ftp://ftp/file hostip: diff --git a/deployment/containers/filer.Dockerfile b/deployment/containers/filer.Dockerfile new file mode 100644 index 00000000..a70e6ee2 --- /dev/null +++ b/deployment/containers/filer.Dockerfile @@ -0,0 +1,39 @@ +################################################### +# Stage 1: Build wheel # +################################################### +FROM python:3.11-alpine AS builder + +# Set work directory +WORKDIR /app + +# Install poetry +RUN pip install poetry + +# Copy source code +COPY . . + +# Build wheel +RUN poetry build -f wheel + +################################################### +# Stage 2: Install wheel and create user # +################################################### +FROM python:3.11-alpine AS runner + +# Copy built wheel from the builder stage +COPY --from=builder /app/dist/*.whl /dist/ + +# Install the application with dependencies +RUN pip install /dist/*.whl + +# Create a non-root user +RUN adduser -D -u 1000 filerUser + +# Switch to the non-root user +USER filerUser + +# Set the working directory +WORKDIR /app + +# Entrypoint command +ENTRYPOINT ["filer"] diff --git a/deployment/containers/taskmaster.Dockerfile b/deployment/containers/taskmaster.Dockerfile new file mode 100644 index 00000000..fb06408b --- /dev/null +++ b/deployment/containers/taskmaster.Dockerfile @@ -0,0 +1,39 @@ +################################################### +# Stage 1: Build wheel # +################################################### +FROM python:3.11-alpine AS builder + +# Set work directory +WORKDIR /app + +# Install poetry +RUN pip install poetry + +# Copy source code +COPY . . + +# Build wheel +RUN poetry build -f wheel + +################################################### +# Stage 2: Install wheel and create user # +################################################### +FROM python:3.11-alpine AS runner + +# Copy built wheel from the builder stage +COPY --from=builder /app/dist/*.whl /dist/ + +# Install the application with dependencies +RUN pip install /dist/*.whl + +# Create a non-root user +RUN adduser -D -u 1000 taskmasterUser + +# Switch to the non-root user +USER taskmasterUser + +# Set the working directory +WORKDIR /app + +# Entrypoint command +ENTRYPOINT ["taskmaster"] diff --git a/documentation/deployment.md b/deployment/documentation/deployment.md similarity index 98% rename from documentation/deployment.md rename to deployment/documentation/deployment.md index 91885ac5..bb99f878 100644 --- a/documentation/deployment.md +++ b/deployment/documentation/deployment.md @@ -10,7 +10,7 @@ ## Installing TESK ### Helm TESK can be installed using Helm 3 (tested with v3.0.0) using [this chart](../charts/tesk). It is best to create a dedicated namespace for TESK, although for test or development clusters it is fine to use the `default` namespace. -The documentation of the chart gives a desciption of all configuration options and below the most common installation scenarios have been described. +The documentation of the chart gives a description of all configuration options and below the most common installation scenarios have been described. TESK installation consists of a single API installed as a K8s deployment and exposed as a K8s service. Additionally, TESK API requires access to the K8s API in order to create K8s Jobs and PVCs. That is why the installation additionally creates objects such as service accounts, roles and role bindings. The chart does not provide a way to install the default storage class and that needs to be done independently by the cluster administrator. @@ -90,7 +90,7 @@ https://tes.ebi.ac.uk/v1/tasks #### Shared file system TESK can exchange Inputs and Outputs with the external world using the local/shared storage. You need to create a PVC that will be reachable for your workflow manager and for TESK at the same time. If the workflow manager (or anything else that produces paths to your inputs and outputs) is installed inside the same K8s cluster, you may use a PVC of a storage class providing RWX access and mount it to the pod where the workflow manager is installed in the directory where the manager will be creating/orchestrating inputs/outputs. Depending on the workflow manager, it may be a working directory of your workflow manager process. -If the workflow manager is installed outside of the cluster, you may be able to use a volume mounting storage visible outside of the cluster (hostPath, NFS, etc) and a PVC bound to that volume. We used Minikube with the hostPath type of storage in this secenario successfuly. +If the workflow manager is installed outside of the cluster, you may be able to use a volume mounting storage visible outside of the cluster (hostPath, NFS, etc) and a PVC bound to that volume. We used Minikube with the hostPath type of storage in this secenario successfully. Creating the shared PVC is not handled by TESK Helm chart. Finally you have to setup following values in the chart: ```yaml diff --git a/documentation/integrated_wes_tes.md b/deployment/documentation/integrated_wes_tes.md similarity index 100% rename from documentation/integrated_wes_tes.md rename to deployment/documentation/integrated_wes_tes.md diff --git a/documentation/local_ftp.md b/deployment/documentation/local_ftp.md similarity index 100% rename from documentation/local_ftp.md rename to deployment/documentation/local_ftp.md diff --git a/dockerBuild b/dockerBuild deleted file mode 120000 index 32bbcf34..00000000 --- a/dockerBuild +++ /dev/null @@ -1 +0,0 @@ -scripts//dockerBuild \ No newline at end of file diff --git a/dockerRun b/dockerRun deleted file mode 120000 index a087702b..00000000 --- a/dockerRun +++ /dev/null @@ -1 +0,0 @@ -scripts//dockerRun \ No newline at end of file diff --git a/examples/inputFile.json b/docs/examples/inputFile.json similarity index 100% rename from examples/inputFile.json rename to docs/examples/inputFile.json diff --git a/examples/inputHelloWorld.json b/docs/examples/inputHelloWorld.json similarity index 100% rename from examples/inputHelloWorld.json rename to docs/examples/inputHelloWorld.json diff --git a/examples/inputHttp.json b/docs/examples/inputHttp.json similarity index 100% rename from examples/inputHttp.json rename to docs/examples/inputHttp.json diff --git a/examples/localftp/taskWithIO.json b/docs/examples/localftp/taskWithIO.json similarity index 100% rename from examples/localftp/taskWithIO.json rename to docs/examples/localftp/taskWithIO.json diff --git a/examples/resources/cpu.json b/docs/examples/resources/cpu.json similarity index 100% rename from examples/resources/cpu.json rename to docs/examples/resources/cpu.json diff --git a/examples/resources/more_cpu_than_nodes.json b/docs/examples/resources/more_cpu_than_nodes.json similarity index 100% rename from examples/resources/more_cpu_than_nodes.json rename to docs/examples/resources/more_cpu_than_nodes.json diff --git a/examples/taskCreate b/docs/examples/taskCreate similarity index 100% rename from examples/taskCreate rename to docs/examples/taskCreate diff --git a/examples/taskList b/docs/examples/taskList similarity index 100% rename from examples/taskList rename to docs/examples/taskList diff --git a/examples/transferPvc/Readme.md b/docs/examples/transferPvc/Readme.md similarity index 100% rename from examples/transferPvc/Readme.md rename to docs/examples/transferPvc/Readme.md diff --git a/examples/transferPvc/clean b/docs/examples/transferPvc/clean similarity index 100% rename from examples/transferPvc/clean rename to docs/examples/transferPvc/clean diff --git a/examples/transferPvc/minikubeStart b/docs/examples/transferPvc/minikubeStart similarity index 100% rename from examples/transferPvc/minikubeStart rename to docs/examples/transferPvc/minikubeStart diff --git a/examples/transferPvc/pod.yaml b/docs/examples/transferPvc/pod.yaml similarity index 100% rename from examples/transferPvc/pod.yaml rename to docs/examples/transferPvc/pod.yaml diff --git a/examples/transferPvc/pv.yaml b/docs/examples/transferPvc/pv.yaml similarity index 100% rename from examples/transferPvc/pv.yaml rename to docs/examples/transferPvc/pv.yaml diff --git a/examples/transferPvc/pvc.yaml b/docs/examples/transferPvc/pvc.yaml similarity index 100% rename from examples/transferPvc/pvc.yaml rename to docs/examples/transferPvc/pvc.yaml diff --git a/doc/taskmaster_architecture.png b/docs/taskmaster_architecture.png similarity index 100% rename from doc/taskmaster_architecture.png rename to docs/taskmaster_architecture.png diff --git a/documentation/tesintro.md b/docs/tesintro.md similarity index 100% rename from documentation/tesintro.md rename to docs/tesintro.md diff --git a/documentation/img/TESKlogo.png b/images/TESKlogo.png similarity index 100% rename from documentation/img/TESKlogo.png rename to images/TESKlogo.png diff --git a/documentation/img/TESKlogowfont.png b/images/TESKlogowfont.png similarity index 100% rename from documentation/img/TESKlogowfont.png rename to images/TESKlogowfont.png diff --git a/documentation/img/architecture.png b/images/architecture.png similarity index 100% rename from documentation/img/architecture.png rename to images/architecture.png diff --git a/documentation/img/project-architecture.png b/images/project-architecture.png similarity index 100% rename from documentation/img/project-architecture.png rename to images/project-architecture.png diff --git a/init b/init deleted file mode 100755 index 96c61f2d..00000000 --- a/init +++ /dev/null @@ -1,8 +0,0 @@ - -# Creating virtualenv -virtualenv --clear -p python2.7 .venv - -# Activating virtualenv -. .venv/bin/activate - -./install diff --git a/install b/install deleted file mode 100755 index eefa8240..00000000 --- a/install +++ /dev/null @@ -1,4 +0,0 @@ - - -pip install . - diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..073e2f60 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,8 @@ +# Global options: + +[mypy] +warn_return_any = True +warn_unused_configs = True + +[mypy-urlparse] +ignore_missing_imports = True diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..abf94639 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2119 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = "*" +files = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] + +[[package]] +name = "authlib" +version = "1.3.0" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Authlib-1.3.0-py2.py3-none-any.whl", hash = "sha256:9637e4de1fb498310a56900b3e2043a206b03cb11c05422014b0302cbc814be3"}, + {file = "Authlib-1.3.0.tar.gz", hash = "sha256:959ea62a5b7b5123c5059758296122b57cd2585ae2ed1c0622c21b371ffdae06"}, +] + +[package.dependencies] +cryptography = "*" + +[[package]] +name = "bandit" +version = "1.7.8" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"}, + {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + +[[package]] +name = "boto3" +version = "1.34.104" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.34.104-py3-none-any.whl", hash = "sha256:bec91a3bca63320e5f68a25b5eaa7bab65e35bb9253a544875c2e03679f1d5fb"}, + {file = "boto3-1.34.104.tar.gz", hash = "sha256:5b37c8f4ea6f408147994a6e230c49ca755da57f5964ccea8b8fd4ff5f11759e"}, +] + +[package.dependencies] +botocore = ">=1.34.104,<1.35.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "boto3-stubs" +version = "1.34.108" +description = "Type annotations for boto3 1.34.108 generated with mypy-boto3-builder 7.24.0" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3_stubs-1.34.108-py3-none-any.whl", hash = "sha256:105bde543b499662d30e51b7d32b2dce6b61e21effd98303810eb89532813292"}, + {file = "boto3_stubs-1.34.108.tar.gz", hash = "sha256:e13e55b181eec0ec190f4aa91e0c4ab99eca4c1c4174e16aedc54bbb280e6179"}, +] + +[package.dependencies] +botocore-stubs = "*" +types-s3transfer = "*" +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[package.extras] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)"] +account = ["mypy-boto3-account (>=1.34.0,<1.35.0)"] +acm = ["mypy-boto3-acm (>=1.34.0,<1.35.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.34.0,<1.35.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)", "mypy-boto3-account (>=1.34.0,<1.35.0)", "mypy-boto3-acm (>=1.34.0,<1.35.0)", "mypy-boto3-acm-pca (>=1.34.0,<1.35.0)", "mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-amp (>=1.34.0,<1.35.0)", "mypy-boto3-amplify (>=1.34.0,<1.35.0)", "mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)", "mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)", "mypy-boto3-apigateway (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)", "mypy-boto3-appconfig (>=1.34.0,<1.35.0)", "mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)", "mypy-boto3-appfabric (>=1.34.0,<1.35.0)", "mypy-boto3-appflow (>=1.34.0,<1.35.0)", "mypy-boto3-appintegrations (>=1.34.0,<1.35.0)", "mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-application-insights (>=1.34.0,<1.35.0)", "mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-appmesh (>=1.34.0,<1.35.0)", "mypy-boto3-apprunner (>=1.34.0,<1.35.0)", "mypy-boto3-appstream (>=1.34.0,<1.35.0)", "mypy-boto3-appsync (>=1.34.0,<1.35.0)", "mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)", "mypy-boto3-artifact (>=1.34.0,<1.35.0)", "mypy-boto3-athena (>=1.34.0,<1.35.0)", "mypy-boto3-auditmanager (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)", "mypy-boto3-b2bi (>=1.34.0,<1.35.0)", "mypy-boto3-backup (>=1.34.0,<1.35.0)", "mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)", "mypy-boto3-backupstorage (>=1.34.0,<1.35.0)", "mypy-boto3-batch (>=1.34.0,<1.35.0)", "mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-billingconductor (>=1.34.0,<1.35.0)", "mypy-boto3-braket (>=1.34.0,<1.35.0)", "mypy-boto3-budgets (>=1.34.0,<1.35.0)", "mypy-boto3-ce (>=1.34.0,<1.35.0)", "mypy-boto3-chatbot (>=1.34.0,<1.35.0)", "mypy-boto3-chime (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)", "mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)", "mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)", "mypy-boto3-cloud9 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)", "mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)", "mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)", "mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)", "mypy-boto3-codeartifact (>=1.34.0,<1.35.0)", "mypy-boto3-codebuild (>=1.34.0,<1.35.0)", "mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)", "mypy-boto3-codecommit (>=1.34.0,<1.35.0)", "mypy-boto3-codeconnections (>=1.34.0,<1.35.0)", "mypy-boto3-codedeploy (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)", "mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-codepipeline (>=1.34.0,<1.35.0)", "mypy-boto3-codestar (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)", "mypy-boto3-comprehend (>=1.34.0,<1.35.0)", "mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)", "mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)", "mypy-boto3-config (>=1.34.0,<1.35.0)", "mypy-boto3-connect (>=1.34.0,<1.35.0)", "mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)", "mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)", "mypy-boto3-connectcases (>=1.34.0,<1.35.0)", "mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)", "mypy-boto3-controlcatalog (>=1.34.0,<1.35.0)", "mypy-boto3-controltower (>=1.34.0,<1.35.0)", "mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)", "mypy-boto3-cur (>=1.34.0,<1.35.0)", "mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)", "mypy-boto3-databrew (>=1.34.0,<1.35.0)", "mypy-boto3-dataexchange (>=1.34.0,<1.35.0)", "mypy-boto3-datapipeline (>=1.34.0,<1.35.0)", "mypy-boto3-datasync (>=1.34.0,<1.35.0)", "mypy-boto3-datazone (>=1.34.0,<1.35.0)", "mypy-boto3-dax (>=1.34.0,<1.35.0)", "mypy-boto3-deadline (>=1.34.0,<1.35.0)", "mypy-boto3-detective (>=1.34.0,<1.35.0)", "mypy-boto3-devicefarm (>=1.34.0,<1.35.0)", "mypy-boto3-devops-guru (>=1.34.0,<1.35.0)", "mypy-boto3-directconnect (>=1.34.0,<1.35.0)", "mypy-boto3-discovery (>=1.34.0,<1.35.0)", "mypy-boto3-dlm (>=1.34.0,<1.35.0)", "mypy-boto3-dms (>=1.34.0,<1.35.0)", "mypy-boto3-docdb (>=1.34.0,<1.35.0)", "mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)", "mypy-boto3-drs (>=1.34.0,<1.35.0)", "mypy-boto3-ds (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)", "mypy-boto3-ebs (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)", "mypy-boto3-ecr (>=1.34.0,<1.35.0)", "mypy-boto3-ecr-public (>=1.34.0,<1.35.0)", "mypy-boto3-ecs (>=1.34.0,<1.35.0)", "mypy-boto3-efs (>=1.34.0,<1.35.0)", "mypy-boto3-eks (>=1.34.0,<1.35.0)", "mypy-boto3-eks-auth (>=1.34.0,<1.35.0)", "mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)", "mypy-boto3-elasticache (>=1.34.0,<1.35.0)", "mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)", "mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)", "mypy-boto3-elb (>=1.34.0,<1.35.0)", "mypy-boto3-elbv2 (>=1.34.0,<1.35.0)", "mypy-boto3-emr (>=1.34.0,<1.35.0)", "mypy-boto3-emr-containers (>=1.34.0,<1.35.0)", "mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-entityresolution (>=1.34.0,<1.35.0)", "mypy-boto3-es (>=1.34.0,<1.35.0)", "mypy-boto3-events (>=1.34.0,<1.35.0)", "mypy-boto3-evidently (>=1.34.0,<1.35.0)", "mypy-boto3-finspace (>=1.34.0,<1.35.0)", "mypy-boto3-finspace-data (>=1.34.0,<1.35.0)", "mypy-boto3-firehose (>=1.34.0,<1.35.0)", "mypy-boto3-fis (>=1.34.0,<1.35.0)", "mypy-boto3-fms (>=1.34.0,<1.35.0)", "mypy-boto3-forecast (>=1.34.0,<1.35.0)", "mypy-boto3-forecastquery (>=1.34.0,<1.35.0)", "mypy-boto3-frauddetector (>=1.34.0,<1.35.0)", "mypy-boto3-freetier (>=1.34.0,<1.35.0)", "mypy-boto3-fsx (>=1.34.0,<1.35.0)", "mypy-boto3-gamelift (>=1.34.0,<1.35.0)", "mypy-boto3-glacier (>=1.34.0,<1.35.0)", "mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)", "mypy-boto3-glue (>=1.34.0,<1.35.0)", "mypy-boto3-grafana (>=1.34.0,<1.35.0)", "mypy-boto3-greengrass (>=1.34.0,<1.35.0)", "mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)", "mypy-boto3-groundstation (>=1.34.0,<1.35.0)", "mypy-boto3-guardduty (>=1.34.0,<1.35.0)", "mypy-boto3-health (>=1.34.0,<1.35.0)", "mypy-boto3-healthlake (>=1.34.0,<1.35.0)", "mypy-boto3-honeycode (>=1.34.0,<1.35.0)", "mypy-boto3-iam (>=1.34.0,<1.35.0)", "mypy-boto3-identitystore (>=1.34.0,<1.35.0)", "mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)", "mypy-boto3-importexport (>=1.34.0,<1.35.0)", "mypy-boto3-inspector (>=1.34.0,<1.35.0)", "mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)", "mypy-boto3-inspector2 (>=1.34.0,<1.35.0)", "mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-iot (>=1.34.0,<1.35.0)", "mypy-boto3-iot-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)", "mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)", "mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)", "mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)", "mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)", "mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)", "mypy-boto3-iotwireless (>=1.34.0,<1.35.0)", "mypy-boto3-ivs (>=1.34.0,<1.35.0)", "mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)", "mypy-boto3-ivschat (>=1.34.0,<1.35.0)", "mypy-boto3-kafka (>=1.34.0,<1.35.0)", "mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-kendra (>=1.34.0,<1.35.0)", "mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)", "mypy-boto3-keyspaces (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)", "mypy-boto3-kms (>=1.34.0,<1.35.0)", "mypy-boto3-lakeformation (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)", "mypy-boto3-lex-models (>=1.34.0,<1.35.0)", "mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-lightsail (>=1.34.0,<1.35.0)", "mypy-boto3-location (>=1.34.0,<1.35.0)", "mypy-boto3-logs (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)", "mypy-boto3-m2 (>=1.34.0,<1.35.0)", "mypy-boto3-machinelearning (>=1.34.0,<1.35.0)", "mypy-boto3-macie2 (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)", "mypy-boto3-medialive (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)", "mypy-boto3-mediatailor (>=1.34.0,<1.35.0)", "mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)", "mypy-boto3-memorydb (>=1.34.0,<1.35.0)", "mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)", "mypy-boto3-mgh (>=1.34.0,<1.35.0)", "mypy-boto3-mgn (>=1.34.0,<1.35.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)", "mypy-boto3-mobile (>=1.34.0,<1.35.0)", "mypy-boto3-mq (>=1.34.0,<1.35.0)", "mypy-boto3-mturk (>=1.34.0,<1.35.0)", "mypy-boto3-mwaa (>=1.34.0,<1.35.0)", "mypy-boto3-neptune (>=1.34.0,<1.35.0)", "mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)", "mypy-boto3-neptunedata (>=1.34.0,<1.35.0)", "mypy-boto3-network-firewall (>=1.34.0,<1.35.0)", "mypy-boto3-networkmanager (>=1.34.0,<1.35.0)", "mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-nimble (>=1.34.0,<1.35.0)", "mypy-boto3-oam (>=1.34.0,<1.35.0)", "mypy-boto3-omics (>=1.34.0,<1.35.0)", "mypy-boto3-opensearch (>=1.34.0,<1.35.0)", "mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)", "mypy-boto3-opsworks (>=1.34.0,<1.35.0)", "mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)", "mypy-boto3-organizations (>=1.34.0,<1.35.0)", "mypy-boto3-osis (>=1.34.0,<1.35.0)", "mypy-boto3-outposts (>=1.34.0,<1.35.0)", "mypy-boto3-panorama (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)", "mypy-boto3-personalize (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-events (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-pi (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)", "mypy-boto3-pipes (>=1.34.0,<1.35.0)", "mypy-boto3-polly (>=1.34.0,<1.35.0)", "mypy-boto3-pricing (>=1.34.0,<1.35.0)", "mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)", "mypy-boto3-proton (>=1.34.0,<1.35.0)", "mypy-boto3-qbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-qconnect (>=1.34.0,<1.35.0)", "mypy-boto3-qldb (>=1.34.0,<1.35.0)", "mypy-boto3-qldb-session (>=1.34.0,<1.35.0)", "mypy-boto3-quicksight (>=1.34.0,<1.35.0)", "mypy-boto3-ram (>=1.34.0,<1.35.0)", "mypy-boto3-rbin (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-rds-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-rekognition (>=1.34.0,<1.35.0)", "mypy-boto3-repostspace (>=1.34.0,<1.35.0)", "mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)", "mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)", "mypy-boto3-resource-groups (>=1.34.0,<1.35.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)", "mypy-boto3-robomaker (>=1.34.0,<1.35.0)", "mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)", "mypy-boto3-route53 (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)", "mypy-boto3-route53domains (>=1.34.0,<1.35.0)", "mypy-boto3-route53profiles (>=1.34.0,<1.35.0)", "mypy-boto3-route53resolver (>=1.34.0,<1.35.0)", "mypy-boto3-rum (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-s3control (>=1.34.0,<1.35.0)", "mypy-boto3-s3outposts (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-savingsplans (>=1.34.0,<1.35.0)", "mypy-boto3-scheduler (>=1.34.0,<1.35.0)", "mypy-boto3-schemas (>=1.34.0,<1.35.0)", "mypy-boto3-sdb (>=1.34.0,<1.35.0)", "mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)", "mypy-boto3-securityhub (>=1.34.0,<1.35.0)", "mypy-boto3-securitylake (>=1.34.0,<1.35.0)", "mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)", "mypy-boto3-service-quotas (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)", "mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)", "mypy-boto3-ses (>=1.34.0,<1.35.0)", "mypy-boto3-sesv2 (>=1.34.0,<1.35.0)", "mypy-boto3-shield (>=1.34.0,<1.35.0)", "mypy-boto3-signer (>=1.34.0,<1.35.0)", "mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)", "mypy-boto3-sms (>=1.34.0,<1.35.0)", "mypy-boto3-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)", "mypy-boto3-snowball (>=1.34.0,<1.35.0)", "mypy-boto3-sns (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)", "mypy-boto3-ssm (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)", "mypy-boto3-sso (>=1.34.0,<1.35.0)", "mypy-boto3-sso-admin (>=1.34.0,<1.35.0)", "mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)", "mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)", "mypy-boto3-storagegateway (>=1.34.0,<1.35.0)", "mypy-boto3-sts (>=1.34.0,<1.35.0)", "mypy-boto3-supplychain (>=1.34.0,<1.35.0)", "mypy-boto3-support (>=1.34.0,<1.35.0)", "mypy-boto3-support-app (>=1.34.0,<1.35.0)", "mypy-boto3-swf (>=1.34.0,<1.35.0)", "mypy-boto3-synthetics (>=1.34.0,<1.35.0)", "mypy-boto3-textract (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-influxdb (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-query (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-write (>=1.34.0,<1.35.0)", "mypy-boto3-tnb (>=1.34.0,<1.35.0)", "mypy-boto3-transcribe (>=1.34.0,<1.35.0)", "mypy-boto3-transfer (>=1.34.0,<1.35.0)", "mypy-boto3-translate (>=1.34.0,<1.35.0)", "mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)", "mypy-boto3-voice-id (>=1.34.0,<1.35.0)", "mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)", "mypy-boto3-waf (>=1.34.0,<1.35.0)", "mypy-boto3-waf-regional (>=1.34.0,<1.35.0)", "mypy-boto3-wafv2 (>=1.34.0,<1.35.0)", "mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)", "mypy-boto3-wisdom (>=1.34.0,<1.35.0)", "mypy-boto3-workdocs (>=1.34.0,<1.35.0)", "mypy-boto3-worklink (>=1.34.0,<1.35.0)", "mypy-boto3-workmail (>=1.34.0,<1.35.0)", "mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)", "mypy-boto3-xray (>=1.34.0,<1.35.0)"] +amp = ["mypy-boto3-amp (>=1.34.0,<1.35.0)"] +amplify = ["mypy-boto3-amplify (>=1.34.0,<1.35.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.34.0,<1.35.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.34.0,<1.35.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.34.0,<1.35.0)"] +appflow = ["mypy-boto3-appflow (>=1.34.0,<1.35.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.34.0,<1.35.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.34.0,<1.35.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.34.0,<1.35.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.34.0,<1.35.0)"] +appstream = ["mypy-boto3-appstream (>=1.34.0,<1.35.0)"] +appsync = ["mypy-boto3-appsync (>=1.34.0,<1.35.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)"] +artifact = ["mypy-boto3-artifact (>=1.34.0,<1.35.0)"] +athena = ["mypy-boto3-athena (>=1.34.0,<1.35.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.34.0,<1.35.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.34.0,<1.35.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.34.0,<1.35.0)"] +backup = ["mypy-boto3-backup (>=1.34.0,<1.35.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.34.0,<1.35.0)"] +batch = ["mypy-boto3-batch (>=1.34.0,<1.35.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.34.0,<1.35.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] +boto3 = ["boto3 (==1.34.108)", "botocore (==1.34.108)"] +braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] +budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] +ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] +chatbot = ["mypy-boto3-chatbot (>=1.34.0,<1.35.0)"] +chime = ["mypy-boto3-chime (>=1.34.0,<1.35.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.34.0,<1.35.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.34.0,<1.35.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.34.0,<1.35.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.34.0,<1.35.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.34.0,<1.35.0)"] +codeconnections = ["mypy-boto3-codeconnections (>=1.34.0,<1.35.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.34.0,<1.35.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.34.0,<1.35.0)"] +codestar = ["mypy-boto3-codestar (>=1.34.0,<1.35.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.34.0,<1.35.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)"] +config = ["mypy-boto3-config (>=1.34.0,<1.35.0)"] +connect = ["mypy-boto3-connect (>=1.34.0,<1.35.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.34.0,<1.35.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)"] +controlcatalog = ["mypy-boto3-controlcatalog (>=1.34.0,<1.35.0)"] +controltower = ["mypy-boto3-controltower (>=1.34.0,<1.35.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)"] +cur = ["mypy-boto3-cur (>=1.34.0,<1.35.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)"] +databrew = ["mypy-boto3-databrew (>=1.34.0,<1.35.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.34.0,<1.35.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.34.0,<1.35.0)"] +datasync = ["mypy-boto3-datasync (>=1.34.0,<1.35.0)"] +datazone = ["mypy-boto3-datazone (>=1.34.0,<1.35.0)"] +dax = ["mypy-boto3-dax (>=1.34.0,<1.35.0)"] +deadline = ["mypy-boto3-deadline (>=1.34.0,<1.35.0)"] +detective = ["mypy-boto3-detective (>=1.34.0,<1.35.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.34.0,<1.35.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.34.0,<1.35.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.34.0,<1.35.0)"] +discovery = ["mypy-boto3-discovery (>=1.34.0,<1.35.0)"] +dlm = ["mypy-boto3-dlm (>=1.34.0,<1.35.0)"] +dms = ["mypy-boto3-dms (>=1.34.0,<1.35.0)"] +docdb = ["mypy-boto3-docdb (>=1.34.0,<1.35.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)"] +drs = ["mypy-boto3-drs (>=1.34.0,<1.35.0)"] +ds = ["mypy-boto3-ds (>=1.34.0,<1.35.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.34.0,<1.35.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)"] +ebs = ["mypy-boto3-ebs (>=1.34.0,<1.35.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.34.0,<1.35.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)"] +ecr = ["mypy-boto3-ecr (>=1.34.0,<1.35.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.34.0,<1.35.0)"] +ecs = ["mypy-boto3-ecs (>=1.34.0,<1.35.0)"] +efs = ["mypy-boto3-efs (>=1.34.0,<1.35.0)"] +eks = ["mypy-boto3-eks (>=1.34.0,<1.35.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.34.0,<1.35.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.34.0,<1.35.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)"] +elb = ["mypy-boto3-elb (>=1.34.0,<1.35.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.34.0,<1.35.0)"] +emr = ["mypy-boto3-emr (>=1.34.0,<1.35.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.34.0,<1.35.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.34.0,<1.35.0)"] +es = ["mypy-boto3-es (>=1.34.0,<1.35.0)"] +essential = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +events = ["mypy-boto3-events (>=1.34.0,<1.35.0)"] +evidently = ["mypy-boto3-evidently (>=1.34.0,<1.35.0)"] +finspace = ["mypy-boto3-finspace (>=1.34.0,<1.35.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.34.0,<1.35.0)"] +firehose = ["mypy-boto3-firehose (>=1.34.0,<1.35.0)"] +fis = ["mypy-boto3-fis (>=1.34.0,<1.35.0)"] +fms = ["mypy-boto3-fms (>=1.34.0,<1.35.0)"] +forecast = ["mypy-boto3-forecast (>=1.34.0,<1.35.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.34.0,<1.35.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.34.0,<1.35.0)"] +freetier = ["mypy-boto3-freetier (>=1.34.0,<1.35.0)"] +fsx = ["mypy-boto3-fsx (>=1.34.0,<1.35.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.34.0,<1.35.0)"] +glacier = ["mypy-boto3-glacier (>=1.34.0,<1.35.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)"] +glue = ["mypy-boto3-glue (>=1.34.0,<1.35.0)"] +grafana = ["mypy-boto3-grafana (>=1.34.0,<1.35.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.34.0,<1.35.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.34.0,<1.35.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.34.0,<1.35.0)"] +health = ["mypy-boto3-health (>=1.34.0,<1.35.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.34.0,<1.35.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.34.0,<1.35.0)"] +iam = ["mypy-boto3-iam (>=1.34.0,<1.35.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.34.0,<1.35.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)"] +importexport = ["mypy-boto3-importexport (>=1.34.0,<1.35.0)"] +inspector = ["mypy-boto3-inspector (>=1.34.0,<1.35.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.34.0,<1.35.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)"] +iot = ["mypy-boto3-iot (>=1.34.0,<1.35.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.34.0,<1.35.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.34.0,<1.35.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.34.0,<1.35.0)"] +ivs = ["mypy-boto3-ivs (>=1.34.0,<1.35.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.34.0,<1.35.0)"] +kafka = ["mypy-boto3-kafka (>=1.34.0,<1.35.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)"] +kendra = ["mypy-boto3-kendra (>=1.34.0,<1.35.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.34.0,<1.35.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.34.0,<1.35.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)"] +kms = ["mypy-boto3-kms (>=1.34.0,<1.35.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.34.0,<1.35.0)"] +lambda = ["mypy-boto3-lambda (>=1.34.0,<1.35.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.34.0,<1.35.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.34.0,<1.35.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.34.0,<1.35.0)"] +location = ["mypy-boto3-location (>=1.34.0,<1.35.0)"] +logs = ["mypy-boto3-logs (>=1.34.0,<1.35.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)"] +m2 = ["mypy-boto3-m2 (>=1.34.0,<1.35.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.34.0,<1.35.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.34.0,<1.35.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)"] +medialive = ["mypy-boto3-medialive (>=1.34.0,<1.35.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.34.0,<1.35.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.34.0,<1.35.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.34.0,<1.35.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.34.0,<1.35.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)"] +mgh = ["mypy-boto3-mgh (>=1.34.0,<1.35.0)"] +mgn = ["mypy-boto3-mgn (>=1.34.0,<1.35.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)"] +mobile = ["mypy-boto3-mobile (>=1.34.0,<1.35.0)"] +mq = ["mypy-boto3-mq (>=1.34.0,<1.35.0)"] +mturk = ["mypy-boto3-mturk (>=1.34.0,<1.35.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.34.0,<1.35.0)"] +neptune = ["mypy-boto3-neptune (>=1.34.0,<1.35.0)"] +neptune-graph = ["mypy-boto3-neptune-graph (>=1.34.0,<1.35.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.34.0,<1.35.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.34.0,<1.35.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.34.0,<1.35.0)"] +networkmonitor = ["mypy-boto3-networkmonitor (>=1.34.0,<1.35.0)"] +nimble = ["mypy-boto3-nimble (>=1.34.0,<1.35.0)"] +oam = ["mypy-boto3-oam (>=1.34.0,<1.35.0)"] +omics = ["mypy-boto3-omics (>=1.34.0,<1.35.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.34.0,<1.35.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.34.0,<1.35.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)"] +organizations = ["mypy-boto3-organizations (>=1.34.0,<1.35.0)"] +osis = ["mypy-boto3-osis (>=1.34.0,<1.35.0)"] +outposts = ["mypy-boto3-outposts (>=1.34.0,<1.35.0)"] +panorama = ["mypy-boto3-panorama (>=1.34.0,<1.35.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)"] +personalize = ["mypy-boto3-personalize (>=1.34.0,<1.35.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.34.0,<1.35.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)"] +pi = ["mypy-boto3-pi (>=1.34.0,<1.35.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.34.0,<1.35.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)"] +pipes = ["mypy-boto3-pipes (>=1.34.0,<1.35.0)"] +polly = ["mypy-boto3-polly (>=1.34.0,<1.35.0)"] +pricing = ["mypy-boto3-pricing (>=1.34.0,<1.35.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)"] +proton = ["mypy-boto3-proton (>=1.34.0,<1.35.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.34.0,<1.35.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.34.0,<1.35.0)"] +qldb = ["mypy-boto3-qldb (>=1.34.0,<1.35.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.34.0,<1.35.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.34.0,<1.35.0)"] +ram = ["mypy-boto3-ram (>=1.34.0,<1.35.0)"] +rbin = ["mypy-boto3-rbin (>=1.34.0,<1.35.0)"] +rds = ["mypy-boto3-rds (>=1.34.0,<1.35.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.34.0,<1.35.0)"] +redshift = ["mypy-boto3-redshift (>=1.34.0,<1.35.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.34.0,<1.35.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.34.0,<1.35.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.34.0,<1.35.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.34.0,<1.35.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.34.0,<1.35.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)"] +route53 = ["mypy-boto3-route53 (>=1.34.0,<1.35.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.34.0,<1.35.0)"] +route53profiles = ["mypy-boto3-route53profiles (>=1.34.0,<1.35.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.34.0,<1.35.0)"] +rum = ["mypy-boto3-rum (>=1.34.0,<1.35.0)"] +s3 = ["mypy-boto3-s3 (>=1.34.0,<1.35.0)"] +s3control = ["mypy-boto3-s3control (>=1.34.0,<1.35.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.34.0,<1.35.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.34.0,<1.35.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.34.0,<1.35.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.34.0,<1.35.0)"] +schemas = ["mypy-boto3-schemas (>=1.34.0,<1.35.0)"] +sdb = ["mypy-boto3-sdb (>=1.34.0,<1.35.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.34.0,<1.35.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.34.0,<1.35.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.34.0,<1.35.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)"] +ses = ["mypy-boto3-ses (>=1.34.0,<1.35.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.34.0,<1.35.0)"] +shield = ["mypy-boto3-shield (>=1.34.0,<1.35.0)"] +signer = ["mypy-boto3-signer (>=1.34.0,<1.35.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)"] +sms = ["mypy-boto3-sms (>=1.34.0,<1.35.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.34.0,<1.35.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)"] +snowball = ["mypy-boto3-snowball (>=1.34.0,<1.35.0)"] +sns = ["mypy-boto3-sns (>=1.34.0,<1.35.0)"] +sqs = ["mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +ssm = ["mypy-boto3-ssm (>=1.34.0,<1.35.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)"] +sso = ["mypy-boto3-sso (>=1.34.0,<1.35.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.34.0,<1.35.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.34.0,<1.35.0)"] +sts = ["mypy-boto3-sts (>=1.34.0,<1.35.0)"] +supplychain = ["mypy-boto3-supplychain (>=1.34.0,<1.35.0)"] +support = ["mypy-boto3-support (>=1.34.0,<1.35.0)"] +support-app = ["mypy-boto3-support-app (>=1.34.0,<1.35.0)"] +swf = ["mypy-boto3-swf (>=1.34.0,<1.35.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.34.0,<1.35.0)"] +textract = ["mypy-boto3-textract (>=1.34.0,<1.35.0)"] +timestream-influxdb = ["mypy-boto3-timestream-influxdb (>=1.34.0,<1.35.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.34.0,<1.35.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.34.0,<1.35.0)"] +tnb = ["mypy-boto3-tnb (>=1.34.0,<1.35.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.34.0,<1.35.0)"] +transfer = ["mypy-boto3-transfer (>=1.34.0,<1.35.0)"] +translate = ["mypy-boto3-translate (>=1.34.0,<1.35.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.34.0,<1.35.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)"] +waf = ["mypy-boto3-waf (>=1.34.0,<1.35.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.34.0,<1.35.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.34.0,<1.35.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.34.0,<1.35.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.34.0,<1.35.0)"] +worklink = ["mypy-boto3-worklink (>=1.34.0,<1.35.0)"] +workmail = ["mypy-boto3-workmail (>=1.34.0,<1.35.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.34.0,<1.35.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)"] +xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] + +[[package]] +name = "botocore" +version = "1.34.108" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.34.108-py3-none-any.whl", hash = "sha256:b1b9d00804267669c5fcc36489269f7e9c43580c30f0885fbf669cf73cec720b"}, + {file = "botocore-1.34.108.tar.gz", hash = "sha256:384c9408c447631475dc41fdc9bf2e0f30c29c420d96bfe8b468bdc2bace3e13"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.20.9)"] + +[[package]] +name = "botocore-stubs" +version = "1.34.94" +description = "Type annotations and code completion for botocore" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "botocore_stubs-1.34.94-py3-none-any.whl", hash = "sha256:b0345f55babd8b901c53804fc5c326a4a0bd2e23e3b71f9ea5d9f7663466e6ba"}, + {file = "botocore_stubs-1.34.94.tar.gz", hash = "sha256:64d80a3467e3b19939e9c2750af33328b3087f8f524998dbdf7ed168227f507d"}, +] + +[package.dependencies] +types-awscrt = "*" + +[package.extras] +botocore = ["botocore"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.5.1" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, + {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, + {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"}, + {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"}, + {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"}, + {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"}, + {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"}, + {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"}, + {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"}, + {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"}, + {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"}, + {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"}, + {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"}, + {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"}, + {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"}, + {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"}, + {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"}, + {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"}, + {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"}, + {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"}, + {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"}, + {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"}, + {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"}, + {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"}, + {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"}, + {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"}, + {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"}, + {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"}, + {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"}, + {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"}, + {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"}, + {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"}, + {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"}, + {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"}, + {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"}, + {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"}, + {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"}, + {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"}, + {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"}, + {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"}, + {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"}, + {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"}, + {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"}, + {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"}, + {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"}, + {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"}, + {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"}, + {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"}, + {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"}, + {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"}, + {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"}, + {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, + {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, + {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, + {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, + {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, + {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, + {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dparse" +version = "0.6.4b0" +description = "A parser for Python dependency files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dparse-0.6.4b0-py3-none-any.whl", hash = "sha256:592ff183348b8a5ea0a18442a7965e29445d3a26063654ec2c7e8ef42cd5753c"}, + {file = "dparse-0.6.4b0.tar.gz", hash = "sha256:f8d49b41a527f3d16a269f854e6665245b325e50e41d2c213810cb984553e5c8"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +all = ["dparse[conda]", "dparse[pipenv]", "dparse[poetry]"] +conda = ["pyyaml"] +pipenv = ["pipenv"] +poetry = ["poetry"] + +[[package]] +name = "fs" +version = "2.4.16" +description = "Python's filesystem abstraction layer" +optional = false +python-versions = "*" +files = [ + {file = "fs-2.4.16-py2.py3-none-any.whl", hash = "sha256:660064febbccda264ae0b6bace80a8d1be9e089e0a5eb2427b7d517f9a91545c"}, + {file = "fs-2.4.16.tar.gz", hash = "sha256:ae97c7d51213f4b70b6a958292530289090de3a7e15841e108fbe144f069d313"}, +] + +[package.dependencies] +appdirs = ">=1.4.3,<1.5.0" +setuptools = "*" +six = ">=1.10,<2.0" + +[package.extras] +scandir = ["scandir (>=1.5,<2.0)"] + +[[package]] +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "kubernetes" +version = "29.0.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"}, + {file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "kubernetes-stubs" +version = "22.6.0.post1" +description = "Type stubs for the Kubernetes Python API client" +optional = false +python-versions = "*" +files = [ + {file = "kubernetes-stubs-22.6.0.post1.tar.gz", hash = "sha256:9f4de86ef3c5aeb8ca555164f7427e8d909b00ad0b2081cf2bf17dc44cfb63e9"}, + {file = "kubernetes_stubs-22.6.0.post1-py2.py3-none-any.whl", hash = "sha256:46a4d6fc30458f245c54d2f5777dcb2ecc16bc86258fb37c7b87c631d2ac61da"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.2" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.2-py3-none-any.whl", hash = "sha256:70b54a6282f4704d12c0a41599682c5c5450e843b9ec406308653b47c59648a1"}, + {file = "marshmallow-3.21.2.tar.gz", hash = "sha256:82408deadd8b33d56338d2182d455db632c6313aa2af61916672146bb32edc56"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "moto" +version = "5.0.7" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "moto-5.0.7-py2.py3-none-any.whl", hash = "sha256:c0214c1361fb1dc85f587d9ce17cd988c6f69ff0ed54d43789654022e0e744f2"}, + {file = "moto-5.0.7.tar.gz", hash = "sha256:f2cde691dc4bc675e318a65f018902ac7f89d61bf2646052f7df215d212f069e"}, +] + +[package.dependencies] +boto3 = ">=1.9.201" +botocore = ">=1.14.0" +cryptography = ">=3.3.1" +Jinja2 = ">=2.10.1" +python-dateutil = ">=2.1,<3.0.0" +requests = ">=2.5" +responses = ">=0.15.0" +werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +apigateway = ["PyYAML (>=5.1)", "joserfc (>=0.9.0)", "openapi-spec-validator (>=0.5.0)"] +apigatewayv2 = ["PyYAML (>=5.1)", "openapi-spec-validator (>=0.5.0)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=3.0.0)"] +batch = ["docker (>=3.0.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +cognitoidp = ["joserfc (>=0.9.0)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"] +glue = ["pyparsing (>=3.0.7)"] +iotdata = ["jsondiff (>=1.1.2)"] +proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.5.5)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.5.5)"] +server = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"] +ssm = ["PyYAML (>=5.1)"] +stepfunctions = ["antlr4-python3-runtime", "jsonpath-ng"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "mypy" +version = "1.10.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.7.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, + {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.18.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, + {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, + {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, + {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, + {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, + {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, + {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, + {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, + {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, + {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, + {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, + {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, + {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, + {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, + {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, + {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, + {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, + {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, + {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, + {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, + {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, + {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, + {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, + {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, + {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, + {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, + {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, + {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, + {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, + {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, + {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, + {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, + {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyfakefs" +version = "5.5.0" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.5.0-py3-none-any.whl", hash = "sha256:8dbf203ab7bef1529f11f7d41b9478b898e95bf9f3b71262163aac07a518cd76"}, + {file = "pyfakefs-5.5.0.tar.gz", hash = "sha256:7448aaa07142f892d0a4eb52a5ed3206a9f02c6599e686cd97d624c18979c154"}, +] + +[[package]] +name = "pyftpdlib" +version = "1.5.9" +description = "Very fast asynchronous FTP server library" +optional = false +python-versions = "*" +files = [ + {file = "pyftpdlib-1.5.9.tar.gz", hash = "sha256:323d4c42f1406aedb4df18faf680f64f32c080ff66f6c26090ba592f5bfc4a0f"}, +] + +[package.extras] +ssl = ["PyOpenSSL"] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyopenssl" +version = "24.1.0" +description = "Python wrapper module around the OpenSSL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyOpenSSL-24.1.0-py3-none-any.whl", hash = "sha256:17ed5be5936449c5418d1cd269a1a9e9081bc54c17aed272b45856a3d3dc86ad"}, + {file = "pyOpenSSL-24.1.0.tar.gz", hash = "sha256:cabed4bfaa5df9f1a16c0ef64a0cb65318b5cd077a7eda7d6970131ca2f41a6f"}, +] + +[package.dependencies] +cryptography = ">=41.0.5,<43" + +[package.extras] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] +test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] + +[[package]] +name = "pytest" +version = "8.2.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-localftpserver" +version = "1.3.1" +description = "A PyTest plugin which provides an FTP fixture for your tests" +optional = false +python-versions = "*" +files = [ + {file = "pytest_localftpserver-1.3.1-py3-none-any.whl", hash = "sha256:dbb2fcc5ea950231a60c506d3039873cf9c322871618de2de52d5075c98247fc"}, + {file = "pytest_localftpserver-1.3.1.tar.gz", hash = "sha256:8d9fbbceb2cca4ec212b0bb81655f73f5281bab606d1df7d7e43a2197d20c9fd"}, +] + +[package.dependencies] +pyftpdlib = ">=1.2.0" +PyOpenSSL = "*" +pytest = "*" +wget = "*" + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "responses" +version = "0.25.0" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.25.0-py3-none-any.whl", hash = "sha256:2f0b9c2b6437db4b528619a77e5d565e4ec2a9532162ac1a131a83529db7be1a"}, + {file = "responses-0.25.0.tar.gz", hash = "sha256:01ae6a02b4f34e39bffceb0fc6786b67a25eae919c6368d05eabc8d9576c2a66"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + +[[package]] +name = "ruff" +version = "0.4.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.4.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:29d44ef5bb6a08e235c8249294fa8d431adc1426bfda99ed493119e6f9ea1bf6"}, + {file = "ruff-0.4.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c4efe62b5bbb24178c950732ddd40712b878a9b96b1d02b0ff0b08a090cbd891"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c8e2f1e8fc12d07ab521a9005d68a969e167b589cbcaee354cb61e9d9de9c15"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60ed88b636a463214905c002fa3eaab19795679ed55529f91e488db3fe8976ab"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b90fc5e170fc71c712cc4d9ab0e24ea505c6a9e4ebf346787a67e691dfb72e85"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8e7e6ebc10ef16dcdc77fd5557ee60647512b400e4a60bdc4849468f076f6eef"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9ddb2c494fb79fc208cd15ffe08f32b7682519e067413dbaf5f4b01a6087bcd"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c51c928a14f9f0a871082603e25a1588059b7e08a920f2f9fa7157b5bf08cfe9"}, + {file = "ruff-0.4.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5eb0a4bfd6400b7d07c09a7725e1a98c3b838be557fee229ac0f84d9aa49c36"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b1867ee9bf3acc21778dcb293db504692eda5f7a11a6e6cc40890182a9f9e595"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1aecced1269481ef2894cc495647392a34b0bf3e28ff53ed95a385b13aa45768"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9da73eb616b3241a307b837f32756dc20a0b07e2bcb694fec73699c93d04a69e"}, + {file = "ruff-0.4.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:958b4ea5589706a81065e2a776237de2ecc3e763342e5cc8e02a4a4d8a5e6f95"}, + {file = "ruff-0.4.4-py3-none-win32.whl", hash = "sha256:cb53473849f011bca6e754f2cdf47cafc9c4f4ff4570003a0dad0b9b6890e876"}, + {file = "ruff-0.4.4-py3-none-win_amd64.whl", hash = "sha256:424e5b72597482543b684c11def82669cc6b395aa8cc69acc1858b5ef3e5daae"}, + {file = "ruff-0.4.4-py3-none-win_arm64.whl", hash = "sha256:39df0537b47d3b597293edbb95baf54ff5b49589eb7ff41926d8243caa995ea6"}, + {file = "ruff-0.4.4.tar.gz", hash = "sha256:f87ea42d5cdebdc6a69761a9d0bc83ae9b3b30d0ad78952005ba6568d6c022af"}, +] + +[[package]] +name = "s3transfer" +version = "0.10.1" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:ceb252b11bcf87080fb7850a224fb6e05c8a776bab8f2b64b7f25b969464839d"}, + {file = "s3transfer-0.10.1.tar.gz", hash = "sha256:5683916b4c724f799e600f41dd9e10a9ff19871bf87623cc8f491cb4f5fa0a19"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + +[[package]] +name = "safety" +version = "3.2.0" +description = "Checks installed dependencies for known vulnerabilities and licenses." +optional = false +python-versions = ">=3.7" +files = [ + {file = "safety-3.2.0-py3-none-any.whl", hash = "sha256:a432fc9d17e79a4386c4f093656b617c56f839cde022649cfa796d72c7a544de"}, + {file = "safety-3.2.0.tar.gz", hash = "sha256:8bd5cab5f3d8a61ce0ea6e98f267c1006d056097c45c644fee7afeff7d5949c1"}, +] + +[package.dependencies] +Authlib = ">=1.2.0" +Click = ">=8.0.2" +dparse = ">=0.6.4b0" +jinja2 = ">=3.1.0" +marshmallow = ">=3.15.0" +packaging = ">=21.0" +pydantic = ">=1.10.12" +requests = "*" +rich = "*" +"ruamel.yaml" = ">=0.17.21" +safety-schemas = ">=0.0.2" +setuptools = ">=65.5.1" +typer = "*" +typing-extensions = ">=4.7.1" +urllib3 = ">=1.26.5" + +[package.extras] +github = ["pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] +spdx = ["spdx-tools (>=0.8.2)"] + +[[package]] +name = "safety-schemas" +version = "0.0.2" +description = "Schemas for Safety tools" +optional = false +python-versions = ">=3.7" +files = [ + {file = "safety_schemas-0.0.2-py3-none-any.whl", hash = "sha256:277c077ce6e53221874a87c29515ffdd2f3773a6db4d035a9f67cc98db3b8c7f"}, + {file = "safety_schemas-0.0.2.tar.gz", hash = "sha256:7d1b040ec06480f05cff6b45ea7a93e09c8942df864fb0d01ddeb67c323cfa8c"}, +] + +[package.dependencies] +dparse = ">=0.6.4b0" +packaging = ">=21.0" +pydantic = "*" +ruamel-yaml = ">=0.17.21" +typing-extensions = ">=4.7.1" + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "stevedore" +version = "5.2.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, + {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "typer" +version = "0.12.3" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +files = [ + {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"}, + {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "types-awscrt" +version = "0.20.9" +description = "Type annotations and code completion for awscrt" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "types_awscrt-0.20.9-py3-none-any.whl", hash = "sha256:3ae374b553e7228ba41a528cf42bd0b2ad7303d806c73eff4aaaac1515e3ea4e"}, + {file = "types_awscrt-0.20.9.tar.gz", hash = "sha256:64898a2f4a2468f66233cb8c29c5f66de907cf80ba1ef5bb1359aef2f81bb521"}, +] + +[[package]] +name = "types-botocore" +version = "1.0.2" +description = "Proxy package for botocore-stubs" +optional = false +python-versions = "*" +files = [ + {file = "types-botocore-1.0.2.tar.gz", hash = "sha256:885d4ceb8d0594b73d08d5feb12b6bbd1ef9e7333531acb80477dc7904f02304"}, + {file = "types_botocore-1.0.2-py3-none-any.whl", hash = "sha256:453fc52a1da8ac162793323d5f0f0dbaa40a3cd9dad915f5a3de79a07fd65674"}, +] + +[package.dependencies] +botocore-stubs = "*" + +[[package]] +name = "types-requests" +version = "2.31.0.20240406" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "types-s3transfer" +version = "0.10.1" +description = "Type annotations and code completion for s3transfer" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "types_s3transfer-0.10.1-py3-none-any.whl", hash = "sha256:49a7c81fa609ac1532f8de3756e64b58afcecad8767933310228002ec7adff74"}, + {file = "types_s3transfer-0.10.1.tar.gz", hash = "sha256:02154cce46528287ad76ad1a0153840e0492239a0887e8833466eccf84b98da0"}, +] + +[[package]] +name = "types-urllib3" +version = "1.26.25.14" +description = "Typing stubs for urllib3" +optional = false +python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, + {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "typos" +version = "1.21.0" +description = "Source Code Spelling Correction" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typos-1.21.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9b592145137a113cf5b631ddca55944405441a43fedb0fb3332ddfdba5d0040c"}, + {file = "typos-1.21.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e159df29a5af13cb42e2f0d0c61356c9ed68999a15682447d183ee0a5c1faa24"}, + {file = "typos-1.21.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e7ef2dde5ae496f145bc1d0c2b4380dbc5410d0be083ff5cd7136d00eeb9a69"}, + {file = "typos-1.21.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3806c3e8f11b0e9734491ce656597cd06ad11847c4cecada2a1781a71319a753"}, + {file = "typos-1.21.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56859b6938c970cdbf9f168189665a1459ab9813c40bbba7a9183b5d0643f949"}, + {file = "typos-1.21.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ebeac8b75e226354920567026cd45c4926dcf01d6d6def2b88fba5d3e4ae838a"}, + {file = "typos-1.21.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:326eeaf541d5dc9097e3550a52c1bc814ea2b0315f1b83a96ff5665b941742fc"}, + {file = "typos-1.21.0-py3-none-win32.whl", hash = "sha256:9e05ac854507b121921e242db0b8b80eb7d2968fa5f1ad1be507941627a95c71"}, + {file = "typos-1.21.0-py3-none-win_amd64.whl", hash = "sha256:f576f250e6e8dc8d4c65e74502fdaa3dff0877ae3e1713b45d0f96a141d1d906"}, + {file = "typos-1.21.0.tar.gz", hash = "sha256:8955e93edf9b901a8821840c19c17adc77611eeb9623ab6158126b193f5452b8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wget" +version = "3.2" +description = "pure python download utility" +optional = false +python-versions = "*" +files = [ + {file = "wget-3.2.zip", hash = "sha256:35e630eca2aa50ce998b9b1a127bb26b30dfee573702782aa982f875e3f16061"}, +] + +[[package]] +name = "xmltodict" +version = "0.13.0" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.4" +files = [ + {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, + {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "32828e61e4649c4baa1cb381d148c064695478e66e6ee8719299f3d697a9cff0" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..eb685fbf --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,106 @@ +[tool.poetry] +name = "tesk" +version = "0.1.0" +description = "An implementation of a task execution engine based on the GA4GH Task Execution Service (TES) API standard running on Kubernetes." +authors = ["Elixir Cloud AAI "] +maintainers = ["Elixir Cloud AAI "] +license = "Apache-2.0" +readme = "README.md" +repository = "https://github.com/elixir-cloud-aai/TESK" +keywords = ["TES", "Kubernetes", "Task Execution", "ebi", "TESK", "workflow", "automation"] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.11" +] + +[tool.poetry.scripts] +filer = 'tesk.services.filer:main' +taskmaster = 'tesk.services.taskmaster:main' + +[tool.poetry.dependencies] +python = "^3.11" +kubernetes = "^29.0.0" +requests = ">=2.20.0" +urllib3 = "^2.2.1" +boto3 = "1.34.104" + +[tool.poetry.group.dev.dependencies] +pytest = "*" +pyfakefs = "*" +pytest-mock = "*" +fs = "*" +moto = "*" +pytest-localftpserver = "*" +ruff = "^0.4.4" +mypy = "^1.10.0" +typos = "^1.21.0" +bandit = "^1.7.8" +safety = "^3.2.0" +pytest-cov = "^5.0.0" + +[tool.poetry.group.lint.dependencies] +ruff = "^0.4.4" +typos = "^1.21.0" + +[tool.poetry.group.types.dependencies] +mypy = "^1.10.0" +kubernetes-stubs = "^22.6.0.post1" +types-urllib3 = "^1.26.25.14" +types-botocore = "^1.0.2" +boto3-stubs = "^1.34.108" +types-requests = "^2.31.0.20240406" + +[tool.poetry.group.security.dependencies] +bandit = "^1.7.8" +safety = "^3.2.0" + +[tool.poetry.group.test.dependencies] +pytest = "*" +pyfakefs = "*" +pytest-mock = "*" +fs = "*" +moto = "*" +pytest-localftpserver = "*" +kubernetes = "*" +pytest-cov = "^5.0.0" + +[tool.ruff.lint] +select = [ + "E", # pycodestyle + "F", # Pyflakes + "UP", # pyupgrade + "B", # flake8-bugbear + "SIM", # flake8-simplify + "I", # isort + "PL" # pylint +] + +[tool.ruff.format] +quote-style = "single" +indent-style = "tab" +docstring-code-format = true + +[tool.typos.default.extend-words] +mke = 'mke' + +[tool.pytest_env] +FTP_USER="user" +FTP_PASS="pass" +TESK_FTP_USERNAME="user" +TESK_FTP_PASSWORD="pass" +FTP_HOME ="/tmp" +FTP_FIXTURE_SCOPE="function" +FTP_PORT = 2111 + +[tool.bandit] +skips = [ + "B321", # FTP-related functions are being called. + "B402", # A FTP-related module is being imported. +] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/pytest.ini b/pytest.ini deleted file mode 100755 index a9d56aa8..00000000 --- a/pytest.ini +++ /dev/null @@ -1,9 +0,0 @@ -[pytest] -env = - FTP_USER=user - FTP_PASS=pass - TESK_FTP_USERNAME=user - TESK_FTP_PASSWORD=pass - FTP_HOME =/tmp - FTP_FIXTURE_SCOPE=function - FTP_PORT = 2111 \ No newline at end of file diff --git a/scripts/dockerBuild b/scripts/dockerBuild deleted file mode 100755 index 6f9c3f92..00000000 --- a/scripts/dockerBuild +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -# -# Usage: -# -# buildDockerImage filer or -# buildDockerImage taskmaster -# - -IMAGE=$1 - -if [ -z "$IMAGE" ]; -then - echo "Use: $0 [tag]" - exit 12 -fi - -TAG=$2 - -if [ -z "$TAG" ]; -then - TAG=testing -fi - -if command -V buildah; -then - buildah bud -t "docker.io/elixircloud/tesk-core-$IMAGE:$TAG" \ - --format=docker --no-cache \ - -f "containers/$IMAGE.Dockerfile" -else - docker build -t "docker.io/elixircloud/tesk-core-$1:$TAG" -f "containers/$1.Dockerfile" . -fi diff --git a/scripts/dockerRun b/scripts/dockerRun deleted file mode 100755 index 4bc8709b..00000000 --- a/scripts/dockerRun +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -# -# Usage: -# -# buildRun filer or -# buildRun taskmaster -# - -imageName="$1" -shift - -docker run "docker.io/elixircloud/tesk-core-$imageName:testing" "$@" diff --git a/scripts/run b/scripts/run index f7e11994..c3baaba3 100755 --- a/scripts/run +++ b/scripts/run @@ -1,3 +1,5 @@ +# TODO: Remove once API is stable and tested on cluster. +# This script can also be added to Makefile # # Usage: diff --git a/scripts/taskmaster b/scripts/taskmaster new file mode 100755 index 00000000..c1a5be21 --- /dev/null +++ b/scripts/taskmaster @@ -0,0 +1,3 @@ +# TODO: Add to makefile after understanding what its doing 😕 + +PYTHONPATH="tesk" python tesk/service/taskmaster.py "$@" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 10e6d8ae..00000000 --- a/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[metadata] -description-file=README.md -[aliases] -test=pytest diff --git a/setup.py b/setup.py deleted file mode 100644 index b66eb91e..00000000 --- a/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -import codecs -from os import path -from setuptools import setup, find_packages - -HERE = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with codecs.open(path.join(HERE, 'README.md'), encoding='utf-8') as f: - LONG_DESC = f.read() - -INSTALL_DEPS = ['kubernetes==9.0.0', - 'requests>=2.20.0', - 'urllib3==1.26.18', - 'boto3==1.16.18', - ] -TEST_DEPS = [ 'pytest', - 'pyfakefs', - 'pytest-mock' - , 'fs', - 'moto', - 'pytest-localftpserver' - ] - -DEV_DEPS = [] - -setup( - name='teskcore', - - # https://pypi.python.org/pypi/setuptools_scm - use_scm_version=True, - - description='TES on Kubernetes', - long_description=LONG_DESC, - long_description_content_type="text/markdown", - - url='https://github.com/EMBL-EBI-TSI/TESK', - - author='Erik van der Bergh', - author_email='evdbergh@ebi.ac.uk', - - license='Apache License 2.0', - - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 4 - Beta', - - 'Intended Audience :: System Administrators', - - 'License :: OSI Approved :: Apache Software License', - - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7' - ], - - # What does your project relate to? - keywords='tes kubernetes ebi', - - packages = find_packages('src'), - package_dir = {'': 'src'}, - - entry_points={ - 'console_scripts' : [ - 'filer = tesk_core.filer:main', - 'taskmaster = tesk_core.taskmaster:main' - ] - }, - test_suite='tests', - - # List run-time dependencies here. These will be installed by pip when - # your project is installed. For an analysis of "install_requires" vs pip's - # requirements files see: - # https://packaging.python.org/en/latest/requirements.html - install_requires=INSTALL_DEPS, - - setup_requires=['setuptools_scm'], - - tests_require=TEST_DEPS, - - python_requires='>=3.5, <4.0', - - # List additional groups of dependencies here (e.g. development - # dependencies). You can install these using the following syntax, - # for example: - # $ pip install -e .[dev,test] - extras_require={ - 'dev': DEV_DEPS, - 'test': TEST_DEPS - }, -) diff --git a/src/tesk_core/Util.py b/src/tesk_core/Util.py deleted file mode 100644 index 0a8b89cc..00000000 --- a/src/tesk_core/Util.py +++ /dev/null @@ -1,7 +0,0 @@ -import json - - -def pprint(data): - - return json.dumps(data, indent=4) - diff --git a/src/tesk_core/exception.py b/src/tesk_core/exception.py deleted file mode 100644 index 89ae498c..00000000 --- a/src/tesk_core/exception.py +++ /dev/null @@ -1,10 +0,0 @@ - - -class UnknownProtocol(Exception): - pass - -class FileProtocolDisabled(Exception): - pass - -class InvalidHostPath(Exception): - pass diff --git a/src/tesk_core/filer.py b/src/tesk_core/filer.py deleted file mode 100755 index d2fef8b8..00000000 --- a/src/tesk_core/filer.py +++ /dev/null @@ -1,500 +0,0 @@ -#!/usr/bin/env python3 - -from ftplib import FTP -import ftplib -import argparse -import sys -import json -import re -import os -import distutils.dir_util -import logging -import netrc -import requests -import gzip -from tesk_core.exception import UnknownProtocol, FileProtocolDisabled -import shutil -from glob import glob -from tesk_core.path import containerPath, getPath, fileEnabled -from tesk_core.transput import Type, Transput, urlparse -from tesk_core.filer_s3 import S3Transput - - - - -class HTTPTransput(Transput): - def __init__(self, path, url, ftype): - Transput.__init__(self, path, url, ftype) - - def download_file(self): - req = requests.get(self.url) - - if req.status_code < 200 or req.status_code >= 300: - logging.error('Got status code: %d', req.status_code) - logging.error(req.text) - return 1 - logging.debug('OK, got status code: %d', req.status_code) - - with open(self.path, 'wb') as file: - file.write(req.content) - return 0 - - def upload_file(self): - with open(self.path, 'r') as file: - file_contents = file.read() - req = requests.put(self.url, data=file_contents) - - if req.status_code < 200 or req.status_code >= 300: - logging.error('Got status code: %d', req.status_code) - logging.error(req.text) - return 1 - logging.debug('OK, got status code: %d', req.status_code) - - return 0 - - def upload_dir(self): - to_upload = [] - for listing in os.listdir(self.path): - file_path = self.path + '/' + listing - if os.path.isdir(file_path): - ftype = Type.Directory - elif os.path.isfile(file_path): - ftype = Type.File - else: - return 1 - to_upload.append( - HTTPTransput(file_path, self.url + '/' + listing, ftype)) - - # return 1 if any upload failed - return min(sum([transput.upload() for transput in to_upload]), 1) - - def download_dir(self): - logging.error( - 'Won\'t crawl http directory, so unable to download url: %s', - self.url) - return 1 - - -def copyContent(src, dst, symlinks=False, ignore=None): - ''' - https://stackoverflow.com/a/12514470/1553043 - ''' - - for item in os.listdir(src): - s = os.path.join(src, item) - d = os.path.join(dst, item) - if os.path.isdir(s): - shutil.copytree(s, d, symlinks, ignore) - else: - shutil.copy2(s, d) - - -def copyDir(src, dst): - ''' - Limitation of shutil.copytree: - - > The destination directory, named by dst, must not already exist; it will be created as well as missing parent directories. - ''' - - if os.path.exists(dst): - - copyContent(src, dst) - - else: - - shutil.copytree(src, dst) - -def copyFile(src, dst): - ''' - Limitations of shutil.copy: - - It does not interpret * as a glob, but as a character. - ''' - - # If there is any * in 'dst', use only the dirname (base path) - p = re.compile('.*\*.*') - if p.match(dst): - dst=os.path.dirname(dst) - - for file in glob(src): - shutil.copy(file, dst) - - -class FileTransput(Transput): - def __init__(self, path, url, ftype): - Transput.__init__(self, path, url, ftype) - - self.urlContainerPath = containerPath(getPath(self.url)) - - def transfer(self, copyFn, src, dst): - logging.debug("Copying {src} to {dst}".format(**locals())) - copyFn(src, dst) - - def download_file(self): self.transfer(shutil.copy , self.urlContainerPath , self.path) - def download_dir(self): self.transfer(copyDir , self.urlContainerPath , self.path) - def upload_file(self): self.transfer(copyFile , self.path , self.urlContainerPath) - def upload_dir(self): self.transfer(copyDir , self.path , self.urlContainerPath) - - -class FTPTransput(Transput): - def __init__(self, path, url, ftype, ftp_conn=None): - Transput.__init__(self, path, url, ftype) - - self.connection_owner = ftp_conn is None - self.ftp_connection = FTP() if ftp_conn is None else ftp_conn - - # entice users to use contexts when using this class - def __enter__(self): - if self.connection_owner: - self.ftp_connection.connect(self.netloc) - ftp_login(self.ftp_connection, self.netloc, self.netrc_file) - return self - - def upload_dir(self): - for file in os.listdir(self.path): - file_path = self.path + '/' + file - file_url = self.url + '/' + file - - if os.path.isdir(file_path): - ftype = Type.Directory - elif os.path.isfile(file_path): - ftype = Type.File - else: - logging.error( - 'Directory listing in is neither file nor directory: "%s"', - file_url - ) - return 1 - - logging.debug('Uploading %s\t"%s"', ftype.value, file_path) - - # We recurse into new transputs, ending with files which are uploaded - # Downside is nothing happens with empty dirs. - with FTPTransput(file_path, file_url, ftype) as transfer: - if transfer.upload(): - return 1 - return 0 - - def upload_file(self): - error = ftp_make_dirs(self.ftp_connection, - os.path.dirname(self.url_path)) - if error: - logging.error( - 'Unable to create remote directories needed for %s', - self.url - ) - return 1 - - if not ftp_check_directory(self.ftp_connection, self.url_path): - return 1 - - return ftp_upload_file(self.ftp_connection, self.path, self.url_path) - - def download_dir(self): - logging.debug('Processing ftp dir: %s target: %s', self.url, self.path) - self.ftp_connection.cwd(self.url_path) - - # This is horrible and I'm sorry but it works flawlessly. - # Credit to Chris Haas for writing this - # See https://stackoverflow.com/questions/966578/parse-response-from-ftp-list-command-syntax-variations - # for attribution - ftp_command = re.compile( - r'^(?P[\-ld])(?P([\-r][\-w][\-xs]){3})\s+(?P\d+)\s+(?P\w+)\s+(?P\w+)\s+(?P\d+)\s+(?P((\w{3})\s+(\d{2})\s+(\d{1,2}):(\d{2}))|((\w{3})\s+(\d{1,2})\s+(\d{4})))\s+(?P.+)$') - - lines = [] - self.ftp_connection.retrlines('LIST', lines.append) - - for line in lines: - matches = ftp_command.match(line) - dirbit = matches.group('dir') - name = matches.group('name') - - file_path = self.path + '/' + name - file_url = self.url + '/' + name - - if dirbit == 'd': - ftype = Type.Directory - else: - ftype = Type.File - - # We recurse into new transputs, ending with files which are downloaded - # Downside is nothing happens with empty dirs. - with FTPTransput(file_path, file_url, ftype, - self.ftp_connection) as transfer: - if transfer.download(): - return 1 - return 0 - - def download_file(self): - logging.debug('Downloading ftp file: "%s" Target: %s', self.url, - self.path) - basedir = os.path.dirname(self.path) - distutils.dir_util.mkpath(basedir) - - return ftp_download_file(self.ftp_connection, self.url_path, self.path) - - def delete(self): - if self.connection_owner: - self.ftp_connection.close() - - -def ftp_login(ftp_connection, netloc, netrc_file): - user = None - if netrc_file is not None: - creds = netrc_file.authenticators(netloc) - if creds: - user, _, password = creds - elif 'TESK_FTP_USERNAME' in os.environ and 'TESK_FTP_PASSWORD' in os.environ: - user = os.environ['TESK_FTP_USERNAME'] - password = os.environ['TESK_FTP_PASSWORD'] - - if user: - try: - ftp_connection.login(user, password) - except ftplib.error_perm: - ftp_connection.login() - else: - ftp_connection.login() - - -def ftp_check_directory(ftp_connection, path): - """ - Following convention with the rest of the code, - return 0 if it is a directory, 1 if it is not or failed to do the check - """ - response = ftp_connection.pwd() - if response == '': - return 1 - original_directory = response - - # We are NOT scp, so we won't create a file when filename is not - # specified (mirrors input behaviour) - try: - ftp_connection.cwd(path) - logging.error( - 'Path "%s" at "%s" already exists and is a folder. \ - Please specify a target filename and retry', - path, ftp_connection.host) - is_directory = True - except ftplib.error_perm: - is_directory = False - except (ftplib.error_reply, ftplib.error_temp): - logging.exception('Could not check if path "%s" in "%s" is directory', - path, ftp_connection.host) - return 1 - try: - ftp_connection.cwd(original_directory) - except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): - logging.exception( - 'Error when checking if "%s" in "%s" was a directory', - path, ftp_connection.host) - return 1 - - return 0 if is_directory else 1 - - -def ftp_upload_file(ftp_connection, local_source_path, - remote_destination_path): - try: - with open(local_source_path, 'r+b') as file: - ftp_connection.storbinary("STOR /" + remote_destination_path, file) - except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): - logging.exception( - 'Unable to upload file "%s" to "%s" as "%s"', - local_source_path, - ftp_connection.host, - remote_destination_path) - return 1 - return 0 - - -def ftp_download_file(ftp_connection, remote_source_path, - local_destination_path): - try: - with open(local_destination_path, 'w+b') as file: - ftp_connection.retrbinary("RETR " + remote_source_path, file.write) - except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): - logging.exception( - 'Unable to download file "%s" from "%s" as "%s"', - remote_source_path, - ftp_connection.host, - local_destination_path - ) - return 1 - return 0 - - -def subfolders_in(whole_path): - """ - Returns all subfolders in a path, in order - - >>> subfolders_in('/') - ['/'] - - >>> subfolders_in('/this/is/a/path') - ['/this', '/this/is', '/this/is/a', '/this/is/a/path'] - - >>> subfolders_in('this/is/a/path') - ['this', 'this/is', 'this/is/a', 'this/is/a/path'] - """ - path_fragments = whole_path.lstrip('/').split('/') - if whole_path.startswith('/'): - path_fragments[0] = '/' + path_fragments[0] - path = path_fragments[0] - subfolders = [path] - for fragment in path_fragments[1:]: - path += '/' + fragment - subfolders.append(path) - return subfolders - - -def ftp_make_dirs(ftp_connection, path): - response = ftp_connection.pwd() - if response == '': - return 1 - original_directory = response - - # if directory exists do not do anything else - try: - ftp_connection.cwd(path) - return 0 - except (ftplib.error_perm, ftplib.error_temp): - pass - except ftplib.error_reply: - logging.exception('Unable to create directory "%s" at "%s"', - path, ftp_connection.host) - return 1 - - for subfolder in subfolders_in(path): - try: - ftp_connection.cwd(subfolder) - except (ftplib.error_perm, ftplib.error_temp): - try: - ftp_connection.mkd(subfolder) - except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): - logging.exception('Unable to create directory "%s" at "%s"', - subfolder, ftp_connection.host) - return 1 - except ftplib.error_reply: - logging.exception('Unable to create directory "%s" at "%s"', - path, ftp_connection.host) - return 1 - - try: - ftp_connection.cwd(original_directory) - except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): - logging.exception('Unable to create directory "%s" at "%s"', - path, ftp_connection.host) - return 1 - return 0 - - -def file_from_content(filedata): - with open(filedata['path'], 'w') as file: - file.write(str(filedata['content'])) - return 0 - - - -def newTransput(scheme, netloc): - def fileTransputIfEnabled(): - - if fileEnabled(): - return FileTransput - else: - raise FileProtocolDisabled("'file:' protocol disabled\n" - "To enable it, both '{}' and '{}' environment variables must be defined." - .format('HOST_BASE_PATH', - 'CONTAINER_BASE_PATH') - ) - - if scheme == 'ftp': - return FTPTransput - elif scheme == 'file': - return fileTransputIfEnabled() - elif scheme in ['http', 'https']: - return HTTPTransput - elif scheme == 's3': - return S3Transput - else: - raise UnknownProtocol("Unknown protocol: '{scheme}'".format(**locals())) - - -def process_file(ttype, filedata): - ''' - @param ttype: str - Can be 'inputs' or 'outputs' - ''' - - if 'content' in filedata: - return file_from_content(filedata) - parsed_url = urlparse(filedata['url']) - scheme = parsed_url.scheme - netloc = parsed_url.netloc - if scheme == '': - logging.info('Could not determine protocol for url: "%s", assuming "file"', filedata['url']) - scheme='file' - - trans = newTransput(scheme, netloc) - - with trans(filedata['path'], filedata['url'], - Type(filedata['type'])) as transfer: - if ttype == 'inputs': - return transfer.download() - if ttype == 'outputs': - return transfer.upload() - - logging.info('There was no action to do with %s', filedata['path']) - return 0 - - -def logConfig(loglevel): - logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', - datefmt='%m/%d/%Y %I:%M:%S', - level=loglevel, - stream=sys.stdout) - - -def main(): - parser = argparse.ArgumentParser( - description='Filer script for down- and uploading files') - parser.add_argument( - 'transputtype', - help='transput to handle, either \'inputs\' or \'outputs\' ') - parser.add_argument( - 'data', - help='file description data, see docs for structure') - parser.add_argument( - '--debug', - '-d', - help='debug logging', - action='store_true') - args = parser.parse_args() - - if args.debug: - loglevel = logging.DEBUG - else: - loglevel = logging.ERROR - - logConfig(loglevel) - - logging.info('Starting %s filer...', args.transputtype) - - if args.data.endswith('.gz'): - with gzip.open(args.data, 'rb') as fh: - data = json.loads(fh.read()) - else: - data = json.loads(args.data) - - for afile in data[args.transputtype]: - logging.debug('Processing file: %s', afile['path']) - if process_file(args.transputtype, afile): - logging.error('Unable to process file, aborting') - return 1 - logging.debug('Processed file: %s', afile['path']) - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/src/tesk_core/filer_class.py b/src/tesk_core/filer_class.py deleted file mode 100644 index 980adbbf..00000000 --- a/src/tesk_core/filer_class.py +++ /dev/null @@ -1,180 +0,0 @@ -import json -from tesk_core import path -from tesk_core.path import fileEnabled - - -class Filer: - - def getVolumes(self): return self.spec['spec']['template']['spec']['volumes'] - - def getContainer(self, i): return self.spec['spec']['template']['spec']['containers'][i] - - def getVolumeMounts(self): return self.getContainer(0)['volumeMounts'] - def getEnv(self): return self.getContainer(0)['env'] - def getImagePullPolicy(self): return self.getContainer(0)['imagePullPolicy'] - - - def __init__(self, name, data, filer_name='eu.gcr.io/tes-wes/filer', filer_version='v0.5', pullPolicyAlways = False, json_pvc=None): - self.name = name - self.json_pvc = json_pvc - self.spec = { - "kind": "Job", - "apiVersion": "batch/v1", - "metadata": {"name": name}, - "spec": { - "template": { - "metadata": {"name": "tesk-filer"}, - "spec": { - "containers": [{ - "name": "filer", - "image": "%s:%s" % (filer_name, filer_version), - "args": [], - "env": [], - "volumeMounts": [], - "imagePullPolicy": 'Always' if pullPolicyAlways else 'IfNotPresent' - } - ], - "volumes": [], - "restartPolicy": "Never" - } - } - } - } - - env = self.getEnv() - if json_pvc is None: - env.append({"name": "JSON_INPUT", "value": json.dumps(data)}) - env.append({"name": "HOST_BASE_PATH", "value": path.HOST_BASE_PATH}) - env.append( - {"name": "CONTAINER_BASE_PATH", "value": path.CONTAINER_BASE_PATH}) - - if json_pvc: - self.getVolumeMounts().append({ - "name" : 'jsoninput' - , 'mountPath' : '/jsoninput' - }) - self.getVolumes().append({ - "name" : 'jsoninput' - , "configMap" : { 'name' : json_pvc } - }) - - if fileEnabled(): - self.getVolumeMounts().append({ - - "name" : 'transfer-volume' - , 'mountPath' : path.CONTAINER_BASE_PATH - }) - - self.getVolumes().append({ - - "name" : 'transfer-volume' - , 'persistentVolumeClaim' : { 'claimName' : path.TRANSFER_PVC_NAME } - - }) - - self.add_s3_mount() - - def add_s3_mount(self): - """ Mounts the s3 configuration file. The secret name is hardcoded and - set to 'aws-secret'. - """ - - env = self.getEnv() - env.append({"name": "AWS_CONFIG_FILE", "value": "/aws/config"}) - env.append( - { - "name": "AWS_SHARED_CREDENTIALS_FILE", - "value": "/aws/credentials" - } - ) - - self.getVolumeMounts().append( - { - "name": "s3-conf", - "mountPath": "/aws", - "readOnly": True, - } - ) - self.getVolumes().append( - { - "name": "s3-conf", - "secret": { - "secretName": "aws-secret", - "items": [ - { - "key": "credentials", - "path": "credentials" - }, - { - "key": "config", - "path": "config" - } - ], - "optional": True, - } - } - ) - - def set_ftp(self, user, pw): - env = self.getEnv() - env.append({"name": "TESK_FTP_USERNAME", "value": user}) - env.append({"name": "TESK_FTP_PASSWORD", "value": pw}) - - def set_backoffLimit(self, limit): - """Set a number of retries of a job execution (default value is 6). Use the environment variable - TESK_API_TASKMASTER_ENVIRONMENT_FILER_BACKOFF_LIMIT to explicitly set this value. - - Args: - limit: The number of retries before considering a Job as failed. - """ - self.spec['spec'].update({"backoffLimit": limit}) - - def add_volume_mount(self, pvc): - self.getVolumeMounts().extend(pvc.volume_mounts) - self.getVolumes().append({"name": "task-volume", - "persistentVolumeClaim": { - "claimName": pvc.name}}) - - - def add_netrc_mount(self, netrc_name='netrc'): - ''' - Sets $HOME to an arbitrary location (to prevent its change as a result of runAsUser), currently hardcoded to `/opt/home` - Mounts the secret netrc into that location: $HOME/.netrc. - ''' - - self.getVolumeMounts().append({"name" : 'netrc', - "mountPath" : '/opt/home/.netrc', - "subPath" : ".netrc" - }) - self.getVolumes().append({"name" : "netrc", - "secret" : { - "secretName" : netrc_name, - "defaultMode" : 420, - "items" : [ - { - "key": ".netrc", - "path": ".netrc" - } - ] - } - }) - self.getEnv().append({"name": "HOME", - "value": "/opt/home" - }) - - - def get_spec(self, mode, debug=False): - if self.json_pvc is None: - self.spec['spec']['template']['spec']['containers'][0]['args'] = [ - mode, "$(JSON_INPUT)"] - else: - self.spec['spec']['template']['spec']['containers'][0]['args'] = [ - mode, "/jsoninput/JSON_INPUT.gz"] - - if debug: - self.spec['spec']['template']['spec']['containers'][0][ - 'args'].append( - '-d') - - self.spec['spec']['template']['metadata']['name'] = self.name - return self.spec diff --git a/src/tesk_core/filer_s3.py b/src/tesk_core/filer_s3.py deleted file mode 100644 index 5ed50157..00000000 --- a/src/tesk_core/filer_s3.py +++ /dev/null @@ -1,117 +0,0 @@ -import sys -import os -import logging -import re -import botocore -import boto3 -from tesk_core.transput import Transput, Type - -class S3Transput(Transput): - def __init__(self, path, url, ftype): - Transput.__init__(self, path, url, ftype) - self.bucket, self.file_path = self.get_bucket_name_and_file_path() - self.bucket_obj = None - - def __enter__(self): - client = boto3.resource('s3', endpoint_url=self.extract_endpoint()) - if self.check_if_bucket_exists(client): - sys.exit(1) - self.bucket_obj = client.Bucket(self.bucket) - return self - - def extract_endpoint(self): - return boto3.client('s3').meta.endpoint_url - - def check_if_bucket_exists(self, client): - try: - client.meta.client.head_bucket(Bucket=self.bucket) - except botocore.exceptions.ClientError as e: - # If a client error is thrown, then check that it was a 404 error. - # If it was a 404 error, then the bucket does not exist. - logging.error('Got status code: %s', e.response['Error']['Code']) - if e.response['Error']['Code'] == "404": - logging.error("Failed to fetch Bucket, reason: %s", e.response['Error']['Message']) - return 1 - return 0 - - def get_bucket_name_and_file_path(self): - """ - If the S3 url is similar to s3://idr-bucket-1/README.txt format - """ - - bucket = self.netloc - file_path = self.url_path[1:] - - return bucket, file_path - - def download_file(self): - logging.debug('Downloading s3 object: "%s" Target: %s', self.bucket + "/" + self.file_path, self.path) - basedir = os.path.dirname(self.path) - os.makedirs(basedir, exist_ok=True) - return self.get_s3_file(self.path, self.file_path) - - def upload_file(self): - logging.debug('Uploading s3 object: "%s" Target: %s', self.path, self.bucket + "/" + self.file_path) - try: - self.bucket_obj.upload_file(Filename=self.path, Key=self.file_path) - except (botocore.exceptions.ClientError, OSError) as err: - logging.error("File upload failed for '%s'", self.bucket + "/" + self.file_path) - logging.error(err) - return 1 - return 0 - - def upload_dir(self): - logging.debug('Uploading s3 object: "%s" Target: %s', self.path, self.bucket + "/" + self.file_path) - try: - for item in os.listdir(self.path): - path = os.path.join(self.path,item) - if os.path.isdir(path): - file_type = Type.Directory - elif os.path.isfile(path): - file_type = Type.File - else: - # An exception is raised, if the object type is neither file or directory - logging.error("Object is neither file or directory : '%s' ",path) - raise IOError - file_path = os.path.join(self.url, item) - with S3Transput(path, file_path, file_type) as transfer: - if transfer.upload(): - return 1 - except OSError as err: - logging.error("File upload failed for '%s'", self.bucket + "/" + self.file_path) - logging.error(err) - return 1 - return 0 - - def download_dir(self): - logging.debug('Downloading s3 object: "%s" Target: %s', self.bucket + "/" + self.file_path, self.path) - client = boto3.client('s3', endpoint_url=self.extract_endpoint()) - if not self.file_path.endswith('/'): - self.file_path += '/' - objects = client.list_objects_v2(Bucket=self.bucket, Prefix=self.file_path) - - # If the file path does not exists in s3 bucket, 'Contents' key will not be present in objects - if "Contents" not in objects: - logging.error('Got status code: %s', 404) - logging.error("Invalid file path!.") - return 1 - - # Looping through the list of objects and downloading them - for obj in objects["Contents"]: - file_name = os.path.basename(obj["Key"]) - dir_name = os.path.dirname(obj["Key"]) - path_to_create = re.sub(r'^' + self.file_path.strip('/').replace('/', '\/') + '', "", dir_name).strip('/') - path_to_create = os.path.join(self.path, path_to_create) - os.makedirs(path_to_create, exist_ok=True) - if self.get_s3_file(os.path.join(path_to_create, file_name), obj["Key"]): - return 1 - return 0 - - def get_s3_file(self, file_name, key): - try: - self.bucket_obj.download_file(Filename=file_name, Key=key) - except botocore.exceptions.ClientError as err: - logging.error('Got status code: %s', err.response['Error']['Code']) - logging.error(err.response['Error']['Message']) - return 1 - return 0 diff --git a/src/tesk_core/job.py b/src/tesk_core/job.py deleted file mode 100644 index a1648314..00000000 --- a/src/tesk_core/job.py +++ /dev/null @@ -1,78 +0,0 @@ -import logging -import time -from datetime import datetime, timezone -from kubernetes import client, config -from kubernetes.client.rest import ApiException -from tesk_core.Util import pprint - - -logging.basicConfig(format='%(message)s', level=logging.INFO) -class Job: - def __init__(self, body, name='task-job', namespace='default'): - self.name = name - self.namespace = namespace - self.status = 'Initialized' - self.bv1 = client.BatchV1Api() - self.cv1 = client.CoreV1Api() - self.timeout = 240 - self.body = body - self.body['metadata']['name'] = self.name - - def run_to_completion(self, poll_interval, check_cancelled, pod_timeout): - - logging.debug("Creating job '{}'...".format(self.name)) - logging.debug(pprint(self.body)) - self.timeout = pod_timeout - try: - self.bv1.create_namespaced_job(self.namespace, self.body) - except ApiException as ex: - if ex.status == 409: - logging.debug(f"Reading existing job: {self.name} ") - self.bv1.read_namespaced_job(self.name, self.namespace) - else: - logging.debug(ex.body) - raise ApiException(ex.status, ex.reason) - is_all_pods_running = False - status, is_all_pods_running = self.get_status(is_all_pods_running) - while status == 'Running': - if check_cancelled(): - self.delete() - return 'Cancelled' - time.sleep(poll_interval) - status, is_all_pods_running = self.get_status(is_all_pods_running) - return status - - - def get_status(self, is_all_pods_runnning): - job = self.bv1.read_namespaced_job(self.name, self.namespace) - try: - if job.status.conditions[0].type == 'Complete' and job.status.conditions[0].status: - self.status = 'Complete' - elif job.status.conditions[0].type == 'Failed' and job.status.conditions[0].status: - self.status = 'Failed' - else: - self.status = 'Error' - except TypeError: # The condition is not initialized, so it is not complete yet, wait for it - self.status = 'Running' - job_duration = 0 - if job.status.active and job.status.start_time: - job_duration = (datetime.now(timezone.utc) - job.status.start_time).total_seconds() - if job_duration > self.timeout and not is_all_pods_runnning: - pods = (self.cv1.list_namespaced_pod(self.namespace - , label_selector='job-name={}'.format(self.name))).items - is_all_pods_runnning = True - for pod in pods: - if pod.status.phase == "Pending" and pod.status.start_time: - is_all_pods_runnning = False - delta = (datetime.now(timezone.utc) - pod.status.start_time).total_seconds() - if delta > self.timeout and \ - pod.status.container_statuses[0].state.waiting.reason == "ImagePullBackOff": - logging.info(pod.status.container_statuses[0].state.waiting) - return 'Error', is_all_pods_runnning - - return self.status, is_all_pods_runnning - - def delete(self): - logging.info("Removing failed jobs") - self.bv1.delete_namespaced_job( - self.name, self.namespace, body=client.V1DeleteOptions(propagation_policy="Background")) \ No newline at end of file diff --git a/src/tesk_core/path.py b/src/tesk_core/path.py deleted file mode 100644 index 00a60a44..00000000 --- a/src/tesk_core/path.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -from os.path import relpath -from tesk_core.exception import InvalidHostPath -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - - - - -def getEnv(varName): - - return os.environ.get(varName) - - -def getPathEnv(varName): - ''' - Gets a path from env var 'varName' and normalizes it - - e.g. removes trailing slashes. - This removes some cases from the rest of the code. - ''' - - varContent = getEnv(varName) - - return os.path.normpath(varContent) if varContent else None - - -HOST_BASE_PATH = getPathEnv('HOST_BASE_PATH') -CONTAINER_BASE_PATH = getPathEnv('CONTAINER_BASE_PATH') -TRANSFER_PVC_NAME = getEnv('TRANSFER_PVC_NAME') - -def fileEnabled(): - - return HOST_BASE_PATH is not None \ - and CONTAINER_BASE_PATH is not None - - -def getPath(url): - - parsed_url = urlparse(url) - - return parsed_url.path - - -def isDescendant(base, path): - ''' - Is 'path' is a descendant of 'base'? - ''' - - return os.path.commonprefix([base, path]) == base - - -def validatePath(path): - - if not isDescendant(HOST_BASE_PATH, path): - - raise InvalidHostPath("'{path}' is not a descendant of 'HOST_BASE_PATH' ({HOST_BASE_PATH})".format( path = path - , HOST_BASE_PATH = HOST_BASE_PATH - )) - - -def containerPath(path): - - validatePath(path) - - relPath = relpath(path, HOST_BASE_PATH) - - return os.path.join(CONTAINER_BASE_PATH, relPath) - - - diff --git a/src/tesk_core/pvc.py b/src/tesk_core/pvc.py deleted file mode 100644 index abf8e61b..00000000 --- a/src/tesk_core/pvc.py +++ /dev/null @@ -1,56 +0,0 @@ -from kubernetes import client, config -from kubernetes.client.rest import ApiException -from tesk_core.Util import pprint -import os -import logging - - -class PVC(): - - def __init__(self, name='task-pvc', size_gb=1, namespace='default'): - self.name = name - self.spec = {'apiVersion': 'v1', - 'kind': 'PersistentVolumeClaim', - 'metadata': {'name': name}, - 'spec': { - 'accessModes': ['ReadWriteOnce'], - 'resources': {'requests': {'storage': str(size_gb) + 'Gi'}} - } - } - - self.subpath_idx = 0 - self.namespace = namespace - self.cv1 = client.CoreV1Api() - - # The environment variable 'TESK_API_TASKMASTER_ENVIRONMENT_STORAGE_CLASS_NAME' - # can be set to the preferred, non-default, user-defined storageClass - if os.environ.get('STORAGE_CLASS_NAME') is not None: - self.spec['spec'].update({'storageClassName': os.environ.get('STORAGE_CLASS_NAME')}) - - def set_volume_mounts(self, mounts): - self.volume_mounts = mounts - - def get_subpath(self): - subpath = 'dir' + str(self.subpath_idx) - self.subpath_idx += 1 - return subpath - - def create(self): - - logging.debug('Creating PVC...') - logging.debug(pprint(self.spec)) - try: - return self.cv1.create_namespaced_persistent_volume_claim(self.namespace, self.spec) - except ApiException as ex: - if ex.status == 409: - logging.debug(f"Reading existing PVC: {self.name}") - return self.cv1.read_namespaced_persistent_volume_claim(self.name, self.namespace) - else: - logging.debug(ex.body) - raise ApiException(ex.status, ex.reason) - - - def delete(self): - cv1 = client.CoreV1Api() - cv1.delete_namespaced_persistent_volume_claim( - self.name, self.namespace, body=client.V1DeleteOptions()) diff --git a/src/tesk_core/taskmaster.py b/src/tesk_core/taskmaster.py deleted file mode 100755 index 0d1f182b..00000000 --- a/src/tesk_core/taskmaster.py +++ /dev/null @@ -1,339 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import json -import os -import re -import sys -import logging -import gzip -from kubernetes import client, config -from tesk_core.job import Job -from tesk_core.pvc import PVC -from tesk_core.filer_class import Filer - -created_jobs = [] -poll_interval = 5 -task_volume_basename = 'task-volume' -args = None -logger = None - -def run_executor(executor, namespace, pvc=None): - jobname = executor['metadata']['name'] - spec = executor['spec']['template']['spec'] - - if os.environ.get('EXECUTOR_BACKOFF_LIMIT') is not None: - executor['spec'].update({'backoffLimit': int(os.environ['EXECUTOR_BACKOFF_LIMIT'])}) - - if pvc is not None: - mounts = spec['containers'][0].setdefault('volumeMounts', []) - mounts.extend(pvc.volume_mounts) - volumes = spec.setdefault('volumes', []) - volumes.extend([{'name': task_volume_basename, 'persistentVolumeClaim': { - 'readonly': False, 'claimName': pvc.name}}]) - logger.debug('Created job: ' + jobname) - job = Job(executor, jobname, namespace) - logger.debug('Job spec: ' + str(job.body)) - - global created_jobs - created_jobs.append(job) - - status = job.run_to_completion(poll_interval, check_cancelled,args.pod_timeout) - if status != 'Complete': - if status == 'Error': - job.delete() - exit_cancelled('Got status ' + status) - -# TODO move this code to PVC class - - -def append_mount(volume_mounts, name, path, pvc): - - # Checks all mount paths in volume_mounts if the path given is already in - # there - duplicate = next( - (mount for mount in volume_mounts if mount['mountPath'] == path), - None) - # If not, add mount path - if duplicate is None: - subpath = pvc.get_subpath() - logger.debug(' '.join( - ['appending' + name + - 'at path' + path + - 'with subPath:' + subpath])) - volume_mounts.append( - {'name': name, 'mountPath': path, 'subPath': subpath}) - - -def dirname(iodata): - if iodata['type'] == 'FILE': - # strip filename from path - r = '(.*)/' - dirname = re.match(r, iodata['path']).group(1) - logger.debug('dirname of ' + iodata['path'] + 'is: ' + dirname) - elif iodata['type'] == 'DIRECTORY': - dirname = iodata['path'] - - return dirname - - -def generate_mounts(data, pvc): - volume_mounts = [] - - # gather volumes that need to be mounted, without duplicates - volume_name = task_volume_basename - for volume in data['volumes']: - append_mount(volume_mounts, volume_name, volume, pvc) - - # gather other paths that need to be mounted from inputs/outputs FILE and - # DIRECTORY entries - for aninput in data['inputs']: - dirnm = dirname(aninput) - append_mount(volume_mounts, volume_name, dirnm, pvc) - - for anoutput in data['outputs']: - dirnm = dirname(anoutput) - append_mount(volume_mounts, volume_name, dirnm, pvc) - - return volume_mounts - - -def init_pvc(data, filer): - task_name = data['executors'][0]['metadata']['labels']['taskmaster-name'] - pvc_name = task_name + '-pvc' - pvc_size = data['resources']['disk_gb'] - pvc = PVC(pvc_name, pvc_size, args.namespace) - - mounts = generate_mounts(data, pvc) - logging.debug(mounts) - logging.debug(type(mounts)) - pvc.set_volume_mounts(mounts) - filer.add_volume_mount(pvc) - - pvc.create() - # to global var for cleanup purposes - global created_pvc - created_pvc = pvc - - if os.environ.get('NETRC_SECRET_NAME') is not None: - filer.add_netrc_mount(os.environ.get('NETRC_SECRET_NAME')) - - filerjob = Job( - filer.get_spec('inputs', args.debug), - task_name + '-inputs-filer', - args.namespace) - - global created_jobs - created_jobs.append(filerjob) - # filerjob.run_to_completion(poll_interval) - status = filerjob.run_to_completion(poll_interval, check_cancelled, args.pod_timeout) - if status != 'Complete': - exit_cancelled('Got status ' + status) - - return pvc - - -def run_task(data, filer_name, filer_version, have_json_pvc=False): - task_name = data['executors'][0]['metadata']['labels']['taskmaster-name'] - pvc = None - - if have_json_pvc: - json_pvc = task_name - else: - json_pvc = None - - if data['volumes'] or data['inputs'] or data['outputs']: - - filer = Filer(task_name + '-filer', data, filer_name, filer_version, args.pull_policy_always, json_pvc) - - if os.environ.get('TESK_FTP_USERNAME') is not None: - filer.set_ftp( - os.environ['TESK_FTP_USERNAME'], - os.environ['TESK_FTP_PASSWORD']) - - if os.environ.get('FILER_BACKOFF_LIMIT') is not None: - filer.set_backoffLimit(int(os.environ['FILER_BACKOFF_LIMIT'])) - - pvc = init_pvc(data, filer) - - for executor in data['executors']: - run_executor(executor, args.namespace, pvc) - - # run executors - logging.debug("Finished running executors") - - # upload files and delete pvc - if data['volumes'] or data['inputs'] or data['outputs']: - filerjob = Job( - filer.get_spec('outputs', args.debug), - task_name + '-outputs-filer', - args.namespace) - - global created_jobs - created_jobs.append(filerjob) - - # filerjob.run_to_completion(poll_interval) - status = filerjob.run_to_completion(poll_interval, check_cancelled, args.pod_timeout) - if status != 'Complete': - exit_cancelled('Got status ' + status) - else: - pvc.delete() - - -def newParser(): - - parser = argparse.ArgumentParser(description='TaskMaster main module') - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument( - 'json', - help='string containing json TES request, required if -f is not given', - nargs='?') - group.add_argument( - '-f', - '--file', - help='TES request as a file or \'-\' for stdin, required if json is not given') - - parser.add_argument( - '-p', - '--poll-interval', - help='Job polling interval', - default=5) - parser.add_argument( - '-pt', - '--pod-timeout', - type=int, - help='Pod creation timeout', - default=240) - parser.add_argument( - '-fn', - '--filer-name', - help='Filer image version', - default='eu.gcr.io/tes-wes/filer') - parser.add_argument( - '-fv', - '--filer-version', - help='Filer image version', - default='v0.1.9') - parser.add_argument( - '-n', - '--namespace', - help='Kubernetes namespace to run in', - default='default') - parser.add_argument( - '-s', - '--state-file', - help='State file for state.py script', - default='/tmp/.teskstate') - parser.add_argument( - '-d', - '--debug', - help='Set debug mode', - action='store_true') - parser.add_argument( - '--localKubeConfig', - help='Read k8s configuration from localhost', - action='store_true') - parser.add_argument( - '--pull-policy-always', - help="set imagePullPolicy = 'Always'", - action='store_true') - - - return parser - - -def newLogger(loglevel): - logging.basicConfig( - format='%(asctime)s %(levelname)s: %(message)s', - datefmt='%m/%d/%Y %I:%M:%S', - level=loglevel) - logging.getLogger('kubernetes.client').setLevel(logging.CRITICAL) - logger = logging.getLogger(__name__) - - return logger - - - -def main(): - have_json_pvc = False - - parser = newParser() - global args - - args = parser.parse_args() - - poll_interval = args.poll_interval - - loglevel = logging.ERROR - if args.debug: - loglevel = logging.DEBUG - - global logger - logger = newLogger(loglevel) - logger.debug('Starting taskmaster') - - # Get input JSON - if args.file is None: - data = json.loads(args.json) - elif args.file == '-': - data = json.load(sys.stdin) - else: - if args.file.endswith('.gz'): - with gzip.open(args.file, 'rb') as fh: - data = json.loads(fh.read()) - have_json_pvc = True - else: - with open(args.file) as fh: - data = json.load(fh) - - # Load kubernetes config file - if args.localKubeConfig: - config.load_kube_config() - else: - config.load_incluster_config() - - global created_pvc - created_pvc = None - - # Check if we're cancelled during init - if check_cancelled(): - exit_cancelled('Cancelled during init') - - run_task(data, args.filer_name, args.filer_version, have_json_pvc) - - -def clean_on_interrupt(): - logger.debug('Caught interrupt signal, deleting jobs and pvc') - - for job in created_jobs: - job.delete() - - - -def exit_cancelled(reason='Unknown reason'): - logger.error('Cancelling taskmaster: ' + reason) - sys.exit(0) - - -def check_cancelled(): - - labelInfoFile = '/podinfo/labels' - - if not os.path.exists(labelInfoFile): - return False - - with open(labelInfoFile) as fh: - for line in fh.readlines(): - name, label = line.split('=') - logging.debug('Got label: ' + label) - if label == '"Cancelled"': - return True - - return False - - -if __name__ == "__main__": - try: - main() - except KeyboardInterrupt: - clean_on_interrupt() diff --git a/src/tesk_core/transput.py b/src/tesk_core/transput.py deleted file mode 100644 index 17494ca9..00000000 --- a/src/tesk_core/transput.py +++ /dev/null @@ -1,81 +0,0 @@ -import enum -import os -import netrc -import logging -try: - from urllib.parse import urlparse -except ImportError: - from urlparse import urlparse - - -@enum.unique -class Type(enum.Enum): - File = 'FILE' - Directory = 'DIRECTORY' - - -class Transput: - def __init__(self, path, url, ftype): - self.path = path - self.url = url - self.ftype = ftype - - parsed_url = urlparse(url) - self.netloc = parsed_url.netloc - self.url_path = parsed_url.path - self.netrc_file = None - try: - netrc_path = os.path.join(os.environ['HOME'], '.netrc') - except KeyError: - netrc_path = '/.netrc' - try: - self.netrc_file = netrc.netrc(netrc_path) - except IOError as fnfe: - logging.error(fnfe) - except netrc.NetrcParseError as err: - logging.error('netrc.NetrcParseError') - logging.error(err) - except Exception as er: - logging.error(er) - - def upload(self): - logging.debug('%s uploading %s %s', self.__class__.__name__, - self.ftype, self.url) - if self.ftype == Type.File: - return self.upload_file() - if self.ftype == Type.Directory: - return self.upload_dir() - return 1 - - def download(self): - logging.debug('%s downloading %s %s', self.__class__.__name__, - self.ftype, self.url) - if self.ftype == Type.File: - return self.download_file() - if self.ftype == Type.Directory: - return self.download_dir() - return 1 - - def delete(self): - pass - - def download_file(self): - raise NotImplementedError() - - def download_dir(self): - raise NotImplementedError() - - def upload_file(self): - raise NotImplementedError() - - def upload_dir(self): - raise NotImplementedError() - - # make it compatible with contexts (with keyword) - def __enter__(self): - return self - - def __exit__(self, error_type, error_value, traceback): - self.delete() - # Swallow all exceptions since the filer mostly works with error codes - return False diff --git a/taskmaster b/taskmaster deleted file mode 100755 index bafd8759..00000000 --- a/taskmaster +++ /dev/null @@ -1,3 +0,0 @@ - - -PYTHONPATH="src" python src/tesk_core/taskmaster.py "$@" diff --git a/tesk/__init__.py b/tesk/__init__.py new file mode 100644 index 00000000..168e28a8 --- /dev/null +++ b/tesk/__init__.py @@ -0,0 +1,3 @@ +""" +_summary_:tesk package initialization file. +""" diff --git a/src/tesk_core/README.md b/tesk/services/README.md similarity index 100% rename from src/tesk_core/README.md rename to tesk/services/README.md diff --git a/tesk/services/__init__.py b/tesk/services/__init__.py new file mode 100644 index 00000000..4df220ff --- /dev/null +++ b/tesk/services/__init__.py @@ -0,0 +1,3 @@ +""" +_summary_:service (tesk_core) package initialization file. +""" diff --git a/tesk/services/constants.py b/tesk/services/constants.py new file mode 100644 index 00000000..0c813c78 --- /dev/null +++ b/tesk/services/constants.py @@ -0,0 +1,3 @@ +import os + +TIMEOUT = os.getenv('TIMEOUT', '100') diff --git a/tesk/services/exceptions.py b/tesk/services/exceptions.py new file mode 100644 index 00000000..e801ca2c --- /dev/null +++ b/tesk/services/exceptions.py @@ -0,0 +1,29 @@ +class ServiceStatusCodes: + OK = 200 + REDIRECT = 300 + CREATED = 201 + CONFLICT = 409 + NOT_FOUND = 404 + BAD_REQUEST = 400 + UNAUTHORIZED = 401 + FORBIDDEN = 403 + INTERNAL_SERVER_ERROR = 500 + NOT_IMPLEMENTED = 501 + SERVICE_UNAVAILABLE = 503 + GATEWAY_TIMEOUT = 504 + + @classmethod + def get(cls, status_name): + return getattr(cls, status_name.upper(), None) + + +class UnknownProtocol(Exception): + pass + + +class FileProtocolDisabled(Exception): + pass + + +class InvalidHostPath(Exception): + pass diff --git a/tesk/services/filer.py b/tesk/services/filer.py new file mode 100755 index 00000000..54e3328d --- /dev/null +++ b/tesk/services/filer.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python3 + +import argparse +import distutils.dir_util +import ftplib +import gzip +import json +import logging +import os +import re +import shutil +import sys +from ftplib import FTP +from glob import glob + +import requests + +from tesk.services.constants import TIMEOUT +from tesk.services.exceptions import ( + FileProtocolDisabled, + ServiceStatusCodes, + UnknownProtocol, +) +from tesk.services.filer_s3 import S3Transput +from tesk.services.path import containerPath, fileEnabled, getPath +from tesk.services.transput import Transput, Type, urlparse + + +class HTTPTransput(Transput): + def __init__(self, path, url, ftype): + Transput.__init__(self, path, url, ftype) + + def download_file(self): + req = requests.get(self.url, timeout=TIMEOUT) + + if ( + req.status_code < ServiceStatusCodes.OK + or req.status_code >= ServiceStatusCodes.REDIRECT + ): + logging.error('Got status code: %d', req.status_code) + logging.error(req.text) + return 1 + logging.debug('OK, got status code: %d', req.status_code) + + with open(self.path, 'wb') as file: + file.write(req.content) + return 0 + + def upload_file(self): + with open(self.path) as file: + file_contents = file.read() + req = requests.put(self.url, data=file_contents, timeout=TIMEOUT) + + if ( + req.status_code < ServiceStatusCodes.OK + or req.status_code >= ServiceStatusCodes.REDIRECT + ): + logging.error('Got status code: %d', req.status_code) + logging.error(req.text) + return 1 + logging.debug('OK, got status code: %d', req.status_code) + + return 0 + + def upload_dir(self): + to_upload = [] + for listing in os.listdir(self.path): + file_path = f'{self.path}/{listing}' + if os.path.isdir(file_path): + ftype = Type.Directory + elif os.path.isfile(file_path): + ftype = Type.File + else: + return 1 + to_upload.append(HTTPTransput(file_path, f'{self.url}/{listing}', ftype)) + + # return 1 if any upload failed + return min(sum(transput.upload() for transput in to_upload), 1) + + def download_dir(self): + logging.error( + "Won't crawl http directory, so unable to download url: %s", self.url + ) + return 1 + + +def copyContent(src, dst, symlinks=False, ignore=None): + """ + https://stackoverflow.com/a/12514470/1553043 + """ + + for item in os.listdir(src): + s = os.path.join(src, item) + d = os.path.join(dst, item) + if os.path.isdir(s): + shutil.copytree(s, d, symlinks, ignore) + else: + shutil.copy2(s, d) + + +def copyDir(src, dst): + """ + Limitation of shutil.copytree: + + The destination directory, named by dst, must not already exist; + it will be created as well as missing parent directories. + """ + + if os.path.exists(dst): + copyContent(src, dst) + + else: + shutil.copytree(src, dst) + + +def copyFile(src, dst): + """ + Limitations of shutil.copy: + + It does not interpret * as a glob, but as a character. + """ + + # If there is any * in 'dst', use only the dirname (base path) + p = re.compile('.*\*.*') + if p.match(dst): + dst = os.path.dirname(dst) + + for file in glob(src): + shutil.copy(file, dst) + + +class FileTransput(Transput): + def __init__(self, path, url, ftype): + Transput.__init__(self, path, url, ftype) + + self.urlContainerPath = containerPath(getPath(self.url)) + + def transfer(self, copyFn, src, dst): + logging.debug('Copying {src} to {dst}'.format(**locals())) + copyFn(src, dst) + + def download_file(self): + self.transfer(shutil.copy, self.urlContainerPath, self.path) + + def download_dir(self): + self.transfer(copyDir, self.urlContainerPath, self.path) + + def upload_file(self): + self.transfer(copyFile, self.path, self.urlContainerPath) + + def upload_dir(self): + self.transfer(copyDir, self.path, self.urlContainerPath) + + +class FTPTransput(Transput): + def __init__(self, path, url, ftype, ftp_conn=None): + Transput.__init__(self, path, url, ftype) + + self.connection_owner = ftp_conn is None + self.ftp_connection = FTP() if ftp_conn is None else ftp_conn + + # entice users to use contexts when using this class + def __enter__(self): + if self.connection_owner: + self.ftp_connection.connect(self.netloc) + ftp_login(self.ftp_connection, self.netloc, self.netrc_file) + return self + + def upload_dir(self): + for file in os.listdir(self.path): + file_path = f'{self.path}/{file}' + file_url = f'{self.url}/{file}' + + if os.path.isdir(file_path): + ftype = Type.Directory + elif os.path.isfile(file_path): + ftype = Type.File + else: + logging.error( + 'Directory listing in is neither file nor directory: "%s"', file_url + ) + return 1 + + logging.debug('Uploading %s\t"%s"', ftype.value, file_path) + + # We recurse into new transputs, ending with files which are uploaded + # Downside is nothing happens with empty dirs. + with FTPTransput(file_path, file_url, ftype) as transfer: + if transfer.upload(): + return 1 + return 0 + + def upload_file(self): + if ftp_make_dirs(self.ftp_connection, os.path.dirname(self.url_path)): + logging.error('Unable to create remote directories needed for %s', self.url) + return 1 + + if not ftp_check_directory(self.ftp_connection, self.url_path): + return 1 + + return ftp_upload_file(self.ftp_connection, self.path, self.url_path) + + def download_dir(self): + logging.debug('Processing ftp dir: %s target: %s', self.url, self.path) + self.ftp_connection.cwd(self.url_path) + + # This is horrible and I'm sorry but it works flawlessly. + # Credit to Chris Haas for writing this + # See https://stackoverflow.com/questions/966578/parse-response-from-ftp-list-command-syntax-variations + # for attribution + ftp_command = re.compile( + r'^(?P[\-ld])(?P([\-r][\-w][\-xs]){3})\s+(?P\d+)\s+(?P\w+)\s+(?P\w+)\s+(?P\d+)\s+(?P((\w{3})\s+(\d{2})\s+(\d{1,2}):(\d{2}))|((\w{3})\s+(\d{1,2})\s+(\d{4})))\s+(?P.+)$' + ) + + lines = [] + self.ftp_connection.retrlines('LIST', lines.append) + + for line in lines: + matches = ftp_command.match(line) + dirbit = matches['dir'] + name = matches['name'] + + file_path = f'{self.path}/{name}' + file_url = f'{self.url}/{name}' + + ftype = Type.Directory if dirbit == 'd' else Type.File + + # We recurse into new transputs, ending with files which are downloaded + # Downside is nothing happens with empty dirs. + with FTPTransput( + file_path, file_url, ftype, self.ftp_connection + ) as transfer: + if transfer.download(): + return 1 + return 0 + + def download_file(self): + logging.debug('Downloading ftp file: "%s" Target: %s', self.url, self.path) + basedir = os.path.dirname(self.path) + distutils.dir_util.mkpath(basedir) + + return ftp_download_file(self.ftp_connection, self.url_path, self.path) + + def delete(self): + if self.connection_owner: + self.ftp_connection.close() + + +def ftp_login(ftp_connection, netloc, netrc_file): + user = None + if netrc_file is not None: + if creds := netrc_file.authenticators(netloc): + user, _, password = creds + elif 'TESK_FTP_USERNAME' in os.environ and 'TESK_FTP_PASSWORD' in os.environ: + user = os.environ['TESK_FTP_USERNAME'] + password = os.environ['TESK_FTP_PASSWORD'] + + if user: + try: + ftp_connection.login(user, password) + except ftplib.error_perm: + ftp_connection.login() + else: + ftp_connection.login() + + +def ftp_check_directory(ftp_connection, path): + """ + Following convention with the rest of the code, + return 0 if it is a directory, 1 if it is not or failed to do the check + """ + response = ftp_connection.pwd() + if response == '': + return 1 + original_directory = response + + # We are NOT scp, so we won't create a file when filename is not + # specified (mirrors input behaviour) + try: + ftp_connection.cwd(path) + logging.error( + 'Path "%s" at "%s" already exists and is a folder. \ + Please specify a target filename and retry', + path, + ftp_connection.host, + ) + is_directory = True + except ftplib.error_perm: + is_directory = False + except (ftplib.error_reply, ftplib.error_temp): + logging.exception( + 'Could not check if path "%s" in "%s" is directory', + path, + ftp_connection.host, + ) + return 1 + try: + ftp_connection.cwd(original_directory) + except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): + logging.exception( + 'Error when checking if "%s" in "%s" was a directory', + path, + ftp_connection.host, + ) + return 1 + + return 0 if is_directory else 1 + + +def ftp_upload_file(ftp_connection, local_source_path, remote_destination_path): + try: + with open(local_source_path, 'r+b') as file: + ftp_connection.storbinary(f'STOR /{remote_destination_path}', file) + except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): + logging.exception( + 'Unable to upload file "%s" to "%s" as "%s"', + local_source_path, + ftp_connection.host, + remote_destination_path, + ) + return 1 + return 0 + + +def ftp_download_file(ftp_connection, remote_source_path, local_destination_path): + try: + with open(local_destination_path, 'w+b') as file: + ftp_connection.retrbinary(f'RETR {remote_source_path}', file.write) + except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): + logging.exception( + 'Unable to download file "%s" from "%s" as "%s"', + remote_source_path, + ftp_connection.host, + local_destination_path, + ) + return 1 + return 0 + + +def subfolders_in(whole_path): + """ + Returns all subfolders in a path, in order + + >>> subfolders_in('/') + ['/'] + + >>> subfolders_in('/this/is/a/path') + ['/this', '/this/is', '/this/is/a', '/this/is/a/path'] + + >>> subfolders_in('this/is/a/path') + ['this', 'this/is', 'this/is/a', 'this/is/a/path'] + """ + path_fragments = whole_path.lstrip('/').split('/') + if whole_path.startswith('/'): + path_fragments[0] = f'/{path_fragments[0]}' + path = path_fragments[0] + subfolders = [path] + for fragment in path_fragments[1:]: + path += f'/{fragment}' + subfolders.append(path) + return subfolders + + +def ftp_make_dirs(ftp_connection, path): # noqa: PLR0911 + response = ftp_connection.pwd() + if response == '': + return 1 + original_directory = response + + # if directory exists do not do anything else + try: + ftp_connection.cwd(path) + return 0 + except (ftplib.error_perm, ftplib.error_temp): + pass + except ftplib.error_reply: + logging.exception( + 'Unable to create directory "%s" at "%s"', path, ftp_connection.host + ) + return 1 + + for subfolder in subfolders_in(path): + try: + ftp_connection.cwd(subfolder) + except (ftplib.error_perm, ftplib.error_temp): + try: + ftp_connection.mkd(subfolder) + except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): + logging.exception( + 'Unable to create directory "%s" at "%s"', + subfolder, + ftp_connection.host, + ) + return 1 + except ftplib.error_reply: + logging.exception( + 'Unable to create directory "%s" at "%s"', path, ftp_connection.host + ) + return 1 + + try: + ftp_connection.cwd(original_directory) + except (ftplib.error_reply, ftplib.error_perm, ftplib.error_temp): + logging.exception( + 'Unable to create directory "%s" at "%s"', path, ftp_connection.host + ) + return 1 + return 0 + + +def file_from_content(filedata): + with open(filedata['path'], 'w') as file: + file.write(str(filedata['content'])) + return 0 + + +def newTransput(scheme, netloc): + def fileTransputIfEnabled(): + if fileEnabled(): + return FileTransput + raise FileProtocolDisabled( + "'file:' protocol disabled\nTo enable it, both HOST_BASE_PATH and CONTAINER_BASE_PATH environment variables must be defined." # noqa: E501 + ) + + if scheme == 'ftp': + return FTPTransput + elif scheme == 'file': + return fileTransputIfEnabled() + elif scheme in ['http', 'https']: + return HTTPTransput + elif scheme == 's3': + return S3Transput + else: + raise UnknownProtocol("Unknown protocol: '{scheme}'".format(**locals())) + + +def process_file(ttype, filedata): + """ + @param ttype: str + Can be 'inputs' or 'outputs' + """ + + if 'content' in filedata: + return file_from_content(filedata) + parsed_url = urlparse(filedata['url']) + scheme = parsed_url.scheme + netloc = parsed_url.netloc + if scheme == '': + logging.info( + 'Could not determine protocol for url: "%s", assuming "file"', + filedata['url'], + ) + scheme = 'file' + + trans = newTransput(scheme, netloc) + + with trans(filedata['path'], filedata['url'], Type(filedata['type'])) as transfer: + if ttype == 'inputs': + return transfer.download() + if ttype == 'outputs': + return transfer.upload() + + logging.info('There was no action to do with %s', filedata['path']) + return 0 + + +def logConfig(loglevel): + logging.basicConfig( + format='%(asctime)s %(levelname)s: %(message)s', + datefmt='%m/%d/%Y %I:%M:%S', + level=loglevel, + stream=sys.stdout, + ) + + +def main(): + parser = argparse.ArgumentParser( + description='Filer script for down- and uploading files' + ) + parser.add_argument( + 'transputtype', help="transput to handle, either 'inputs' or 'outputs' " + ) + parser.add_argument('data', help='file description data, see docs for structure') + parser.add_argument('--debug', '-d', help='debug logging', action='store_true') + args = parser.parse_args() + + loglevel = logging.DEBUG if args.debug else logging.ERROR + + logConfig(loglevel) + + logging.info('Starting %s filer...', args.transputtype) + + if args.data.endswith('.gz'): + with gzip.open(args.data, 'rb') as fh: + data = json.loads(fh.read()) + else: + data = json.loads(args.data) + + for afile in data[args.transputtype]: + logging.debug('Processing file: %s', afile['path']) + if process_file(args.transputtype, afile): + logging.error('Unable to process file, aborting') + return 1 + logging.debug('Processed file: %s', afile['path']) + + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/tesk/services/filer_class.py b/tesk/services/filer_class.py new file mode 100644 index 00000000..8b87657c --- /dev/null +++ b/tesk/services/filer_class.py @@ -0,0 +1,178 @@ +import json + +from tesk.services import path +from tesk.services.path import fileEnabled + + +class Filer: + def getVolumes(self): + return self.spec['spec']['template']['spec']['volumes'] + + def getContainer(self, i): + return self.spec['spec']['template']['spec']['containers'][i] + + def getVolumeMounts(self): + return self.getContainer(0)['volumeMounts'] + + def getEnv(self): + return self.getContainer(0)['env'] + + def getImagePullPolicy(self): + return self.getContainer(0)['imagePullPolicy'] + + def __init__( # noqa: PLR0913 + self, + name, + data, + filer_name='eu.gcr.io/tes-wes/filer', + filer_version='v0.5', + pullPolicyAlways=False, + json_pvc=None, + ): + self.name = name + self.json_pvc = json_pvc + self.spec = { + 'kind': 'Job', + 'apiVersion': 'batch/v1', + 'metadata': {'name': name}, + 'spec': { + 'template': { + 'metadata': {'name': 'tesk-filer'}, + 'spec': { + 'containers': [ + { + 'name': 'filer', + 'image': f'{filer_name}:{filer_version}', + 'args': [], + 'env': [], + 'volumeMounts': [], + 'imagePullPolicy': 'Always' + if pullPolicyAlways + else 'IfNotPresent', + } + ], + 'volumes': [], + 'restartPolicy': 'Never', + }, + } + }, + } + + env = self.getEnv() + if json_pvc is None: + env.append({'name': 'JSON_INPUT', 'value': json.dumps(data)}) + env.append({'name': 'HOST_BASE_PATH', 'value': path.HOST_BASE_PATH}) + env.append({'name': 'CONTAINER_BASE_PATH', 'value': path.CONTAINER_BASE_PATH}) + + if json_pvc: + self.getVolumeMounts().append( + {'name': 'jsoninput', 'mountPath': '/jsoninput'} + ) + self.getVolumes().append( + {'name': 'jsoninput', 'configMap': {'name': json_pvc}} + ) + + if fileEnabled(): + self.getVolumeMounts().append( + {'name': 'transfer-volume', 'mountPath': path.CONTAINER_BASE_PATH} + ) + + self.getVolumes().append( + { + 'name': 'transfer-volume', + 'persistentVolumeClaim': {'claimName': path.TRANSFER_PVC_NAME}, + } + ) + + self.add_s3_mount() + + def add_s3_mount(self): + """Mounts the s3 configuration file. The secret name is hardcoded and + set to 'aws-secret'. + """ + + env = self.getEnv() + env.append({'name': 'AWS_CONFIG_FILE', 'value': '/aws/config'}) + env.append({'name': 'AWS_SHARED_CREDENTIALS_FILE', 'value': '/aws/credentials'}) + + self.getVolumeMounts().append( + { + 'name': 's3-conf', + 'mountPath': '/aws', + 'readOnly': True, + } + ) + self.getVolumes().append( + { + 'name': 's3-conf', + 'secret': { + 'secretName': 'aws-secret', + 'items': [ + {'key': 'credentials', 'path': 'credentials'}, + {'key': 'config', 'path': 'config'}, + ], + 'optional': True, + }, + } + ) + + def set_ftp(self, user, pw): + env = self.getEnv() + env.append({'name': 'TESK_FTP_USERNAME', 'value': user}) + env.append({'name': 'TESK_FTP_PASSWORD', 'value': pw}) + + def set_backoffLimit(self, limit): + """Set a number of retries of a job execution (default value is 6). Use the + environment variable TESK_API_TASKMASTER_ENVIRONMENT_FILER_BACKOFF_LIMIT to + explicitly set this value. + + Args: + limit: The number of retries before considering a Job as failed. + """ + self.spec['spec'].update({'backoffLimit': limit}) + + def add_volume_mount(self, pvc): + self.getVolumeMounts().extend(pvc.volume_mounts) + self.getVolumes().append( + {'name': 'task-volume', 'persistentVolumeClaim': {'claimName': pvc.name}} + ) + + def add_netrc_mount(self, netrc_name='netrc'): + """ + Sets $HOME to an arbitrary location (to prevent its change as a result of + runAsUser), currently hardcoded to `/opt/home` Mounts the secret netrc into + that location: $HOME/.netrc. + """ + + self.getVolumeMounts().append( + {'name': 'netrc', 'mountPath': '/opt/home/.netrc', 'subPath': '.netrc'} + ) + self.getVolumes().append( + { + 'name': 'netrc', + 'secret': { + 'secretName': netrc_name, + 'defaultMode': 420, + 'items': [{'key': '.netrc', 'path': '.netrc'}], + }, + } + ) + self.getEnv().append({'name': 'HOME', 'value': '/opt/home'}) + + def get_spec(self, mode, debug=False): + if self.json_pvc is None: + self.spec['spec']['template']['spec']['containers'][0]['args'] = [ + mode, + '$(JSON_INPUT)', + ] + else: + self.spec['spec']['template']['spec']['containers'][0]['args'] = [ + mode, + '/jsoninput/JSON_INPUT.gz', + ] + + if debug: + self.spec['spec']['template']['spec']['containers'][0]['args'].append('-d') + + self.spec['spec']['template']['metadata']['name'] = self.name + return self.spec diff --git a/tesk/services/filer_s3.py b/tesk/services/filer_s3.py new file mode 100644 index 00000000..11451787 --- /dev/null +++ b/tesk/services/filer_s3.py @@ -0,0 +1,146 @@ +import logging +import os +import re +import sys + +import boto3 +import botocore + +from tesk.services.transput import Transput, Type + + +class S3Transput(Transput): + def __init__(self, path, url, ftype): + Transput.__init__(self, path, url, ftype) + self.bucket, self.file_path = self.get_bucket_name_and_file_path() + self.bucket_obj = None + + def __enter__(self): + client = boto3.resource('s3', endpoint_url=self.extract_endpoint()) + if self.check_if_bucket_exists(client): + sys.exit(1) + self.bucket_obj = client.Bucket(self.bucket) + return self + + def extract_endpoint(self): + return boto3.client('s3').meta.endpoint_url + + def check_if_bucket_exists(self, client): + try: + client.meta.client.head_bucket(Bucket=self.bucket) + except botocore.exceptions.ClientError as e: + # If a client error is thrown, then check that it was a 404 error. + # If it was a 404 error, then the bucket does not exist. + logging.error('Got status code: %s', e.response['Error']['Code']) + if e.response['Error']['Code'] == '404': + logging.error( + 'Failed to fetch Bucket, reason: %s', e.response['Error']['Message'] + ) + return 1 + return 0 + + def get_bucket_name_and_file_path(self): + """ + If the S3 url is similar to s3://idr-bucket-1/README.txt format + """ + + bucket = self.netloc + file_path = self.url_path[1:] + + return bucket, file_path + + def download_file(self): + logging.debug( + 'Downloading s3 object: "%s" Target: %s', + f'{self.bucket}/{self.file_path}', + self.path, + ) + basedir = os.path.dirname(self.path) + os.makedirs(basedir, exist_ok=True) + return self.get_s3_file(self.path, self.file_path) + + def upload_file(self): + logging.debug( + 'Uploading s3 object: "%s" Target: %s', + self.path, + f'{self.bucket}/{self.file_path}', + ) + try: + self.bucket_obj.upload_file(Filename=self.path, Key=self.file_path) + except (botocore.exceptions.ClientError, OSError) as err: + logging.error( + "File upload failed for '%s'", f'{self.bucket}/{self.file_path}' + ) + logging.error(err) + return 1 + return 0 + + def upload_dir(self): + logging.debug( + 'Uploading s3 object: "%s" Target: %s', + self.path, + f'{self.bucket}/{self.file_path}', + ) + try: + for item in os.listdir(self.path): + path = os.path.join(self.path, item) + if os.path.isdir(path): + file_type = Type.Directory + elif os.path.isfile(path): + file_type = Type.File + else: + # An exception is raised, if the object type is neither + # file or directory + logging.error("Object is neither file or directory : '%s' ", path) + raise OSError + file_path = os.path.join(self.url, item) + with S3Transput(path, file_path, file_type) as transfer: + if transfer.upload(): + return 1 + except OSError as err: + logging.error( + "File upload failed for '%s'", f'{self.bucket}/{self.file_path}' + ) + logging.error(err) + return 1 + return 0 + + def download_dir(self): + logging.debug( + 'Downloading s3 object: "%s" Target: %s', + f'{self.bucket}/{self.file_path}', + self.path, + ) + client = boto3.client('s3', endpoint_url=self.extract_endpoint()) + if not self.file_path.endswith('/'): + self.file_path += '/' + objects = client.list_objects_v2(Bucket=self.bucket, Prefix=self.file_path) + + # If the file path does not exists in s3 bucket, 'Contents' + # key will not be present in objects + if 'Contents' not in objects: + logging.error('Got status code: %s', 404) + logging.error('Invalid file path!.') + return 1 + + # Looping through the list of objects and downloading them + for obj in objects['Contents']: + file_name = os.path.basename(obj['Key']) + dir_name = os.path.dirname(obj['Key']) + path_to_create = re.sub( + r'^' + self.file_path.strip('/').replace('/', '\/') + '', '', dir_name + ).strip('/') + path_to_create = os.path.join(self.path, path_to_create) + os.makedirs(path_to_create, exist_ok=True) + if self.get_s3_file(os.path.join(path_to_create, file_name), obj['Key']): + return 1 + return 0 + + def get_s3_file(self, file_name, key): + try: + self.bucket_obj.download_file(Filename=file_name, Key=key) + except botocore.exceptions.ClientError as err: + logging.error('Got status code: %s', err.response['Error']['Code']) + logging.error(err.response['Error']['Message']) + return 1 + return 0 diff --git a/tesk/services/job.py b/tesk/services/job.py new file mode 100644 index 00000000..2de163cb --- /dev/null +++ b/tesk/services/job.py @@ -0,0 +1,101 @@ +import logging +import time +from datetime import datetime, timezone + +from kubernetes import client +from kubernetes.client.exceptions import ApiException + +from tesk.services.exceptions import ServiceStatusCodes +from tesk.services.utils import pprint + +logging.basicConfig(format='%(message)s', level=logging.INFO) + + +class Job: + def __init__(self, body, name='task-job', namespace='default'): + self.name = name + self.namespace = namespace + self.status = 'Initialized' + self.bv1 = client.BatchV1Api() + self.cv1 = client.CoreV1Api() + self.timeout = 240 + self.body = body + self.body['metadata']['name'] = self.name + + def run_to_completion(self, poll_interval, check_cancelled, pod_timeout): + logging.debug(f"Creating job '{self.name}'...") + logging.debug(pprint(self.body)) + self.timeout = pod_timeout + try: + self.bv1.create_namespaced_job(self.namespace, self.body) + except ApiException as ex: + if ex.status == ServiceStatusCodes.CONFLICT: + logging.debug(f'Reading existing job: {self.name} ') + self.bv1.read_namespaced_job(self.name, self.namespace) + else: + logging.debug(ex.body) + raise ApiException(ex.status, ex.reason) from None + is_all_pods_running = False + status, is_all_pods_running = self.get_status(is_all_pods_running) + while status == 'Running': + if check_cancelled(): + self.delete() + return 'Cancelled' + time.sleep(poll_interval) + status, is_all_pods_running = self.get_status(is_all_pods_running) + return status + + def get_status(self, is_all_pods_running): + job = self.bv1.read_namespaced_job(self.name, self.namespace) + try: + if ( + job.status.conditions[0].type == 'Complete' + and job.status.conditions[0].status + ): + self.status = 'Complete' + elif ( + job.status.conditions[0].type == 'Failed' + and job.status.conditions[0].status + ): + self.status = 'Failed' + else: + self.status = 'Error' + except ( + TypeError + ): # The condition is not initialized, so it is not complete yet, wait for it + self.status = 'Running' + job_duration = 0 + if job.status.active and job.status.start_time: + job_duration = ( + datetime.now(timezone.utc) - job.status.start_time + ).total_seconds() + if job_duration > self.timeout and not is_all_pods_running: + pods = ( + self.cv1.list_namespaced_pod( + self.namespace, label_selector=f'job-name={self.name}' + ) + ).items + is_all_pods_running = True + for pod in pods: + if pod.status.phase == 'Pending' and pod.status.start_time: + is_all_pods_running = False + delta = ( + datetime.now(timezone.utc) - pod.status.start_time + ).total_seconds() + if ( + delta > self.timeout + and pod.status.container_statuses[0].state.waiting.reason + == 'ImagePullBackOff' + ): + logging.info(pod.status.container_statuses[0].state.waiting) + return 'Error', is_all_pods_running + + return self.status, is_all_pods_running + + def delete(self): + logging.info('Removing failed jobs') + self.bv1.delete_namespaced_job( + self.name, + self.namespace, + body=client.V1DeleteOptions(propagation_policy='Background'), + ) diff --git a/tesk/services/path.py b/tesk/services/path.py new file mode 100644 index 00000000..4dee35e3 --- /dev/null +++ b/tesk/services/path.py @@ -0,0 +1,60 @@ +import os +from os.path import relpath +from urllib.parse import urlparse + +from tesk.services.exceptions import InvalidHostPath + + +def getEnv(varName): + return os.environ.get(varName) + + +def getPathEnv(varName): + """ + Gets a path from env var 'varName' and normalizes it + + e.g. removes trailing slashes. + This removes some cases from the rest of the code. + """ + + varContent = getEnv(varName) + + return os.path.normpath(varContent) if varContent else None + + +HOST_BASE_PATH = getPathEnv('HOST_BASE_PATH') +CONTAINER_BASE_PATH = getPathEnv('CONTAINER_BASE_PATH') +TRANSFER_PVC_NAME = getEnv('TRANSFER_PVC_NAME') + + +def fileEnabled(): + return HOST_BASE_PATH is not None and CONTAINER_BASE_PATH is not None + + +def getPath(url): + parsed_url = urlparse(url) + + return parsed_url.path + + +def isDescendant(base, path): + """ + Is 'path' is a descendant of 'base'? + """ + + return os.path.commonprefix([base, path]) == base + + +def validatePath(path): + if not isDescendant(HOST_BASE_PATH, path): + raise InvalidHostPath( + f"'{path}' is not a descendant of 'HOST_BASE_PATH' ({HOST_BASE_PATH})" + ) + + +def containerPath(path): + validatePath(path) + + relPath = relpath(path, HOST_BASE_PATH) + + return os.path.join(CONTAINER_BASE_PATH, relPath) diff --git a/tesk/services/pvc.py b/tesk/services/pvc.py new file mode 100644 index 00000000..07733580 --- /dev/null +++ b/tesk/services/pvc.py @@ -0,0 +1,64 @@ +import logging +import os + +from kubernetes import client +from kubernetes.client.exceptions import ApiException + +from tesk.services.exceptions import ServiceStatusCodes +from tesk.services.utils import pprint + + +class PVC: + def __init__(self, name='task-pvc', size_gb=1, namespace='default'): + self.name = name + self.spec = { + 'apiVersion': 'v1', + 'kind': 'PersistentVolumeClaim', + 'metadata': {'name': name}, + 'spec': { + 'accessModes': ['ReadWriteOnce'], + 'resources': {'requests': {'storage': f'{str(size_gb)}Gi'}}, + }, + } + + self.subpath_idx = 0 + self.namespace = namespace + self.cv1 = client.CoreV1Api() + + # The environment variable 'TESK_API_TASKMASTER_ENVIRONMENT_STORAGE_CLASS_NAME' + # can be set to the preferred, non-default, user-defined storageClass + if os.environ.get('STORAGE_CLASS_NAME') is not None: + self.spec['spec'].update( + {'storageClassName': os.environ.get('STORAGE_CLASS_NAME')} + ) + + def set_volume_mounts(self, mounts): + self.volume_mounts = mounts + + def get_subpath(self): + subpath = f'dir{str(self.subpath_idx)}' + self.subpath_idx += 1 + return subpath + + def create(self): + logging.debug('Creating PVC...') + logging.debug(pprint(self.spec)) + try: + return self.cv1.create_namespaced_persistent_volume_claim( + self.namespace, self.spec + ) + except ApiException as ex: + if ex.status == ServiceStatusCodes.CONFLICT: + logging.debug(f'Reading existing PVC: {self.name}') + return self.cv1.read_namespaced_persistent_volume_claim( + self.name, self.namespace + ) + else: + logging.debug(ex.body) + raise ApiException(ex.status, ex.reason) from None + + def delete(self): + cv1 = client.CoreV1Api() + cv1.delete_namespaced_persistent_volume_claim( + self.name, self.namespace, body=client.V1DeleteOptions() + ) diff --git a/tesk/services/taskmaster.py b/tesk/services/taskmaster.py new file mode 100755 index 00000000..a89a8d4c --- /dev/null +++ b/tesk/services/taskmaster.py @@ -0,0 +1,332 @@ +#!/usr/bin/env python3 + +import argparse +import gzip +import json +import logging +import os +import re +import sys + +from kubernetes import config + +from tesk.services.filer_class import Filer +from tesk.services.job import Job +from tesk.services.pvc import PVC + +created_jobs = [] +poll_interval = 5 +task_volume_basename = 'task-volume' +args = None +logger = None + + +def run_executor(executor, namespace, pvc=None): + jobname = executor['metadata']['name'] + spec = executor['spec']['template']['spec'] + + if os.environ.get('EXECUTOR_BACKOFF_LIMIT') is not None: + executor['spec'].update( + {'backoffLimit': int(os.environ['EXECUTOR_BACKOFF_LIMIT'])} + ) + + if pvc is not None: + mounts = spec['containers'][0].setdefault('volumeMounts', []) + mounts.extend(pvc.volume_mounts) + volumes = spec.setdefault('volumes', []) + volumes.extend( + [ + { + 'name': task_volume_basename, + 'persistentVolumeClaim': {'readonly': False, 'claimName': pvc.name}, + } + ] + ) + logger.debug(f'Created job: {jobname}') + job = Job(executor, jobname, namespace) + logger.debug(f'Job spec: {str(job.body)}') + + created_jobs.append(job) + + status = job.run_to_completion(poll_interval, check_cancelled, args.pod_timeout) + if status != 'Complete': + if status == 'Error': + job.delete() + exit_cancelled(f'Got status {status}') + + +# TODO move this code to PVC class + + +def append_mount(volume_mounts, name, path, pvc): + # Checks all mount paths in volume_mounts if the path given is already in + # there + duplicate = next( + (mount for mount in volume_mounts if mount['mountPath'] == path), None + ) + # If not, add mount path + if duplicate is None: + subpath = pvc.get_subpath() + logger.debug(' '.join([f'appending{name}at path{path}with subPath:{subpath}'])) + volume_mounts.append({'name': name, 'mountPath': path, 'subPath': subpath}) + + +def dirname(iodata): + if iodata['type'] == 'FILE': + # strip filename from path + r = '(.*)/' + dirname = re.match(r, iodata['path'])[1] + logger.debug('dirname of ' + iodata['path'] + 'is: ' + dirname) + elif iodata['type'] == 'DIRECTORY': + dirname = iodata['path'] + + return dirname + + +def generate_mounts(data, pvc): + volume_mounts = [] + + # gather volumes that need to be mounted, without duplicates + volume_name = task_volume_basename + for volume in data['volumes']: + append_mount(volume_mounts, volume_name, volume, pvc) + + # gather other paths that need to be mounted from inputs/outputs FILE and + # DIRECTORY entries + for aninput in data['inputs']: + dirnm = dirname(aninput) + append_mount(volume_mounts, volume_name, dirnm, pvc) + + for anoutput in data['outputs']: + dirnm = dirname(anoutput) + append_mount(volume_mounts, volume_name, dirnm, pvc) + + return volume_mounts + + +def init_pvc(data, filer): + task_name = data['executors'][0]['metadata']['labels']['taskmaster-name'] + pvc_name = f'{task_name}-pvc' + pvc_size = data['resources']['disk_gb'] + pvc = PVC(pvc_name, pvc_size, args.namespace) + + mounts = generate_mounts(data, pvc) + logging.debug(mounts) + logging.debug(type(mounts)) + pvc.set_volume_mounts(mounts) + filer.add_volume_mount(pvc) + + pvc.create() + # to global var for cleanup purposes + global created_pvc # noqa: PLW0603 + created_pvc = pvc + + if os.environ.get('NETRC_SECRET_NAME') is not None: + filer.add_netrc_mount(os.environ.get('NETRC_SECRET_NAME')) + + filerjob = Job( + filer.get_spec('inputs', args.debug), + f'{task_name}-inputs-filer', + args.namespace, + ) + + created_jobs.append(filerjob) + # filerjob.run_to_completion(poll_interval) + status = filerjob.run_to_completion( + poll_interval, check_cancelled, args.pod_timeout + ) + if status != 'Complete': + exit_cancelled(f'Got status {status}') + + return pvc + + +def run_task(data, filer_name, filer_version, have_json_pvc=False): + task_name = data['executors'][0]['metadata']['labels']['taskmaster-name'] + pvc = None + + json_pvc = task_name if have_json_pvc else None + + if data['volumes'] or data['inputs'] or data['outputs']: + filer = Filer( + f'{task_name}-filer', + data, + filer_name, + filer_version, + args.pull_policy_always, + json_pvc, + ) + + if os.environ.get('TESK_FTP_USERNAME') is not None: + filer.set_ftp( + os.environ['TESK_FTP_USERNAME'], os.environ['TESK_FTP_PASSWORD'] + ) + + if os.environ.get('FILER_BACKOFF_LIMIT') is not None: + filer.set_backoffLimit(int(os.environ['FILER_BACKOFF_LIMIT'])) + + pvc = init_pvc(data, filer) + + for executor in data['executors']: + run_executor(executor, args.namespace, pvc) + + # run executors + logging.debug('Finished running executors') + + # upload files and delete pvc + if data['volumes'] or data['inputs'] or data['outputs']: + filerjob = Job( + filer.get_spec('outputs', args.debug), + f'{task_name}-outputs-filer', + args.namespace, + ) + + created_jobs.append(filerjob) + + # filerjob.run_to_completion(poll_interval) + status = filerjob.run_to_completion( + poll_interval, check_cancelled, args.pod_timeout + ) + if status != 'Complete': + exit_cancelled(f'Got status {status}') + else: + pvc.delete() + + +def newParser(): + parser = argparse.ArgumentParser(description='TaskMaster main module') + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument( + 'json', + help='string containing json TES request, required if -f is not given', + nargs='?', + ) + group.add_argument( + '-f', + '--file', + help="TES request as a file or '-' for stdin, required if json is not given", + ) + + parser.add_argument('-p', '--poll-interval', help='Job polling interval', default=5) + parser.add_argument( + '-pt', '--pod-timeout', type=int, help='Pod creation timeout', default=240 + ) + parser.add_argument( + '-fn', + '--filer-name', + help='Filer image version', + default='eu.gcr.io/tes-wes/filer', + ) + parser.add_argument( + '-fv', '--filer-version', help='Filer image version', default='v0.1.9' + ) + parser.add_argument( + '-n', '--namespace', help='Kubernetes namespace to run in', default='default' + ) + parser.add_argument( + '-s', + '--state-file', + help='State file for state.py script', + default='/tmp/.teskstate', # nosec: B108, false positive + ) + parser.add_argument('-d', '--debug', help='Set debug mode', action='store_true') + parser.add_argument( + '--localKubeConfig', + help='Read k8s configuration from localhost', + action='store_true', + ) + parser.add_argument( + '--pull-policy-always', + help="set imagePullPolicy = 'Always'", + action='store_true', + ) + + return parser + + +def newLogger(loglevel): + logging.basicConfig( + format='%(asctime)s %(levelname)s: %(message)s', + datefmt='%m/%d/%Y %I:%M:%S', + level=loglevel, + ) + logging.getLogger('kubernetes.client').setLevel(logging.CRITICAL) + + return logging.getLogger(__name__) + + +def main(): + have_json_pvc = False + + parser = newParser() + global args # noqa: PLW0603 + + args = parser.parse_args() + + loglevel = logging.DEBUG if args.debug else logging.ERROR + global logger # noqa: PLW0603 + logger = newLogger(loglevel) + logger.debug('Starting taskmaster') + + # Get input JSON + if args.file is None: + data = json.loads(args.json) + elif args.file == '-': + data = json.load(sys.stdin) + elif args.file.endswith('.gz'): + with gzip.open(args.file, 'rb') as fh: + data = json.loads(fh.read()) + have_json_pvc = True + else: + with open(args.file) as fh: + data = json.load(fh) + + # Load kubernetes config file + if args.localKubeConfig: + config.load_kube_config() + else: + config.load_incluster_config() + + global created_pvc # noqa: PLW0603 + created_pvc = None + + # Check if we're cancelled during init + if check_cancelled(): + exit_cancelled('Cancelled during init') + + run_task(data, args.filer_name, args.filer_version, have_json_pvc) + + +def clean_on_interrupt(): + logger.debug('Caught interrupt signal, deleting jobs and pvc') + + for job in created_jobs: + job.delete() + + +def exit_cancelled(reason='Unknown reason'): + logger.error(f'Cancelling taskmaster: {reason}') + sys.exit(0) + + +def check_cancelled(): + labelInfoFile = '/podinfo/labels' + + if not os.path.exists(labelInfoFile): + return False + + with open(labelInfoFile) as fh: + for line in fh: + name, label = line.split('=') + logging.debug(f'Got label: {label}') + if label == '"Cancelled"': + return True + + return False + + +if __name__ == '__main__': + try: + main() + except KeyboardInterrupt: + clean_on_interrupt() diff --git a/tesk/services/transput.py b/tesk/services/transput.py new file mode 100644 index 00000000..ded6db76 --- /dev/null +++ b/tesk/services/transput.py @@ -0,0 +1,76 @@ +import enum +import logging +import netrc +import os +from urllib.parse import urlparse + + +@enum.unique +class Type(enum.Enum): + File = 'FILE' + Directory = 'DIRECTORY' + + +class Transput: + def __init__(self, path, url, ftype): + self.path = path + self.url = url + self.ftype = ftype + + parsed_url = urlparse(url) + self.netloc = parsed_url.netloc + self.url_path = parsed_url.path + self.netrc_file = None + try: + netrc_path = os.path.join(os.environ['HOME'], '.netrc') + except KeyError: + netrc_path = '/.netrc' + try: + self.netrc_file = netrc.netrc(netrc_path) + except OSError as fnfe: + logging.error(fnfe) + except netrc.NetrcParseError as err: + logging.error('netrc.NetrcParseError') + logging.error(err) + except Exception as er: + logging.error(er) + + def upload(self): + logging.debug( + '%s uploading %s %s', self.__class__.__name__, self.ftype, self.url + ) + if self.ftype == Type.File: + return self.upload_file() + return self.upload_dir() if self.ftype == Type.Directory else 1 + + def download(self): + logging.debug( + '%s downloading %s %s', self.__class__.__name__, self.ftype, self.url + ) + if self.ftype == Type.File: + return self.download_file() + return self.download_dir() if self.ftype == Type.Directory else 1 + + def delete(self): + pass + + def download_file(self): + raise NotImplementedError() + + def download_dir(self): + raise NotImplementedError() + + def upload_file(self): + raise NotImplementedError() + + def upload_dir(self): + raise NotImplementedError() + + # make it compatible with contexts (with keyword) + def __enter__(self): + return self + + def __exit__(self, error_type, error_value, traceback): + self.delete() + # Swallow all exceptions since the filer mostly works with error codes + return False diff --git a/tesk/services/utils.py b/tesk/services/utils.py new file mode 100644 index 00000000..d6913e28 --- /dev/null +++ b/tesk/services/utils.py @@ -0,0 +1,5 @@ +import json + + +def pprint(data): + return json.dumps(data, indent=4) diff --git a/tests/FilerClassTest.py b/tests/FilerClassTest.py deleted file mode 100644 index 67868b8a..00000000 --- a/tests/FilerClassTest.py +++ /dev/null @@ -1,155 +0,0 @@ -# encoding: utf-8 - -import unittest -import os -from tesk_core.filer_class import Filer -from tesk_core import path -from tesk_core.Util import pprint - -try: - from unittest.mock import patch # Python 3 @UnresolvedImport -except: - from mock import patch - - - - -@patch('tesk_core.path.HOST_BASE_PATH' , '/home/tfga/workspace/cwl-tes') -@patch('tesk_core.path.CONTAINER_BASE_PATH' , '/transfer') -@patch('tesk_core.path.TRANSFER_PVC_NAME' , 'transfer-pvc') -@patch.dict(os.environ, - { - "AWS_SHARED_CREDENTIALS_FILE": "/aws/credentials", - "AWS_CONFIG_FILE": "/aws/config", - }) -class FilerClassTest_env(unittest.TestCase): - - def test_env_vars(self): - - f = Filer('name', {'a': 1}) - f.set_backoffLimit(10) - - pprint(f.spec) - - self.assertEquals(f.getEnv(), [ - - { 'name': 'JSON_INPUT' , 'value': '{"a": 1}' } - ,{ 'name': 'HOST_BASE_PATH' , 'value': '/home/tfga/workspace/cwl-tes' } - ,{ 'name': 'CONTAINER_BASE_PATH' , 'value': '/transfer' } - ,{"name": "AWS_CONFIG_FILE", "value": "/aws/config"} - ,{"name": "AWS_SHARED_CREDENTIALS_FILE", "value": "/aws/credentials"}, - ]) - self.assertEquals(f.spec['spec']['backoffLimit'], 10) - - - def test_mounts(self): - ''' - kind: Pod - apiVersion: v1 - metadata: - name: tfga-pod - spec: - containers: - - name: tfga-container - image: eu.gcr.io/tes-wes/filer:testing - volumeMounts: - - mountPath: /transfer - name: transfer-volume - volumes: - - name: transfer-volume - hostPath: - path: /transferAtNode - # persistentVolumeClaim: - # claimName: task-pv-claim - ''' - - f = Filer('name', {'a': 1}) - - pprint(f.spec) - - pprint(f.getVolumeMounts()) - - self.assertEquals(f.getVolumeMounts(), [ - - { "name" : 'transfer-volume' - , 'mountPath' : path.CONTAINER_BASE_PATH, - }, - {'mountPath': '/aws', 'name': 's3-conf', 'readOnly': True} - ]) - - self.assertEquals(f.getVolumes(), [ - - { "name" : 'transfer-volume' - , 'persistentVolumeClaim' : { 'claimName' : 'transfer-pvc' } - }, - { - "name": "s3-conf", - "secret": { - "secretName": "aws-secret", - "items": [ - { - "key": "credentials", - "path": "credentials" - }, - { - "key": "config", - "path": "config" - } - ], - "optional": True, - } - } - ]) - - -class FilerClassTest_no_env(unittest.TestCase): - - def test_mounts_file_disabled(self): - - f = Filer('name', {'a': 1}) - - pprint(f.spec) - - pprint(f.getVolumeMounts()) - - self.assertEquals(f.getVolumeMounts() , [ - {'mountPath': '/aws', 'name': 's3-conf', 'readOnly': True} - ]) - self.assertEquals(f.getVolumes() , [ - { - "name": "s3-conf", - "secret": { - "secretName": "aws-secret", - "items": [ - { - "key": "credentials", - "path": "credentials" - }, - { - "key": "config", - "path": "config" - } - ], - "optional": True, - } - } - ]) - - - def test_image_pull_policy(self): - - f = Filer('name', {'a': 1}) - self.assertEquals(f.getImagePullPolicy() , 'IfNotPresent') - - f = Filer('name', {'a': 1}, pullPolicyAlways = True) - self.assertEquals(f.getImagePullPolicy() , 'Always') - - - - - - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/tests/TaskMasterTest.py b/tests/TaskMasterTest.py deleted file mode 100644 index 242740f5..00000000 --- a/tests/TaskMasterTest.py +++ /dev/null @@ -1,83 +0,0 @@ -import unittest -from tesk_core.taskmaster import newParser, run_task, newLogger -from argparse import Namespace -import json -import logging -try: - from unittest.mock import patch # Python 3 @UnresolvedImport -except: - from mock import patch - - - -def pvcCreateMock(self): print '[mock] Creating PVC...' -def pvcDeleteMock(self): print '[mock] Deleting PVC...' - -def jobRunToCompletionMock(job, b, c): - - print "[mock] Creating job '{}'...".format(job.name) - - return 'Complete' - - -class ParserTest(unittest.TestCase): - - - def test_defaults(self): - - parser = newParser() - - args = parser.parse_args(["json"]) - - print(args) - - self.assertEquals( args - , Namespace( debug=False, file=None, filer_version='v0.1.9', json='json', namespace='default', poll_interval=5, state_file='/tmp/.teskstate' - , localKubeConfig=False - , pull_policy_always=False - ) - ) - - - def test_localKubeConfig(self): - - parser = newParser() - - args = parser.parse_args(['json', '--localKubeConfig']) - - print(args) - - self.assertEquals( args - , Namespace( debug=False, file=None, filer_version='v0.1.9', json='json', namespace='default', poll_interval=5, state_file='/tmp/.teskstate' - , localKubeConfig=True - , pull_policy_always=False - ) - ) - - - def test_pullPolicyAlways(self): - - parser = newParser() - - self.assertEquals( parser.parse_args(['json' ]).pull_policy_always, False ) - self.assertEquals( parser.parse_args(['json', '--pull-policy-always']).pull_policy_always, True ) - - - - @patch('tesk_core.taskmaster.args' , Namespace(debug=True, namespace='default', pull_policy_always=True)) - @patch('tesk_core.taskmaster.logger' , newLogger(logging.DEBUG)) - @patch('tesk_core.taskmaster.PVC.create' , pvcCreateMock) - @patch('tesk_core.taskmaster.PVC.delete' , pvcDeleteMock) - @patch('tesk_core.taskmaster.Job.run_to_completion' , jobRunToCompletionMock) - def test_run_task(self): - - with open('tests/resources/inputFile.json') as fh: - data = json.load(fh) - - run_task(data, 'filer_version') - - - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file diff --git a/tests/assertThrows.py b/tests/assertThrows.py deleted file mode 100644 index 4e7af0ed..00000000 --- a/tests/assertThrows.py +++ /dev/null @@ -1,15 +0,0 @@ - - -class AssertThrowsMixin(object): - - def assertThrows(self, func, exceptionClass, errorMessage = None): - - with self.assertRaises(exceptionClass) as cm: - - func() - - if errorMessage: - - self.assertEqual(str(cm.exception), errorMessage) - - diff --git a/tests/test_filer.py b/tests/test_filer.py deleted file mode 100644 index 5aea8d0a..00000000 --- a/tests/test_filer.py +++ /dev/null @@ -1,251 +0,0 @@ -import unittest -import logging -import os -from tesk_core.filer import newTransput, FTPTransput, HTTPTransput, FileTransput,\ - process_file, logConfig, getPath, copyDir, copyFile, ftp_check_directory,\ - subfolders_in -from tesk_core.exception import UnknownProtocol, InvalidHostPath,\ - FileProtocolDisabled -from tesk_core.path import containerPath -from tesk_core.filer_s3 import S3Transput -from assertThrows import AssertThrowsMixin -from fs.opener import open_fs -from io import StringIO -from unittest.mock import patch - - - - - - - -def getTree(rootDir): - strio = StringIO() - with open_fs(rootDir) as dst1_fs: - dst1_fs.tree(file=strio) - treeTxt = strio.getvalue() - strio.close() - return treeTxt - - -def stripLines(txt): - return '\n'.join([line.strip() for line in txt.splitlines()[1:]]) - - -@patch('tesk_core.path.HOST_BASE_PATH', '/home/tfga/workspace/cwl-tes') -@patch('tesk_core.path.CONTAINER_BASE_PATH', '/transfer') -class FilerTest(unittest.TestCase, AssertThrowsMixin): - - @classmethod - def setUpClass(cls): - logConfig(logging.DEBUG) # Doesn't work... - - @patch('tesk_core.filer.copyDir') - @patch('tesk_core.filer.shutil.copy') - def test_download_file(self, copyMock, copyDirMock): - filedata = { - "url": "file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5", - "path": "/var/lib/cwl/stgda974802-fa81-4f0b-8fe4-341d5655af4b/md5", - - "type": "FILE", # File = 'FILE' - # Directory = 'DIRECTORY' - - "name": "md5", - "description": "cwl_input:md5" - } - - process_file('inputs', filedata) - - copyDirMock.assert_not_called() - - copyMock.assert_called_once_with('/transfer/tmphrtip1o8/md5', - '/var/lib/cwl/stgda974802-fa81-4f0b-' - '8fe4-341d5655af4b/md5') - - @patch('tesk_core.filer.copyDir') - @patch('tesk_core.filer.shutil.copy') - def test_download_dir(self, copyMock, copyDirMock): - filedata = { - "url": "file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/", - "path": "/TclSZU", - "type": "DIRECTORY", - "name": "workdir" - } - - process_file('inputs', filedata) - - copyMock.assert_not_called() - - copyDirMock.assert_called_once_with('/transfer/tmphrtip1o8', '/TclSZU') - - @patch('tesk_core.filer.copyDir') - @patch('tesk_core.filer.shutil.copy') - def test_upload_dir(self, copyMock, copyDirMock): - filedata = { - "url": "file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/", - "path": "/TclSZU", - "type": "DIRECTORY", - "name": "workdir" - } - - process_file('outputs', filedata) - - copyMock.assert_not_called() - - copyDirMock.assert_called_once_with('/TclSZU', '/transfer/tmphrtip1o8') - - @patch('tesk_core.filer.copyDir') - @patch('tesk_core.filer.copyFile') - def test_upload_file(self, copyFileMock, copyDirMock): - - filedata = { - "url": "file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5", - "path": "/TclSZU/md5", - "type": "FILE", - "name": "stdout" - } - - process_file('outputs', filedata) - - copyDirMock.assert_not_called() - - copyFileMock.assert_called_once_with( '/TclSZU/md5' - , '/transfer/tmphrtip1o8/md5') - - - @patch('tesk_core.filer.copyDir') - @patch('tesk_core.filer.copyFile') - def test_upload_file_glob(self, copyFileMock, copyDirMock): - - filedata = { - "url": "file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5*", - "path": "/TclSZU/md5*", - "type": "FILE", - "name": "stdout" - } - - process_file('outputs', filedata) - - copyDirMock.assert_not_called() - - copyFileMock.assert_called_once_with( '/TclSZU/md5*' - , '/transfer/tmphrtip1o8/md5*') - - - def test_copyDir(self): - def rmDir(d): - os.system('rm -r {}'.format(d)) - - baseDir = 'tests/resources/copyDirTest/' - src = os.path.join(baseDir, 'src') - dst1 = os.path.join(baseDir, 'dst1') - dst2 = os.path.join(baseDir, 'dst2') - - rmDir(dst1) - rmDir(dst2) - - self.assertTrue(os.path.exists(src)) # src should exist - self.assertFalse(os.path.exists(dst1)) # dst1 shouldn't - self.assertFalse(os.path.exists(dst2)) # dst2 shouldn't - - # Copying to existing dst --------------------------------------------- - # Let's create dst1 - os.mkdir(dst1) - self.assertTrue(os.path.exists(dst1)) # Now dst1 should exist - - # Let's try to copy - copyDir(src, dst1) - - - self.assertEqual(getTree(dst1), - stripLines(''' - |-- a - | |-- 1.txt - | `-- 2.txt - `-- 3.txt - ''' - ) - ) - - # Copying to non-existing dst ----------------------------------------- - self.assertFalse(os.path.exists(dst2)) # dst2 should not exist - - # Let's try to copy - copyDir(src, dst2) - - self.assertEqual(getTree(dst2), - stripLines(''' - |-- a - | |-- 1.txt - | `-- 2.txt - `-- 3.txt - ''' - ) - ) - - def test_getPath(self): - - self.assertEqual( getPath('file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5') - , '/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5') - - def test_getPathNoScheme(self): - - self.assertEquals( getPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5') - , '/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5') - - self.assertEqual( containerPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5') - , '/transfer/tmphrtip1o8/md5') - - def test_containerPath(self): - self.assertEqual( - containerPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5'), - '/transfer/tmphrtip1o8/md5') - - # What happens if 'path' is not a descendant of HOST_BASE_PATH? - self.assertThrows(lambda: containerPath('/someOtherFolder'), - InvalidHostPath, - "'/someOtherFolder' is not a descendant of " - "'HOST_BASE_PATH' (/home/tfga/workspace/cwl-tes)" - ) - - def test_newTransput(self): - self.assertEqual(newTransput('ftp', 'test.com'), FTPTransput) - self.assertEqual(newTransput('http', 'test.com'), HTTPTransput) - self.assertEqual(newTransput('https', 'test.com'), HTTPTransput) - self.assertEqual(newTransput('file', '/home/tfga/workspace/'), FileTransput) - self.assertEqual(newTransput('s3', '/home/tfga/workspace/'), S3Transput) - self.assertEqual(newTransput('http', 's3.aws.com'), HTTPTransput) - - self.assertThrows(lambda: newTransput('svn', 'example.com') - , UnknownProtocol - , "Unknown protocol: 'svn'" - ) - - @patch('ftplib.FTP') - def test_ftp_check_directory(self, conn): - """ Ensure that when the path provided is an existing directory, the - return value is 0.""" - path = os.path.curdir - self.assertEqual(ftp_check_directory(conn, path), 0) - - def test_subfolders_in(self): - """ Ensure the all the subfolders of a path are properly returned.""" - path = "/this/is/a/path" - subfldrs = ['/this', '/this/is', '/this/is/a', '/this/is/a/path'] - self.assertEqual(subfolders_in(path), subfldrs) - - - -class FilerTest_no_env(unittest.TestCase, AssertThrowsMixin): - - def test_newTransput_file_disabled(self): - self.assertThrows( lambda: newTransput('file','/home/user/test') - , FileProtocolDisabled - , "'file:' protocol disabled\n" - "To enable it, both 'HOST_BASE_PATH' and 'CONTAINER_BASE_PATH' environment variables must be defined." - ) - - -if __name__ == "__main__": - # import sys;sys.argv = ['', 'Test.testName'] - unittest.main() diff --git a/tests/test_filer_ftp_pytest.py b/tests/test_filer_ftp_pytest.py deleted file mode 100755 index 872b4892..00000000 --- a/tests/test_filer_ftp_pytest.py +++ /dev/null @@ -1,226 +0,0 @@ -""" Tests for 'filer.py' FTP functionalities using 'pytest'.""" - -from unittest import mock -import ftplib -import os - -from tesk_core.filer import ( - FTPTransput, - Type, - ftp_login, - ftp_upload_file, - ftp_download_file, - ftp_check_directory, - ftp_make_dirs -) - - -def test_ftp_login(mocker): - """ Ensure ftp_login detects ftp credentials and properly calls - ftplib.FTP.login.""" - - conn = mocker.patch('ftplib.FTP') - mock_login = mocker.patch('ftplib.FTP.login') - with mock.patch.dict( - 'os.environ', - { - 'TESK_FTP_USERNAME': 'test', - 'TESK_FTP_PASSWORD': 'test_pass', - } - ): - ftp_login(conn, None, None) - mock_login.assert_called_with('test', 'test_pass') - - -def test_ftp_upload_file_error(mocker, caplog): - """ Ensure that upon upload error, ftp_upload_file behaves correctly.""" - - conn = mocker.patch('ftplib.FTP') - mocker.patch('ftplib.FTP.storbinary', side_effect=ftplib.error_reply) - assert 1 == ftp_upload_file(conn, - 'tests/test_filer.py', - '/home/tesk/test_copy.py') - assert 'Unable to upload file' in caplog.text - - -def test_ftp_download_file_error(mocker, caplog): - """ Ensure that upon download error, ftp_download_file behaves correctly. - """ - - conn = mocker.patch('ftplib.FTP') - mocker.patch('ftplib.FTP.retrbinary', side_effect=ftplib.error_perm) - with mock.patch('builtins.open', mock.mock_open(), create=False) as m: - assert 1 == ftp_download_file(conn, - 'test_filer_ftp_pytest.py', - 'test_copy.py') - assert 'Unable to download file' in caplog.text - - -def test_ftp_download_file_success(mocker, caplog): - """ Ensure that upon successful download, the local destination file has - been created.""" - - conn = mocker.patch('ftplib.FTP') - mock_retrbin = mocker.patch('ftplib.FTP.retrbinary') - with mock.patch('builtins.open', mock.mock_open(), create=False) as m: - assert 0 == ftp_download_file(conn, - 'test_filer_ftp_pytest.py', - 'test_copy.py') - - mock_retrbin.assert_called_with( - "RETR " + "test_filer_ftp_pytest.py", - mock.ANY - ) - - m.assert_called_with('test_copy.py', 'w+b') - - # Since we want to avoid file creation in testing and we're using - # 'create=False', we cannot check whether a file exists or not (but - # it's not really necessary since we can assert that the necessary - # functions have been invoked. - # assert os.path.exists('test_copy.py') - - -def test_ftp_upload_dir(mocker, fs, ftpserver): - """ Check whether the upload of a directory through FTP completes - successfully. """ - - # Fake local nested directories with files - fs.create_dir('dir1') - fs.create_dir('dir1/dir2') - fs.create_file('dir1/file1', contents="this is random") - fs.create_file('dir1/dir2/file2', contents="not really") - fs.create_file('dir1/dir2/file4.txt', contents="took me a while") - - login_dict = ftpserver.get_login_data() - - conn = ftplib.FTP() - - mocker.patch('ftplib.FTP.connect', - side_effect=conn.connect( - host=login_dict['host'], - port=login_dict['port'] - ) - ) - mocker.patch( - 'ftplib.FTP.login', - side_effect=conn.login(login_dict['user'], login_dict['passwd']) - ) - mocker.patch('ftplib.FTP.pwd', side_effect=conn.pwd) - mocker.patch('ftplib.FTP.cwd', side_effect=conn.cwd) - mocker.patch('ftplib.FTP.mkd', side_effect=conn.mkd) - mock_storbinary = mocker.patch('ftplib.FTP.storbinary') - - ftp_obj = FTPTransput( - "dir1", - "ftp://" + login_dict['host'] + "/dir1", - Type.Directory, - ftp_conn=conn - ) - - ftp_obj.upload_dir() - - # We use mock.ANY since the 2nd argument of the 'ftplib.FTP.storbinary' is - # a file object and we can't have the same between the original and the - # mock calls - assert sorted(mock_storbinary.mock_calls) == sorted([ - mock.call('STOR /' + '/dir1/file1', mock.ANY), - mock.call('STOR /' + '/dir1/dir2/file2', mock.ANY), - mock.call('STOR /' + '/dir1/dir2/file4.txt', mock.ANY) - ]) - - -def test_ftp_download_dir(mocker, tmpdir, tmp_path, ftpserver): - """ Check whether the download of a directory through FTP completes - successfully. """ - - # Temporary nested directories with files - file1 = tmpdir.mkdir("dir1").join("file1") - file1.write("this is random") - file2 = tmpdir.mkdir("dir1/dir2").join("file2") - file2.write('not really') - file3 = tmpdir.join('dir1/dir2/file3') - file3.write('took me a while') - - # Temporary folder for download - tmpdir.mkdir('downloads') - - # Populate the server with the above files to later download - ftpserver.put_files({ - 'src': str(tmp_path) + '/dir1/file1', - 'dest': 'remote1/file1' - }) - ftpserver.put_files({ - 'src': str(tmp_path) + '/dir1/dir2/file2', - 'dest': 'remote1/remote2/file2' - }) - ftpserver.put_files({ - 'src': str(tmp_path) + '/dir1/dir2/file3', - 'dest': 'remote1/remote2/file3' - }) - - login_dict = ftpserver.get_login_data() - - conn = ftplib.FTP() - conn.connect(host=login_dict['host'], port=login_dict['port']) - conn.login(login_dict['user'], login_dict['passwd']) - - mock_retrbinary = mocker.patch( - 'ftplib.FTP.retrbinary', - side_effect=conn.retrbinary - ) - - ftp_obj = FTPTransput( - str(tmp_path) + "downloads", - "ftp://" + login_dict['host'], - Type.Directory, - ftp_conn=conn - ) - - ftp_obj.download_dir() - - # We use mock.ANY since the 2nd argument of the 'ftplib.FTP.storbinary' is - # a file object and we can't have the same between the original and the - # mock calls - assert sorted(mock_retrbinary.mock_calls) == sorted([ - mock.call('RETR ' + '/remote1/file1', mock.ANY), - mock.call('RETR ' + '/remote1/remote2/file2', mock.ANY), - mock.call('RETR ' + '/remote1/remote2/file3', mock.ANY) - ]) - - assert os.path.exists(str(tmp_path) + 'downloads/remote1/file1') - assert os.path.exists(str(tmp_path) + 'downloads/remote1/remote2/file2') - assert os.path.exists(str(tmp_path) + 'downloads/remote1/remote2/file3') - - -def test_ftp_check_directory_error(mocker, caplog): - """Ensure ftp_check_directory_error creates the proper error log - message in case of error.""" - - conn = mocker.patch('ftplib.FTP') - mocker.patch('ftplib.FTP.cwd', side_effect=ftplib.error_reply) - assert 1 == ftp_check_directory(conn, '/folder/file') - assert 'Could not check if path' in caplog.text - - -def test_ftp_make_dirs(mocker): - """ In case of existing directory, exit with 0. """ - - conn = mocker.patch('ftplib.FTP') - assert ftp_make_dirs(conn, os.curdir) == 0 - - -def test_ftp_make_dirs_error(mocker, ftpserver, caplog): - """ Ensure in case of 'ftplib.error_reply', both the return value - and the error message are correct. """ - - login_dict = ftpserver.get_login_data() - - conn = ftplib.FTP() - conn.connect(host=login_dict['host'], port=login_dict['port']) - conn.login(login_dict['user'], login_dict['passwd']) - - mocker.patch('ftplib.FTP.cwd', side_effect=ftplib.error_reply) - - assert ftp_make_dirs(conn, 'dir1') == 1 - assert 'Unable to create directory' in caplog.text diff --git a/tests/test_filer_general_pytest.py b/tests/test_filer_general_pytest.py deleted file mode 100755 index 49f0782b..00000000 --- a/tests/test_filer_general_pytest.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Tests for 'filer.py' general purpose functionalities using 'pytest'.""" - -# Note: In tests such as 'test_process_file_with_scheme' or -# 'test_copyContent_dir', only the outer function of each unit under testing is -# checked, since mocking a function apparently affects its output. Maybe -# there's a way to bypass that issue and test deeper down the call tree. - -import pytest - -from tesk_core.filer import ( - process_file, - copyContent, - FileProtocolDisabled -) - - -def test_process_file_no_scheme(caplog): - """ Ensure that when process_file is called without a scheme and no - 'HOST_BASE_PATH', 'CONTAINER_BASE_PATH' environment variables - set, the appropriate error is raised.""" - - filedata = {'url': 'www.foo.bar'} - - with pytest.raises(FileProtocolDisabled): - process_file('upload', filedata) - - -def test_process_file_with_scheme(mocker): - """ Ensure expected behaviour when 'process_file' is called with scheme. - In this test example, scheme is 'http', filedata:type is 'FILE' and - ttype is 'inputs'.""" - - filedata = { - 'url': 'http://www.foo.bar', - 'path': '.', - 'type': 'FILE', - } - mock_new_Trans = mocker.patch('tesk_core.filer.newTransput') - process_file('inputs', filedata) - - mock_new_Trans.assert_called_once_with('http','www.foo.bar') - - -def test_process_file_from_content(tmpdir, tmp_path): - """ Ensure 'process_file' behaves correctly when the file contents - should be drawn from the filedata content field.""" - - test_file = tmpdir.join("testfile") - filedata = { - 'path': str(tmp_path) + '/testfile', - 'content': 'This is some test content' - } - process_file('inputs', filedata) - - assert open(str(tmp_path) + '/testfile', 'r').read() == filedata['content'] - - -def test_copyContent_dir(mocker): - """Ensure that 'os.listdir' is called when 'copyContent' is called.""" - - mock_os_listdir = mocker.patch('os.listdir') - copyContent('.', '/test_dst') - - mock_os_listdir.assert_called_once_with('.') diff --git a/tests/test_filer_http_pytest.py b/tests/test_filer_http_pytest.py deleted file mode 100755 index 2013568c..00000000 --- a/tests/test_filer_http_pytest.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Tests for 'filer.py' HTTP functionalities using 'pytest'.""" - -from requests import Response, put -import os -from unittest import mock - -from tesk_core.filer import ( - HTTPTransput, - Type -) - -PATH_DOWN = 'test_download_file.txt' -PATH_UP = 'tests/test_filer_http_pytest.py' -SUCCESS = 200 -FAIL = 300 -URL = 'http://www.foo.bar' -FTYPE = 'FILE' - -resp = Response() -resp._content = b'{ "foo" : "bar" }' - - -def test_download_file(mocker): - """ Ensure a file gets properly downloaded.""" - - resp.status_code = SUCCESS - http_obj = HTTPTransput(PATH_DOWN, URL, FTYPE) - mocker.patch('requests.get', return_value=resp) - - with mock.patch( - 'builtins.open', - mock.mock_open(read_data=resp._content), - create=False - ) as m: - assert 0 == http_obj.download_file() - assert open(PATH_DOWN, 'rb').read() == resp._content - - -def test_download_file_error(mocker, caplog): - """ Ensure download error returns the correct value and log message.""" - - resp.status_code = FAIL - http_obj = HTTPTransput(PATH_DOWN, URL, FTYPE) - mocker.patch('requests.get', return_value=resp) - - assert 1 == http_obj.download_file() - assert 'Got status code: {}'.format(FAIL) in caplog.text - - -def test_upload_file(mocker): - """ Ensure a file gets properly uploaded.""" - - resp.status_code = SUCCESS - http_obj = HTTPTransput(PATH_UP, URL, FTYPE) - mocker.patch('requests.put', return_value=resp) - - assert 0 == http_obj.upload_file() - - -def test_upload_file_error(mocker, caplog): - """ Ensure upload error returns the correct value and log message.""" - - resp.status_code = FAIL - http_obj = HTTPTransput(PATH_UP, URL, FTYPE) - mocker.patch('requests.put', return_value=resp) - - assert 1 == http_obj.upload_file() - assert 'Got status code: {}'.format(FAIL) in caplog.text - - -def test_upload_dir(mocker, fs): - """ Ensure that each file inside nexted directories gets successfully - uploaded.""" - - # Tele2 Speedtest Service, free upload /download test server - endpoint = "http://speedtest.tele2.net/upload.php" - resp.status_code = 200 - - fs.create_dir('dir1') - fs.create_dir('dir1/dir2') - fs.create_file('dir1/file1', contents="this is random") - fs.create_file('dir1/dir2/file2', contents="not really") - fs.create_file('dir1/dir2/file4.txt', contents="took me a while") - - - mock_put = mocker.patch('requests.put', return_value=resp) - - http_obj = HTTPTransput( - "dir1", - endpoint + "/dir1", - Type.Directory - ) - - assert http_obj.upload_dir() == 0 - - # We emply the 'list.sorted' trick to ignore calls order because the - # 'assert_has_calls' method would not work in this setting - assert sorted(mock_put.mock_calls) == sorted([ - mock.call(endpoint + '/dir1/dir2/file2', data="not really"), - mock.call(endpoint + '/dir1/dir2/file4.txt', data="took me a while"), - mock.call(endpoint + '/dir1/file1', data="this is random"), - ]) - - - def test_upload_dir_error(mocker, fs): - """ Ensure 'upload_dir' error returns the correct value. """ - - fs.create_dir('dir2') - - # Tele2 Speedtest Service, free upload /download test server - endpoint1 = "http://speedtest.tele2.net/upload.php" - - # Non-existent endpoint - endpoint2 = "http://somerandomendpoint.fail" - - http_obj1 = HTTPTransput( - "dir1", - endpoint1 + "/dir1", - Type.Directory - ) - - http_obj2 = HTTPTransput( - "dir2", - endpoint2 + "/dir1", - Type.Directory - ) - - assert http_obj1.upload_dir() == 1 - assert http_obj2.upload_dir() == 1 diff --git a/tests/test_job.py b/tests/test_job.py deleted file mode 100644 index 0ca19ad1..00000000 --- a/tests/test_job.py +++ /dev/null @@ -1,288 +0,0 @@ -import unittest -import json -import os -import datetime -from unittest.mock import patch -from dateutil.tz import tzutc -from tesk_core import taskmaster -from tesk_core.job import Job -from argparse import Namespace -from datetime import timezone -from kubernetes.client.rest import ApiException - -START_TIME = datetime.datetime.now(timezone.utc) -class MockObject(object): - def __init__(self, dictionary): - for k, v in dictionary.items(): - if isinstance(v, dict): - self.__dict__[k] = MockObject(v) - else: - self.__dict__[k] = v - -def read_namespaced_job_error(name, namespace): - return_value = {'active': 1, - 'completion_time': None, - 'conditions': None, - 'failed': None, - 'start_time': START_TIME - datetime.timedelta(minutes=10), - 'succeeded': None} - return MockObject({"status":return_value}) - -def read_namespaced_job_pending(diff_time=5): - return_value = {'active': 1, - 'completion_time': None, - 'conditions': None, - 'failed': None, - 'start_time': START_TIME - datetime.timedelta(minutes=diff_time), - 'succeeded': None} - return MockObject({"status":return_value}) - -def read_namespaced_job_success(name, namespace): - return_value = {'active': None, - 'completion_time': datetime.datetime(2020, 7, 20, 5, 12, 42, tzinfo=tzutc()), - 'conditions': [MockObject({'last_probe_time': datetime.datetime(2020, 7, 20, 5, 12, 42, tzinfo=tzutc()), - 'last_transition_time': datetime.datetime(2020, 7, 20, 5, 12, 42, tzinfo=tzutc()), - 'message': None, - 'reason': None, - 'status': 'True', - 'type': 'Complete'})], - 'failed': None, - 'start_time': datetime.datetime(2020, 7, 20, 5, 12, 35, tzinfo=tzutc()), - 'succeeded': 1} - return MockObject({"status":return_value}) - -def read_namespaced_job_running(name, namespace): - """ - if the `conditions` value is `None`, its assumed that the pod is running. Hence the value of `conditions` is - kept as `None` - """ - return_value = {'active': None, - 'completion_time': None, - 'conditions': None, - 'failed': None, - 'start_time': datetime.datetime(2020, 7, 20, 5, 12, 35, tzinfo=tzutc()), - 'succeeded': None} - return MockObject({"status":return_value}) - -def list_namespaced_pod_error_ImagePullBackOff(diff_time=10): - return_value = {"status": {"conditions": [], - "container_statuses": [MockObject({"container_id": None, - "image": "ubuntu_mock_test_image", - "image_id": "", - "last_state": {"running": None, - "terminated": None, - "waiting": None}, - "name": "task-1000-ex-00", - "ready": False, - "restart_count": 0, - "state": {"running": None, - "terminated": None, - "waiting": {"message": "Back-off " - "pulling " - "image " - "ubuntu_mock_test_image", - "reason": "ImagePullBackOff"}}})], - "host_ip": "192.168.99.100", - "init_container_statuses": None, - "message": None, - "nominated_node_name": None, - "phase": "Pending", - "pod_ip": "172.17.0.5", - "qos_class": "BestEffort", - "reason": None, - "start_time": START_TIME- datetime.timedelta(minutes=diff_time)}} - return MockObject({"items":[MockObject(return_value)]}) - -def list_namespaced_pod_pending_unknown_error(diff_time=5): - return_value = {"status": {"conditions": [], - "container_statuses": [MockObject({"container_id": None, - "image": "ubuntu_mock_test_image", - "image_id": "", - "last_state": {"running": None, - "terminated": None, - "waiting": None}, - "name": "task-1000-ex-00", - "ready": False, - "restart_count": 0, - "state": {"running": None, - "terminated": None, - "waiting": {"message": "Unknown error", - "reason": "Unknown"}}})], - "host_ip": "192.168.99.100", - "init_container_statuses": None, - "message": None, - "nominated_node_name": None, - "phase": "Pending", - "pod_ip": "172.17.0.5", - "qos_class": "BestEffort", - "reason": None, - "start_time": START_TIME- datetime.timedelta(minutes=diff_time)}} - return MockObject({"items":[MockObject(return_value)]}) - -class JobTestCase(unittest.TestCase): - def setUp(self): - """ - Initialising - """ - self.data = json.loads(open(os.path.join(os.path.dirname(__file__), "resources/inputFile.json")).read()) - taskmaster.args = Namespace(debug=False, file=None, filer_version='v0.1.9', json='json' - , namespace='default', poll_interval=5, state_file='/tmp/.teskstate' - , localKubeConfig=False, pull_policy_always=False - , filer_name="eu.gcr.io/tes-wes/filer", pod_timeout=240 - ) - def test_job(self): - """ - Testing if Job object is getting created successfully - """ - job = Job({'metadata': {'name': 'test'}}) - self.assertEqual(job.name, 'task-job') - self.assertEqual(job.namespace, 'default') - - @patch("kubernetes.client.BatchV1Api.create_namespaced_job") - @patch("tesk_core.job.Job.get_status", side_effect=[("Running", True),("Running", True),("Running", True), - ("Running", True),("Running", True),("Complete", True)]) - def test_run_to_completion_success(self, mock_get_status, mock_create_namespaced_job): - """ - Checking if the Job runs is completed successfully - """ - for executor in self.data['executors']: - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status = job.run_to_completion(1, taskmaster.check_cancelled, - taskmaster.args.pod_timeout) - self.assertEqual(status, "Complete") - - @patch("tesk_core.job.Job.delete") - @patch("tesk_core.taskmaster.check_cancelled", return_value=True) - @patch("kubernetes.client.BatchV1Api.create_namespaced_job") - @patch("tesk_core.job.Job.get_status", side_effect=[("Running", True)]) - def test_run_to_completion_cancelled(self, mock_get_status, mock_create_namespaced_job, mock_check_cancelled, - mock_job_delete): - """ - Checking if the Job is cancelled - """ - for executor in self.data['executors']: - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status = job.run_to_completion(taskmaster.args.poll_interval, taskmaster.check_cancelled, - taskmaster.args.pod_timeout) - self.assertEqual(status, "Cancelled") - - @patch("tesk_core.taskmaster.check_cancelled", return_value=False) - @patch("kubernetes.client.BatchV1Api.create_namespaced_job", side_effect=ApiException(status=409,reason="conflict")) - @patch("tesk_core.job.Job.get_status", side_effect=[("Complete", True)]) - @patch("kubernetes.client.BatchV1Api.read_namespaced_job", side_effect=read_namespaced_job_running) - def test_run_to_completion_check_conflict_exception(self, mock_get_status, mock_read_namespaced_job, - mock_check_cancelled,mock_create_namespaced_job): - """ - Checking if the Job status is complete when an ApiException of 409 is raised - """ - for executor in self.data['executors']: - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status = job.run_to_completion(taskmaster.args.poll_interval, taskmaster.check_cancelled, - taskmaster.args.pod_timeout) - self.assertEqual(status, "Complete") - - @patch("kubernetes.client.BatchV1Api.create_namespaced_job", - side_effect=ApiException(status=500, reason="Random Exception")) - def test_run_to_completion_check_other_K8_exception(self,mock_create_namespaced_job): - """ - Checking if the an exception is raised when ApiException status is other than 409 - """ - for executor in self.data['executors']: - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - with self.assertRaises(ApiException): - job.run_to_completion(taskmaster.args.poll_interval, taskmaster.check_cancelled, - taskmaster.args.pod_timeout) - - @patch("kubernetes.client.CoreV1Api.list_namespaced_pod") - @patch("kubernetes.client.BatchV1Api.read_namespaced_job", side_effect=read_namespaced_job_error) - @patch("tesk_core.job.Job.delete") - @patch("tesk_core.taskmaster.check_cancelled", return_value=False) - @patch("kubernetes.client.BatchV1Api.create_namespaced_job") - def test_run_to_completion_error(self, mock_create_namespaced_job, mock_check_cancelled, - mock_job_delete,mock_read_namespaced_job,mock_list_namespaced_pod ): - """ - Testing if the job state is 'error' when the status of the pod is in pending - state and reason is ImagePullBackOff - """ - mock_list_namespaced_pod.return_value = list_namespaced_pod_error_ImagePullBackOff(10) - for executor in self.data['executors']: - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status = job.run_to_completion(1, taskmaster.check_cancelled,120) - self.assertEqual(status, "Error") - - @patch("kubernetes.client.BatchV1Api.read_namespaced_job", side_effect=read_namespaced_job_error) - @patch("kubernetes.client.CoreV1Api.list_namespaced_pod") - def test_get_job_status_ImagaPullBackoff_error(self,mock_list_namespaced_pod, mock_read_namespaced_job): - """ - Checking whether the job state is 'error', when the pod failed to start and if reason for pod failure - is ImagePullBackOff - """ - mock_list_namespaced_pod.return_value = list_namespaced_pod_error_ImagePullBackOff() - executor = self.data['executors'][0] - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - job.timeout = 50 - status, all_pods_running = job.get_status(False) - self.assertEqual(status,"Error") - - @patch("kubernetes.client.CoreV1Api.list_namespaced_pod") - @patch("kubernetes.client.BatchV1Api.read_namespaced_job", side_effect=read_namespaced_job_success) - def test_get_status_success(self, mock_read_namespaced_job, mock_list_namespaced_pod): - """ - Checking if job status is complete - """ - executor = self.data['executors'][0] - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status, all_pods_running = job.get_status(False) - self.assertEqual(status, "Complete") - - @patch("kubernetes.client.CoreV1Api.list_namespaced_pod") - @patch("kubernetes.client.BatchV1Api.read_namespaced_job", side_effect=read_namespaced_job_running) - def test_get_status_running(self, mock_read_namespaced_job, mock_list_namespaced_pod): - """ - Checking if the job is in running state in an ideal situation - """ - executor = self.data['executors'][0] - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status, all_pods_running = job.get_status(False) - self.assertEqual(status, "Running") - - @patch("kubernetes.client.CoreV1Api.list_namespaced_pod") - @patch("kubernetes.client.BatchV1Api.read_namespaced_job") - def test_get_job_status_for_failed_pod(self, mock_read_namespaced_job, mock_list_namespaced_pod): - """ - Checking if the job status is 'running' when the pod failed to start with a reason other than ImagePullBackOff. - """ - mock_list_namespaced_pod.return_value = list_namespaced_pod_pending_unknown_error() - mock_read_namespaced_job.return_value = read_namespaced_job_pending() - executor = self.data['executors'][0] - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status, all_pods_running = job.get_status(False) - self.assertEqual(status, "Running") - - @patch("kubernetes.client.CoreV1Api.list_namespaced_pod") - @patch("kubernetes.client.BatchV1Api.read_namespaced_job") - def test_get_job_status_for_wrong_image(self, mock_read_namespaced_job, mock_list_namespaced_pod): - """ - Assuming image name is wrong, the testcase will check if job status returned from the method is "running" - during the default pod timeout. - """ - mock_list_namespaced_pod.return_value = list_namespaced_pod_error_ImagePullBackOff(2) - mock_read_namespaced_job.return_value = read_namespaced_job_pending(2) - executor = self.data['executors'][0] - jobname = executor['metadata']['name'] - job = Job(executor, jobname, taskmaster.args.namespace) - status, all_pods_running = job.get_status(False) - self.assertEqual(status, "Running") - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_s3_filer.py b/tests/test_s3_filer.py deleted file mode 100644 index ce5a5b61..00000000 --- a/tests/test_s3_filer.py +++ /dev/null @@ -1,174 +0,0 @@ -import os -import pytest -import boto3 -from tesk_core.filer_s3 import S3Transput -#from tesk_core.extract_endpoint import extract_endpoint -from moto import mock_s3 -from unittest.mock import patch, mock_open - -@pytest.fixture() -def moto_boto(): - with mock_s3(): - boto3.client('s3', endpoint_url="http://s3.amazonaws.com") - - client = boto3.resource('s3',endpoint_url="http://s3.amazonaws.com") - client.create_bucket(Bucket='tesk') - client.Bucket('tesk').put_object(Bucket='tesk', Key='folder/file.txt', Body='') - client.Bucket('tesk').put_object(Bucket='tesk', Key='folder1/folder2/file.txt', Body='') - yield - -@pytest.mark.parametrize("path, url, ftype,expected", [ - ("/home/user/filer_test/file.txt", "s3://tesk/folder/file.txt","FILE", - ("tesk","folder/file.txt")), - ("/home/user/filer_test/file.txt", "s3://tesk/folder1/folder2","DIRECTORY", - ("tesk","folder1/folder2")), - ]) -def test_get_bucket_name_and_file_path( moto_boto, path, url, ftype,expected): - """ - Check if the bucket name and path is extracted correctly for file and folders - """ - trans = S3Transput(path, url, ftype) - assert trans.get_bucket_name_and_file_path() == expected - -@pytest.mark.parametrize("path, url, ftype,expected", [ - ("/home/user/filer_test/file.txt", "s3://tesk/folder/file.txt","FILE",0), - ("/home/user/filer_test/file.txt", "s3://mybucket/folder/file.txt","FILE",1), - ("/home/user/filer_test/", "s3://tesk/folder1/folder2","DIRECTORY",0), - ("/home/user/filer_test/", "s3://mybucket/folder1/folder2","DIRECTORY",1) - ]) -def test_check_if_bucket_exists(moto_boto, path, url, ftype, expected): - """ - Check if the bucket exists - """ - client = boto3.resource('s3', endpoint_url="http://s3.amazonaws.com") - trans = S3Transput(path, url, ftype) - assert trans.check_if_bucket_exists(client) == expected - -# @patch('tesk_core.filer.os.makedirs') -# @patch('builtins.open') -# @patch('s3transfer.utils.OSUtils.rename_file') -@pytest.mark.parametrize("path, url, ftype,expected", [ - ("/home/user/filer_test/file.txt", "s3://tesk/folder/file.txt","FILE",0), - ("/home/user/filer_test/file.txt", "s3://tesk/folder/file_new.txt","FILE",1), - ]) -def test_s3_download_file( moto_boto, path, url, ftype, expected, fs, caplog): - """ - Checking for successful/failed file download from Object storage server - """ - with S3Transput(path, url, ftype) as trans: - assert trans.download_file() == expected - if expected: - assert "Not Found" in caplog.text - else: - assert os.path.exists(path) == True - - - -@patch('tesk_core.filer.os.makedirs') -@patch('builtins.open') -@patch('s3transfer.utils.OSUtils.rename_file') -#@patch("tesk_core.filer_s3.extract_endpoint", return_value="http://s3.amazonaws.com") -@pytest.mark.parametrize("path, url, ftype,expected", [ - ("filer_test/", "s3://tesk/folder1/","DIRECTORY",0), - ("filer_test/", "s3://tesk/folder10/folder20","DIRECTORY",1) - ]) -def test_s3_download_directory( mock_makedirs, mock_open, mock_rename, path, url, ftype, - expected, moto_boto, caplog): - """ - test case to check directory download from Object storage server - """ - with S3Transput(path, url, ftype) as trans: - assert trans.download_dir() == expected - print(mock_rename.mock_calls) - if expected: - assert "Invalid file path" in caplog.text - else: - ''' - s3 object path s3://tesk/folder1/ will contain 'folder2', checking if the 'folder2' - is present in the download folder. - ''' - mock_rename.assert_called_once_with('filer_test/folder2', exist_ok=True) - - -@pytest.mark.parametrize("path, url, ftype,expected", [ - ("/home/user/filer_test/file.txt", "s3://tesk/folder/file.txt","FILE",0), - ("/home/user/filer_test/file_new.txt", "s3://tesk/folder/file.txt","FILE",1), - ]) -def test_s3_upload_file( moto_boto, path, url, ftype, expected,fs, caplog): - """ - Testing successful/failed file upload to object storage server - """ - fs.create_file("/home/user/filer_test/file.txt") - client = boto3.resource('s3', endpoint_url="http://s3.amazonaws.com") - trans = S3Transput(path, url, ftype) - trans.bucket_obj = client.Bucket(trans.bucket) - assert trans.upload_file() == expected - if expected: - assert "File upload failed for" in caplog.text - else: - ''' - Checking if the file was uploaded, if the object is found, load() method will return None - otherwise an exception will be raised. - ''' - assert client.Object('tesk', 'folder/file.txt').load() == None - - - -@pytest.mark.parametrize("path, url, ftype,expected", [ - ("tests", "s3://tesk/folder1/folder2","DIRECTORY",0), - ("/home/user/filer_test_new/", "s3://tesk/folder1/folder2","DIRECTORY",1) - ]) -def test_s3_upload_directory(path, url, ftype, expected, moto_boto, caplog): - """ - Checking for successful and failed Directory upload to object storage server - """ - client = boto3.resource('s3', endpoint_url="http://s3.amazonaws.com") - trans = S3Transput(path, url, ftype) - trans.bucket_obj = client.Bucket(trans.bucket) - assert trans.upload_dir() == expected - if expected: - assert "File upload failed for" in caplog.text - else: - ''' - Checking if the file was uploaded, if the object is found load() method will return None - otherwise an exception will be raised. - ''' - assert client.Object('tesk', 'folder1/folder2/test_filer.py').load() == None - -def test_upload_directory_for_unknown_file_type(moto_boto, fs, monkeypatch, caplog): - """ - Checking whether an exception is raised when the object type is neither file or directory - If the exception is raised, an error message will be logged. - """ - monkeypatch.setattr(os.path, 'isfile', lambda _:False) - fs.create_file("/home/user/filer_test/text.txt") - url, ftype = "s3://tesk/folder10/folder20","DIRECTORY" - path = "/home/user/filer_test/" - trans = S3Transput(path, url, ftype) - client = boto3.resource('s3', endpoint_url="http://s3.amazonaws.com") - trans.bucket_obj = client.Bucket(trans.bucket) - trans.upload_dir() - assert "Object is neither file or directory" in caplog.text - - -@patch("tesk_core.filer.os.path.exists", return_value=1) -def test_extract_url_from_config_file(mock_path_exists): - """ - Testing extraction of endpoint url from default file location - """ - read_data = '\n'.join(["[default]", "endpoint_url = http://s3-aws-region.amazonaws.com"]) - with patch("builtins.open", mock_open(read_data=read_data), create=True) as mock_file: - mock_file.return_value.__iter__.return_value = read_data.splitlines() - #assert extract_endpoint() == "http://s3-aws-region.amazonaws.com" - #mock_file.assert_called_once_with("~/.aws/config", encoding=None) - -@patch.dict(os.environ, {"AWS_CONFIG_FILE": "~/.aws/config"}) -def test_extract_url_from_environ_variable(): - """ - Testing successful extraction of endpoint url read from file path saved on enviornment variable - """ - read_data = '\n'.join(["[default]","endpoint_url = http://s3-aws-region.amazonaws.com"]) - with patch("builtins.open", mock_open(read_data=read_data),create=True) as mock_file: - mock_file.return_value.__iter__.return_value = read_data.splitlines() - #assert (extract_endpoint() == "http://s3-aws-region.amazonaws.com") - #mock_file.assert_called_once_with(os.environ["AWS_CONFIG_FILE"], encoding=None) diff --git a/tests/test_taskmaster.py b/tests/test_taskmaster.py deleted file mode 100644 index d780cb4b..00000000 --- a/tests/test_taskmaster.py +++ /dev/null @@ -1,133 +0,0 @@ -import json -import os -import unittest -from unittest.mock import patch -from argparse import Namespace -from tesk_core import taskmaster -from tesk_core.filer_class import Filer -from kubernetes.client.rest import ApiException -from tesk_core.taskmaster import init_pvc, PVC, run_executor,\ - generate_mounts, append_mount, dirname, run_task,newParser - - -class TaskmasterTest(unittest.TestCase): - - def setUp(self): - self.data = json.loads(open(os.path.join(os.path.dirname(__file__), "resources/inputFile.json")).read()) - self.task_name = self.data['executors'][0]['metadata']['labels']['taskmaster-name'] - taskmaster.args = Namespace( debug = False, file = None, filer_version = 'v0.1.9', json = 'json' - ,namespace='default', poll_interval=5, state_file='/tmp/.teskstate' - , localKubeConfig=False, pull_policy_always=False - , filer_name= "eu.gcr.io/tes-wes/filer", pod_timeout = 240 - ) - self.filer = Filer(self.task_name + '-filer', self.data, taskmaster.args.filer_name, - taskmaster.args.filer_version, taskmaster.args.pull_policy_always) - self.pvc = PVC(self.task_name + '-pvc', self.data['resources']['disk_gb'], taskmaster.args.namespace) - - taskmaster.created_jobs = [] - - @patch("tesk_core.taskmaster.PVC.create") - @patch("tesk_core.taskmaster.Job.run_to_completion", return_value="Complete") - @patch("tesk_core.taskmaster.logger") - def test_pvc_creation(self, mock_logger, mock_run_to_compl, mock_pvc_create): - """ - Testing to check if the PVC volume was created successfully - """ - self.assertIsInstance(init_pvc(self.data, self.filer), PVC) - - @patch("kubernetes.client.CoreV1Api.read_namespaced_persistent_volume_claim") - @patch("kubernetes.client.CoreV1Api.create_namespaced_persistent_volume_claim", side_effect=ApiException(status=409, - reason="conflict")) - def test_create_pvc_check_for_conflict_exception(self, mock_create_namespaced_pvc, - mock_read_namespaced_pvc): - self.pvc.create() - mock_read_namespaced_pvc.assert_called_once() - - @patch("kubernetes.client.CoreV1Api.create_namespaced_persistent_volume_claim", side_effect=ApiException(status=500, - reason="Random error")) - def test_create_pvc_check_for_other_exceptions(self, mock_create_namespaced_pvc): - with self.assertRaises(ApiException): - self.pvc.create() - - - @patch("tesk_core.taskmaster.PVC.delete") - @patch("tesk_core.taskmaster.PVC.create") - @patch("tesk_core.taskmaster.Job.run_to_completion", return_value="error") - @patch("tesk_core.taskmaster.logger") - def test_pvc_failure(self, mock_logger, run_to_compl, mock_pvc_create, mock_pvc_delete): - """ - Testcase for finding if the PVC creation failed with exit 0 - """ - - self.assertRaises(SystemExit, init_pvc, self.data, self.filer) - - @patch("tesk_core.taskmaster.PVC.delete") - @patch("tesk_core.taskmaster.Job.delete") - @patch("tesk_core.taskmaster.Job.run_to_completion", return_value="Error") - @patch("tesk_core.taskmaster.logger") - def test_run_executor_failure(self, mock_logger, mock_run_to_compl, mock_job_delete, mock_pvc_delete): - """ - - """ - self.assertRaises(SystemExit, run_executor, self.data['executors'][0],taskmaster.args.namespace) - - @patch("tesk_core.taskmaster.PVC") - @patch("tesk_core.taskmaster.Job.run_to_completion", return_value="Complete") - @patch("tesk_core.taskmaster.logger") - def test_run_executor_complete(self, mock_logger, mock_run_to_compl, mock_pvc): - """ - - """ - self.assertEqual(run_executor(self.data['executors'][0], taskmaster.args.namespace,mock_pvc),None) - - - - @patch("tesk_core.taskmaster.logger") - def test_generate_mount(self, mock_logger): - """ - - """ - self.assertIsInstance(generate_mounts(self.data, self.pvc),list) - - @patch("tesk_core.taskmaster.logger") - def test_append_mount(self, mock_logger): - """ - - """ - volume_mounts = [] - task_volume_name = 'task-volume' - for aninput in self.data['inputs']: - dirnm = dirname(aninput) - append_mount(volume_mounts, task_volume_name, dirnm, self.pvc) - self.assertEqual(volume_mounts,[{'name': task_volume_name, 'mountPath': '/some/volume', 'subPath': 'dir0'}]) - - - @patch('tesk_core.taskmaster.logger') - @patch('tesk_core.taskmaster.PVC.create') - @patch('tesk_core.taskmaster.PVC.delete') - @patch('tesk_core.taskmaster.Job.run_to_completion', return_value='Complete' ) - def test_run_task(self, mock_job, mock_pvc_create, mock_pvc_delete, mock_logger): - """ - - """ - run_task(self.data, taskmaster.args.filer_name, taskmaster.args.filer_version) - - def test_localKubeConfig(self): - """ - - """ - parser = newParser() - args = parser.parse_args(['json', '--localKubeConfig']) - self.assertEqual(args - , Namespace(debug=False, file=None, filer_version='v0.1.9', json='json', namespace='default', - poll_interval=5, state_file='/tmp/.teskstate' - , localKubeConfig=True - , pull_policy_always=False - , filer_name='eu.gcr.io/tes-wes/filer' - , pod_timeout=240 - ) - ) - -if __name__ == "__main__": - #import sys;sys.argv = ['', 'Test.testName'] - unittest.main() \ No newline at end of file diff --git a/tests/test_unit/test_services/FilerClassTest.py b/tests/test_unit/test_services/FilerClassTest.py new file mode 100644 index 00000000..9ba1f351 --- /dev/null +++ b/tests/test_unit/test_services/FilerClassTest.py @@ -0,0 +1,142 @@ +import os +import unittest + +from tesk.services import path +from tesk.services.filer_class import Filer +from tesk.services.utils import pprint + +try: + from unittest.mock import patch # Python 3 @UnresolvedImport +except ImportError: + from unittest.mock import patch + + +@patch('tesk.services.path.HOST_BASE_PATH', '/home/tfga/workspace/cwl-tes') +@patch('tesk.services.path.CONTAINER_BASE_PATH', '/transfer') +@patch('tesk.services.path.TRANSFER_PVC_NAME', 'transfer-pvc') +@patch.dict( + os.environ, + { + 'AWS_SHARED_CREDENTIALS_FILE': '/aws/credentials', + 'AWS_CONFIG_FILE': '/aws/config', + }, +) +class FilerClassTest_env(unittest.TestCase): + def test_env_vars(self): + f = Filer('name', {'a': 1}) + f.set_backoffLimit(10) + + pprint(f.spec) + + self.assertEqual( + f.getEnv(), + [ + {'name': 'JSON_INPUT', 'value': '{"a": 1}'}, + {'name': 'HOST_BASE_PATH', 'value': '/home/tfga/workspace/cwl-tes'}, + {'name': 'CONTAINER_BASE_PATH', 'value': '/transfer'}, + {'name': 'AWS_CONFIG_FILE', 'value': '/aws/config'}, + {'name': 'AWS_SHARED_CREDENTIALS_FILE', 'value': '/aws/credentials'}, + ], + ) + self.assertEqual(f.spec['spec']['backoffLimit'], 10) + + def test_mounts(self): + """ + kind: Pod + apiVersion: v1 + metadata: + name: tfga-pod + spec: + containers: + - name: tfga-container + image: eu.gcr.io/tes-wes/filer:testing + volumeMounts: + - mountPath: /transfer + name: transfer-volume + volumes: + - name: transfer-volume + hostPath: + path: /transferAtNode + # persistentVolumeClaim: + # claimName: task-pv-claim + """ + + f = Filer('name', {'a': 1}) + + pprint(f.spec) + + pprint(f.getVolumeMounts()) + + self.assertEqual( + f.getVolumeMounts(), + [ + { + 'name': 'transfer-volume', + 'mountPath': path.CONTAINER_BASE_PATH, + }, + {'mountPath': '/aws', 'name': 's3-conf', 'readOnly': True}, + ], + ) + + self.assertEqual( + f.getVolumes(), + [ + { + 'name': 'transfer-volume', + 'persistentVolumeClaim': {'claimName': 'transfer-pvc'}, + }, + { + 'name': 's3-conf', + 'secret': { + 'secretName': 'aws-secret', + 'items': [ + {'key': 'credentials', 'path': 'credentials'}, + {'key': 'config', 'path': 'config'}, + ], + 'optional': True, + }, + }, + ], + ) + + +class FilerClassTest_no_env(unittest.TestCase): + def test_mounts_file_disabled(self): + f = Filer('name', {'a': 1}) + + pprint(f.spec) + + pprint(f.getVolumeMounts()) + + self.assertEqual( + f.getVolumeMounts(), + [{'mountPath': '/aws', 'name': 's3-conf', 'readOnly': True}], + ) + self.assertEqual( + f.getVolumes(), + [ + { + 'name': 's3-conf', + 'secret': { + 'secretName': 'aws-secret', + 'items': [ + {'key': 'credentials', 'path': 'credentials'}, + {'key': 'config', 'path': 'config'}, + ], + 'optional': True, + }, + } + ], + ) + + def test_image_pull_policy(self): + f = Filer('name', {'a': 1}) + self.assertEqual(f.getImagePullPolicy(), 'IfNotPresent') + + f = Filer('name', {'a': 1}, pullPolicyAlways=True) + self.assertEqual(f.getImagePullPolicy(), 'Always') + + +if __name__ == '__main__': + # import sys;sys.argv = ['', 'Test.testName'] + unittest.main() diff --git a/tests/test_unit/test_services/TaskMasterTest.py b/tests/test_unit/test_services/TaskMasterTest.py new file mode 100644 index 00000000..b8c93a65 --- /dev/null +++ b/tests/test_unit/test_services/TaskMasterTest.py @@ -0,0 +1,93 @@ +import json +import logging +import unittest +from argparse import Namespace +from unittest.mock import patch + +from tesk.services.taskmaster import newLogger, newParser, run_task + + +def pvcCreateMock(self): + print('[mock] Creating PVC...') + + +def pvcDeleteMock(self): + print('[mock] Deleting PVC...') + + +def jobRunToCompletionMock(job, b, c): + print(f"[mock] Creating job '{job.name}'...") + return 'Complete' + + +class ParserTest(unittest.TestCase): + def test_defaults(self): + parser = newParser() + + args = parser.parse_args(['json']) + + print(args) + + self.assertEqual( + args, + Namespace( + debug=False, + file=None, + filer_version='v0.1.9', + json='json', + namespace='default', + poll_interval=5, + state_file='/tmp/.teskstate', + localKubeConfig=False, + pull_policy_always=False, + ), + ) + + def test_localKubeConfig(self): + parser = newParser() + + args = parser.parse_args(['json', '--localKubeConfig']) + + print(args) + + self.assertEqual( + args, + Namespace( + debug=False, + file=None, + filer_version='v0.1.9', + json='json', + namespace='default', + poll_interval=5, + state_file='/tmp/.teskstate', + localKubeConfig=True, + pull_policy_always=False, + ), + ) + + def test_pullPolicyAlways(self): + parser = newParser() + + self.assertEqual(parser.parse_args(['json']).pull_policy_always, False) + self.assertEqual( + parser.parse_args(['json', '--pull-policy-always']).pull_policy_always, True + ) + + @patch( + 'tesk.services.taskmaster.args', + Namespace(debug=True, namespace='default', pull_policy_always=True), + ) + @patch('tesk.services.taskmaster.logger', newLogger(logging.DEBUG)) + @patch('tesk.services.taskmaster.PVC.create', pvcCreateMock) + @patch('tesk.services.taskmaster.PVC.delete', pvcDeleteMock) + @patch('tesk.services.taskmaster.Job.run_to_completion', jobRunToCompletionMock) + def test_run_task(self): + with open('tests/resources/inputFile.json') as fh: + data = json.load(fh) + + run_task(data, 'filer_version') + + +if __name__ == '__main__': + # import sys;sys.argv = ['', 'Test.testName'] + unittest.main() diff --git a/tests/test_unit/test_services/assertThrows.py b/tests/test_unit/test_services/assertThrows.py new file mode 100644 index 00000000..7c9a2cc8 --- /dev/null +++ b/tests/test_unit/test_services/assertThrows.py @@ -0,0 +1,7 @@ +class AssertThrowsMixin: + def assertThrows(self, func, exceptionClass, errorMessage=None): + with self.assertRaises(exceptionClass) as cm: + func() + + if errorMessage: + self.assertEqual(str(cm.exception), errorMessage) diff --git a/tests/resources/copyDirTest/src/3.txt b/tests/test_unit/test_services/resources/copyDirTest/dst1/3.txt similarity index 100% rename from tests/resources/copyDirTest/src/3.txt rename to tests/test_unit/test_services/resources/copyDirTest/dst1/3.txt diff --git a/tests/resources/copyDirTest/src/a/1.txt b/tests/test_unit/test_services/resources/copyDirTest/dst1/a/1.txt similarity index 100% rename from tests/resources/copyDirTest/src/a/1.txt rename to tests/test_unit/test_services/resources/copyDirTest/dst1/a/1.txt diff --git a/tests/resources/copyDirTest/src/a/2.txt b/tests/test_unit/test_services/resources/copyDirTest/dst1/a/2.txt similarity index 100% rename from tests/resources/copyDirTest/src/a/2.txt rename to tests/test_unit/test_services/resources/copyDirTest/dst1/a/2.txt diff --git a/src/tesk_core/__init__.py b/tests/test_unit/test_services/resources/copyDirTest/dst2/3.txt similarity index 100% rename from src/tesk_core/__init__.py rename to tests/test_unit/test_services/resources/copyDirTest/dst2/3.txt diff --git a/tests/test_unit/test_services/resources/copyDirTest/dst2/a/1.txt b/tests/test_unit/test_services/resources/copyDirTest/dst2/a/1.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_unit/test_services/resources/copyDirTest/dst2/a/2.txt b/tests/test_unit/test_services/resources/copyDirTest/dst2/a/2.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_unit/test_services/resources/copyDirTest/src/3.txt b/tests/test_unit/test_services/resources/copyDirTest/src/3.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_unit/test_services/resources/copyDirTest/src/a/1.txt b/tests/test_unit/test_services/resources/copyDirTest/src/a/1.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_unit/test_services/resources/copyDirTest/src/a/2.txt b/tests/test_unit/test_services/resources/copyDirTest/src/a/2.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/resources/inputFile.json b/tests/test_unit/test_services/resources/inputFile.json similarity index 100% rename from tests/resources/inputFile.json rename to tests/test_unit/test_services/resources/inputFile.json diff --git a/tests/resources/test_config b/tests/test_unit/test_services/resources/test_config similarity index 100% rename from tests/resources/test_config rename to tests/test_unit/test_services/resources/test_config diff --git a/tests/test_unit/test_services/test_filer.py b/tests/test_unit/test_services/test_filer.py new file mode 100644 index 00000000..729a2591 --- /dev/null +++ b/tests/test_unit/test_services/test_filer.py @@ -0,0 +1,260 @@ +import logging +import os +import unittest +from io import StringIO +from unittest.mock import patch + +from fs.opener import open_fs + +from tesk.services.exceptions import ( + FileProtocolDisabled, + InvalidHostPath, + UnknownProtocol, +) +from tesk.services.filer import ( + FileTransput, + FTPTransput, + HTTPTransput, + copyDir, + ftp_check_directory, + getPath, + logConfig, + newTransput, + process_file, + subfolders_in, +) +from tesk.services.filer_s3 import S3Transput +from tesk.services.path import containerPath +from tests.test_unit.test_services.assertThrows import AssertThrowsMixin + + +def getTree(rootDir): + strio = StringIO() + with open_fs(rootDir) as dst1_fs: + dst1_fs.tree(file=strio) + treeTxt = strio.getvalue() + strio.close() + return treeTxt + + +def stripLines(txt): + return '\n'.join([line.strip() for line in txt.splitlines()[1:]]) + + +@patch('tesk.services.path.HOST_BASE_PATH', '/home/tfga/workspace/cwl-tes') +@patch('tesk.services.path.CONTAINER_BASE_PATH', '/transfer') +class FilerTest(unittest.TestCase, AssertThrowsMixin): + @classmethod + def setUpClass(cls): + logConfig(logging.DEBUG) # Doesn't work... + + @patch('tesk.services.filer.copyDir') + @patch('tesk.services.filer.shutil.copy') + def test_download_file(self, copyMock, copyDirMock): + filedata = { + 'url': 'file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5', + 'path': '/var/lib/cwl/stgda974802-fa81-4f0b-8fe4-341d5655af4b/md5', + 'type': 'FILE', # File = 'FILE' + # Directory = 'DIRECTORY' + 'name': 'md5', + 'description': 'cwl_input:md5', + } + + process_file('inputs', filedata) + + copyDirMock.assert_not_called() + + copyMock.assert_called_once_with( + '/transfer/tmphrtip1o8/md5', + '/var/lib/cwl/stgda974802-fa81-4f0b-' '8fe4-341d5655af4b/md5', + ) + + @patch('tesk.services.filer.copyDir') + @patch('tesk.services.filer.shutil.copy') + def test_download_dir(self, copyMock, copyDirMock): + filedata = { + 'url': 'file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/', + 'path': '/TclSZU', + 'type': 'DIRECTORY', + 'name': 'workdir', + } + + process_file('inputs', filedata) + + copyMock.assert_not_called() + + copyDirMock.assert_called_once_with('/transfer/tmphrtip1o8', '/TclSZU') + + @patch('tesk.services.filer.copyDir') + @patch('tesk.services.filer.shutil.copy') + def test_upload_dir(self, copyMock, copyDirMock): + filedata = { + 'url': 'file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/', + 'path': '/TclSZU', + 'type': 'DIRECTORY', + 'name': 'workdir', + } + + process_file('outputs', filedata) + + copyMock.assert_not_called() + + copyDirMock.assert_called_once_with('/TclSZU', '/transfer/tmphrtip1o8') + + @patch('tesk.services.filer.copyDir') + @patch('tesk.services.filer.copyFile') + def test_upload_file(self, copyFileMock, copyDirMock): + filedata = { + 'url': 'file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5', + 'path': '/TclSZU/md5', + 'type': 'FILE', + 'name': 'stdout', + } + + process_file('outputs', filedata) + + copyDirMock.assert_not_called() + + copyFileMock.assert_called_once_with('/TclSZU/md5', '/transfer/tmphrtip1o8/md5') + + @patch('tesk.services.filer.copyDir') + @patch('tesk.services.filer.copyFile') + def test_upload_file_glob(self, copyFileMock, copyDirMock): + filedata = { + 'url': 'file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5*', + 'path': '/TclSZU/md5*', + 'type': 'FILE', + 'name': 'stdout', + } + + process_file('outputs', filedata) + + copyDirMock.assert_not_called() + + copyFileMock.assert_called_once_with( + '/TclSZU/md5*', '/transfer/tmphrtip1o8/md5*' + ) + + def test_copyDir(self): + def rmDir(d): + os.system(f'rm -r {d}') + + baseDir = 'tests/test_unit/test_services/resources/copyDirTest/' + src = os.path.join(baseDir, 'src') + dst1 = os.path.join(baseDir, 'dst1') + dst2 = os.path.join(baseDir, 'dst2') + + rmDir(dst1) + rmDir(dst2) + + self.assertTrue(os.path.exists(src)) # src should exist + self.assertFalse(os.path.exists(dst1)) # dst1 shouldn't + self.assertFalse(os.path.exists(dst2)) # dst2 shouldn't + + # Copying to existing dst --------------------------------------------- + # Let's create dst1 + os.mkdir(dst1) + self.assertTrue(os.path.exists(dst1)) # Now dst1 should exist + + # Let's try to copy + copyDir(src, dst1) + + self.assertEqual( + getTree(dst1), + stripLines(""" + |-- a + | |-- 1.txt + | `-- 2.txt + `-- 3.txt + """), + ) + + # Copying to non-existing dst ----------------------------------------- + self.assertFalse(os.path.exists(dst2)) # dst2 should not exist + + # Let's try to copy + copyDir(src, dst2) + + self.assertEqual( + getTree(dst1), + stripLines(""" + |-- a + | |-- 1.txt + | `-- 2.txt + `-- 3.txt + """), + ) + + def test_getPath(self): + self.assertEqual( + getPath('file:///home/tfga/workspace/cwl-tes/tmphrtip1o8/md5'), + '/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5', + ) + + def test_getPathNoScheme(self): + self.assertEqual( + getPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5'), + '/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5', + ) + + self.assertEqual( + containerPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5'), + '/transfer/tmphrtip1o8/md5', + ) + + def test_containerPath(self): + self.assertEqual( + containerPath('/home/tfga/workspace/cwl-tes/tmphrtip1o8/md5'), + '/transfer/tmphrtip1o8/md5', + ) + + # What happens if 'path' is not a descendant of HOST_BASE_PATH? + self.assertThrows( + lambda: containerPath('/someOtherFolder'), + InvalidHostPath, + "'/someOtherFolder' is not a descendant of " + "'HOST_BASE_PATH' (/home/tfga/workspace/cwl-tes)", + ) + + def test_newTransput(self): + self.assertEqual(newTransput('ftp', 'test.com'), FTPTransput) + self.assertEqual(newTransput('http', 'test.com'), HTTPTransput) + self.assertEqual(newTransput('https', 'test.com'), HTTPTransput) + self.assertEqual(newTransput('file', '/home/tfga/workspace/'), FileTransput) + self.assertEqual(newTransput('s3', '/home/tfga/workspace/'), S3Transput) + self.assertEqual(newTransput('http', 's3.aws.com'), HTTPTransput) + + self.assertThrows( + lambda: newTransput('svn', 'example.com'), + UnknownProtocol, + "Unknown protocol: 'svn'", + ) + + @patch('ftplib.FTP') + def test_ftp_check_directory(self, conn): + """Ensure that when the path provided is an existing directory, the + return value is 0.""" + path = os.path.curdir + self.assertEqual(ftp_check_directory(conn, path), 0) + + def test_subfolders_in(self): + """Ensure the all the subfolders of a path are properly returned.""" + path = '/this/is/a/path' + subfldrs = ['/this', '/this/is', '/this/is/a', '/this/is/a/path'] + self.assertEqual(subfolders_in(path), subfldrs) + + +class FilerTest_no_env(unittest.TestCase, AssertThrowsMixin): + def test_newTransput_file_disabled(self): + self.assertThrows( + lambda: newTransput('file', '/home/user/test'), + FileProtocolDisabled, + "'file:' protocol disabled\n" + 'To enable it, both HOST_BASE_PATH and CONTAINER_BASE_PATH' + ' environment variables must be defined.', + ) + + +if __name__ == '__main__': + # import sys;sys.argv = ['', 'Test.testName'] + unittest.main() diff --git a/tests/test_unit/test_services/test_filer_ftp_pytest.py b/tests/test_unit/test_services/test_filer_ftp_pytest.py new file mode 100755 index 00000000..c13a88de --- /dev/null +++ b/tests/test_unit/test_services/test_filer_ftp_pytest.py @@ -0,0 +1,213 @@ +"""Tests for 'filer.py' FTP functionalities using 'pytest'.""" + +import ftplib +import os +from unittest import mock + +from tesk.services.filer import ( + FTPTransput, + Type, + ftp_check_directory, + ftp_download_file, + ftp_login, + ftp_make_dirs, + ftp_upload_file, +) + + +def test_ftp_login(mocker): + """Ensure ftp_login detects ftp credentials and properly calls + ftplib.FTP.login.""" + + conn = mocker.patch('ftplib.FTP') + mock_login = mocker.patch('ftplib.FTP.login') + with mock.patch.dict( + 'os.environ', + { + 'TESK_FTP_USERNAME': 'test', + 'TESK_FTP_PASSWORD': 'test_pass', + }, + ): + ftp_login(conn, None, None) + mock_login.assert_called_with('test', 'test_pass') + + +def test_ftp_upload_file_error(mocker, caplog): + """Ensure that upon upload error, ftp_upload_file behaves correctly.""" + + conn = mocker.patch('ftplib.FTP') + mocker.patch('ftplib.FTP.storbinary', side_effect=ftplib.error_reply) + assert ftp_upload_file(conn, __file__, '/home/tesk/test_copy.py') == 1 + assert 'Unable to upload file' in caplog.text + + +def test_ftp_download_file_error(mocker, caplog): + """Ensure that upon download error, ftp_download_file behaves correctly.""" + + conn = mocker.patch('ftplib.FTP') + mocker.patch('ftplib.FTP.retrbinary', side_effect=ftplib.error_perm) + with mock.patch('builtins.open', mock.mock_open(), create=False): + assert ftp_download_file(conn, 'test_filer_ftp_pytest.py', 'test_copy.py') == 1 + assert 'Unable to download file' in caplog.text + + +def test_ftp_download_file_success(mocker, caplog): + """Ensure that upon successful download, the local destination file has + been created.""" + + conn = mocker.patch('ftplib.FTP') + mock_retrbin = mocker.patch('ftplib.FTP.retrbinary') + with mock.patch('builtins.open', mock.mock_open(), create=False) as m: + assert ftp_download_file(conn, 'test_filer_ftp_pytest.py', 'test_copy.py') == 0 + + mock_retrbin.assert_called_with('RETR ' + 'test_filer_ftp_pytest.py', mock.ANY) + + m.assert_called_with('test_copy.py', 'w+b') + + # Since we want to avoid file creation in testing and we're using + # 'create=False', we cannot check whether a file exists or not (but + # it's not really necessary since we can assert that the necessary + # functions have been invoked. + # assert os.path.exists('test_copy.py') + + +def test_ftp_upload_dir(mocker, fs, ftpserver): + """Check whether the upload of a directory through FTP completes + successfully.""" + + # Fake local nested directories with files + fs.create_dir('dir1') + fs.create_dir('dir1/dir2') + fs.create_file('dir1/file1', contents='this is random') + fs.create_file('dir1/dir2/file2', contents='not really') + fs.create_file('dir1/dir2/file4.txt', contents='took me a while') + + login_dict = ftpserver.get_login_data() + + conn = ftplib.FTP() + + mocker.patch( + 'ftplib.FTP.connect', + side_effect=conn.connect(host=login_dict['host'], port=login_dict['port']), + ) + mocker.patch( + 'ftplib.FTP.login', + side_effect=conn.login(login_dict['user'], login_dict['passwd']), + ) + mocker.patch('ftplib.FTP.pwd', side_effect=conn.pwd) + mocker.patch('ftplib.FTP.cwd', side_effect=conn.cwd) + mocker.patch('ftplib.FTP.mkd', side_effect=conn.mkd) + mock_storbinary = mocker.patch('ftplib.FTP.storbinary') + + ftp_obj = FTPTransput( + 'dir1', 'ftp://' + login_dict['host'] + '/dir1', Type.Directory, ftp_conn=conn + ) + + ftp_obj.upload_dir() + + # We use mock.ANY since the 2nd argument of the 'ftplib.FTP.storbinary' is + # a file object and we can't have the same between the original and the + # mock calls + assert sorted(mock_storbinary.mock_calls) == sorted( + [ + mock.call('STOR /' + '/dir1/file1', mock.ANY), + mock.call('STOR /' + '/dir1/dir2/file2', mock.ANY), + mock.call('STOR /' + '/dir1/dir2/file4.txt', mock.ANY), + ] + ) + + +def test_ftp_download_dir(mocker, tmpdir, tmp_path, ftpserver): + """Check whether the download of a directory through FTP completes + successfully.""" + + # Temporary nested directories with files + file1 = tmpdir.mkdir('dir1').join('file1') + file1.write('this is random') + file2 = tmpdir.mkdir('dir1/dir2').join('file2') + file2.write('not really') + file3 = tmpdir.join('dir1/dir2/file3') + file3.write('took me a while') + + # Temporary folder for download + tmpdir.mkdir('downloads') + + # Populate the server with the above files to later download + ftpserver.put_files({'src': f'{str(tmp_path)}/dir1/file1', 'dest': 'remote1/file1'}) + ftpserver.put_files( + { + 'src': f'{str(tmp_path)}/dir1/dir2/file2', + 'dest': 'remote1/remote2/file2', + } + ) + ftpserver.put_files( + { + 'src': f'{str(tmp_path)}/dir1/dir2/file3', + 'dest': 'remote1/remote2/file3', + } + ) + + login_dict = ftpserver.get_login_data() + + conn = ftplib.FTP() + conn.connect(host=login_dict['host'], port=login_dict['port']) + conn.login(login_dict['user'], login_dict['passwd']) + + mock_retrbinary = mocker.patch('ftplib.FTP.retrbinary', side_effect=conn.retrbinary) + + ftp_obj = FTPTransput( + f'{str(tmp_path)}downloads', + 'ftp://' + login_dict['host'], + Type.Directory, + ftp_conn=conn, + ) + + ftp_obj.download_dir() + + # We use mock.ANY since the 2nd argument of the 'ftplib.FTP.storbinary' is + # a file object and we can't have the same between the original and the + # mock calls + assert sorted(mock_retrbinary.mock_calls) == sorted( + [ + mock.call('RETR ' + '/remote1/file1', mock.ANY), + mock.call('RETR ' + '/remote1/remote2/file2', mock.ANY), + mock.call('RETR ' + '/remote1/remote2/file3', mock.ANY), + ] + ) + + assert os.path.exists(f'{str(tmp_path)}downloads/remote1/file1') + assert os.path.exists(f'{str(tmp_path)}downloads/remote1/remote2/file2') + assert os.path.exists(f'{str(tmp_path)}downloads/remote1/remote2/file3') + + +def test_ftp_check_directory_error(mocker, caplog): + """Ensure ftp_check_directory_error creates the proper error log + message in case of error.""" + + conn = mocker.patch('ftplib.FTP') + mocker.patch('ftplib.FTP.cwd', side_effect=ftplib.error_reply) + assert ftp_check_directory(conn, '/folder/file') == 1 + assert 'Could not check if path' in caplog.text + + +def test_ftp_make_dirs(mocker): + """In case of existing directory, exit with 0.""" + + conn = mocker.patch('ftplib.FTP') + assert ftp_make_dirs(conn, os.curdir) == 0 + + +def test_ftp_make_dirs_error(mocker, ftpserver, caplog): + """Ensure in case of 'ftplib.error_reply', both the return value + and the error message are correct.""" + + login_dict = ftpserver.get_login_data() + + conn = ftplib.FTP() + conn.connect(host=login_dict['host'], port=login_dict['port']) + conn.login(login_dict['user'], login_dict['passwd']) + + mocker.patch('ftplib.FTP.cwd', side_effect=ftplib.error_reply) + + assert ftp_make_dirs(conn, 'dir1') == 1 + assert 'Unable to create directory' in caplog.text diff --git a/tests/test_unit/test_services/test_filer_general_pytest.py b/tests/test_unit/test_services/test_filer_general_pytest.py new file mode 100755 index 00000000..6a6c2002 --- /dev/null +++ b/tests/test_unit/test_services/test_filer_general_pytest.py @@ -0,0 +1,62 @@ +"""Tests for 'filer.py' general purpose functionalities using 'pytest'.""" + +# Note: In tests such as 'test_process_file_with_scheme' or +# 'test_copyContent_dir', only the outer function of each unit under testing is +# checked, since mocking a function apparently affects its output. Maybe +# there's a way to bypass that issue and test deeper down the call tree. + +import pytest + +from tesk.services.filer import FileProtocolDisabled, copyContent, process_file + + +def test_process_file_no_scheme(caplog): + """Ensure that when process_file is called without a scheme and no + 'HOST_BASE_PATH', 'CONTAINER_BASE_PATH' environment variables + set, the appropriate error is raised.""" + + filedata = {'url': 'www.foo.bar'} + + with pytest.raises(FileProtocolDisabled): + process_file('upload', filedata) + + +def test_process_file_with_scheme(mocker): + """Ensure expected behaviour when 'process_file' is called with scheme. + In this test example, scheme is 'http', filedata:type is 'FILE' and + ttype is 'inputs'.""" + + filedata = { + 'url': 'http://www.foo.bar', + 'path': '.', + 'type': 'FILE', + } + mock_new_Trans = mocker.patch('tesk.services.filer.newTransput') + process_file('inputs', filedata) + + mock_new_Trans.assert_called_once_with('http', 'www.foo.bar') + + +def test_process_file_from_content(tmpdir, tmp_path): + """Ensure 'process_file' behaves correctly when the file contents + should be drawn from the filedata content field.""" + + # test_file = tmpdir.join('testfile') + filedata = { + 'path': f'{str(tmp_path)}/testfile', + 'content': 'This is some test content', + } + process_file('inputs', filedata) + + file_path = f'{str(tmp_path)}/testfile' + with open(file_path) as file: + assert file.read() == filedata['content'] + + +def test_copyContent_dir(mocker): + """Ensure that 'os.listdir' is called when 'copyContent' is called.""" + + mock_os_listdir = mocker.patch('os.listdir') + copyContent('.', '/test_dst') + + mock_os_listdir.assert_called_once_with('.') diff --git a/tests/test_unit/test_services/test_filer_http_pytest.py b/tests/test_unit/test_services/test_filer_http_pytest.py new file mode 100755 index 00000000..b4c7c5db --- /dev/null +++ b/tests/test_unit/test_services/test_filer_http_pytest.py @@ -0,0 +1,115 @@ +"""Tests for 'filer.py' HTTP functionalities using 'pytest'.""" + +from unittest import mock + +from requests import Response + +from tesk.services.filer import HTTPTransput, Type + +PATH_DOWN = 'test_download_file.txt' +PATH_UP = 'tests/test_unit/test_services/test_filer_http_pytest.py' +SUCCESS = 200 +FAIL = 300 +URL = 'http://www.foo.bar' +FTYPE = 'FILE' + +resp = Response() +resp._content = b'{ "foo" : "bar" }' + + +def test_download_file(mocker): + """Ensure a file gets properly downloaded.""" + + resp.status_code = SUCCESS + http_obj = HTTPTransput(PATH_DOWN, URL, FTYPE) + mocker.patch('requests.get', return_value=resp) + + with mock.patch( + 'builtins.open', mock.mock_open(read_data=resp._content), create=False + ): + assert http_obj.download_file() == 0 + with open(PATH_DOWN, 'rb') as file: + assert file.read() == resp._content + + +def test_download_file_error(mocker, caplog): + """Ensure download error returns the correct value and log message.""" + + resp.status_code = FAIL + http_obj = HTTPTransput(PATH_DOWN, URL, FTYPE) + mocker.patch('requests.get', return_value=resp) + + assert http_obj.download_file() == 1 + assert f'Got status code: {FAIL}' in caplog.text + + +def test_upload_file(mocker): + """Ensure a file gets properly uploaded.""" + + resp.status_code = SUCCESS + http_obj = HTTPTransput(PATH_UP, URL, FTYPE) + mocker.patch('requests.put', return_value=resp) + + assert http_obj.upload_file() == 0 + + +def test_upload_file_error(mocker, caplog): + """Ensure upload error returns the correct value and log message.""" + + resp.status_code = FAIL + http_obj = HTTPTransput(PATH_UP, URL, FTYPE) + mocker.patch('requests.put', return_value=resp) + + assert http_obj.upload_file() == 1 + assert f'Got status code: {FAIL}' in caplog.text + + +def test_upload_dir(mocker, fs): + """Ensure that each file inside nexted directories gets successfully + uploaded.""" + + # Tele2 Speedtest Service, free upload /download test server + endpoint = 'http://speedtest.tele2.net/upload.php' + resp.status_code = 200 + + fs.create_dir('dir1') + fs.create_dir('dir1/dir2') + fs.create_file('dir1/file1', contents='this is random') + fs.create_file('dir1/dir2/file2', contents='not really') + fs.create_file('dir1/dir2/file4.txt', contents='took me a while') + + mock_put = mocker.patch('requests.put', return_value=resp) + + http_obj = HTTPTransput('dir1', f'{endpoint}/dir1', Type.Directory) + + assert http_obj.upload_dir() == 0 + + # We empoly the 'list.sorted' trick to ignore calls order because the + # 'assert_has_calls' method would not work in this setting + assert sorted(mock_put.mock_calls) == sorted( + [ + mock.call(f'{endpoint}/dir1/dir2/file2', data='not really', timeout='100'), + mock.call( + f'{endpoint}/dir1/dir2/file4.txt', data='took me a while', timeout='100' + ), + mock.call(f'{endpoint}/dir1/file1', data='this is random', timeout='100'), + ] + ) + + def test_upload_dir_error(mocker, fs): + """Ensure 'upload_dir' error returns the correct value.""" + + fs.create_dir('dir2') + + # Tele2 Speedtest Service, free upload /download test server + endpoint1 = 'http://speedtest.tele2.net/upload.php' + + # Non-existent endpoint + endpoint2 = 'http://somerandomendpoint.fail' + + http_obj1 = HTTPTransput('dir1', f'{endpoint1}/dir1', Type.Directory) + + http_obj2 = HTTPTransput('dir2', f'{endpoint2}/dir1', Type.Directory) + + assert http_obj1.upload_dir() == 1 + assert http_obj2.upload_dir() == 1 diff --git a/tests/test_unit/test_services/test_job.py b/tests/test_unit/test_services/test_job.py new file mode 100644 index 00000000..66e62e30 --- /dev/null +++ b/tests/test_unit/test_services/test_job.py @@ -0,0 +1,437 @@ +import datetime +import json +import os +import unittest +from argparse import Namespace +from datetime import timezone +from unittest.mock import patch + +from dateutil.tz import tzutc +from kubernetes.client.rest import ApiException + +from tesk.services import taskmaster +from tesk.services.job import Job + +START_TIME = datetime.datetime.now(timezone.utc) + + +class MockObject: + def __init__(self, dictionary): + for k, v in dictionary.items(): + self.__dict__[k] = MockObject(v) if isinstance(v, dict) else v + + +def read_namespaced_job_error(name, namespace): + return_value = { + 'active': 1, + 'completion_time': None, + 'conditions': None, + 'failed': None, + 'start_time': START_TIME - datetime.timedelta(minutes=10), + 'succeeded': None, + } + return MockObject({'status': return_value}) + + +def read_namespaced_job_pending(diff_time=5): + return_value = { + 'active': 1, + 'completion_time': None, + 'conditions': None, + 'failed': None, + 'start_time': START_TIME - datetime.timedelta(minutes=diff_time), + 'succeeded': None, + } + return MockObject({'status': return_value}) + + +def read_namespaced_job_success(name, namespace): + return_value = { + 'active': None, + 'completion_time': datetime.datetime(2020, 7, 20, 5, 12, 42, tzinfo=tzutc()), + 'conditions': [ + MockObject( + { + 'last_probe_time': datetime.datetime( + 2020, 7, 20, 5, 12, 42, tzinfo=tzutc() + ), + 'last_transition_time': datetime.datetime( + 2020, 7, 20, 5, 12, 42, tzinfo=tzutc() + ), + 'message': None, + 'reason': None, + 'status': 'True', + 'type': 'Complete', + } + ) + ], + 'failed': None, + 'start_time': datetime.datetime(2020, 7, 20, 5, 12, 35, tzinfo=tzutc()), + 'succeeded': 1, + } + return MockObject({'status': return_value}) + + +def read_namespaced_job_running(name, namespace): + """ + if the `conditions` value is `None`, its assumed that + the pod is running. Hence the value of `conditions` is + kept as `None` + """ + return_value = { + 'active': None, + 'completion_time': None, + 'conditions': None, + 'failed': None, + 'start_time': datetime.datetime(2020, 7, 20, 5, 12, 35, tzinfo=tzutc()), + 'succeeded': None, + } + return MockObject({'status': return_value}) + + +def list_namespaced_pod_error_ImagePullBackOff(diff_time=10): + return_value = { + 'status': { + 'conditions': [], + 'container_statuses': [ + MockObject( + { + 'container_id': None, + 'image': 'ubuntu_mock_test_image', + 'image_id': '', + 'last_state': { + 'running': None, + 'terminated': None, + 'waiting': None, + }, + 'name': 'task-1000-ex-00', + 'ready': False, + 'restart_count': 0, + 'state': { + 'running': None, + 'terminated': None, + 'waiting': { + 'message': 'Back-off ' + 'pulling ' + 'image ' + 'ubuntu_mock_test_image', + 'reason': 'ImagePullBackOff', + }, + }, + } + ) + ], + 'host_ip': '192.168.99.100', + 'init_container_statuses': None, + 'message': None, + 'nominated_node_name': None, + 'phase': 'Pending', + 'pod_ip': '172.17.0.5', + 'qos_class': 'BestEffort', + 'reason': None, + 'start_time': START_TIME - datetime.timedelta(minutes=diff_time), + } + } + return MockObject({'items': [MockObject(return_value)]}) + + +def list_namespaced_pod_pending_unknown_error(diff_time=5): + return_value = { + 'status': { + 'conditions': [], + 'container_statuses': [ + MockObject( + { + 'container_id': None, + 'image': 'ubuntu_mock_test_image', + 'image_id': '', + 'last_state': { + 'running': None, + 'terminated': None, + 'waiting': None, + }, + 'name': 'task-1000-ex-00', + 'ready': False, + 'restart_count': 0, + 'state': { + 'running': None, + 'terminated': None, + 'waiting': { + 'message': 'Unknown error', + 'reason': 'Unknown', + }, + }, + } + ) + ], + 'host_ip': '192.168.99.100', + 'init_container_statuses': None, + 'message': None, + 'nominated_node_name': None, + 'phase': 'Pending', + 'pod_ip': '172.17.0.5', + 'qos_class': 'BestEffort', + 'reason': None, + 'start_time': START_TIME - datetime.timedelta(minutes=diff_time), + } + } + return MockObject({'items': [MockObject(return_value)]}) + + +class JobTestCase(unittest.TestCase): + def setUp(self): + """ + Initialising + """ + file_path = os.path.join(os.path.dirname(__file__), 'resources/inputFile.json') + with open(file_path) as file: + self.data = json.load(file) + taskmaster.args = Namespace( + debug=False, + file=None, + filer_version='v0.1.9', + json='json', + namespace='default', + poll_interval=5, + state_file='/tmp/.teskstate', + localKubeConfig=False, + pull_policy_always=False, + filer_name='eu.gcr.io/tes-wes/filer', + pod_timeout=240, + ) + + def test_job(self): + """ + Testing if Job object is getting created successfully + """ + job = Job({'metadata': {'name': 'test'}}) + self.assertEqual(job.name, 'task-job') + self.assertEqual(job.namespace, 'default') + + @patch('kubernetes.client.BatchV1Api.create_namespaced_job') + @patch( + 'tesk.services.job.Job.get_status', + side_effect=[ + ('Running', True), + ('Running', True), + ('Running', True), + ('Running', True), + ('Running', True), + ('Complete', True), + ], + ) + def test_run_to_completion_success( + self, mock_get_status, mock_create_namespaced_job + ): + """ + Checking if the Job runs is completed successfully + """ + for executor in self.data['executors']: + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status = job.run_to_completion( + 1, taskmaster.check_cancelled, taskmaster.args.pod_timeout + ) + self.assertEqual(status, 'Complete') + + @patch('tesk.services.job.Job.delete') + @patch('tesk.services.taskmaster.check_cancelled', return_value=True) + @patch('kubernetes.client.BatchV1Api.create_namespaced_job') + @patch('tesk.services.job.Job.get_status', side_effect=[('Running', True)]) + def test_run_to_completion_cancelled( + self, + mock_get_status, + mock_create_namespaced_job, + mock_check_cancelled, + mock_job_delete, + ): + """ + Checking if the Job is cancelled + """ + for executor in self.data['executors']: + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status = job.run_to_completion( + taskmaster.args.poll_interval, + taskmaster.check_cancelled, + taskmaster.args.pod_timeout, + ) + self.assertEqual(status, 'Cancelled') + + @patch('tesk.services.taskmaster.check_cancelled', return_value=False) + @patch( + 'kubernetes.client.BatchV1Api.create_namespaced_job', + side_effect=ApiException(status=409, reason='conflict'), + ) + @patch('tesk.services.job.Job.get_status', side_effect=[('Complete', True)]) + @patch( + 'kubernetes.client.BatchV1Api.read_namespaced_job', + side_effect=read_namespaced_job_running, + ) + def test_run_to_completion_check_conflict_exception( + self, + mock_get_status, + mock_read_namespaced_job, + mock_check_cancelled, + mock_create_namespaced_job, + ): + """ + Checking if the Job status is complete when an ApiException of 409 is raised + """ + for executor in self.data['executors']: + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status = job.run_to_completion( + taskmaster.args.poll_interval, + taskmaster.check_cancelled, + taskmaster.args.pod_timeout, + ) + self.assertEqual(status, 'Complete') + + @patch( + 'kubernetes.client.BatchV1Api.create_namespaced_job', + side_effect=ApiException(status=500, reason='Random Exception'), + ) + def test_run_to_completion_check_other_K8_exception( + self, mock_create_namespaced_job + ): + """ + Checking if the an exception is raised when ApiException status is other + than 409 + """ + for executor in self.data['executors']: + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + with self.assertRaises(ApiException): + job.run_to_completion( + taskmaster.args.poll_interval, + taskmaster.check_cancelled, + taskmaster.args.pod_timeout, + ) + + @patch('kubernetes.client.CoreV1Api.list_namespaced_pod') + @patch( + 'kubernetes.client.BatchV1Api.read_namespaced_job', + side_effect=read_namespaced_job_error, + ) + @patch('tesk.services.job.Job.delete') + @patch('tesk.services.taskmaster.check_cancelled', return_value=False) + @patch('kubernetes.client.BatchV1Api.create_namespaced_job') + def test_run_to_completion_error( # noqa: PLR0913 + self, + mock_create_namespaced_job, + mock_check_cancelled, + mock_job_delete, + mock_read_namespaced_job, + mock_list_namespaced_pod, + ): + """ + Testing if the job state is 'error' when the status of the pod is in pending + state and reason is ImagePullBackOff + """ + mock_list_namespaced_pod.return_value = ( + list_namespaced_pod_error_ImagePullBackOff(10) + ) + for executor in self.data['executors']: + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status = job.run_to_completion(1, taskmaster.check_cancelled, 120) + self.assertEqual(status, 'Error') + + @patch( + 'kubernetes.client.BatchV1Api.read_namespaced_job', + side_effect=read_namespaced_job_error, + ) + @patch('kubernetes.client.CoreV1Api.list_namespaced_pod') + def test_get_job_status_ImagaPullBackoff_error( + self, mock_list_namespaced_pod, mock_read_namespaced_job + ): + """ + Checking whether the job state is 'error', when the pod failed to start and + if reason for pod failure is ImagePullBackOff + """ + mock_list_namespaced_pod.return_value = ( + list_namespaced_pod_error_ImagePullBackOff() + ) + executor = self.data['executors'][0] + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + job.timeout = 50 + status, all_pods_running = job.get_status(False) + self.assertEqual(status, 'Error') + + @patch('kubernetes.client.CoreV1Api.list_namespaced_pod') + @patch( + 'kubernetes.client.BatchV1Api.read_namespaced_job', + side_effect=read_namespaced_job_success, + ) + def test_get_status_success( + self, mock_read_namespaced_job, mock_list_namespaced_pod + ): + """ + Checking if job status is complete + """ + executor = self.data['executors'][0] + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status, all_pods_running = job.get_status(False) + self.assertEqual(status, 'Complete') + + @patch('kubernetes.client.CoreV1Api.list_namespaced_pod') + @patch( + 'kubernetes.client.BatchV1Api.read_namespaced_job', + side_effect=read_namespaced_job_running, + ) + def test_get_status_running( + self, mock_read_namespaced_job, mock_list_namespaced_pod + ): + """ + Checking if the job is in running state in an ideal situation + """ + executor = self.data['executors'][0] + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status, all_pods_running = job.get_status(False) + self.assertEqual(status, 'Running') + + @patch('kubernetes.client.CoreV1Api.list_namespaced_pod') + @patch('kubernetes.client.BatchV1Api.read_namespaced_job') + def test_get_job_status_for_failed_pod( + self, mock_read_namespaced_job, mock_list_namespaced_pod + ): + """ + Checking if the job status is 'running' when the pod failed to start with a + reason other than ImagePullBackOff. + """ + mock_list_namespaced_pod.return_value = ( + list_namespaced_pod_pending_unknown_error() + ) + mock_read_namespaced_job.return_value = read_namespaced_job_pending() + executor = self.data['executors'][0] + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status, all_pods_running = job.get_status(False) + self.assertEqual(status, 'Running') + + @patch('kubernetes.client.CoreV1Api.list_namespaced_pod') + @patch('kubernetes.client.BatchV1Api.read_namespaced_job') + def test_get_job_status_for_wrong_image( + self, mock_read_namespaced_job, mock_list_namespaced_pod + ): + """ + Assuming image name is wrong, the testcase will check if job status returned + from the method is "running" during the default pod timeout. + """ + mock_list_namespaced_pod.return_value = ( + list_namespaced_pod_error_ImagePullBackOff(2) + ) + mock_read_namespaced_job.return_value = read_namespaced_job_pending(2) + executor = self.data['executors'][0] + jobname = executor['metadata']['name'] + job = Job(executor, jobname, taskmaster.args.namespace) + status, all_pods_running = job.get_status(False) + self.assertEqual(status, 'Running') + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_unit/test_services/test_s3_filer.py b/tests/test_unit/test_services/test_s3_filer.py new file mode 100644 index 00000000..ed34b0ee --- /dev/null +++ b/tests/test_unit/test_services/test_s3_filer.py @@ -0,0 +1,231 @@ +import os +from unittest.mock import mock_open, patch + +import boto3 +import pytest + +# from tesk.services.extract_endpoint import extract_endpoint +from moto import mock_aws + +from tesk.services.filer_s3 import S3Transput + + +@pytest.fixture() +def moto_boto(): + with mock_aws(): + boto3.client('s3', endpoint_url='http://s3.amazonaws.com') + client = boto3.resource('s3', endpoint_url='http://s3.amazonaws.com') + client.create_bucket( + Bucket='tesk', + CreateBucketConfiguration={ + 'LocationConstraint': str(boto3.session.Session().region_name), + }, + ) + client.Bucket('tesk').put_object(Bucket='tesk', Key='folder/file.txt', Body='') + client.Bucket('tesk').put_object( + Bucket='tesk', Key='folder1/folder2/file.txt', Body='' + ) + yield + + +@pytest.mark.parametrize( + 'path, url, ftype,expected', + [ + ( + '/home/user/filer_test/file.txt', + 's3://tesk/folder/file.txt', + 'FILE', + ('tesk', 'folder/file.txt'), + ), + ( + '/home/user/filer_test/file.txt', + 's3://tesk/folder1/folder2', + 'DIRECTORY', + ('tesk', 'folder1/folder2'), + ), + ], +) +def test_get_bucket_name_and_file_path(moto_boto, path, url, ftype, expected): + """ + Check if the bucket name and path is extracted correctly for file and folders + """ + trans = S3Transput(path, url, ftype) + assert trans.get_bucket_name_and_file_path() == expected + + +@pytest.mark.parametrize( + 'path, url, ftype,expected', + [ + ('/home/user/filer_test/file.txt', 's3://tesk/folder/file.txt', 'FILE', 0), + ('/home/user/filer_test/file.txt', 's3://mybucket/folder/file.txt', 'FILE', 1), + ('/home/user/filer_test/', 's3://tesk/folder1/folder2', 'DIRECTORY', 0), + ('/home/user/filer_test/', 's3://mybucket/folder1/folder2', 'DIRECTORY', 1), + ], +) +def test_check_if_bucket_exists(moto_boto, path, url, ftype, expected): + """ + Check if the bucket exists + """ + client = boto3.resource('s3', endpoint_url='http://s3.amazonaws.com') + trans = S3Transput(path, url, ftype) + assert trans.check_if_bucket_exists(client) == expected + + +# @patch('tesk.services.filer.os.makedirs') +# @patch('builtins.open') +# @patch('s3transfer.utils.OSUtils.rename_file') +@pytest.mark.parametrize( + 'path, url, ftype,expected', + [ + ('/home/user/filer_test/file.txt', 's3://tesk/folder/file.txt', 'FILE', 0), + ('/home/user/filer_test/file.txt', 's3://tesk/folder/file_new.txt', 'FILE', 1), + ], +) +def test_s3_download_file(moto_boto, path, url, ftype, expected, fs, caplog): # noqa: PLR0913 + """ + Checking for successful/failed file download from Object storage server + """ + with S3Transput(path, url, ftype) as trans: + assert trans.download_file() == expected + if expected: + assert 'Not Found' in caplog.text + else: + assert os.path.exists(path) is True + + +@patch('tesk.services.filer.os.makedirs') +@patch('builtins.open') +@patch('s3transfer.utils.OSUtils.rename_file') +# @patch("tesk.services.filer_s3.extract_endpoint", return_value="http://s3.amazonaws.com") +@pytest.mark.parametrize( + 'path, url, ftype,expected', + [ + ('filer_test/', 's3://tesk/folder1/', 'DIRECTORY', 0), + ('filer_test/', 's3://tesk/folder10/folder20', 'DIRECTORY', 1), + ], +) +def test_s3_download_directory( # noqa: PLR0913 + mock_makedirs, mock_open, mock_rename, path, url, ftype, expected, moto_boto, caplog +): + """ + test case to check directory download from Object storage server + """ + with S3Transput(path, url, ftype) as trans: + assert trans.download_dir() == expected + print(mock_rename.mock_calls) + if expected: + assert 'Invalid file path' in caplog.text + else: + """ + s3 object path s3://tesk/folder1/ will contain 'folder2', checking + if the 'folder2' is present in the download folder. + """ + mock_rename.assert_called_once_with('filer_test/folder2', exist_ok=True) + + +@pytest.mark.parametrize( + 'path, url, ftype,expected', + [ + ('/home/user/filer_test/file.txt', 's3://tesk/folder/file.txt', 'FILE', 0), + ('/home/user/filer_test/file_new.txt', 's3://tesk/folder/file.txt', 'FILE', 1), + ], +) +def test_s3_upload_file(moto_boto, path, url, ftype, expected, fs, caplog): # noqa: PLR0913 + """ + Testing successful/failed file upload to object storage server + """ + fs.create_file('/home/user/filer_test/file.txt') + client = boto3.resource('s3', endpoint_url='http://s3.amazonaws.com') + trans = S3Transput(path, url, ftype) + trans.bucket_obj = client.Bucket(trans.bucket) + assert trans.upload_file() == expected + if expected: + assert 'File upload failed for' in caplog.text + else: + """ + Checking if the file was uploaded, if the object is found, + load() method will return None otherwise an exception will be raised. + """ + assert client.Object('tesk', 'folder/file.txt').load() is None + + +# @pytest.mark.parametrize( +# 'path, url, ftype, expected, caplog_text', +# [ +# ('tests', 's3://tesk/folder1/folder2', 'DIRECTORY', 0, None), +# ( +# '/home/user/filer_test_new/', +# 's3://tesk/folder1/folder2', +# 'DIRECTORY', +# 1, +# 'File upload failed for', +# ), +# ], +# ) +# def test_s3_upload_directory(path, url, ftype, expected, caplog_text): +# """ +# Checking for successful and failed Directory upload to object storage server +# """ +# client = boto3.resource('s3', endpoint_url='http://s3.amazonaws.com') +# trans = S3Transput(path, url, ftype) +# trans.bucket_obj = client.Bucket(trans.bucket) +# assert trans.upload_dir() == expected +# if expected: +# assert 'File upload failed for' in caplog_text +# else: +# """ +# Checking if the file was uploaded, if the object is found +# load() method will return None otherwise an exception will be raised. +# """ +# assert client.Object('tesk', 'folder1/folder2/test_filer.py').load() is None + + +def test_upload_directory_for_unknown_file_type(moto_boto, fs, monkeypatch, caplog): + """ + Checking whether an exception is raised when the object type is neither file + or directory If the exception is raised, an error message will be logged. + """ + monkeypatch.setattr(os.path, 'isfile', lambda _: False) + fs.create_file('/home/user/filer_test/text.txt') + url, ftype = 's3://tesk/folder10/folder20', 'DIRECTORY' + path = '/home/user/filer_test/' + trans = S3Transput(path, url, ftype) + client = boto3.resource('s3', endpoint_url='http://s3.amazonaws.com') + trans.bucket_obj = client.Bucket(trans.bucket) + trans.upload_dir() + assert 'Object is neither file or directory' in caplog.text + + +@patch('tesk.services.filer.os.path.exists', return_value=1) +def test_extract_url_from_config_file(mock_path_exists): + """ + Testing extraction of endpoint url from default file location + """ + read_data = '\n'.join( + ['[default]', 'endpoint_url = http://s3-aws-region.amazonaws.com'] + ) + with patch( + 'builtins.open', mock_open(read_data=read_data), create=True + ) as mock_file: + mock_file.return_value.__iter__.return_value = read_data.splitlines() + # assert extract_endpoint() == "http://s3-aws-region.amazonaws.com" + # mock_file.assert_called_once_with("~/.aws/config", encoding=None) + + +@patch.dict(os.environ, {'AWS_CONFIG_FILE': '~/.aws/config'}) +def test_extract_url_from_environ_variable(): + """ + Testing successful extraction of endpoint url read from file path saved + on environment variable + """ + read_data = '\n'.join( + ['[default]', 'endpoint_url = http://s3-aws-region.amazonaws.com'] + ) + with patch( + 'builtins.open', mock_open(read_data=read_data), create=True + ) as mock_file: + mock_file.return_value.__iter__.return_value = read_data.splitlines() + # assert (extract_endpoint() == "http://s3-aws-region.amazonaws.com") + # mock_file.assert_called_once_with(os.environ["AWS_CONFIG_FILE"], + # encoding=None + # ) diff --git a/tests/test_unit/test_services/test_taskmaster.py b/tests/test_unit/test_services/test_taskmaster.py new file mode 100644 index 00000000..7d610535 --- /dev/null +++ b/tests/test_unit/test_services/test_taskmaster.py @@ -0,0 +1,185 @@ +import json +import os +import unittest +from argparse import Namespace +from unittest.mock import patch + +from kubernetes.client.rest import ApiException + +from tesk.services import taskmaster +from tesk.services.filer_class import Filer +from tesk.services.taskmaster import ( + PVC, + append_mount, + dirname, + generate_mounts, + init_pvc, + newParser, + run_executor, + run_task, +) + + +class TaskmasterTest(unittest.TestCase): + def setUp(self): + input_file_path = os.path.join( + os.path.dirname(__file__), 'resources/inputFile.json' + ) + with open(input_file_path) as file: + self.data = json.load(file) + self.task_name = self.data['executors'][0]['metadata']['labels'][ + 'taskmaster-name' + ] + taskmaster.args = Namespace( + debug=False, + file=None, + filer_version='v0.1.9', + json='json', + namespace='default', + poll_interval=5, + state_file='/tmp/.teskstate', + localKubeConfig=False, + pull_policy_always=False, + filer_name='eu.gcr.io/tes-wes/filer', + pod_timeout=240, + ) + self.filer = Filer( + f'{self.task_name}-filer', + self.data, + taskmaster.args.filer_name, + taskmaster.args.filer_version, + taskmaster.args.pull_policy_always, + ) + self.pvc = PVC( + f'{self.task_name}-pvc', + self.data['resources']['disk_gb'], + taskmaster.args.namespace, + ) + + taskmaster.created_jobs = [] + + @patch('tesk.services.taskmaster.PVC.create') + @patch('tesk.services.taskmaster.Job.run_to_completion', return_value='Complete') + @patch('tesk.services.taskmaster.logger') + def test_pvc_creation(self, mock_logger, mock_run_to_compl, mock_pvc_create): + """ + Testing to check if the PVC volume was created successfully + """ + self.assertIsInstance(init_pvc(self.data, self.filer), PVC) + + @patch('kubernetes.client.CoreV1Api.read_namespaced_persistent_volume_claim') + @patch( + 'kubernetes.client.CoreV1Api.create_namespaced_persistent_volume_claim', + side_effect=ApiException(status=409, reason='conflict'), + ) + def test_create_pvc_check_for_conflict_exception( + self, mock_create_namespaced_pvc, mock_read_namespaced_pvc + ): + self.pvc.create() + mock_read_namespaced_pvc.assert_called_once() + + @patch( + 'kubernetes.client.CoreV1Api.create_namespaced_persistent_volume_claim', + side_effect=ApiException(status=500, reason='Random error'), + ) + def test_create_pvc_check_for_other_exceptions(self, mock_create_namespaced_pvc): + with self.assertRaises(ApiException): + self.pvc.create() + + @patch('tesk.services.taskmaster.PVC.delete') + @patch('tesk.services.taskmaster.PVC.create') + @patch('tesk.services.taskmaster.Job.run_to_completion', return_value='error') + @patch('tesk.services.taskmaster.logger') + def test_pvc_failure( + self, mock_logger, run_to_compl, mock_pvc_create, mock_pvc_delete + ): + """ + Testcase for finding if the PVC creation failed with exit 0 + """ + + self.assertRaises(SystemExit, init_pvc, self.data, self.filer) + + @patch('tesk.services.taskmaster.PVC.delete') + @patch('tesk.services.taskmaster.Job.delete') + @patch('tesk.services.taskmaster.Job.run_to_completion', return_value='Error') + @patch('tesk.services.taskmaster.logger') + def test_run_executor_failure( + self, mock_logger, mock_run_to_compl, mock_job_delete, mock_pvc_delete + ): + """ """ + self.assertRaises( + SystemExit, + run_executor, + self.data['executors'][0], + taskmaster.args.namespace, + ) + + @patch('tesk.services.taskmaster.PVC') + @patch('tesk.services.taskmaster.Job.run_to_completion', return_value='Complete') + @patch('tesk.services.taskmaster.logger') + def test_run_executor_complete(self, mock_logger, mock_run_to_compl, mock_pvc): + """ """ + self.assertEqual( + run_executor( + self.data['executors'][0], taskmaster.args.namespace, mock_pvc + ), + None, + ) + + @patch('tesk.services.taskmaster.logger') + def test_generate_mount(self, mock_logger): + """ """ + self.assertIsInstance(generate_mounts(self.data, self.pvc), list) + + @patch('tesk.services.taskmaster.logger') + def test_append_mount(self, mock_logger): + """ """ + volume_mounts = [] + task_volume_name = 'task-volume' + for aninput in self.data['inputs']: + dirnm = dirname(aninput) + append_mount(volume_mounts, task_volume_name, dirnm, self.pvc) + self.assertEqual( + volume_mounts, + [ + { + 'name': task_volume_name, + 'mountPath': '/some/volume', + 'subPath': 'dir0', + } + ], + ) + + @patch('tesk.services.taskmaster.logger') + @patch('tesk.services.taskmaster.PVC.create') + @patch('tesk.services.taskmaster.PVC.delete') + @patch('tesk.services.taskmaster.Job.run_to_completion', return_value='Complete') + def test_run_task(self, mock_job, mock_pvc_create, mock_pvc_delete, mock_logger): + """ """ + run_task(self.data, taskmaster.args.filer_name, taskmaster.args.filer_version) + + def test_localKubeConfig(self): + """ """ + parser = newParser() + args = parser.parse_args(['json', '--localKubeConfig']) + self.assertEqual( + args, + Namespace( + debug=False, + file=None, + filer_version='v0.1.9', + json='json', + namespace='default', + poll_interval=5, + state_file='/tmp/.teskstate', + localKubeConfig=True, + pull_policy_always=False, + filer_name='eu.gcr.io/tes-wes/filer', + pod_timeout=240, + ), + ) + + +if __name__ == '__main__': + # import sys;sys.argv = ['', 'Test.testName'] + unittest.main() diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 31f37a68..00000000 --- a/tox.ini +++ /dev/null @@ -1,25 +0,0 @@ -[tox] -envlist = - py{38,39,310,311}-unit, - py{38,39,310,311}-lint -skip_missing_interpreters = True - -[gh-actions] -python = - 3.7: py37 - 3.8: py38 - 3.9: py39 - 3.10: py310 - -[testenv] -passenv = CI, TRAVIS, TRAVIS_* -deps = - py{38,39,310,311}: .[test] - py{38,39,310,311}-unit: pytest-cov - codecov - py{38,39,310,311}-lint: pylint -commands = - py{38,39,310,311}-unit: pytest -v --cov-report xml --cov tesk_core {posargs} tests - py{38,39,310,311}-unit: codecov - py{38,39,310,311}-lint: python -m pylint --exit-zero -d missing-docstring,line-too-long,C tesk_core - py{38,39,310,311}-lint: python -m pylint -E tesk_core