diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..d33ab2a2 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +karabo/_version.py export-subst diff --git a/.github/workflows/build-dev-image.yml b/.github/workflows/build-dev-image.yml deleted file mode 100644 index 58026190..00000000 --- a/.github/workflows/build-dev-image.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: Build Dev Image - -on: - workflow_call: - inputs: - REGISTRY: - required: true - type: string - IMAGE_NAME: - required: true - type: string - outputs: - imagetag: - description: "Image tag" - value: ${{ jobs.Build_Dev_Image.outputs.tag }} - - -jobs: - Build_Dev_Image: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - outputs: - tag: ${{ steps.imagetag.outputs.tag }} - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Log in to the Container registry - uses: docker/login-action@v2 - with: - registry: ${{ inputs.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Create image tag - id: imagetag - shell: bash -l {0} - run: | - UUID=$(uuidgen) - IMAGE_TAG=dev-"${UUID:0:8}" - echo "tag=$IMAGE_TAG" >> $GITHUB_OUTPUT - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ inputs.REGISTRY }}/${{ github.repository_owner }}/${{ inputs.IMAGE_NAME }} - tags: | - type=raw, value=${{ steps.imagetag.outputs.tag }} - - - name: Build and push Docker image - uses: docker/build-push-action@v4 - with: - file: docker/dev/Dockerfile - context: . - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/build-docker-image.yml b/.github/workflows/build-docker-image.yml new file mode 100644 index 00000000..a7cc27c7 --- /dev/null +++ b/.github/workflows/build-docker-image.yml @@ -0,0 +1,121 @@ +name: Build Docker Image + +on: + workflow_call: + inputs: + verstag: + type: string + required: true + latest: + type: boolean + required: false + default: false + + workflow_dispatch: + inputs: + gitrev: + type: string + required: false + default: "" + description: "gitrev: commit-hash (full) | branch | tag if not current commit." + verstag: + type: string + required: true + description: "version: PEP440 version-tag of Karabo. DON't trigger build if you don't know what PEP440 is!" + latest: + type: boolean + required: false + default: false + description: "tag image as 'latest'?" + test: + type: boolean + required: false + default: false + description: "create env from environment.yaml instead of conda-wheel?" + +env: + REGISTRY: ghcr.io + IMG_NAME: karabo-pipeline + +jobs: + build-test-and-push-image: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup metadata img-name & img-tag + shell: bash -l {0} + run: | + if [[ ${{ github.event_name }} == "workflow_call" ]]; then + echo "gitrev=$GITHUB_SHA" >> "$GITHUB_ENV" + echo "build=user" >> "$GITHUB_ENV" + elif [[ ${{ github.event_name }} == "workflow_dispatch" ]]; then + if [[ "${{ inputs.gitrev }}" != "" ]]; then + echo "gitrev=${{ inputs.gitrev }}" >> "$GITHUB_ENV" + else + echo "gitrev=$GITHUB_SHA" >> "$GITHUB_ENV" + fi + if [[ "${{ inputs.test }}" == 'true' ]]; then + echo "build=test" >> "$GITHUB_ENV" + else + echo "build=user" >> "$GITHUB_ENV" + fi + else + echo "Invalid github-event!" + exit 1 + fi + echo "latest=${{ inputs.latest }}" >> "$GITHUB_ENV" + echo "version=${{ inputs.verstag }}" >> "$GITHUB_ENV" + REPO_OWNER=${{ github.repository_owner }} + echo "IMG_ADDR=${{ env.REGISTRY }}/${REPO_OWNER@L}/${{ env.IMG_NAME }}" >> "$GITHUB_ENV" + DEV_STR="dev" + if [[ "${{ inputs.verstag }}" == *"$DEV_STR"* ]] && [[ "${{ inputs.latest }}" == 'true' ]]; then + echo "Invalid configuration of workflow-inputs!" + exit 1 + fi + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ github.repository }} + tags: | + type=raw, enable=${{ env.latest }}, value=latest + type=raw, value=${{ env.version }} + + - name: Docker build + shell: bash -l {0} + run: | + docker build \ + --build-arg GIT_REV=${{ env.gitrev }} \ + --build-arg BUILD=${{ env.build }} \ + --build-arg KARABO_VERSION=${{ env.version }} \ + -f Dockerfile \ + -t ${{ env.IMG_ADDR }}:${{ env.version }} \ + . + if [[ ${{ env.latest }} == 'true' ]]; then + docker tag ${{ env.IMG_ADDR }}:${{ env.version }} ${{ env.IMG_ADDR }}:latest + fi + + - name: Test image + run: | # karabo-sitepackage-location used for mpirun instead of --pyargs because --only-mpi is a custom-flag of karabo which lives in the site-packages + docker run --rm ${{ env.IMG_ADDR }}:${{ env.version }} bash -c \ + 'export IS_GITHUB_RUNNER=true RUN_GPU_TESTS=false RUN_NOTEBOOK_TESTS=false; pytest --pyargs karabo.test; SITE_PKGS=$(pip show karabo-pipeline | grep Location | sed "s/.*\(\/opt\/conda.*\).*/\1/"); mpirun -n 2 pytest --only-mpi $SITE_PKGS/karabo/test' + - name: Docker push + shell: bash -l {0} + run: | + docker push --all-tags ${{ env.IMG_ADDR }} \ No newline at end of file diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index c4553e50..6549f8e5 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -20,7 +20,7 @@ jobs: run: | conda env create -n karabo_dev_env -f environment.yaml conda activate karabo_dev_env - pip install -r requirements.txt + pip install ".[dev]" - name: Build Docs shell: bash -l {0} run: | diff --git a/.github/workflows/build-user-image.yml b/.github/workflows/build-user-image.yml deleted file mode 100644 index 97e38260..00000000 --- a/.github/workflows/build-user-image.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: Build User Image - -on: - workflow_run: - workflows: ["Test User Package"] - types: - - completed - - workflow_dispatch: - -env: - REGISTRY: ghcr.io - -jobs: - build-and-push-image: - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} - permissions: - contents: read - packages: write - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - # Aussumes that current repo-tag matches karabo:latest on anaconda.org - - name: Get Previous tag - uses: actions-ecosystem/action-get-latest-tag@v1 - id: get-latest-tag - - - name: Log in to the Container registry - uses: docker/login-action@v2 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v4 - with: - images: ${{ env.REGISTRY }}/${{ github.repository }} - tags: | - type=raw, value=latest - type=pep440, pattern={{version}}, value=${{ steps.get-latest-tag.outputs.tag }} - - - name: Build and push Docker image - uses: docker/build-push-action@v4 - with: - file: docker/user/Dockerfile - context: . - push: true - build-args: KARABO_TAG=${{ steps.get-latest-tag.outputs.tag }} - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} diff --git a/.github/workflows/conda-build.yml b/.github/workflows/conda-build.yml index 0acc197e..7877603d 100644 --- a/.github/workflows/conda-build.yml +++ b/.github/workflows/conda-build.yml @@ -4,37 +4,127 @@ on: release: types: [published] workflow_dispatch: + inputs: + buildNumber: + type: string + required: true + description: "build-nr: anaconda.org build-nr (DON'T trigger build if you don't know how to set it)" + version: + type: string + required: true + description: "version: PEP440 package-version (DON'T trigger build if you don't know what PEP440 is)" + buildDocker: + type: boolean + required: false + default: false + description: "build docker-img if conda-build & tests succeeded?" + latestDocker: + type: boolean + required: false + default: false + description: "tag docker-img as latest (if `buildDocker` is enabled)" + workflow_call: + inputs: + buildNumber: + type: string + required: true + version: + type: string + required: true + buildDocker: + type: boolean + required: false + default: false + latestDocker: + type: boolean + required: false + default: false jobs: conda-build: runs-on: ubuntu-latest container: ghcr.io/i4ds/mambabuild-docker:latest - + outputs: + karabo-version: ${{ steps.bcs.outputs.karabo_version }} + build-docker: ${{ steps.bcs.outputs.build_docker == 'true' }} + latest-docker: ${{ steps.bcs.outputs.latest_docker == 'true' }} steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Get Previous tag uses: actions-ecosystem/action-get-latest-tag@v1 id: get-latest-tag - - name: Install conda build + - name: Build Conda + id: bcs shell: bash -l {0} - id: channels run: | - KARABO_TAG=${{ steps.get-latest-tag.outputs.tag }} + DEV_STR="dev" + if [[ ${{ github.event_name }} == 'release' ]] + then + echo "Conda build from release" + build="0" + KARABO_TAG=${{ steps.get-latest-tag.outputs.tag }} + if [[ ${KARABO_TAG:0:1} == "v" ]]; then + KARABO_VERSION="${KARABO_TAG:1}" + else + echo "invalid karabo-tag: has no leading v" + exit 1 + fi + BUILD_DOCKER=true + LATEST_DOCKER=true + elif [[ ${{ github.event_name }} == 'workflow_dispatch' ]] || [[ ${{ github.event_name }} == 'workflow_call' ]]; then + echo "Conda build from dispatch | call" + build=${{ inputs.buildNumber }} + BUILD_DOCKER=${{ inputs.buildDocker }} + LATEST_DOCKER=${{ inputs.latestDocker }} + KARABO_VERSION=${{ inputs.version }} + conda install -c conda-forge python versioneer + VERSIONEER_VERSION=$(/opt/conda/bin/python -c 'import versioneer; print(versioneer.get_version())') + if [[ "$VERSIONEER_VERSION" == *"+"* ]]; then + if [[ "$KARABO_VERSION" != *"$DEV_STR"* ]]; then + echo "Dirty commit-version needs to be a PEP440 conform dev-version" + exit 1 + fi + else + if [[ "$KARABO_VERSION" != "$VERSIONEER_VERSION" ]]; then + echo "Provided version $KARABO_VERSION doesn't match the actual version $VERSIONEER_VERSION" + exit 1 + fi + fi + else + echo "Not specified github-event occured" + exit 1 + fi + if [[ "$KARABO_VERSION" == *"$DEV_STR"* ]]; then + if [[ $LATEST_DOCKER == 'true' ]]; then + echo "Dev build docker image can not be tagged as latest!" + exit 1 + fi + build="$(($build + 1000))" + fi + + export KARABO_VERSION=$KARABO_VERSION build=$build + echo "karabo_version=$KARABO_VERSION" >> $GITHUB_OUTPUT + echo "build_docker=$BUILD_DOCKER" >> $GITHUB_OUTPUT + echo "latest_docker=$LATEST_DOCKER" >> $GITHUB_OUTPUT + conda config --append channels i4ds conda config --append channels nvidia/label/cuda-11.7.0 conda config --append channels conda-forge - - name: Build Conda - shell: bash -l {0} - run: | + cd conda - KARABO_TAG=${{ steps.get-latest-tag.outputs.tag }} - export KARABO_VERSION="${KARABO_TAG:1}" - echo "__version__ = \"${KARABO_VERSION}\"" > karabo/version.py conda mambabuild . - name: Publish to Conda shell: bash -l {0} run: | conda activate base anaconda -t ${{ secrets.ANACONDA_SECRET }} upload /opt/conda/conda-bld/linux-64/karabo-pipeline-*.tar.bz2 --force + + test-build: + needs: conda-build + uses: ./.github/workflows/test-user-package.yml + with: + version: ${{ needs.conda-build.outputs.karabo-version }} + buildDocker: ${{ needs.conda-build.outputs.build-docker == 'true' }} + latestDocker: ${{ needs.conda-build.outputs.latest-docker == 'true' }} diff --git a/.github/workflows/dev-workflow.yml b/.github/workflows/dev-workflow.yml index bae2b17e..bac9e949 100644 --- a/.github/workflows/dev-workflow.yml +++ b/.github/workflows/dev-workflow.yml @@ -11,4 +11,4 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 \ No newline at end of file + uses: actions/checkout@v4 diff --git a/.github/workflows/purge-image.yml b/.github/workflows/purge-image.yml deleted file mode 100644 index 15af751d..00000000 --- a/.github/workflows/purge-image.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Purge Image - -on: - workflow_call: - inputs: - IMAGE_NAME: - required: true - type: string - IMAGE_TAG: - required: true - type: string - -jobs: - Purge_Image: - runs-on: ubuntu-latest - steps: - - name: Purge image - uses: bots-house/ghcr-delete-image-action@v1.1.0 - with: - owner: ${{ github.repository_owner }} - name: ${{ inputs.IMAGE_NAME }} - token: ${{ secrets.GITHUB_TOKEN }} - tag: ${{ inputs.IMAGE_TAG }} diff --git a/.github/workflows/test-user-package.yml b/.github/workflows/test-user-package.yml index 103afb95..47a90ac0 100644 --- a/.github/workflows/test-user-package.yml +++ b/.github/workflows/test-user-package.yml @@ -1,35 +1,75 @@ name: Test User Package on: - workflow_run: - workflows: ["Conda Build"] - types: - - completed + workflow_dispatch: + inputs: + version: + type: string + required: true + description: "version: PEP440 package-version" + buildDocker: + type: boolean + required: false + default: false + description: "build docker-img if tests succeeded? DON't enable if you don't know what PEP440 is!" + latestDocker: + type: boolean + required: false + default: false + description: "tag docker-img as latest (if `buildDocker` is enabled)" + workflow_call: + inputs: + version: + type: string + required: true + buildDocker: + type: boolean + required: false + default: false + latestDocker: + type: boolean + required: false + default: false jobs: - conda-build: + test-conda-build: runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} steps: - name: Install Conda uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - name: Set variables, Install Package & Dependencies shell: bash -l {0} run: | + KARABO_VERSION=${{ inputs.version }} + DEV_STR="dev" + if [[ "$KARABO_VERSION" == *"$DEV_STR"* ]] && [[ ${{ inputs.latestDocker }} == 'true' ]]; then + echo "Invalid configuration of workflow-inputs!" + exit 1 + fi export IS_GITHUB_RUNNER=true export RUN_NOTEBOOK_TESTS=false conda install -y -n base conda-libmamba-solver conda config --set solver libmamba + conda config --env --set channel_priority true conda create -y -n karabo-env python=3.9 conda activate karabo-env - conda install -y -c nvidia/label/cuda-11.7.0 -c i4ds -c conda-forge karabo-pipeline=${{ env.KARABO_VERSION }} + conda install -y -c nvidia/label/cuda-11.7.0 -c i4ds -c conda-forge karabo-pipeline=$KARABO_VERSION pip install ipykernel python -m ipykernel install --user --name python3 + - name: Test Package shell: bash -l {0} run: | conda activate karabo-env pytest --pyargs karabo.test + + build-docker: + needs: test-conda-build + if: ${{ inputs.buildDocker }} + uses: ./.github/workflows/build-docker-image.yml + with: + verstag: ${{ inputs.version }} + latest: ${{ inputs.latestDocker }} + + diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 811e2ada..732901a3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,7 +4,7 @@ on: schedule: # Every night at 5am - cron: '0 5 * * *' pull_request: - types: [opened, synchronize, reopened] + types: [opened, synchronize, reopened, ready_for_review] workflow_dispatch: push: branches: @@ -27,8 +27,7 @@ jobs: conda create -y -n test_karabo python=3.9 conda activate test_karabo mamba env update --file environment.yaml - pip install -r requirements.txt - pip install ipykernel + pip install -e ".[dev]" python -m ipykernel install --user --name python3 - name: Test Dev-Tools shell: bash -l {0} @@ -42,17 +41,13 @@ jobs: shell: bash -l {0} run: | conda activate test_karabo - export IS_GITHUB_RUNNER=true - export RUN_GPU_TESTS=false - export RUN_NOTEBOOK_TESTS=true - pip install . --no-deps + export IS_GITHUB_RUNNER=true RUN_GPU_TESTS=false RUN_NOTEBOOK_TESTS=true + mpirun -n 2 pytest --only-mpi pytest --cov=./ --cov-report=xml - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - directory: ./coverage/reports/ + with: + token: ${{ secrets.CODECOV_TOKEN }} env_vars: OS,PYTHON - files: ./coverage1.xml,./coverage2.xml,!./cache fail_ci_if_error: false name: codecov-karabo diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f2990e8c..8832ebc6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + rev: 6.1.0 hooks: - id: flake8 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..beee8d30 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,53 @@ +FROM nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 +# build: user|test, KARABO_VERSION: version to install from anaconda.org in case build=user: `{major}.{minor}.{patch}` (no leading 'v') +ARG GIT_REV="main" BUILD="user" KARABO_VERSION="" +RUN apt-get update && apt-get install -y git gcc gfortran libarchive13 wget curl nano +ENV LD_LIBRARY_PATH="/usr/local/cuda/compat:/usr/local/cuda/lib64" \ + PATH="/opt/conda/bin:${PATH}" \ + IS_DOCKER_CONTAINER="true" +RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-py39_23.5.0-3-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + conda init && \ + rm ~/miniconda.sh +SHELL ["conda", "run", "-n", "base", "/bin/bash", "-c"] +RUN conda install -n base conda-libmamba-solver && \ + conda config --set solver libmamba && \ + conda create -y -n karabo +# change venv because libmamba solver lives in base and any serious environment update could f*** up the linked deps like `libarchive.so` +SHELL ["conda", "run", "-n", "karabo", "/bin/bash", "-c"] +RUN mkdir Karabo-Pipeline && \ + cd Karabo-Pipeline && \ + git init && \ + git remote add origin https://github.com/i4Ds/Karabo-Pipeline.git && \ + git fetch origin ${GIT_REV} && \ + git reset --hard ${GIT_REV} && \ + if [ "$BUILD" = "user" ] ; then \ + conda install -y -c i4ds -c conda-forge -c "nvidia/label/cuda-11.7.1" karabo-pipeline="$KARABO_VERSION"; \ + elif [ "$BUILD" = "test" ] ; then \ + conda env update -f="environment.yaml"; \ + pip install --no-deps "."; \ + else \ + exit 1; \ + fi && \ + mkdir ~/karabo && \ + cp -r "karabo/examples" ~/karabo/examples/ && \ + cd ".." && \ + rm -rf "Karabo-Pipeline/" && \ + pip install jupyterlab ipykernel pytest && \ + python -m ipykernel install --user --name=karabo + +# set bash-env accordingly for interactive and non-interactive shells for docker & singularity +RUN mkdir opt/etc && \ + echo "conda activate karabo" >> ~/.bashrc && \ + cat ~/.bashrc | sed -n '/conda initialize/,/conda activate/p' > /opt/etc/conda_init_script +ENV BASH_ENV=/opt/etc/conda_init_script +RUN echo "source $BASH_ENV" >> /etc/bash.bashrc && \ + echo "source $BASH_ENV" >> /etc/profile + +# link packaged mpich-version with ldconfig to enable mpi-hook (it also links everything else, but shouldn't be an issue) +RUN echo "$CONDA_PREFIX"/lib > /etc/ld.so.conf.d/conda.conf && \ + ldconfig + +# Additional setup +WORKDIR /workspace +ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "karabo"] \ No newline at end of file diff --git a/README.md b/README.md index 3e939e30..17d3d900 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,16 @@ ![Alt text](doc/src/_static/logo.png?raw=true "Karabo") =========== -[![Test Software](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/test.yml/badge.svg)](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/test.yml) -[![Build Docs](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/build-docs.yml/badge.svg)](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/build-docs.yml) -[![Build Conda](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/conda-build.yml/badge.svg)](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/conda-build.yml) -[![Build Docker User Image](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/build-user-image.yml/badge.svg)](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/build-user-image.yml) +| | | +| --- | --- | +| Testing | [![CI - Test](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/test.yml/badge.svg)](https://github.com/i4Ds/Karabo-Pipeline/actions/workflows/test.yml) [![codecov](https://codecov.io/gh/i4Ds/Karabo-Pipeline/graph/badge.svg?token=WU4IC2MOXV)](https://codecov.io/gh/i4Ds/Karabo-Pipeline) | +| Package | [![Conda Latest Release](https://anaconda.org/i4ds/karabo-pipeline/badges/version.svg)](https://anaconda.org/i4ds/karabo-pipeline) [![Conda Downloads](https://anaconda.org/i4ds/karabo-pipeline/badges/downloads.svg)](https://anaconda.org/i4ds/karabo-pipeline) | +| Meta | [![License - BSD 3-Clause](https://anaconda.org/i4ds/karabo-pipeline/badges/license.svg)](https://github.com/i4Ds/Karabo-Pipeline/blob/main/LICENSE) | [Documentation](https://i4ds.github.io/Karabo-Pipeline/) | [Example](karabo/examples/source_detection.ipynb) | [Contributors](CONTRIBUTORS.md) -Karabo is a radio astronomy software distribution for validation and benchmarking of radio telescopes and algorithms. It can be used to simulate the behaviour of the [Square Kilometer Array](https://www.skatelescope.org/the-ska-project/). Our goal is to make installation and ramp-up easier for researchers and developers. +Karabo is a radio astronomy software distribution for validation and benchmarking of radio telescopes and algorithms. It can be used to simulate the behaviour of the [Square Kilometer Array](https://www.skatelescope.org/the-ska-project/) or other supported telescopes. Our goal is to make installation and ramp-up easier for researchers and developers. Karabo includes and relies on OSKAR, RASCIL, PyBDSF, [MIGHTEE](https://arxiv.org/abs/2211.05741), [GLEAM](https://www.mwatelescope.org/science/galactic-science/gleam/), Aratmospy, Bluebild, Eidos, Dask, Tools21cm, katbeam plus configuration of 20 well-known telescopes. Karabo can simulate instrument behavior and atmospheric effects, run imaging algorithms, and evaluate results. @@ -20,12 +21,12 @@ You can use Karabo to build your own data processing pipelines by combinding exi Installation ------------ -The software can be installed on Linux, Windows or Windows WSL. +The software can be installed & used on Linux or Windows WSL. Please see our [documentation](https://i4ds.github.io/Karabo-Pipeline/installation_user.html) for the full installation instructions. -We also offer a [Docker](https://i4ds.github.io/Karabo-Pipeline/container.html) version. +We also offer [Docker](https://i4ds.github.io/Karabo-Pipeline/container.html) images. Contribute to Karabo --------------------- diff --git a/codecov.yml b/codecov.yml index b6700aae..d863dd51 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,5 +1,8 @@ coverage: status: project: + default: + target: 0% # sets required coverage for a success + patch: default: target: 0% # sets required coverage for a success \ No newline at end of file diff --git a/conda/build.sh b/conda/build.sh index 281491fa..161aa1e1 100644 --- a/conda/build.sh +++ b/conda/build.sh @@ -1 +1,2 @@ -$PYTHON -m pip install . \ No newline at end of file + +$PYTHON -m pip install --no-deps . \ No newline at end of file diff --git a/conda/conda_build_config.yaml b/conda/conda_build_config.yaml new file mode 100644 index 00000000..cb6373a5 --- /dev/null +++ b/conda/conda_build_config.yaml @@ -0,0 +1,2 @@ +python: + - 3.9 diff --git a/conda/meta.yaml b/conda/meta.yaml index 8638f902..408144f4 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -8,48 +8,56 @@ source: path: ../ build: - string: {{ KARABO_VERSION }} + number: {{ build }} + string: py{{ py }}h{{ PKG_HASH }}_{{ build }} requirements: build: - - python=3.9 - - pip + - python + - numpy host: - - python=3.9 + - python {{ python }} - pip - run: - - python=3.9 - - aratmospy=1.0.0 + - numpy + - tomli + - versioneer + run: # constrain-notes see `environment.yaml` + - python {{ python }} + - aratmospy =1.0.0=*_0 - astropy - - bdsf - - bluebild + - bdsf =1.10.2=*_0 + - bluebild =0.1.0=*_0 - cuda-cudart - - dask=2022.12.1 + - dask =2022.12.1 + - dask-mpi - distributed - - eidos=1.1.0 + - eidos =1.1.0=*_0 - healpy - - h5py + - h5py =*=mpi_mpich* - ipython - - katbeam=0.1.0 + - katbeam =0.1.0=*_0 - libcufft - matplotlib + - montagepy =6.0.0=*_0 + - mpi4py + - mpich - nbformat - nbconvert - - numpy=1.22 - - oskarpy=2.8.3 + - {{ pin_compatible('numpy') }} + - oskarpy =2.8.3=*_0 - pandas - psutil - - rascil=1.0.0 - - reproject>=0.9,<=10.0 + - rascil =1.0.0=*_0 + - reproject >=0.9,<=10.0 - requests - - scipy=1.10.1 - - ska-gridder-nifty-cuda=0.3.0 - - ska-sdp-datamodels=0.1.3 - - ska-sdp-func-python=0.1.4 - - tools21cm=2.0.2 - - xarray - - montagepy=6.0.0 - + - scipy >=1.10.1 + - ska-gridder-nifty-cuda =0.3.0=*_0 + - ska-sdp-datamodels =0.1.3=*_0 + - ska-sdp-func-python =0.1.4=*_0 + - tools21cm =2.0.2=*_0 + - xarray >=2022.10.0 + # transversal dependencies which we need to reference to get mpi-wheels + - conda-forge::fftw =*=mpi_mpich* test: diff --git a/doc/src/container.md b/doc/src/container.md index 73f589e6..a8e6faf1 100644 --- a/doc/src/container.md +++ b/doc/src/container.md @@ -2,37 +2,70 @@ ## Docker Images -We provide for the karabo-pipeline [Docker images](https://www.docker.com/resources/what-container/#:~:text=A%20Docker%20container%20image%20is,tools%2C%20system%20libraries%20and%20settings.) which are hosted by the [ghcr.io](https://github.com/features/packages) registry. An overview of all available images is [here](https://github.com/i4ds/Karabo-Pipeline/pkgs/container/karabo-pipeline), if a specific version and not simply `latest` is desired. Starting from `karabo@v0.15.0`, all versions should be available. Provided you have docker, the image can be installed as follows: +We provide for the Karabo-pipeline [Docker images](https://www.docker.com/resources/what-container/#:~:text=A%20Docker%20container%20image%20is,tools%2C%20system%20libraries%20and%20settings.) which are hosted by the [ghcr.io](https://github.com/features/packages) registry. An overview of all available images is [here](https://github.com/i4ds/Karabo-Pipeline/pkgs/container/karabo-pipeline), if a specific version and not simply `latest` is desired. Starting from `karabo@v0.15.0`, all versions should be available. Provided you have docker, the image can be pulled as follows: ```shell docker pull ghcr.io/i4ds/karabo-pipeline:latest ``` -Docker images have the advantage that the packages needed for karabo-pipeline are already pre-installed and you can usually run them on other operating systems. In addition, Docker images can easily create singularity containers (see [Singularity Container](#singularity-container)), which are often used in HPC clusters. +Docker images have the advantage that the packages needed for Karabo-pipeline are already pre-installed and you can usually run them on other operating systems. So in case the dependency resolvement of older Karabo installations is not up to date anymore, with Docker images you don't have to worry as the installation process has already been performed. In addition, Docker images can easily transform into other containers like Singularity or Sarus, which are often used in HPC-clusters. -## Singularity Container +## Launch a Docker Container + +What the possibilities using Docker are is far too extensive to describe here. We refer to the official [Docker reference](https://docs.docker.com/reference/) for this. We only show here a minimal example of how Docker could be used, so you can use e.g. a [Jupyter Notebook](https://jupyter.org/) with sample code and an existing Karabo environment. + +```shell +docker run -it -v : -p 8888:8888 ghcr.io/i4ds/karabo-pipeline +``` + +This starts the Docker container of the image interactively, where the port 8888 is forwarded and an editable directory is mounted. After that, you could do whatever you want. For demonstration purpose, we start the jupyter-server in the container with the following command: + +```shell +jupyter lab --ip 0.0.0.0 --no-browser --port=8888 +``` + +This will start a server on the same port as forwarded. Then copy the url which is given at the bottom and replace `hostname` with `localhost` and open it in a browser. + +## Singularity Containers + +**Note:** Currently, building a Singularity container from our docker-registry and run karabo within it doesn't work properly. This is work in progress. Therfore, the following doc regarding Singularity are not relevant. Singularity containers are often standard on HPC clusters, which do not require special permissions (unlike Docker). -We do not provide ready-made [Singularity containers](https://sylabs.io/). However, they can be easily created from Docker images with the following command (may take a while): +We do not provide ready-made [Singularity containers](https://sylabs.io/). However, they can be easily created from Docker images with the following command (may take a while). You may first have to load the module if it's not available `module load singularity`: ```shell -singularity pull https://ghcr.io/i4ds/karabo-pipeline:latest +singularity pull docker://ghcr.io/i4ds/karabo-pipeline ``` -How to use Singularity containers can be seen in the [Singularity documentation](https://docs.sylabs.io/guides/3.1/user-guide/cli.html). +This creates a `.sif` file which acts as a singularity image and can be used to launch your application. How to use Singularity containers (e.g. mount directories or enable gpu-support) can be seen in the [Singularity documentation](https://docs.sylabs.io/guides/3.1/user-guide/cli.html). Be aware that Singularity mounts the home-directory by default if start a container from your home-directory, which may not be desirable (e.g. `conda init` is done through .bashrc of the image). Be sure to disable this behavior by setting the `--no-home` flag when starting a container. + +## Sarus Containers + +On CSCS, it is recommended to use [Sarus containers](https://sarus.readthedocs.io/en/stable/index.html) (see CSCS [Sarus guide](https://user.cscs.ch/tools/containers/sarus/)). Sarus commands are similar to Docker or Singularity. It is recommended to create a Sarus image in an interactive SLURM job using `srun --pty bash`. -## Docker Container +**Setup** -What is possible with Docker is far too extensive to describe here. We refer to the official [Docker reference](https://docs.docker.com/reference/) for this. We only show here a minimal example of how Docker could be used, so you can use a [Jupyter Notebook](https://jupyter.org/) with sample code and working Karabo environment. +You should load `daint-gpu` or `daint-mc` before loading the `sarus` modulefile: ```shell -docker run -it --rm -p 8888:8888 ghcr.io/i4ds/karabo-pipeline:latest +module load daint-gpu \# or daint-mc +module load sarus ``` -This starts the Docker container of the image interactively, where we have port 8888 forwarded here. After that, we start the jupyter service in the container with the following command: +Then you can pull a Docker image to a Sarus image as follows: ```shell -jupyter lab --ip 0.0.0.0 --no-browser --port=8888 --allow-root +sarus pull ghcr.io/i4ds/karabo-pipeline ``` -This will start the server on the same port we forwarded. Then copy the url which is given at the bottom and replace `hostname` with `localhost` and open it in the browser. \ No newline at end of file +**MPI (MPICH) Support** + +Karabo >= `v0.22.0` supports [MPICH](https://www.mpich.org/)-based MPI processes that enable multi-node workflows on CSCS (or any other system which supports MPICH MPI). Note, on CSCS, mpi-runs are launched through SLURM (not through mpirun or mpiexec) by setting the `-n` (total-mpi-tasks) and `-N` (mpi-tasks-per-node) options when launching a job. So you have to set them according to your task. + +```shell +srun -N2 -n2 -C gpu sarus run --mount=type=bind,source=,destination=/workspace ghcr.io/i4ds/karabo-pipeline +``` + +Here, an MPI application with 2 processes is launched with your repository mounted in the container (/workspace is the default working-directory). Make sure that you know how many processes are reasonable to run because it can rapidly sum up to a large number of nodehours. + +Sarus containers allow native mpi-hook to utilize the mpi of CSCS at optimized performance. This can be done by simply adding the `--mpi` flag to the sarus run command. Probably, there will be some warning about the minor version of some libmpi-files. However, according to [sarus abi-compatibility](https://sarus.readthedocs.io/en/stable/user/abi_compatibility.html) this shouldn't be an issue. \ No newline at end of file diff --git a/doc/src/development.md b/doc/src/development.md index dfeb422e..33f8e49e 100644 --- a/doc/src/development.md +++ b/doc/src/development.md @@ -28,31 +28,23 @@ Then create a local development environment with the provided `environment.yaml` conda env create -n -f environment.yaml ``` -Then install the development dependencies using `requirements.txt`. +Then install Karabo as a package and the according dev-dependencies. ```shell conda activate -pip install -r requirements.txt +pip install -e ".[dev]" ``` -NOTE: With these commands, only the dependencies but not the current version of karabo will be installed into a conda environment. To tell Python to treat the reposity as a package, run the following (note that using `conda develop` is not recommended, see [this issue](https://github.com/conda/conda-build/issues/1992)): +Afterwards, activating you dev-tools in your IDE and SHELL is recommended. For the setup of your IDE of choice you have to do it yourself. For the SHELL setup, we recommend to do the following in the repo-root: ```shell -pip install -e . -``` - -(Optional) For your developer experience, the following link might be useful: [Setup Python Interpreter in PyCharm](https://www.jetbrains.com/help/pycharm/conda-support-creating-conda-virtual-environment.html). - -You are done! If everything worked as expected, you can start an interactive Python session and test the import: - -```shell -python ->>> import karabo +pre-commit install +podmena add local ``` ## Formatting -To increase the readability of the code and to better detect potential errors already during development, a number of tools are used. These tools must first be installed in the virtual environment using `pip install -r requirements.txt`. If possible use the versions defined in `requirements.txt`, so that all developers work with the same versions. The configurations of the tools are handled in `setup.cfg`. If changes to the configurations are desired, the team members should agree to this (e.g. via a meeting). +To increase the readability of the code and to better detect potential errors already during development, a number of tools are used. The configurations of the tools are handled in `setup.cfg` or `pyproject.toml`. If changes to the configurations are desired, the team members should agree to this (e.g. via a meeting). It is possible that certain tools complain about something that is not easy or even impossible to fix. ONLY then, there are options to ignore certain lines of code or even whole files for the checker. E.g. `# noqa` ignores inline flake8 complaints. But be careful not to accidentally ignore the whole file (e.g. with `# flake8: noqa`). Please refer to the documentation of the respective tool to learn how to ignore the errors. @@ -156,10 +148,9 @@ So an md file can reference like ``[some file](path/to/some/file)``. When adding new submodules or modules. You need to update the modules.rst file accordingly and add new files similiar to the karabo.simulation.rst. To enable the automatic generation of the documentation via the python docstrings. There is also the command ```sphinx-apidoc``` from sphinx (our doc engine), that can automate this. -If you want to work this sphinx locally on your machine, for example to use this sphinx-apidoc command. Thus, use the following commands to generate the documentation: +If you want to work this sphinx locally on your machine, for example to use this sphinx-apidoc command. Thus, assuming you've installed the dev-dependencies from pyproject.toml, use the following commands to generate the documentation: ```shell -pip install -r requirements.txt make html ``` @@ -167,20 +158,17 @@ make html We use the ` pytest` python package ([pytest docs](https://docs.pytest.org/)), with a few imports from the `unittest` package ([unittest docs](https://docs.python.org/3/library/unittest.html)). To add a new test simply go to the `karabo/test` folder. -Add tests for when you write some sort of new code that you feel like might break. - -TIP: -If you validate your code manually, consider just writing a method in a test class instead of opening a jupyter notebook and writing a new cell or a terminal window where you would execute the code you want to test. +Add tests for when you write some sort of new code that you feel like might break. Be aware that tests utilize the functionality of the testing-framework and therefore might not behave exaclty the same as you whould execute the code just as a function. The most important file to consider is `conftest.py`, which could impact the other tests. ## Create a Release -When everything is merged which should be merged, a new Release can be deployed on `conda-forge` as following: +When everything is merged which should be merged, a new Release can be deployed as following: - [Karabo-Pipeline | Releases](https://github.com/i4Ds/Karabo-Pipeline/releases) - Click on `Draft a new release` -- Define a Version by clicking `Choose a tag`. Currently we increment the second number by 1. -- Update version in `karabo/version.py` +- Define a Version by clicking `Choose a tag`. We follow PEP440 {major}.{minor}.{path} with a leading `v` at the beginning (see previous versions). Usually we increment the minor version by 1. - Check that the `Target` is set to `main`. - Describe the release (get inspired by the previous releases). - Click `Publish release`. - Check on [Karabo-Pipeline | Github Actions](https://github.com/i4Ds/Karabo-Pipeline/actions) that the release is succesful. - Check that the new version is on [Anaconda.org | Packages](https://anaconda.org/i4ds/karabo-pipeline) +- Check on [Karabo-Pipeline | Docker Images](https://github.com/i4ds/Karabo-Pipeline/pkgs/container/karabo-pipeline) that the released image is live. diff --git a/doc/src/installation_user.md b/doc/src/installation_user.md index 31cd850e..76f9a015 100644 --- a/doc/src/installation_user.md +++ b/doc/src/installation_user.md @@ -4,38 +4,41 @@ - Linux or Windows with WSL. For macOS we recommend you use [Docker](container.md), starting with version 0.18.1 of the image. - 8GB RAM - 10GB disk space -- GPU-acceleration requires proprietary nVidia drivers/CUDA >= 11.7 +- GPU-acceleration requires proprietary nVidia drivers/CUDA >= 11 ## Install Karabo The following steps will install Karabo and its prerequisites (miniconda): -``` -wget https://repo.anaconda.com/miniconda/Miniconda3-py39_23.5.2-0-Linux-x86_64.sh -bash Miniconda3-py39_23.5.2-0-Linux-x86_64.sh -b +```shell +# install conda & solver +wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh +bash Miniconda3-latest-Linux-x86_64.sh source ~/miniconda3/bin/activate conda init bash -conda install -y -n base conda-libmamba-solver -conda update -y -n base -c defaults conda -conda create -y -n karabo-env python=3.9 -conda activate karabo-env +conda install -n base conda-libmamba-solver +# setup virtual environment +conda create -n karabo python=3.9 +conda activate karabo conda config --env --set solver libmamba conda config --env --set channel_priority true -conda install -y -c nvidia/label/cuda-11.7.0 -c i4ds -c conda-forge karabo-pipeline -conda clean --all -y +# install karabo +conda install -c nvidia/label/cuda-11.7.0 -c i4ds -c conda-forge karabo-pipeline ``` -Karabo releases older than `v0.15.0` are deprecated and therefore we don't guarantee a successful installation. +Karabo versions older than `v0.15.0` are deprecated and therefore installation will most likely fail. In addition, we do not support Karabo older than latest-minor version in case dependency resolving or online resources are outdated. Therefore, we strongly recommend using the latest version of Karabo. If an older version of Karabo is required, we strongly recommend using a [container](container.md), as the environment is fixed in a container. However, outdated online resources may still occur. -## Update to the current Karabo version +## Update to latest Karabo version A Karabo installation can be updated the following way: + +Note: Even though we care about not introducing API-breaking changes through different minor releases of Karabo, we don't guarantee it. + ``` -conda update -y -c nvidia/label/cuda-11.7.0 -c i4ds -c conda-forge karabo-pipeline -conda clean --all -y +conda update -c nvidia/label/cuda-11.7.0 -c i4ds -c conda-forge karabo-pipeline ``` ## Additional Notes and Troubleshooting -- If the base environment was updated, *libmamba* might fail to install. In that case, reset conda to version 22 using `conda install --rev 0 --name base` or you can try installing Karabo without *libmamba*. Using *libmamba* is not strictly required, but strongly recommended, because it should make the installation much faster and more reliable. -- You can install miniconda into a different path, use ```bash Miniconda3-py39_22.11.1-1-Linux-x86_64.sh -b -p YourDesiredPath``` instead +- Dont' install anything into the base environment except libraries which are supposed to live in there. If you accientally install packages there which are not supposed to be there, you might break some functionalities of your conda-installation. +- If you're using a system conda, it might be that you don't have access to a libmamba-solver, because the solver lives in the base environment, which belongs to root. In this case, you can ask your admin to install the solver, try an installation without the libmamba solver OR we recommend to just install conda into your home (which is the recommended solution). - If you are using WSL and running a jupyter-notebook fails, you might have to set the path to the cuda libraries as follows: ```shell diff --git a/docker/dev/Dockerfile b/docker/dev/Dockerfile deleted file mode 100644 index 5854bae4..00000000 --- a/docker/dev/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -# LEGACY-FILE, has to be checked before usage -# Create build container to not have copied filed in real container afterwards -FROM --platform=amd64 continuumio/miniconda3:4.12.0 as build -ARG IS_DOCKER_CONTAINER=true -COPY environment.yaml environment.yaml -COPY requirements.txt requirements.txt - -FROM --platform=amd64 continuumio/miniconda3:4.12.0 -SHELL ["conda", "run", "-n", "base", "/bin/bash", "-c"] -RUN apt-get update && apt-get install -y curl && apt-get autoclean && rm -rf /var/lib/apt/lists/* -COPY --from=build environment.yaml environment.yaml -COPY --from=build requirements.txt requirements.txt -RUN conda update -y conda && \ - conda clean --all --yes && \ - conda install mamba -y -c conda-forge -RUN mamba env update --file environment.yaml -RUN pip install -r requirements.txt -RUN rm environment.yaml requirements.txt -RUN pip install unittest-xml-reporting -RUN mkdir /workspace -WORKDIR /workspace \ No newline at end of file diff --git a/docker/user/Dockerfile b/docker/user/Dockerfile deleted file mode 100644 index 8ab54340..00000000 --- a/docker/user/Dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -# Create build container to not have copied filed in real container afterwards -FROM nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 as build -ARG KARABO_TAG -ARG IS_DOCKER_CONTAINER=true -RUN apt-get update && apt-get install -y git -RUN git clone --branch ${KARABO_TAG} --depth=1 https://github.com/i4Ds/Karabo-Pipeline.git - -FROM nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -ARG KARABO_TAG -RUN apt-get update && apt-get install -y libarchive13 wget curl nano -RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-py39_23.5.0-3-Linux-x86_64.sh -O ~/miniconda.sh && \ - /bin/bash ~/miniconda.sh -b -p /opt/conda -ENV PATH=/opt/conda/bin:$PATH -RUN conda init -SHELL ["conda", "run", "-n", "base", "/bin/bash", "-c"] -RUN conda update -y conda && \ - conda install mamba -y -c conda-forge -RUN mamba install -y -c i4ds -c conda-forge -c "nvidia/label/cuda-11.7.1" karabo-pipeline="${KARABO_TAG:1}" -RUN pip install jupyterlab ipykernel pytest && \ - python -m ipykernel install --user --name=karabo && \ - mkdir /workspace -WORKDIR /workspace -COPY --from=build Karabo-Pipeline/ repo/ -RUN cp -r repo/karabo/examples /workspace/examples/ && \ - rm -rf repo/ diff --git a/environment.yaml b/environment.yaml index 9bcc36b5..36fa6581 100644 --- a/environment.yaml +++ b/environment.yaml @@ -2,37 +2,41 @@ channels: - i4ds - nvidia/label/cuda-11.7.0 - conda-forge -dependencies: - - python=3.9 - - aratmospy=1.0.0 +dependencies: # package-version & build-number of Karabo-Feedstock deps should be fixed (see PR #526) + - python =3.9 + - aratmospy =1.0.0=*_0 - astropy - - bdsf - - bluebild + - bdsf =1.10.2=*_0 + - bluebild =0.1.0=*_0 - cuda-cudart - - dask=2022.12.1 + - dask =2022.12.1 - dask-mpi - - mpi4py - distributed - - eidos=1.1.0 + - eidos =1.1.0=*_0 - healpy - - h5py + - h5py =*=mpi_mpich* - ipython - - katbeam=0.1.0 + - katbeam =0.1.0=*_0 - libcufft - matplotlib + - montagepy =6.0.0=*_0 + - mpi4py + - mpich - nbformat - nbconvert - - numpy=1.22 - - oskarpy=2.8.3 + - numpy >=1.21, !=1.24.0 + - oskarpy =2.8.3=*_0 - pandas - psutil - - rascil=1.0.0 - - reproject>=0.9,<=10.0 + - rascil =1.0.0=*_0 + - reproject >=0.9,<=10.0 - requests - - scipy=1.10.1 - - ska-gridder-nifty-cuda=0.3.0 - - ska-sdp-datamodels=0.1.3 - - ska-sdp-func-python=0.1.4 - - tools21cm=2.0.2 - - xarray - - montagepy=6.0.0 + - scipy >=1.10.1 + - ska-gridder-nifty-cuda =0.3.0=*_0 + - ska-sdp-datamodels =0.1.3=*_0 + - ska-sdp-func-python =0.1.4=*_0 + - tools21cm =2.0.2=*_0 + - xarray >=2022.10.0 + # transversal dependencies which we need to reference to get mpi-wheels + # casacore hast just no-mpi & open-mpi, but no mpich-wheel + - conda-forge::fftw =*=mpi_mpich* # oskarpy(oskar(casacore)), tools21cm, bluebild(finufft) -> from conda-forge to ignore channel-prio & not take our legacy fftw-wheel \ No newline at end of file diff --git a/karabo/__init__.py b/karabo/__init__.py index 5a9ffbbb..e57eb831 100644 --- a/karabo/__init__.py +++ b/karabo/__init__.py @@ -1,11 +1,15 @@ -# set shared library if WSL to detect GPU drivers +"""This file is executed during build-time and when karabo gets imported. +Hence, you ONLY have deps available here which are available during build-time and +in karabo. If you don't know what that means, don't touch anything here. +""" import os import platform import sys -from karabo.version import __version__ +from ._version import get_versions -__version__ = __version__ +__version__ = get_versions()["version"] +del get_versions if "WSL" in platform.release() and ( os.environ.get("LD_LIBRARY_PATH") is None @@ -22,13 +26,20 @@ # https://stackoverflow.com/questions/6543847/setting-ld-library-path-from-inside-python os.execv(sys.executable, ["python"] + sys.argv) -# Setup dask for slurm -from karabo.util.dask import prepare_slurm_nodes_for_dask +if "SLURM_JOB_ID" in os.environ: + # if-statement is an ugly workaraound to not import pkgs not available at + # build/install-time. This is something which is happening if you install the + # dependencies of Karabo through pip. Then, `versioneer`` determines the current + # version of Karabo automatically, which is done through this root-init-file. + # But because this is happening at build/install-time, the dependencies of Karabo + # are not yet available in the venv, and therefore the installation of the + # dependencies will fail. + from karabo.util.dask import prepare_slurm_nodes_for_dask -prepare_slurm_nodes_for_dask() + prepare_slurm_nodes_for_dask() # set rascil data directory environment variable # see https://ska-telescope.gitlab.io/external/rascil/RASCIL_install.html -from karabo.util.jupyter import set_rascil_data_directory_env # noqa: E402 +from karabo.util.setup_pkg import set_rascil_data_directory_env # noqa: E402 set_rascil_data_directory_env() diff --git a/karabo/_version.py b/karabo/_version.py new file mode 100644 index 00000000..be52b71a --- /dev/null +++ b/karabo/_version.py @@ -0,0 +1,717 @@ +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. +# Generated by versioneer-0.29 +# https://github.com/python-versioneer/python-versioneer +# mypy: ignore-errors + +"""Git implementation of _version.py.""" + +import errno +import functools +import os +import re +import subprocess +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple + + +def get_keywords() -> Dict[str, str]: + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + VCS: str + style: str + tag_prefix: str + parentdir_prefix: str + versionfile_source: str + verbose: bool + + +def get_config() -> VersioneerConfig: + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "v" + cfg.parentdir_prefix = "None" + cfg.versionfile_source = "karabo/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} + + +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f: Callable) -> Callable: + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + + popen_kwargs: Dict[str, Any] = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + **popen_kwargs, + ) + break + except OSError as e: + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords: Dict[str, str] = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs( + tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command +) -> Dict[str, Any]: + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + f"{tag_prefix}[[:digit:]]*", + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces: Dict[str, Any] = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces: Dict[str, Any]) -> str: + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces: Dict[str, Any]) -> str: + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces: Dict[str, Any]) -> str: + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces: Dict[str, Any]) -> str: + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + if pieces["distance"]: + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces: Dict[str, Any]) -> str: + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces: Dict[str, Any]) -> str: + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +def get_versions() -> Dict[str, Any]: + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split("/"): + root = os.path.dirname(root) + except NameError: + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/karabo/imaging/image.py b/karabo/imaging/image.py index 31f4c36a..9b0162d1 100644 --- a/karabo/imaging/image.py +++ b/karabo/imaging/image.py @@ -501,7 +501,8 @@ def plot_power_spectrum( plt.gca().set_ylabel("Brightness temperature [K]") plt.gca().set_xscale("log") plt.gca().set_yscale("log") - plt.gca().set_ylim(1e-6 * np.max(profile), 2.0 * np.max(profile)) + max_profile = float(np.max(profile)) + plt.gca().set_ylim(1e-6 * max_profile, 2.0 * max_profile) plt.tight_layout() if save_png: diff --git a/karabo/simulation/beam.py b/karabo/simulation/beam.py index 591e019a..98ab344b 100644 --- a/karabo/simulation/beam.py +++ b/karabo/simulation/beam.py @@ -333,7 +333,7 @@ def show_kat_beam( """ plt.imshow( beampixels, - extent=[-beamextent / 2, beamextent / 2, -beamextent / 2, beamextent / 2], + extent=(-beamextent / 2, beamextent / 2, -beamextent / 2, beamextent / 2), ) plt.title("%s pol beam\nfor %s at %dMHz" % (pol, "", freq)) plt.xlabel("deg") @@ -359,7 +359,7 @@ def plot_beam( :return: polar plot """ fig = plt.figure() - ax = fig.add_axes([0.1, 0.1, 0.8, 0.8], polar=True) + ax = fig.add_axes((0.1, 0.1, 0.8, 0.8), polar=True) ax.pcolormesh( phi, theta, absdir ) # TODO (Add check for this) X,Y & data2D must all be same dimensions diff --git a/karabo/simulation/sky_model.py b/karabo/simulation/sky_model.py index 5f323b3a..06e79ce2 100644 --- a/karabo/simulation/sky_model.py +++ b/karabo/simulation/sky_model.py @@ -84,7 +84,7 @@ List[str], List[int], List[float], - NDArray[np.object0], + NDArray[np.object_], NDArray[np.int_], NDArray[np.float_], DataArrayCoordinates[xr.DataArray], @@ -473,7 +473,8 @@ def rechunk_array_based_on_self(self, array: xr.DataArray) -> xr.DataArray: raise KaraboSkyModelError("Rechunking of `sources` None is not allowed.") if self.sources.chunks is not None: chunk_size = max(self.sources.chunks[0][0], 1) - array = array.chunk({self._sources_dim_sources: chunk_size}) + chunks: Dict[str, Any] = {self._sources_dim_sources: chunk_size} + array = array.chunk(chunks=chunks) else: pass return array @@ -636,9 +637,11 @@ def filter_by_radius_euclidean_flat_approximation( distances_sq = np.add(np.square(x), np.square(y)) # Filter sources based on inner and outer radius - filter_mask = (distances_sq >= np.square(inner_radius_deg)) & ( - distances_sq <= np.square(outer_radius_deg) - ) + filter_mask = cast( # distances_sq actually an xr.DataArray because x & y are + xr.DataArray, + (distances_sq >= np.square(inner_radius_deg)) + & (distances_sq <= np.square(outer_radius_deg)), + ).compute() copied_sky.sources = copied_sky.sources[filter_mask] @@ -674,7 +677,7 @@ def filter_by_column( filter_mask = (copied_sky.sources[:, col_idx] >= min_val) & ( copied_sky.sources[:, col_idx] <= max_val ) - filter_mask = self.rechunk_array_based_on_self(filter_mask) + filter_mask = self.rechunk_array_based_on_self(filter_mask).compute() # Apply the filter mask and drop the unmatched rows copied_sky.sources = copied_sky.sources.where(filter_mask, drop=True) @@ -1185,9 +1188,11 @@ def get_sky_model_from_h5_to_xarray( data_arrays = [x.compute() for x in data_arrays] sky = xr.concat(data_arrays, dim=XARRAY_DIM_1_DEFAULT) sky = sky.T - sky = sky.chunk( - {XARRAY_DIM_0_DEFAULT: chunksize, XARRAY_DIM_1_DEFAULT: sky.shape[1]} # type: ignore [dict-item] # noqa: E501 - ) + chunks: Dict[str, Any] = { + XARRAY_DIM_0_DEFAULT: chunksize, + XARRAY_DIM_1_DEFAULT: sky.shape[1], + } + sky = sky.chunk(chunks=chunks) return SkyModel(sky, h5_file_connection=f) @staticmethod @@ -1379,13 +1384,12 @@ def get_sky_model_from_fits( data_array.coords[XARRAY_DIM_0_DEFAULT] = source_ids data_arrays.append(data_array) + chunks: Dict[str, Any] = {XARRAY_DIM_0_DEFAULT: chunksize} for freq_dataset in data_arrays: - freq_dataset.chunk({XARRAY_DIM_0_DEFAULT: chunksize}) # type: ignore [dict-item] # noqa: E501 + freq_dataset.chunk(chunks=chunks) result_dataset = ( - xr.concat(data_arrays, dim=XARRAY_DIM_0_DEFAULT) - .chunk({XARRAY_DIM_0_DEFAULT: chunksize}) # type: ignore [dict-item] - .T + xr.concat(data_arrays, dim=XARRAY_DIM_0_DEFAULT).chunk(chunks=chunks).T ) return SkyModel(result_dataset) diff --git a/karabo/sourcedetection/evaluation.py b/karabo/sourcedetection/evaluation.py index 8f00e2e6..b35aafe6 100644 --- a/karabo/sourcedetection/evaluation.py +++ b/karabo/sourcedetection/evaluation.py @@ -187,7 +187,7 @@ def automatic_assignment_of_ground_truth_and_prediction( idx_assigment_pred[distance == np.inf] = -1 # Check if a ground truth point is assigned to more # than one predicted point - pred_multiple_assignments = SourceDetectionEvaluation.__return_multiple_assigned_detected_points( # noqa + pred_multiple_assignments = SourceDetectionEvaluation.__return_multiple_assigned_detected_points( # noqa: E501 idx_assigment_pred ) @@ -329,14 +329,15 @@ def plot_confusion_matrix( filename: Optional[str] = None, ) -> None: conf_matrix = self.get_confusion_matrix() + ax: Axes _, ax = plt.subplots() - ax.matshow(conf_matrix, cmap=plt.cm.Blues, alpha=0.3) + ax.matshow(conf_matrix, cmap=plt.cm.Blues, alpha=0.3) # type: ignore[attr-defined] # noqa: E501 for i in range(conf_matrix.shape[0]): for j in range(conf_matrix.shape[1]): ax.text( x=j, y=i, - s=int(conf_matrix[i, j]), + s=str(conf_matrix[i, j]), va="center", ha="center", size="x-large", diff --git a/karabo/test/conftest.py b/karabo/test/conftest.py index a6081119..3d172dc1 100644 --- a/karabo/test/conftest.py +++ b/karabo/test/conftest.py @@ -1,12 +1,13 @@ """Pytest global fixtures needs to be here!""" import os -from collections.abc import Callable +from collections.abc import Callable, Generator, Iterable from dataclasses import dataclass import matplotlib.pyplot as plt import numpy as np import pytest from numpy.typing import NDArray +from pytest import Config, Item, Parser from karabo.test import data_path from karabo.util.file_handler import FileHandler @@ -14,9 +15,64 @@ NNImageDiffCallable = Callable[[str, str], float] IS_GITHUB_RUNNER = os.environ.get("IS_GITHUB_RUNNER", "false").lower() == "true" +RUN_GPU_TESTS = os.environ.get("RUN_GPU_TESTS", "false").lower() == "true" file_handler_test_dir = os.path.join(os.path.dirname(__file__), "karabo_test") +def pytest_addoption(parser: Parser) -> None: + """Pytest custom argparse hook. + + Add custom argparse options here. + + Pytest argparse-options have to be declared in the root conftest.py. + For some reason, the root conftest.py has to live near the project-root, even if + only a single conftest.py exists. However, this prevents using `pytest .` with + custom argparse-coptions from the root. Instead, either specify the test-dir + or leave it out entirely. + + Args: + parser: pytest.Parser + """ + parser.addoption( + "--only-mpi", + action="store_true", + default=False, + help="run only mpi tests", + ) + + +def pytest_configure(config: Config) -> None: + """Pytest add ini-values. + + Args: + config: pytest.Config + """ + config.addinivalue_line("markers", "mpi: mark mpi-tests as mpi") + + +def pytest_collection_modifyitems(config: Config, items: Iterable[Item]) -> None: + """Pytest modify-items hook. + + Change pytest-behavior dependent on parsed input. + + See https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option + + Args: + config: pytest.Config + items: iterable of pytest.Item + """ # noqa: E501 + if not config.getoption("--only-mpi"): + skipper = pytest.mark.skip(reason="Only run when --only-mpi is given") + for item in items: + if "mpi" in item.keywords: + item.add_marker(skipper) + else: + skipper = pytest.mark.skip(reason="Don't run when --only-mpi is given") + for item in items: + if "mpi" not in item.keywords: + item.add_marker(skipper) + + @dataclass class TFiles: """Read-only repo-artifact paths. @@ -65,7 +121,7 @@ def tobject() -> TFiles: @pytest.fixture(scope="function", autouse=True) -def clean_disk(): +def clean_disk() -> Generator[None, None, None]: """Automatically clears FileHandler.root after each test. Needed in some cases where the underlying functions do use FileHanlder @@ -131,11 +187,11 @@ def sky_data(sky_data_with_ids: NDArray[np.object_]) -> NDArray[np.float64]: def normalized_norm_diff() -> NNImageDiffCallable: """Compare two images.""" - def _normalized_norm_diff(img_path_1, img_path_2): + def _normalized_norm_diff(img_path_1: str, img_path_2: str) -> float: img1 = plt.imread(img_path_1) img2 = plt.imread(img_path_2) assert img1.shape == img2.shape # Calculate the error between the two images - return np.linalg.norm(img1 - img2) / (img1.shape[0] * img1.shape[1]) + return float(np.linalg.norm(img1 - img2) / (img1.shape[0] * img1.shape[1])) return _normalized_norm_diff diff --git a/karabo/test/test_mpi.py b/karabo/test/test_mpi.py new file mode 100644 index 00000000..70c24212 --- /dev/null +++ b/karabo/test/test_mpi.py @@ -0,0 +1,90 @@ +"""MPI tests according to `https://mpi4py.readthedocs.io/en/stable/tutorial.html`.""" +import numpy as np +import pytest +from mpi4py import MPI + + +@pytest.mark.mpi +def test_broadcast_dict(): + comm = MPI.COMM_WORLD + rank = comm.Get_rank() + + if rank == 0: + data = {"key1": [7, 2.72, 2 + 3j], "key2": ("abc", "xyz")} + else: + data = None + data = comm.bcast(data, root=0) + + +@pytest.mark.mpi +def test_scatter_obj(): + comm = MPI.COMM_WORLD + size = comm.Get_size() + rank = comm.Get_rank() + + if rank == 0: + data = [(i + 1) ** 2 for i in range(size)] + else: + data = None + data = comm.scatter(data, root=0) + assert data == (rank + 1) ** 2 + + +@pytest.mark.mpi +def test_gather_obj(): + comm = MPI.COMM_WORLD + size = comm.Get_size() + rank = comm.Get_rank() + + data = (rank + 1) ** 2 + data = comm.gather(data, root=0) + if rank == 0: + for i in range(size): + assert data[i] == (i + 1) ** 2 + else: + assert data is None + + +@pytest.mark.mpi +def test_broadcast_nparrays(): + comm = MPI.COMM_WORLD + rank = comm.Get_rank() + + if rank == 0: + data = np.arange(100, dtype="i") + else: + data = np.empty(100, dtype="i") + comm.Bcast(data, root=0) + for i in range(100): + assert data[i] == i + + +@pytest.mark.mpi +def test_scatter_nparrays(): + comm = MPI.COMM_WORLD + size = comm.Get_size() + rank = comm.Get_rank() + + sendbuf = None + if rank == 0: + sendbuf = np.empty([size, 100], dtype="i") + sendbuf.T[:, :] = range(size) + recvbuf = np.empty(100, dtype="i") + comm.Scatter(sendbuf, recvbuf, root=0) + assert np.allclose(recvbuf, rank) + + +@pytest.mark.mpi +def test_gather_nparrays(): + comm = MPI.COMM_WORLD + size = comm.Get_size() + rank = comm.Get_rank() + + sendbuf = np.zeros(100, dtype="i") + rank + recvbuf = None + if rank == 0: + recvbuf = np.empty([size, 100], dtype="i") + comm.Gather(sendbuf, recvbuf, root=0) + if rank == 0: + for i in range(size): + assert np.allclose(recvbuf[i, :], i) diff --git a/karabo/test/test_source_detection.py b/karabo/test/test_source_detection.py index 6a62d0a4..e904fc16 100644 --- a/karabo/test/test_source_detection.py +++ b/karabo/test/test_source_detection.py @@ -20,9 +20,7 @@ PyBDSFSourceDetectionResultList, SourceDetectionResult, ) -from karabo.test.conftest import NNImageDiffCallable, TFiles - -RUN_GPU_TESTS = os.environ.get("RUN_GPU_TESTS", "false").lower() == "true" +from karabo.test.conftest import RUN_GPU_TESTS, NNImageDiffCallable, TFiles @pytest.fixture diff --git a/karabo/test/test_utils.py b/karabo/test/test_utils.py index 1d1cca35..f6fdebb6 100644 --- a/karabo/test/test_utils.py +++ b/karabo/test/test_utils.py @@ -1,33 +1,38 @@ -import os - import pytest +from karabo.test.conftest import RUN_GPU_TESTS from karabo.util.gpu_util import get_gpu_memory, is_cuda_available from karabo.version import __version__ -RUN_GPU_TESTS = os.environ.get("RUN_GPU_TESTS", "false").lower() == "true" +def test_is_cuda_available(): + assert isinstance(is_cuda_available(), bool) -@pytest.mark.skipif(not RUN_GPU_TESTS, reason="GPU tests are disabled") -def test_get_gpu_memory(): - memory = get_gpu_memory() - assert isinstance(memory, int) - assert memory > 0 +CUDA_AVAILABLE = is_cuda_available() -@pytest.mark.skipif(RUN_GPU_TESTS, reason="Does not fail when GPU is available") + +@pytest.mark.skipif( + CUDA_AVAILABLE, + reason="get-gpu-memory thorows a RuntimeError only if cuda is not available", +) def test_gpu_memory_error(): with pytest.raises(RuntimeError): get_gpu_memory() -def test_is_cuda_available(): - assert isinstance(is_cuda_available(), bool) +@pytest.mark.skipif( + not CUDA_AVAILABLE, reason="get-gpu-memory works only if cuda is available" +) +def test_get_gpu_memory(): + memory = get_gpu_memory() + assert isinstance(memory, int) + assert memory > 0 @pytest.mark.skipif(not RUN_GPU_TESTS, reason="GPU tests are disabled") def test_is_cuda_available_true(): - assert is_cuda_available() + assert CUDA_AVAILABLE def test_version(): diff --git a/karabo/util/__init__.py b/karabo/util/__init__.py index 9196f547..18ce9040 100644 --- a/karabo/util/__init__.py +++ b/karabo/util/__init__.py @@ -1,3 +1,3 @@ -from karabo.util.jupyter import set_rascil_data_directory_env +from karabo.util.setup_pkg import set_rascil_data_directory_env set_rascil_data_directory_env() diff --git a/karabo/util/dask.py b/karabo/util/dask.py index 9517d8e8..09ca500d 100644 --- a/karabo/util/dask.py +++ b/karabo/util/dask.py @@ -12,6 +12,8 @@ import psutil from dask import compute, delayed # type: ignore[attr-defined] from dask.distributed import Client, LocalCluster, Nanny, Worker +from dask_mpi import initialize +from mpi4py import MPI from karabo.error import KaraboDaskError from karabo.util._types import IntFloat @@ -102,13 +104,12 @@ def setup() -> None: @staticmethod def get_dask_client() -> Client: - # Get IS_DOCKER_CONTAINER variable - if os.environ.get("IS_DOCKER_CONTAINER", "false").lower() == "true": - from dask.distributed import Client - from dask_mpi import initialize - from mpi4py import MPI - - initialize(nthreads=DaskHandler.n_threads_per_worker, comm=MPI.COMM_WORLD) + if MPI.COMM_WORLD.Get_size() > 1: + n_threads_per_worker = DaskHandler.n_threads_per_worker + if n_threads_per_worker is None: + initialize(comm=MPI.COMM_WORLD) + else: + initialize(nthreads=n_threads_per_worker, comm=MPI.COMM_WORLD) DaskHandler.dask_client = Client(processes=DaskHandler.use_proccesses) elif DaskHandler.dask_client is None: if ( diff --git a/karabo/util/file_handler.py b/karabo/util/file_handler.py index 4f1c43d5..5d75410b 100644 --- a/karabo/util/file_handler.py +++ b/karabo/util/file_handler.py @@ -2,7 +2,9 @@ import glob import os +import random import shutil +import string import uuid from types import TracebackType from typing import Optional, Union @@ -11,32 +13,85 @@ from karabo.util.plotting_util import Font -def _get_default_root_dir() -> str: - karabo_folder = "karabo_folder" - scratch = os.environ.get("SCRATCH") - if scratch is not None: - root_parent = scratch - else: - root_parent = os.getcwd() - root_dir = os.path.join(root_parent, karabo_folder) - return os.path.abspath(root_dir) +def _get_disk_cache_root() -> str: + """Gets the root-directory of the disk-cache. + + Defined env-var-dir > scratch-dir > tmp-dir + + Honors TMPDIR and TMP environment variable(s). + + Raises: + RuntimeError: If 'TMPDIR' & 'TMP' are set differently which is ambiguous. + + Returns: + path of tmpdir + """ + # first guess is just /tmp (low prio) + tmpdir = f"{os.path.sep}tmp" + # second guess is if scratch is available (mid prio) + if (scratch := os.environ.get("SCRATCH")) is not None and os.path.exists(scratch): + tmpdir = scratch + # third guess is to honor the env-variables mentioned (high prio) + env_check: Optional[str] = None # variable to check previous environment variables + environment_varname = "" + if (TMPDIR := os.environ.get("TMPDIR")) is not None: + tmpdir = os.path.abspath(TMPDIR) + env_check = TMPDIR + environment_varname = "TMPDIR" + if (TMP := os.environ.get("TMP")) is not None: + if env_check is not None: + if TMP != env_check: + raise RuntimeError( + f"Environment variables collision: TMP={TMP} != " + + f"{environment_varname}={env_check} which is ambiguous." + ) + else: + tmpdir = os.path.abspath(TMP) + env_check = TMP + environment_varname = "TMP" + return tmpdir + + +def _get_cache_dir() -> str: + """Creates a deterministic & user-specific cache-dir-name. + + dir-name: karabo-($USER-)<10-rnd-asci-letters-and-digits> + + Returns: + path of cache-dir + """ + tmpdir = _get_disk_cache_root() + delimiter = "-" + prefix = "karabo" + user = os.environ.get("USER") + if user is not None: + prefix = delimiter.join((prefix, user)) + random.seed(prefix) + suffix = "".join(random.choices(string.ascii_letters + string.digits, k=10)) + cache_dir_name = delimiter.join((prefix, suffix)) + cache_dir = os.path.join(tmpdir, cache_dir_name) + return cache_dir class FileHandler: """Utility file-handler for unspecified directories. - Provides directory-management functionality in case no dir-path was specified. - `FileHandler.root` is a static root-directory where each subdir is located. + Provides chache-management functionality. + `FileHandler.root` is a static root-directory where each cache-dir is located. + In case you want to extract something specific from the cache, the path is usually + printed blue & bold in stdout. + Set `FileHandler.root` to change the directory where files and dirs will be saved. - Subdirs are usually {prefix}_{fh_dir_identifier}_{uuid4[:8]} in case `prefix` - is defined, otherwise just {fh_dir_identifier}_{uuid4[:8]}. + Otherwise, we provide $TMP, $TMPDIR & $TEMP with a following /karabo-cache as root. + Subdirs are usually {prefix}_{fh_dir_identifier}_{uuid4} in case `prefix` + is defined, otherwise just {fh_dir_identifier}_{uuid4}. This class provides an additional security layer for the removal of subdirs - in case a root is specified where other files and directories live. - FileHanlder can be used the same way as `tempfile.TemporaryDirectory` using with. + in case a root is specified where other files and directories live. + FileHanlder can be used the same way as `tempfile.TemporaryDirectory` using `with`. """ - root: str = _get_default_root_dir() - fh_dir_identifier = "fhdir" # additional security to protect against dir-removal + root: str = _get_cache_dir() + fh_dir_identifier = "fhdir" # additional protection against dir-removal def __init__( self, diff --git a/karabo/util/jupyter.py b/karabo/util/jupyter.py index c9515aec..659f2c64 100644 --- a/karabo/util/jupyter.py +++ b/karabo/util/jupyter.py @@ -1,22 +1,6 @@ -import os -from distutils.sysconfig import get_python_lib - from IPython.core.getipython import get_ipython -def set_rascil_data_directory_env() -> None: - """ - Sets specific environment variables - that the jupyter kernel is not loading by default. - - This function is idempotent (running it more than once brings no side effects). - - """ - - data_folder = f"{get_python_lib()}/../../../data" - os.environ["RASCIL_DATA"] = data_folder - - def isNotebook() -> bool: # based on this.: # https://stackoverflow.com/questions/15411967/how-can-i-check-if-code-is-executed-in-the-ipython-notebook diff --git a/karabo/util/setup_pkg.py b/karabo/util/setup_pkg.py new file mode 100644 index 00000000..2a208bf5 --- /dev/null +++ b/karabo/util/setup_pkg.py @@ -0,0 +1,19 @@ +"""This .py file is ONLY for setup-specific util-functions. +Thus, ONLY deps at building-time are allowed here. +If you don't know what that means, don't touch anything here. +""" +import os +from sysconfig import get_path + + +def set_rascil_data_directory_env() -> None: + """ + Sets specific environment variables + that the jupyter kernel is not loading by default. + + This function is idempotent (running it more than once brings no side effects). + + """ + lib_dir = os.path.dirname(os.path.dirname(os.path.dirname(get_path("platlib")))) + data_folder = os.path.join(lib_dir, "data") + os.environ["RASCIL_DATA"] = data_folder diff --git a/pyproject.toml b/pyproject.toml index dfb56f3a..932f8a6c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,61 @@ +[project] +name = "Karabo-Pipeline" +description = "A data-driven pipeline for Radio Astronomy from i4ds for the SKA Telescope." +authors = [ + { name = "Simon Felix", email = "simon.felix@fhnw.ch" }, +] +readme = "README.md" +license = {text = "MIT"} +requires-python = ">=3.9" +dynamic = ["version"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.9", + "Topic :: Software Development :: Libraries :: Python Modules", +] + [build-system] requires = [ - "setuptools>=42", - "wheel" + "setuptools>=56.0", + "wheel", + "versioneer[toml]", ] build-backend = "setuptools.build_meta" + +[tool.setuptools.dynamic] +version = { attr = "karabo.__version__" } + +[tool.versioneer] +VCS = "git" +style = "pep440" +versionfile_source = "karabo/_version.py" +versionfile_build = "karabo/_version.py" +tag_prefix = "v" + [tool.pytest.ini_options] +testpaths = "karabo/test" + +[project.optional-dependencies] + dev = [ + 'black[jupyter]==23.10.0', + 'flake8==6.1.0', + 'ipykernel', + 'isort==5.12.0', + 'pre-commit==3.5.0', + 'pydocstyle==6.3.0', + 'pytest==7.4.2', + 'pytest-cov==4.1.0', + 'mypy==1.6.1', + 'mypy-extensions==1.0.0', + 'myst-parser', + 'nest_asyncio', # for notebook test runs + 'podmena', # commit emojis + 'sphinx', + 'sphinx_rtd_theme', + 'types-requests', # types for mypy + 'versioneer', + ] \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index caf52f2b..00000000 --- a/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -# dev & test dependencies - -# formatting tools -mypy==1.2.0 -mypy-extensions==1.0.0 -flake8==6.0.0 -isort==5.12.0 -black[jupyter]==23.3.0 -pydocstyle==6.3.0 -pytest==7.3.1 -pytest-cov==4.1.0 -pre-commit==3.2.2 - -# for notebook test runs -nest_asyncio - -# types for mypy -types-requests - -# doc tools -myst-parser -sphinx -sphinx_rtd_theme diff --git a/setup.cfg b/setup.cfg index e6ef73f3..aeaa7471 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,36 +1,3 @@ - -[metadata] -name = Karabo-Pipeline -author = Simon Felix -author_email = simon.felix@fhnw.ch -url = https://github.com/i4Ds/Karabo-Pipeline -description = A data-driven pipeline for Radio Astronomy from i4ds for the SKA Telescope -long_description = file: README.md -long_description_content_type = text/markdown -license = MIT -license_files = LICENSE -platform = any -keywords = {keywords} -classifiers = - Development Status :: 3 - Alpha - Intended Audience :: Developers - License :: OSI Approved :: MIT License - Operating System :: OS Independent - Programming Language :: Python - Programming Language :: Python :: 3.9 - Topic :: Software Development :: Libraries :: Python Modules -project_urls = - Bug Tracker = https://github.com/i4Ds/Karabo-Pipeline/issues - -[options] -zip_safe = false -include_package_data = true -python_requires = >=3.9 -packages = find: -test_suite = tests -setup_requires = - setuptools - [bdist_wheel] universal = true @@ -74,8 +41,8 @@ exclude = .git, .eggs, __pycache__, tests/, docs/, build/, dist/ [mypy] exclude = (?x)( - karabo/test/* - | setup.py + ^.*test_.*\.py$ | + /setup\.py$ ) # mypy-strict configs check_untyped_defs = true @@ -85,9 +52,9 @@ disallow_subclassing_any = true disallow_untyped_calls = false disallow_untyped_decorators = true disallow_untyped_defs = true +extra_checks = true ignore_missing_imports = true implicit_reexport = false -strict_concatenate = true strict_equality = true warn_redundant_casts = true warn_return_any = true @@ -96,6 +63,7 @@ warn_unused_ignores = true [coverage:run] branch = False +parallel = true [coverage:report] ; Regexes for lines to exclude from consideration @@ -119,3 +87,4 @@ omit = */.experiments/* */examples/* setup.py + */_version.py diff --git a/setup.py b/setup.py index ee229bb2..51fc6e1c 100644 --- a/setup.py +++ b/setup.py @@ -1,14 +1,7 @@ -import os -import re -from distutils.core import setup +import versioneer +from setuptools import find_packages, setup -with open(os.path.join("karabo", "version.py"), mode="r") as file: - version_txt = file.readline() - -canonical_pattern = r"([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?" # noqa: E501 -karabo_version = re.search(canonical_pattern, version_txt).group() - -# implicitly takes config from setup.cfg setup( - version=karabo_version, + version=versioneer.get_version(), + packages=find_packages(), )