Skip to content

Commit

Permalink
improve circleci test time (#227)
Browse files Browse the repository at this point in the history
* test circleci parallelism

* try test-splitting feature

* test custom docker

* add docker to use in circleci testing

* set parallelism to 3

* add readme in docker folder

* Fix voxel tests, add scipy requirement version

* test workflow

* add context in workflow

* no caching available for the current plan

* docker build on machine

* instructions for triggering docker build through circleci

Co-authored-by: Aukerman <[email protected]>
  • Loading branch information
doori and Aukerman authored Sep 2, 2021
1 parent 47692dc commit 76bfc66
Show file tree
Hide file tree
Showing 6 changed files with 155 additions and 13 deletions.
66 changes: 55 additions & 11 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,27 @@ version: 2.1
# kubernetes deploy and integration test
# usecase focussed integration tests if code changes frequently

# Note: to edit context and environment variables, see Organization Settings https://app.circleci.com/settings/organization/github/msk-mind/contexts
parameters:
run_workflow_test:
type: boolean
default: true
run_workflow_docker:
type: boolean
default: false

orbs:
codecov: codecov/[email protected]

jobs:
build:
test:
parallelism: 3
docker:
- image: circleci/python:3.6.8 # primary container for the build job
- image: $DOCKER_USERNAME/luna-dev:latest # primary container for the build job

steps:
- checkout
- run: cat /etc/os-release
- run: sudo apt update
- run: sudo apt install openslide-tools python-openslide default-jre
- run: java -version
- run: which java
- run: echo "export PATH=/home/circleci/.local/bin:$PATH" >> $BASH_ENV # some python packages get installed here
Expand All @@ -29,17 +37,53 @@ jobs:
- run: export PYSPARK_PYTHON=`which python`
- run: echo $PATH
- run: python --version
- run: sudo pip install --upgrade pip
- run: pip --version
- run: sudo pip install -r requirements_dev.txt -q
- run: pip list
- run: pip check
#- run: sudo python setup.py install # needed to run the cli unit tests
- run: pytest -v --capture=tee-sys --show-capture=all --cov=pyluna-common pyluna-common/tests --cov-report=xml
- run: pytest -v --capture=tee-sys --show-capture=all --cov=pyluna-radiology pyluna-radiology/tests --cov-report=xml --cov-append
- run: pytest -v --capture=tee-sys --show-capture=all --cov=pyluna-pathology pyluna-pathology/tests --cov-report=xml --cov-append
- run: coverage report -m
- run:
name: Run tests - pyluna-common
command: |
set -e
COMMON_TEST_FILES=$(circleci tests glob "pyluna-common/tests/**/test_*.py" | circleci tests split --split-by=timings)
pytest -v --capture=tee-sys --show-capture=all --cov=pyluna-common $COMMON_TEST_FILES --cov-report=xml
- run:
name: Run tests - pyluna-radiology
command: |
set -e
RADIOLOGY_TEST_FILES=$(circleci tests glob "pyluna-radiology/tests/**/test_*.py" | circleci tests split --split-by=timings)
pytest -v --capture=tee-sys --show-capture=all --cov=pyluna-radiology $RADIOLOGY_TEST_FILES --cov-report=xml --cov-append
- run:
name: Run tests - pyluna-pathology
command: |
set -e
PATHOLOGY_TEST_FILES=$(circleci tests glob "pyluna-pathology/tests/**/test_*.py" | circleci tests split --split-by=timings)
pytest -v --capture=tee-sys --show-capture=all --cov=pyluna-pathology $PATHOLOGY_TEST_FILES --cov-report=xml --cov-append
- store_artifacts:
path: htmlcov
- codecov/upload:
file: coverage.xml

# https://circleci.com/docs/2.0/building-docker-images/
docker_build:
machine: true
steps:
- checkout
- run: docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
- run: docker build -t $DOCKER_USERNAME/luna-dev:latest ./docker/
- run: docker push $DOCKER_USERNAME/luna-dev:latest

workflows:
version: 2
test:
when: << pipeline.parameters.run_workflow_test >>
jobs:
- test:
context:
- MIND-Data-Processing

build:
when: << pipeline.parameters.run_workflow_docker >>
jobs:
- docker_build:
context:
- MIND-Data-Processing
11 changes: 11 additions & 0 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM python:3-buster

RUN apt-get update && \
apt-get install -y openslide-tools python-openslide default-jre

ADD requirements_dev.txt .

RUN pip install --upgrade pip && \
pip install numpy && \
pip install -r requirements_dev.txt -q

24 changes: 24 additions & 0 deletions docker/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Luna Docker

## Content

- **Dockerfile**:
Simple image that installs openslide, jre and python dependencies from `requirements_dev.txt`.
This image is built and pushed to dockerhub, and used in circleci tests.

- **requirements_dev.txt**:
Python dependencies for luna project

## Build and push image to Dockerhub

Building and pushing the docker image to dockerhub can be done via the circleci workflow `run_workflow_docker` (see .circleci/config.yml for more details.)
This workflow is off by default. To trigger this workflow using circleci, first create a [circleci personal token](https://circleci.com/docs/2.0/managing-api-tokens/#creating-a-personal-api-token).
Then send the post request like below, replacing `<your-personal-circleci-api-token>`, `<your-branch>`.

```
curl --request POST \
--url https://circleci.com/api/v2/project/gh/msk-mind/luna/pipeline \
--header 'Circle-Token: <your-personal-circleci-api-token>' \
--header 'content-type: application/json' \
--data '{"branch": "<your-branch>", "parameters" : {"run_workflow_test": false, "run_workflow_docker": true }}'
```
62 changes: 62 additions & 0 deletions docker/requirements_dev.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Editable mode. Requirements are picked up from setup.cfg's install_requires directive and matched
# up to versions in the the index list above

# https://packaging.python.org/discussions/install-requires-vs-requirements/
# https://caremad.io/posts/2013/07/setup-vs-requirement/
--index-url https://pypi.python.org/simple/

# to override abstract requirements in setup.cfg to specific versions, place specific version entries here

dask
distributed
click
decorator>=4.3,<5.0 # force constrain versions to avoid incompatibility with networkx requirements
filehash
joblib
minio
neo4j
numpy>=1.9.0
pandas>=1.1.0
pyarrow>=3.0.0
pyspark
scikit-image
scipy>=1.7.0
itk
opencv-python
tornado>=6.0.4
PyYAML>=5.4
jsonpath-ng>=1.5.2
yamale>=3.0.4
pyradiomics
flask_restx
requests>=2.25.1
Pillow>=8.1.1
deltalake>=0.2.1
dirhash
log4mongo
medpy
checksumdir
pydicom>=2.1.0
openslide-python
shapely
seaborn
ijson
geojson
orjson
torch
staintools
pytest
pytest-cov
pytest-mock
pytest-runner
testfixtures
requests-mock
mock
wheel>=0.22
pyinstaller>=4.0
poetry
python-semantic-release
m2r2
twine
Sphinx

Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def test_extract_voxels_1(tmp_path):
print (properties)

assert properties['targetShape'] == [280, 280, 11] # Check target shape
assert abs(np.load(str(properties['data']) + '/image_voxels.npy').mean() - -1289.5683001548427) < 1e-8 # Mean within some percision
assert abs(np.load(str(properties['data']) + '/image_voxels.npy').mean() - -1289.3716255245372) < 1e-8 # Mean within some percision
assert np.load(str(properties['data']) + '/label_voxels.npy').sum() == 1139 # ~ number of 1 label voxels


Expand All @@ -103,7 +103,7 @@ def test_extract_voxels_2(tmp_path):

print (properties)
assert properties['targetShape'] == [140, 140, 5] # Check target shape
assert abs(np.load(str(properties['data']) + '/image_voxels.npy').mean() - -1289.2570235496087) < 1e-8 # Mean within some percision
assert abs(np.load(str(properties['data']) + '/image_voxels.npy').mean() - -1289.2387456682986) < 1e-8 # Mean within some percision
assert np.load(str(properties['data']) + '/label_voxels.npy').sum() == 132 # ~ number of 1 label voxels, less due to different resampling


Expand Down
1 change: 1 addition & 0 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ numpy>=1.9.0
pandas>=1.1.0
pyarrow>=3.0.0
pyspark
scipy>=1.7.0
scikit-image
itk
opencv-python
Expand Down

0 comments on commit 76bfc66

Please sign in to comment.