Skip to content

Commit

Permalink
Merge pull request #2 from FCP-INDI/engine/(th)re(e)sources
Browse files Browse the repository at this point in the history
♻️ Split `ResourcePool` into three classes
  • Loading branch information
sgiavasis authored Aug 9, 2024
2 parents 384f1e7 + 0a53108 commit 532322d
Show file tree
Hide file tree
Showing 47 changed files with 4,080 additions and 3,948 deletions.
8 changes: 5 additions & 3 deletions .circleci/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@ commands:
steps:
- run:
name: Getting Sample BIDS Data
command: git clone https://github.com/bids-standard/bids-examples.git
command: |
mkdir -p /home/circleci/project/dev/circleci_data/.pytest_cache/d/bids-examples
git clone https://github.com/bids-standard/bids-examples.git /home/circleci/project/dev/circleci_data/.pytest_cache/d/bids-examples
get-singularity:
parameters:
version:
Expand Down Expand Up @@ -156,7 +158,7 @@ commands:
then
TAG=nightly
else
TAG="${CIRCLE_BRANCH//\//_}"
TAG=`echo ${CIRCLE_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
fi
DOCKER_TAG="ghcr.io/${CIRCLE_PROJECT_USERNAME,,}/${CIRCLE_PROJECT_REPONAME,,}:${TAG,,}"
if [[ -n "<< parameters.variant >>" ]]
Expand All @@ -172,7 +174,7 @@ commands:
name: Testing Singularity installation
command: |
pip install -r dev/circleci_data/requirements.txt
coverage run -m pytest --junitxml=test-results/junit.xml --continue-on-collection-errors dev/circleci_data/test_install.py
coverage run -m pytest --capture=no --junitxml=test-results/junit.xml --continue-on-collection-errors dev/circleci_data/test_install.py
jobs:
combine-coverage:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_C-PAC.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
DOCKERFILE=.github/Dockerfiles/C-PAC.develop$VARIANT-$OS.Dockerfile
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/regression_test_full.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/regression_test_lite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
run: |
if [[ ! $GITHUB_REF_NAME == 'main' ]] && [[ ! $GITHUB_REF_NAME == 'develop' ]]
then
TAG=${GITHUB_REF_NAME//\//_}
TAG=`echo ${GITHUB_REF_NAME} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_REF_NAME == 'develop' ]]
then
TAG=nightly
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/smoke_test_participant.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down Expand Up @@ -133,7 +133,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down Expand Up @@ -192,7 +192,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down
1 change: 1 addition & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ external = ["T20"] # Don't autoremove 'noqa` comments for these rules
"CPAC/utils/sklearn.py" = ["RUF003"]
"CPAC/utils/tests/old_functions.py" = ["C", "D", "E", "EM", "PLW", "RET"]
"CPAC/utils/utils.py" = ["T201"] # until `repickle` is removed
"dev/circleci_data/conftest.py" = ["F401"]
"setup.py" = ["D1"]

[lint.flake8-import-conventions.extend-aliases]
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed

- Moved `pygraphviz` from requirements to `graphviz` optional dependencies group.
- Split `ResourcePool` into three classes: `Resource`, `ResourcePool`, and `StratPool`.

### Fixed

Expand Down
2 changes: 1 addition & 1 deletion CPAC/alff/alff.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

from CPAC.alff.utils import get_opt_string
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.registration.registration import apply_transform
from CPAC.utils.interfaces import Function
from CPAC.utils.utils import check_prov_for_regtool
Expand Down
2 changes: 1 addition & 1 deletion CPAC/anat_preproc/anat_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
wb_command,
)
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.utils.interfaces import Function
from CPAC.utils.interfaces.fsl import Merge as fslMerge

Expand Down
32 changes: 32 additions & 0 deletions CPAC/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Copyright (C) 2024 C-PAC Developers

# This file is part of C-PAC.

# C-PAC is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.

# C-PAC is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
"""Global pytest configuration."""

from pathlib import Path

import pytest


@pytest.fixture
def bids_examples(cache: pytest.Cache) -> Path:
"""Get cached example BIDS directories."""
bids_dir = cache.mkdir("bids-examples").absolute()
if not (bids_dir.exists() and list(bids_dir.iterdir())):
from git import Repo

Repo.clone_from("https://github.com/bids-standard/bids-examples.git", bids_dir)
return bids_dir
13 changes: 2 additions & 11 deletions CPAC/distortion_correction/distortion_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
run_fsl_topup,
)
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.utils import function
from CPAC.utils.datasource import match_epi_fmaps
from CPAC.utils.interfaces.function import Function
Expand Down Expand Up @@ -438,11 +438,6 @@ def distcor_blip_afni_qwarp(wf, cfg, strat_pool, pipe_num, opt=None):
node, out = strat_pool.get_data("pe-direction")
wf.connect(node, out, match_epi_fmaps_node, "bold_pedir")

# interface = {'bold': (match_epi_fmaps_node, 'opposite_pe_epi'),
# 'desc-brain_bold': 'opposite_pe_epi_brain'}
# wf, strat_pool = wrap_block([bold_mask_afni, bold_masking],
# interface, wf, cfg, strat_pool, pipe_num, opt)

func_get_brain_mask = pe.Node(
interface=preprocess.Automask(), name=f"afni_mask_opposite_pe_{pipe_num}"
)
Expand Down Expand Up @@ -530,10 +525,6 @@ def distcor_blip_afni_qwarp(wf, cfg, strat_pool, pipe_num, opt=None):
wf.connect(node, out, undistort_func_mean, "reference_image")
wf.connect(convert_afni_warp, "ants_warp", undistort_func_mean, "transforms")

# interface = {'desc-preproc_bold': (undistort_func_mean, 'output_image')}
# wf, strat_pool = wrap_block([bold_mask_afni],
# interface, wf, cfg, strat_pool, pipe_num, opt)

remask = pe.Node(
interface=preprocess.Automask(), name=f"afni_remask_boldmask_{pipe_num}"
)
Expand Down Expand Up @@ -764,7 +755,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None):
wf.connect(run_topup, "out_jacs", vnum_base, "jac_matrix_list")
wf.connect(run_topup, "out_warps", vnum_base, "warp_field_list")

mean_bold = strat_pool.node_data("sbref")
mean_bold = strat_pool.get_data("sbref")

flirt = pe.Node(interface=fsl.FLIRT(), name="flirt")
flirt.inputs.dof = 6
Expand Down
23 changes: 16 additions & 7 deletions CPAC/func_preproc/func_ingress.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,21 @@

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
from CPAC.utils.datasource import create_func_datasource, ingress_func_metadata
"""Ingress functional data for preprocessing."""

from CPAC.utils.strategy import Strategy


def connect_func_ingress(
workflow, strat_list, c, sub_dict, subject_id, input_creds_path, unique_id=None
workflow,
strat_list: list[Strategy],
c,
sub_dict,
subject_id,
input_creds_path,
unique_id=None,
):
"""Connect functional ingress workflow."""
for num_strat, strat in enumerate(strat_list):
if "func" in sub_dict:
func_paths_dict = sub_dict["func"]
Expand All @@ -31,7 +40,9 @@ def connect_func_ingress(
else:
workflow_name = f"func_gather_{unique_id}_{num_strat}"

func_wf = create_func_datasource(func_paths_dict, workflow_name)
func_wf = strat._resource_pool.create_func_datasource(
func_paths_dict, workflow_name
)

func_wf.inputs.inputnode.set(
subject=subject_id,
Expand All @@ -47,8 +58,6 @@ def connect_func_ingress(
}
)

(workflow, strat.rpool, diff, blip, fmap_rp_list) = ingress_func_metadata(
workflow, c, strat.rpool, sub_dict, subject_id, input_creds_path, unique_id
)
diff, blip, fmap_rp_list = strat.rpool.ingress_func_metadata()

return (workflow, diff, blip, fmap_rp_list)
return strat.rpool.wf, diff, blip, fmap_rp_list
4 changes: 2 additions & 2 deletions CPAC/func_preproc/func_motion.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
motion_power_statistics,
)
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.pipeline.schema import valid_options
from CPAC.utils.interfaces.function import Function
from CPAC.utils.utils import check_prov_for_motion_tool
Expand Down Expand Up @@ -830,7 +830,7 @@ def motion_estimate_filter(wf, cfg, strat_pool, pipe_num, opt=None):
notch.inputs.lowpass_cutoff = opt.get("lowpass_cutoff")
notch.inputs.filter_order = opt.get("filter_order")

movement_parameters = strat_pool.node_data("desc-movementParameters_motion")
movement_parameters = strat_pool.get_data("desc-movementParameters_motion")
wf.connect(
movement_parameters.node, movement_parameters.out, notch, "motion_params"
)
Expand Down
4 changes: 2 additions & 2 deletions CPAC/func_preproc/func_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

from CPAC.func_preproc.utils import nullify
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.utils.interfaces import Function
from CPAC.utils.interfaces.ants import (
AI, # niworkflows
Expand Down Expand Up @@ -993,7 +993,7 @@ def bold_mask_fsl_afni(wf, cfg, strat_pool, pipe_num, opt=None):
# and this function has been changed.

# CHANGES:
# * Converted from a plain function to a CPAC.pipeline.nodeblock.NodeBlockFunction
# * Converted from a plain function to a CPAC.pipeline.engine.nodeblock.NodeBlockFunction
# * Removed Registration version check
# * Hardcoded Registration parameters instead of loading epi_atlasbased_brainmask.json
# * Uses C-PAC's ``FSL-AFNI-brain-probseg`` template in place of ``templateflow.api.get("MNI152NLin2009cAsym", resolution=1, label="brain", suffix="probseg")``
Expand Down
5 changes: 2 additions & 3 deletions CPAC/func_preproc/tests/test_preproc_connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
)
from CPAC.func_preproc.func_preproc import func_normalize
from CPAC.nuisance.nuisance import choose_nuisance_blocks
from CPAC.pipeline.cpac_pipeline import connect_pipeline
from CPAC.pipeline.engine import ResourcePool
from CPAC.pipeline.nipype_pipeline_engine import Workflow
from CPAC.registration.registration import (
Expand Down Expand Up @@ -81,7 +80,7 @@
"from-template_to-T1w_mode-image_desc-linear_xfm",
]

NUM_TESTS = 48 # number of parameterizations to run for many-parameter tests
NUM_TESTS = 8 # number of parameterizations to run for many-parameter tests


def _filter_assertion_message(
Expand Down Expand Up @@ -268,7 +267,7 @@ def test_motion_filter_connections(
if not rpool.check_rpool("desc-cleaned_bold"):
pipeline_blocks += choose_nuisance_blocks(c, generate_only)
wf = Workflow(re.sub(r"[\[\]\-\:\_ \'\",]", "", str(rpool)))
connect_pipeline(wf, c, rpool, pipeline_blocks)
rpool.connect_pipeline(wf, c, pipeline_blocks)
# Check that filtering is happening as expected
filter_switch_key = [
"functional_preproc",
Expand Down
Loading

0 comments on commit 532322d

Please sign in to comment.