Skip to content

Commit

Permalink
🔀 Merge develop into bids_ingress
Browse files Browse the repository at this point in the history
  • Loading branch information
shnizzedy committed Aug 15, 2024
2 parents 61d8b6d + 532322d commit 494f0c7
Show file tree
Hide file tree
Showing 74 changed files with 5,138 additions and 4,634 deletions.
8 changes: 5 additions & 3 deletions .circleci/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@ commands:
steps:
- run:
name: Getting Sample BIDS Data
command: git clone https://github.com/bids-standard/bids-examples.git
command: |
mkdir -p /home/circleci/project/dev/circleci_data/.pytest_cache/d/bids-examples
git clone https://github.com/bids-standard/bids-examples.git /home/circleci/project/dev/circleci_data/.pytest_cache/d/bids-examples
get-singularity:
parameters:
version:
Expand Down Expand Up @@ -156,7 +158,7 @@ commands:
then
TAG=nightly
else
TAG="${CIRCLE_BRANCH//\//_}"
TAG=`echo ${CIRCLE_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
fi
DOCKER_TAG="ghcr.io/${CIRCLE_PROJECT_USERNAME,,}/${CIRCLE_PROJECT_REPONAME,,}:${TAG,,}"
if [[ -n "<< parameters.variant >>" ]]
Expand All @@ -172,7 +174,7 @@ commands:
name: Testing Singularity installation
command: |
pip install -r dev/circleci_data/requirements.txt
coverage run -m pytest --junitxml=test-results/junit.xml --continue-on-collection-errors dev/circleci_data/test_install.py
coverage run -m pytest --capture=no --junitxml=test-results/junit.xml --continue-on-collection-errors dev/circleci_data/test_install.py
jobs:
combine-coverage:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_C-PAC.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
DOCKERFILE=.github/Dockerfiles/C-PAC.develop$VARIANT-$OS.Dockerfile
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/regression_test_full.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/regression_test_lite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ jobs:
run: |
if [[ ! $GITHUB_REF_NAME == 'main' ]] && [[ ! $GITHUB_REF_NAME == 'develop' ]]
then
TAG=${GITHUB_REF_NAME//\//_}
TAG=`echo ${GITHUB_REF_NAME} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_REF_NAME == 'develop' ]]
then
TAG=nightly
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/smoke_test_participant.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down Expand Up @@ -133,7 +133,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down Expand Up @@ -192,7 +192,7 @@ jobs:
GITHUB_BRANCH=$(echo ${GITHUB_REF} | cut -d '/' -f 3-)
if [[ ! $GITHUB_BRANCH == 'main' ]] && [[ ! $GITHUB_BRANCH == 'develop' ]]
then
TAG=${GITHUB_BRANCH//\//_}
TAG=`echo ${GITHUB_BRANCH} | sed 's/[^a-zA-Z0-9._]/-/g'`
elif [[ $GITHUB_BRANCH == 'develop' ]]
then
TAG=nightly
Expand Down
4 changes: 3 additions & 1 deletion .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ external = ["T20"] # Don't autoremove 'noqa` comments for these rules
[lint.per-file-ignores]
"CPAC/func_preproc/func_preproc.py" = ["E402"]
"CPAC/utils/sklearn.py" = ["RUF003"]
"CPAC/utils/tests/old_functions.py" = ["C", "D", "E", "EM", "PLW", "RET"]
"CPAC/utils/utils.py" = ["T201"] # until `repickle` is removed
"dev/circleci_data/conftest.py" = ["F401"]
"setup.py" = ["D1"]

[lint.flake8-import-conventions.extend-aliases]
Expand All @@ -32,7 +34,7 @@ section-order = ["future", "standard-library", "third-party", "collab", "other-f

[lint.isort.sections]
"collab" = ["nibabel", "nilearn", "nipype", "PyBASC", "pybids", "scipy", "spython"]
"other-first-party" = ["flowdump", "indi_aws", "indi_schedulers", "PyPEER"]
"other-first-party" = ["bids2table", "flowdump", "indi_aws", "indi_schedulers", "PyPEER"]

[lint.pydocstyle]
convention = "numpy"
Expand Down
3 changes: 3 additions & 0 deletions .stubs/bids2table/__init__.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from ._b2t import bids2table

__all__ = ["bids2table"]
50 changes: 50 additions & 0 deletions .stubs/bids2table/_b2t.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Copyright (C) 2024 C-PAC Developers

# This file is part of C-PAC.

# C-PAC is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.

# C-PAC is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
# """Specific typing stubs for bids2table."""
from typing import Literal, Optional, overload

from elbow.typing import StrOrPath
from bids2table.table import BIDSTable

@overload
def bids2table(
root: StrOrPath,
*,
with_meta: bool = True,
persistent: bool = False,
index_path: Optional[StrOrPath] = None,
exclude: Optional[list[str]] = None,
incremental: bool = False,
overwrite: bool = False,
workers: Optional[int] = None,
worker_id: Optional[int] = None,
return_table: Literal[True] = True,
) -> BIDSTable: ...
@overload
def bids2table(
root: StrOrPath,
*,
with_meta: bool = True,
persistent: bool = False,
index_path: Optional[StrOrPath] = None,
exclude: Optional[list[str]] = None,
incremental: bool = False,
overwrite: bool = False,
workers: Optional[int] = None,
worker_id: Optional[int] = None,
return_table: Literal[False],
) -> None: ...
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed

- Moved `pygraphviz` from requirements to `graphviz` optional dependencies group.
- Split `ResourcePool` into three classes: `Resource`, `ResourcePool`, and `StratPool`.

### Fixed

Expand Down
8 changes: 3 additions & 5 deletions CPAC/_entrypoints/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from warnings import simplefilter

import yaml
from bids2table import bids2table

from CPAC import __version__, license_notice
from CPAC.pipeline import AVAILABLE_PIPELINE_CONFIGS
Expand All @@ -37,7 +38,6 @@
from CPAC.utils.bids_utils import (
cl_strip_brackets,
create_cpac_data_config,
load_cpac_data_config,
load_yaml_config,
sub_list_filter_by_labels,
)
Expand All @@ -51,8 +51,6 @@
from CPAC.utils.monitoring import failed_to_start, FMLOGGER, log_nodes_cb, WFLOGGER
from CPAC.utils.utils import update_nested_dict

from bids2table import bids2table

simplefilter(action="ignore", category=FutureWarning)
DEFAULT_TMP_DIR = "/tmp"

Expand Down Expand Up @@ -786,9 +784,9 @@ def run_main():

try:
# fillna
bids_table['ses'] = bids_table['ses'].fillna('None')
bids_table["ses"] = bids_table["ses"].fillna("None")
grouped_tab = bids_table.groupby(["sub", "ses"])
except Exception as e:
except Exception as e: # TODO: raise exception
WFLOGGER.warning("Could not create bids table: %s", e)
print("Could not create bids table: %s", e)
sys.exit(1)
Expand Down
20 changes: 18 additions & 2 deletions CPAC/alff/alff.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,30 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2012-2024 C-PAC Developers

# This file is part of C-PAC.

# C-PAC is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.

# C-PAC is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
import os

from nipype.interfaces.afni import preprocess
import nipype.interfaces.utility as util

from CPAC.alff.utils import get_opt_string
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.registration.registration import apply_transform
from CPAC.utils.interfaces import Function
from CPAC.utils.utils import check_prov_for_regtool


Expand Down Expand Up @@ -177,7 +193,7 @@ def create_alff(wf_name="alff_workflow"):
wf.connect(input_node, "rest_res", bandpass, "in_file")

get_option_string = pe.Node(
util.Function(
Function(
input_names=["mask"],
output_names=["option_string"],
function=get_opt_string,
Expand Down
3 changes: 3 additions & 0 deletions CPAC/alff/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@

from pathlib import Path

from CPAC.utils.interfaces.function import Function


@Function.sig_imports(["from pathlib import Path"])
def get_opt_string(mask: Path | str) -> str:
"""
Return option string for 3dTstat.
Expand Down
32 changes: 16 additions & 16 deletions CPAC/anat_preproc/anat_preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
# from copy import deepcopy
import os

from nipype.interfaces import afni, ants, freesurfer, fsl
Expand All @@ -35,7 +34,8 @@
wb_command,
)
from CPAC.pipeline import nipype_pipeline_engine as pe
from CPAC.pipeline.nodeblock import nodeblock
from CPAC.pipeline.engine.nodeblock import nodeblock
from CPAC.utils.interfaces import Function
from CPAC.utils.interfaces.fsl import Merge as fslMerge


Expand Down Expand Up @@ -138,7 +138,7 @@ def acpc_alignment(

aff_to_rig_imports = ["import os", "from numpy import *"]
aff_to_rig = pe.Node(
util.Function(
Function(
input_names=["in_xfm", "out_name"],
output_names=["out_mat"],
function=fsl_aff_to_rigid,
Expand Down Expand Up @@ -319,7 +319,7 @@ def T1wmulT2w_brain_norm_s_string(sigma, in_file):
return "-s %f -div %s" % (sigma, in_file)

T1wmulT2w_brain_norm_s_string = pe.Node(
util.Function(
Function(
input_names=["sigma", "in_file"],
output_names=["out_str"],
function=T1wmulT2w_brain_norm_s_string,
Expand Down Expand Up @@ -378,7 +378,7 @@ def form_lower_string(mean, std):
return "-thr %s -bin -ero -mul 255" % (lower)

form_lower_string = pe.Node(
util.Function(
Function(
input_names=["mean", "std"],
output_names=["out_str"],
function=form_lower_string,
Expand Down Expand Up @@ -444,7 +444,7 @@ def file_to_a_list(infile_1, infile_2):
return [infile_1, infile_2]

file_to_a_list = pe.Node(
util.Function(
Function(
input_names=["infile_1", "infile_2"],
output_names=["out_list"],
function=file_to_a_list,
Expand Down Expand Up @@ -544,7 +544,7 @@ def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
)

skullstrip_args = pe.Node(
util.Function(
Function(
input_names=[
"spat_norm",
"spat_norm_dxyz",
Expand Down Expand Up @@ -762,7 +762,7 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
anat_robustfov.inputs.output_type = "NIFTI_GZ"

anat_pad_RobustFOV_cropped = pe.Node(
util.Function(
Function(
input_names=["cropped_image_path", "target_image_path"],
output_names=["padded_image_path"],
function=pad,
Expand Down Expand Up @@ -902,7 +902,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
from CPAC.unet.function import predict_volumes

unet_mask = pe.Node(
util.Function(
Function(
input_names=["model_path", "cimg_in"],
output_names=["out_path"],
function=predict_volumes,
Expand Down Expand Up @@ -1083,7 +1083,7 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt):

# convert brain mask file from .mgz to .nii.gz
fs_brain_mask_to_nifti = pe.Node(
util.Function(
Function(
input_names=["in_file"], output_names=["out_file"], function=mri_convert
),
name=f"fs_brainmask_to_nifti_{pipe_num}",
Expand Down Expand Up @@ -1119,7 +1119,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156
"""
wmparc_to_nifti = pe.Node(
util.Function(
Function(
input_names=["in_file", "reslice_like", "args"],
output_names=["out_file"],
function=mri_convert,
Expand All @@ -1130,7 +1130,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
# Register wmparc file if ingressing FreeSurfer data
if strat_pool.check_rpool("pipeline-fs_xfm"):
wmparc_to_native = pe.Node(
util.Function(
Function(
input_names=["source_file", "target_file", "xfm", "out_file"],
output_names=["transformed_file"],
function=normalize_wmparc,
Expand Down Expand Up @@ -1168,7 +1168,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt):
wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file")

wb_command_fill_holes = pe.Node(
util.Function(
Function(
input_names=["in_file"], output_names=["out_file"], function=wb_command
),
name=f"wb_command_fill_holes_{pipe_num}",
Expand Down Expand Up @@ -1206,7 +1206,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):

# mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz
convert_fs_brainmask_to_nifti = pe.Node(
util.Function(
Function(
input_names=["in_file"], output_names=["out_file"], function=mri_convert
),
name=f"convert_fs_brainmask_to_nifti_{node_id}",
Expand All @@ -1217,7 +1217,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt):

# mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz
convert_fs_T1_to_nifti = pe.Node(
util.Function(
Function(
input_names=["in_file"], output_names=["out_file"], function=mri_convert
),
name=f"convert_fs_T1_to_nifti_{node_id}",
Expand Down Expand Up @@ -2888,7 +2888,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None):

# fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz
normalize_head = pe.Node(
util.Function(
Function(
input_names=["in_file", "number", "out_file_suffix"],
output_names=["out_file"],
function=fslmaths_command,
Expand Down
Loading

0 comments on commit 494f0c7

Please sign in to comment.