Skip to content

Commit

Permalink
Merge branch 'master' of github.com:karanphil/scilpy into b1_correction
Browse files Browse the repository at this point in the history
  • Loading branch information
karp2601 authored and karp2601 committed Feb 20, 2024
2 parents bd9cfa6 + 1c4b850 commit d0ccd1b
Show file tree
Hide file tree
Showing 30 changed files with 1,154 additions and 243 deletions.
14 changes: 10 additions & 4 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
[run]
branch = True
concurrency = multiprocessing
data_file = .test_reports/.coverage
source =
scilpy/
scripts/
data_file = .coverage
source_pkgs =
scilpy
scripts
relative_files = True
omit =
scripts/tests/*.py
scilpy/tests/**/*.py
Expand All @@ -15,6 +16,11 @@ omit =

[report]
skip_empty = True
skip_covered = True

[html]
title = Scilpy Coverage Report
directory = .test_reports/coverage.html

[xml]
output = .test_reports/coverage.xml
11 changes: 5 additions & 6 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ env:
jobs:
test:
runs-on: scilus-runners
if: github.repository == 'scilus/scilpy'
steps:
- name: Checkout repository for merge
uses: actions/checkout@v4.1.1
uses: actions/checkout@v4

- name: Fetch python version from repository
id: python-selector
Expand Down Expand Up @@ -55,16 +56,14 @@ jobs:
pytest --cov-report term-missing:skip-covered
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: unittests
name: scilpy-unittests-${{ github.run_id }}
verbose: true
directory: .test_reports/
fail_ci_if_error: true
root_dir: $GITHUB_WORKSPACE/scilpy/
plugin: pycoverage

- name: Upload test reports and coverage to artifacts
uses: actions/[email protected]
Expand Down
2 changes: 1 addition & 1 deletion .python-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.10
3.10
7 changes: 3 additions & 4 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ junit_logging = out-err

addopts =
--html=.test_reports/pytest.html
--cov-report=html:.test_reports/coverage.html
--junit-xml=.test_reports/junit.xml
--cov-report=xml:.test_reports/coverage.xml
--cov=scilpy/
--cov=scripts/
--cov
--cov-report html
--cov-report xml
87 changes: 87 additions & 0 deletions scilpy/dwi/operations.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
import logging
import math
import pprint

import numpy as np

from scilpy.gradients.bvec_bval_tools import identify_shells, \
round_bvals_to_shell, DEFAULT_B0_THRESHOLD


def apply_bias_field(dwi_data, bias_field_data, mask_data):
"""
Expand Down Expand Up @@ -131,3 +137,84 @@ def compute_dwi_attenuation(dwi_weights: np.ndarray, b0: np.ndarray):
dwi_attenuation[np.logical_not(np.isfinite(dwi_attenuation))] = 0.

return dwi_attenuation


def detect_volume_outliers(data, bvecs, bvals, std_scale, verbose,
b0_thr=DEFAULT_B0_THRESHOLD):
"""
Parameters
----------
data: np.ndarray
The dwi data.
bvecs: np.ndarray
The bvecs
bvals: np.array
The b-values vector.
std_scale: float
How many deviation from the mean are required to be considered an
outlier.
verbose: bool
If True, print even more stuff.
b0_thr: float
Value below which b-values are considered as b0.
"""
results_dict = {}
shells_to_extract = identify_shells(bvals, b0_thr, sort=True)[0]
bvals = round_bvals_to_shell(bvals, shells_to_extract)
for bval in shells_to_extract[shells_to_extract > b0_thr]:
shell_idx = np.where(bvals == bval)[0]
shell = bvecs[shell_idx]
results_dict[bval] = np.ones((len(shell), 3)) * -1
for i, vec in enumerate(shell):
if np.linalg.norm(vec) < 0.001:
continue

dot_product = np.clip(np.tensordot(shell, vec, axes=1), -1, 1)
angle = np.arccos(dot_product) * 180 / math.pi
angle[np.isnan(angle)] = 0
idx = np.argpartition(angle, 4).tolist()
idx.remove(i)

avg_angle = np.average(angle[idx[:3]])
corr = np.corrcoef([data[..., shell_idx[i]].ravel(),
data[..., shell_idx[idx[0]]].ravel(),
data[..., shell_idx[idx[1]]].ravel(),
data[..., shell_idx[idx[2]]].ravel()])
results_dict[bval][i] = [shell_idx[i], avg_angle,
np.average(corr[0, 1:])]

for key in results_dict.keys():
avg_angle = np.round(np.average(results_dict[key][:, 1]), 4)
std_angle = np.round(np.std(results_dict[key][:, 1]), 4)

avg_corr = np.round(np.average(results_dict[key][:, 2]), 4)
std_corr = np.round(np.std(results_dict[key][:, 2]), 4)

outliers_angle = np.argwhere(
results_dict[key][:, 1] < avg_angle - (std_scale * std_angle))
outliers_corr = np.argwhere(
results_dict[key][:, 2] < avg_corr - (std_scale * std_corr))

print('Results for shell {} with {} directions:'
.format(key, len(results_dict[key])))
print('AVG and STD of angles: {} +/- {}'
.format(avg_angle, std_angle))
print('AVG and STD of correlations: {} +/- {}'
.format(avg_corr, std_corr))

if len(outliers_angle) or len(outliers_corr):
print('Possible outliers ({} STD below or above average):'
.format(std_scale))
print('Outliers based on angle [position (4D), value]')
for i in outliers_angle:
print(results_dict[key][i, :][0][0:2])
print('Outliers based on correlation [position (4D), value]')
for i in outliers_corr:
print(results_dict[key][i, :][0][0::2])
else:
print('No outliers detected.')

if verbose:
print('Shell with b-value {}'.format(key))
pprint.pprint(results_dict[key])
print()
Empty file added scilpy/dwi/tests/__init__.py
Empty file.
13 changes: 13 additions & 0 deletions scilpy/dwi/tests/test_operations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# -*- coding: utf-8 -*-


def test_apply_bias_field():
pass


def test_compute_dwi_attenuation():
pass


def test_detect_volume_outliers():
pass
9 changes: 9 additions & 0 deletions scilpy/dwi/tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# -*- coding: utf-8 -*-


def test_extract_dwi_shell():
pass


def test_extract_b0():
pass
Loading

0 comments on commit d0ccd1b

Please sign in to comment.