Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Unit tests connectivity #1102

Draft
wants to merge 17 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions scilpy/connectivity/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def compute_triu_connectivity_from_labels(tractogram, data_labels,
return matrix, ordered_labels, start_labels, end_labels


def load_node_nifti(directory, in_label, out_label, ref_img):
def _load_node_nifti(directory, in_label, out_label, ref_img):
in_filename = os.path.join(directory,
'{}_{}.nii.gz'.format(in_label, out_label))

Expand Down Expand Up @@ -255,8 +255,8 @@ def compute_connectivity_matrices_from_hdf5(
measures_to_return['streamline_count'] = len(streamlines)

if similarity_directory is not None:
density_sim = load_node_nifti(similarity_directory,
in_label, out_label, labels_img)
density_sim = _load_node_nifti(similarity_directory,
in_label, out_label, labels_img)
if density_sim is None:
ba_vox = 0
else:
Expand Down
34 changes: 34 additions & 0 deletions scilpy/connectivity/tests/test_connectivity.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
import numpy as np

from scilpy.connectivity.connectivity import \
compute_triu_connectivity_from_labels


def test_compute_triu_connectivity_from_labels():
labels = np.asarray([[3, 4, 5, 6],
[7, 8, 9, 10]])

# streamline 1 starts at label 4, ends at label 6
# streamline 2 starts at label 9, ends at label 7
# streamline 3 too, but not in the center (vox, corner)
tractogram = [np.asarray([[0, 1],
[5, 6],
[0, 3]]),
np.asarray([[1, 2],
[1, 0]]),
np.asarray([[1.1, 2.2],
[1.9, 0.5]])]
output, _, _, _ = compute_triu_connectivity_from_labels(
tractogram, labels)
assert np.array_equal(output.shape, [8, 8])
expected_out = np.zeros((8, 8))
expected_out[1, 3] = 1 # This is labels (4, 6)
expected_out[4, 6] = 2 # This is labels (7, 9)
assert np.array_equal(output, expected_out)


def test_compute_connectivity_matrices_from_hdf5():
pass


29 changes: 0 additions & 29 deletions scilpy/connectivity/tests/test_connectivity_tools.py

This file was deleted.

44 changes: 44 additions & 0 deletions scilpy/connectivity/tests/test_matrix_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
import numpy as np

from scilpy.connectivity.matrix_tools import apply_reordering


def test_compute_olo():
pass


def test_apply_olo():
pass


def test_apply_reordering():
conn_matrix = np.asarray([[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]])
output = apply_reordering(conn_matrix, [[0, 1, 3, 2],
[1, 2, 3, 0]])
# First changing rows 2 and 3
expected_out = np.asarray([[1, 2, 3, 4],
[5, 6, 7, 8],
[13, 14, 15, 16],
[9, 10, 11, 12]])
# Permuting columns
expected_out = np.asarray([[2, 3, 4, 1],
[6, 7, 8, 5],
[14, 15, 16, 13],
[10, 11, 12, 9]])
assert np.array_equal(output, expected_out)


def test_evaluate_graph_measures():
pass


def test_normalize_matrix_from_values():
pass


def test_normalize_matrix_from_parcel():
pass
59 changes: 59 additions & 0 deletions scilpy/stats/matrix_stats.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
import itertools
import logging

import bct
Expand All @@ -7,6 +8,8 @@
from scipy.stats import t as stats_t
from statsmodels.stats.multitest import multipletests

from scilpy.tractanalysis.reproducibility_measures import compute_dice_voxel


def _ttest_stat_only(x, y, tail):
t = np.mean(x) - np.mean(y)
Expand Down Expand Up @@ -180,3 +183,59 @@ def omega_sigma(matrix):
(path_length / path_length_rand)

return float(omega), float(sigma)


def pairwise_agreement(matrices, ref_matrix=None, normalize=False):
"""
The similarity measures will be computed for each pair. Alternatively, you
can compare all matrices to a single reference, ref_matrix.

Parameters
----------
matrices: list[np.ndarray]
Input matrices
ref_matrix: Optional[np.ndarray]
Optional reference matrix.
normalize: bool
If true, will normalize all matrices from zero to one.

Returns
-------
output_measures_dict: dict
A dict with list of values for each pair of matrices:
{
'RMSE': root-mean-square error
'correlation': correlation
'w_dice_voxels': weighted dice, agreement of the values.
'dice_voxels': agreement of the binarized matrices
}
"""
def _prepare_matrix(tmp_mat):
# Removing the min now simplifies computations
tmp_mat -= np.min(tmp_mat)
if normalize:
return tmp_mat / np.max(tmp_mat)
return tmp_mat

matrices = [_prepare_matrix(m) for m in matrices]
if ref_matrix is not None:
ref_matrix = _prepare_matrix(ref_matrix)

output_measures_dict = {'RMSE': [], 'correlation': [],
'w_dice_voxels': [], 'dice_voxels': []}

if ref_matrix is not None:
pairs = list(itertools.product(matrices, [ref_matrix]))
else:
pairs = list(itertools.combinations(matrices, r=2))

for i in pairs:
rmse = np.sqrt(np.mean((i[0] - i[1]) ** 2))
output_measures_dict['RMSE'].append(rmse)
corrcoef = np.corrcoef(i[0].ravel(), i[1].ravel())
output_measures_dict['correlation'].append(corrcoef[0][1])
dice, w_dice = compute_dice_voxel(i[0], i[1])
output_measures_dict['dice_voxels'].append(dice)
output_measures_dict['w_dice_voxels'].append(w_dice)

return output_measures_dict
31 changes: 21 additions & 10 deletions scripts/scil_connectivity_compare_populations.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,31 @@
# -*- coding: utf-8 -*-

"""
Performs a network-based statistical comparison for populations g1 and g2. The
output is a matrix of the same size as the input connectivity matrices, with
p-values at each edge.
All input matrices must have the same shape (NxN). For paired t-test, both
groups must have the same number of observations.
Performs a statistical comparison between connectivity matrices for populations
g1 and g2, using a t-test.

The inputs are any connectivity matrix, that can be obtained with
scil_connectivity_compute_matrices.py, used separately on the two populations.
All input matrices must have the same shape (NxN).

The output is a matrix of the same size as the input connectivity matrices,
with p-values at each connection (edge).

For example, if you have streamline count weighted matrices for a MCI and a
control group and you want to investiguate differences in their connectomes:
control group, and you want to investiguate differences in their connectomes:
>>> scil_connectivity_compare_populations.py pval.npy
--g1 MCI/*_sc.npy --g2 CTL/*_sc.npy

Options:

--filtering_mask will simply multiply the binary mask to all input
matrices before performing the statistical comparison. Reduces the number of
statistical tests, useful when using --fdr or --bonferroni.

--paired will use a paired t-test. Then both groups must have the same number
of observations (subjects). They must be listed in the right order using --g1
and --g2.

Formerly: scil_compare_connectivity.py
"""

Expand Down Expand Up @@ -53,9 +63,11 @@ def _build_arg_parser():
help='Output matrix (.npy) containing the edges p-value.')

p.add_argument('--in_g1', nargs='+', required=True,
help='List of matrices for the first population (.npy).')
help='List of matrices for each subject in the first '
'population (.npy).\n')
p.add_argument('--in_g2', nargs='+', required=True,
help='List of matrices for the second population (.npy).')
help='List of matrices for each subject in the second '
'population (.npy).')
p.add_argument('--tail', choices=['left', 'right', 'both'], default='both',
help='Enables specification of an alternative hypothesis:\n'
'left: mean of g1 < mean of g2,\n'
Expand Down Expand Up @@ -94,8 +106,7 @@ def main():
args = parser.parse_args()
logging.getLogger().setLevel(logging.getLevelName(args.verbose))

assert_inputs_exist(parser, args.in_g1+args.in_g2,
args.filtering_mask)
assert_inputs_exist(parser, args.in_g1+args.in_g2, args.filtering_mask)
assert_outputs_exist(parser, args, args.out_pval_matrix)

if args.filtering_mask:
Expand Down
11 changes: 11 additions & 0 deletions scripts/scil_connectivity_compute_matrices.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@

- Streamline count.
- Length: mean streamline length (mm).
Note that this matrix, as well as the volume-weighted, can be used to
normalize a streamline count matrix in scil_connectivity_normalize.
- Volume-weighted: Volume of the bundle.
- Similarity: mean density.
Uses pre-computed density maps, which can be obtained with
Expand All @@ -54,6 +56,15 @@
- Mean DPS: Mean values in the data_per_streamline of each streamline in the
bundles.

What next?
==========
See our other scripts to help you achieve your goals:
- Normalize a streamline-count matrix based on other matrices using
scil_connectivity_normalize.
- Compute a t-test between two groups of subjects using
scil_connectivity_compare_populations.
- See all our scripts starting with scil_connectivity_ for more ideas!

Formerly: scil_compute_connectivity.py
"""

Expand Down
Loading
Loading