Skip to content

Commit

Permalink
Merge pull request #1239 from flatironinstitute/dev-cnmfparams_json
Browse files Browse the repository at this point in the history
CNMFParams overhaul
  • Loading branch information
pgunn authored Feb 27, 2024
2 parents cb6ed96 + bfc69dc commit 70d14d3
Show file tree
Hide file tree
Showing 7 changed files with 445 additions and 290 deletions.
6 changes: 2 additions & 4 deletions caiman/motion_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,8 @@ def __init__(self, fname, min_mov=None, dview=None, max_shifts=(6, 6), niter_rig
splits_els':list
for parallelization split the movies in num_splits chunks across time
num_splits_to_process_els: list,
if none all the splits are processed and the movie is saved otherwise at each iteration
num_splits_to_process_els are considered
num_splits_to_process_els: UNUSED
Legacy parameter, does not do anything
upsample_factor_grid:int,
upsample factor of shifts per patches to avoid smearing when merging patches
Expand Down Expand Up @@ -187,7 +186,6 @@ def __init__(self, fname, min_mov=None, dview=None, max_shifts=(6, 6), niter_rig
self.strides = strides
self.overlaps = overlaps
self.splits_els = splits_els
self.num_splits_to_process_els = num_splits_to_process_els
self.upsample_factor_grid = upsample_factor_grid
self.max_deviation_rigid = max_deviation_rigid
self.shifts_opencv = bool(shifts_opencv)
Expand Down
517 changes: 304 additions & 213 deletions caiman/source_extraction/cnmf/params.py

Large diffs are not rendered by default.

76 changes: 48 additions & 28 deletions caiman/tests/comparison_humans.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,33 +313,53 @@
c, dview, n_processes = setup_cluster(
backend=backend_patch, n_processes=n_processes, single_thread=False)
# %%
params_dict = {'fnames': [fname_new],
'fr': params_movie['fr'],
'decay_time': params_movie['decay_time'],
'rf': params_movie['rf'],
'stride': params_movie['stride_cnmf'],
'K': params_movie['K'],
'gSig': params_movie['gSig'],
'merge_thr': params_movie['merge_thresh'],
'p': global_params['p'],
'nb': global_params['gnb'],
'only_init': global_params['only_init_patch'],
'dview': dview,
'method_deconvolution': 'oasis',
'border_pix': params_movie['crop_pix'],
'low_rank_background': global_params['low_rank_background'],
'rolling_sum': True,
'nb_patch': 1,
'check_nan': check_nan,
'block_size_temp': block_size,
'block_size_spat': block_size,
'num_blocks_per_run_spat': num_blocks_per_run,
'num_blocks_per_run_temp': num_blocks_per_run,
'merge_parallel': True,
'n_pixels_per_process': n_pixels_per_process,
'ssub': global_params['ssub'],
'tsub': global_params['tsub'],
'thr_method': 'nrg'
params_dict = {
'data': {
'decay_time': params_movie['decay_time'],
'fnames': [fname_new],
'fr': params_movie['fr'],
},
'init': {
'gSig': params_movie['gSig'],
'K': params_movie['K'],
'nb': global_params['gnb'],
'rolling_sum': True,
'ssub': global_params['ssub'],
'tsub': global_params['tsub'],

},
'merging': {
'merge_parallel': True,
'merge_thr': params_movie['merge_thresh'],
},
'patch': {
'border_pix': params_movie['crop_pix'],
'low_rank_background': global_params['low_rank_background'],
'nb_patch': 1,
'only_init': global_params['only_init_patch'],
'rf': params_movie['rf'],
'stride': params_movie['stride_cnmf'],

},
'preprocess': {
'check_nan': check_nan,
'n_pixels_per_process': n_pixels_per_process,
'p': global_params['p'],
},
'spatial': {
'block_size_spat': block_size,
'nb': global_params['gnb'],
'num_blocks_per_run_spat': num_blocks_per_run,
'n_pixels_per_process': n_pixels_per_process,
'thr_method': 'nrg'
},
'temporal': {
'block_size_temp': block_size,
'method_deconvolution': 'oasis',
'nb': global_params['gnb'],
'num_blocks_per_run_temp': num_blocks_per_run,
'p': global_params['p'],
},
}

init_method = global_params['init_method']
Expand Down Expand Up @@ -415,7 +435,7 @@
f=ld['f_gt'], R=ld['YrA_gt'], dims=(ld['d1'], ld['d2']))

min_size_neuro = 3 * 2 * np.pi
max_size_neuro = (2 * params_dict['gSig'][0]) ** 2 * np.pi
max_size_neuro = (2 * params_dict['init']['gSig'][0]) ** 2 * np.pi
gt_estimate.threshold_spatial_components(maxthr=0.2, dview=dview)
nrn_size = gt_estimate.remove_small_large_neurons(min_size_neuro, max_size_neuro)
nrn_dup = gt_estimate.remove_duplicates(predictions=None, r_values=None, dist_thr=0.1, min_dist=10,
Expand Down
67 changes: 43 additions & 24 deletions caiman/tests/comparison_humans_online.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,30 +185,49 @@

# %%
params_dict = {
'fnames': fls,
'fr': fr,
'decay_time': decay_time,
'gSig': gSig,
'p': global_params['p'],
'min_SNR': global_params['min_SNR'],
'rval_thr': global_params['rval_thr'],
'ds_factor': ds_factor,
'nb': gnb,
'motion_correct': global_params['mot_corr'],
'init_batch': init_batch,
'init_method': 'bare',
'normalize': True,
'expected_comps': expected_comps,
'dist_shape_update': True,
'K': K,
'epochs': epochs,
'show_movie': False,
'min_num_trial': global_params['min_num_trial'],
'use_peak_max': True,
'thresh_CNN_noisy': global_params['thresh_CNN_noisy'],
'sniper_mode': global_params['sniper_mode'],
'use_dense': False,
'update_freq': global_params['update_freq']
'data': {
'decay_time': decay_time,
'fnames': fls,
'fr': fr,
},
'init': {
'gSig': gSig,
'K': K,
'nb': gnb,
},
'online': {
'dist_shape_update': True,
'ds_factor': ds_factor,
'epochs': epochs,
'expected_comps': expected_comps,
'init_batch': init_batch,
'init_method': 'bare',
'min_num_trial': global_params['min_num_trial'],
'min_SNR': global_params['min_SNR'],
'motion_correct': global_params['mot_corr'],
'normalize': True,
'rval_thr': global_params['rval_thr'],
'show_movie': False,
'sniper_mode': global_params['sniper_mode'],
'thresh_CNN_noisy': global_params['thresh_CNN_noisy'],
'update_freq': global_params['update_freq']
'use_dense': False,
'use_peak_max': True,
},
'preprocess': {
'p': global_params['p'],
},
'quality': {
'min_SNR': global_params['min_SNR'],
'rval_thr': global_params['rval_thr'],
},
'spatial': {
'nb': gnb,
},
'temporal': {
'nb': gnb,
'p': global_params['p'],
},
}
opts = cnmf.params.CNMFParams(params_dict=params_dict)

Expand Down
12 changes: 8 additions & 4 deletions caiman/tests/test_motion_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,14 @@ def _test_motion_correct_rigid(D):
Y, C, S, A, centers, dims, shifts = gen_data(D)
fname = 'testMovie.tif'
cm.movie(Y).save(fname)
params_dict = {'max_shifts': (4, 4), # maximum allowed rigid shifts (in pixels)
'pw_rigid': False, # flag for performing non-rigid motion correction
'border_nan': True,
'is3D': D == 3}
params_dict = {
'motion': {
'border_nan': True,
'is3D': D == 3,
'max_shifts': (4, 4), # maximum allowed rigid shifts (in pixels)
'pw_rigid': False, # flag for performing non-rigid motion correction
}
}
opts = cm.source_extraction.cnmf.params.CNMFParams(params_dict=params_dict)
mc = MotionCorrect(fname, dview=None, **opts.get_group('motion'))
mc.motion_correct(save_movie=True)
Expand Down
55 changes: 39 additions & 16 deletions caiman/tests/test_onacid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
#!/usr/bin/env python

import code
import numpy.testing as npt
import os
from caiman.source_extraction import cnmf
Expand All @@ -25,25 +27,46 @@ def demo():
K = 4 # max number of components in each patch

params_dict = {
'fr': fr,
'fnames': fname,
'decay_time': decay_time,
'gSig': gSig,
'p': p,
'motion_correct': False,
'min_SNR': min_SNR,
'nb': gnb,
'init_batch': init_batch,
'init_method': init_method,
'rf': patch_size // 2,
'stride': stride,
'sniper_mode': True,
'thresh_CNN_noisy': thresh_CNN_noisy,
'K': K
'data': {
'decay_time': decay_time,
'fr': fr,
'fnames': fname
},
'init': {
'K': K,
'gSig': gSig,
'nb': gnb
},
'online': {
'init_batch': init_batch,
'init_method': init_method,
'min_SNR': min_SNR,
'motion_correct': False,
'sniper_mode': True,
'thresh_CNN_noisy': thresh_CNN_noisy
},
'patch':{
'rf': patch_size // 2,
'stride': stride
},
'preprocess': {
'p': p
},
'quality': {
'min_SNR': min_SNR # FIXME duplicated between online.min_SNR and quality.min_SNR
},
'spatial': {
'nb': gnb # FIXME duplicated between init.nb and spatial.nb and temporal.nb
},
'temporal': {
'nb': gnb, # FIXME duplicated between init.nb and spatial.nb and temporal.nb
'p': p, # FIXME duplicated between preprocess.p and temporal.p
},
}
opts = cnmf.params.CNMFParams(params_dict=params_dict)
cnm = cnmf.online_cnmf.OnACID(params=opts)
cnm.fit_online()
#code.interact(local=dict(globals(), **locals()) )
cnm.save('test_online.hdf5')
cnm2 = cnmf.online_cnmf.load_OnlineCNMF('test_online.hdf5')
npt.assert_allclose(cnm.estimates.A.sum(), cnm2.estimates.A.sum())
Expand All @@ -52,4 +75,4 @@ def demo():

def test_onacid():
demo()
pass

2 changes: 1 addition & 1 deletion caiman/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ def get_caiman_version() -> tuple[str, str]:
# 'GITW') git rev-parse if caiman is built from "pip install -e ." and we are working
# out of the checkout directory (the user may have since updated without reinstall)
# 'RELF') A release file left in the process to cut a release. Should have a single line
# in it whick looks like "Version:1.4"
# in it which looks like "Version:1.4"
# 'FILE') The date of some frequently changing files, which act as a very rough
# approximation when no other methods are possible
#
Expand Down

0 comments on commit 70d14d3

Please sign in to comment.