From 2202528951d6b67d68925e97fbdfcb5694c77914 Mon Sep 17 00:00:00 2001 From: Pat Gunn Date: Tue, 21 Nov 2023 10:24:54 -0500 Subject: [PATCH] (remove some small docstring references to slurm) --- caiman/source_extraction/cnmf/map_reduce.py | 2 +- caiman/source_extraction/cnmf/temporal.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/caiman/source_extraction/cnmf/map_reduce.py b/caiman/source_extraction/cnmf/map_reduce.py index 5a629a028..8812f6a05 100644 --- a/caiman/source_extraction/cnmf/map_reduce.py +++ b/caiman/source_extraction/cnmf/map_reduce.py @@ -41,7 +41,7 @@ def cnmf_patches(args_in): number of global background components backend: string - 'ipyparallel' or 'single_thread' or SLURM + 'ipyparallel' or 'single_thread' n_processes: int number of cores to be used (should be less than the number of cores started with ipyparallel) diff --git a/caiman/source_extraction/cnmf/temporal.py b/caiman/source_extraction/cnmf/temporal.py index 70f64aba9..69db93307 100644 --- a/caiman/source_extraction/cnmf/temporal.py +++ b/caiman/source_extraction/cnmf/temporal.py @@ -113,7 +113,6 @@ def update_temporal_components(Y, A, b, Cin, fin, bl=None, c1=None, g=None, sn=N ipyparallel, parallelization using the ipyparallel cluster. You should start the cluster (install ipyparallel and then type ipcluster -n 6, where 6 is the number of processes). - SLURM: using SLURM scheduler memory_efficient: Bool whether or not to optimize for memory usage (longer running times). necessary with very large datasets @@ -287,7 +286,6 @@ def update_iteration(parrllcomp, len_parrllcomp, nb, C, S, bl, nr, ipyparallel, parallelization using the ipyparallel cluster. You should start the cluster (install ipyparallel and then type ipcluster -n 6, where 6 is the number of processes). - SLURM: using SLURM scheduler memory_efficient: Bool whether or not to optimize for memory usage (longer running times). necessary with very large datasets