From 5b48417a5582904b9b41adeb10a883c344ef0baf Mon Sep 17 00:00:00 2001 From: Samuel Garcia Date: Mon, 4 Mar 2024 21:23:38 +0100 Subject: [PATCH] Fixes after Pierre remove many features methods!!! --- .../clustering/position_and_features.py | 10 ++++++---- .../sortingcomponents/features_from_peaks.py | 1 + .../tests/test_features_from_peaks.py | 6 ++---- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/spikeinterface/sortingcomponents/clustering/position_and_features.py b/src/spikeinterface/sortingcomponents/clustering/position_and_features.py index c4a999ae92..805a1572fd 100644 --- a/src/spikeinterface/sortingcomponents/clustering/position_and_features.py +++ b/src/spikeinterface/sortingcomponents/clustering/position_and_features.py @@ -47,6 +47,8 @@ class PositionAndFeaturesClustering: @classmethod def main_function(cls, recording, peaks, params): + from sklearn.preprocessing import QuantileTransformer + assert HAVE_HDBSCAN, "twisted clustering needs hdbscan to be installed" if "n_jobs" in params["job_kwargs"]: @@ -68,22 +70,22 @@ def main_function(cls, recording, peaks, params): position_method = d["peak_localization_kwargs"]["method"] - features_list = [position_method, "ptp", "energy"] + features_list = [position_method, "ptp",] features_params = { position_method: {"radius_um": params["radius_um"]}, "ptp": {"all_channels": False, "radius_um": params["radius_um"]}, - "energy": {"radius_um": params["radius_um"]}, } features_data = compute_features_from_peaks( recording, peaks, features_list, features_params, ms_before=1, ms_after=1, **params["job_kwargs"] ) - hdbscan_data = np.zeros((len(peaks), 4), dtype=np.float32) + hdbscan_data = np.zeros((len(peaks), 3), dtype=np.float32) hdbscan_data[:, 0] = features_data[0]["x"] hdbscan_data[:, 1] = features_data[0]["y"] hdbscan_data[:, 2] = features_data[1] - hdbscan_data[:, 3] = features_data[2] + + preprocessing = QuantileTransformer(output_distribution="uniform") hdbscan_data = preprocessing.fit_transform(hdbscan_data) diff --git a/src/spikeinterface/sortingcomponents/features_from_peaks.py b/src/spikeinterface/sortingcomponents/features_from_peaks.py index 8770dec6f9..40f89068f9 100644 --- a/src/spikeinterface/sortingcomponents/features_from_peaks.py +++ b/src/spikeinterface/sortingcomponents/features_from_peaks.py @@ -215,4 +215,5 @@ def compute(self, traces, peaks, waveforms): "amplitude": AmplitudeFeature, "ptp": PeakToPeakFeature, "random_projections": RandomProjectionsFeature, + "center_of_mass": LocalizeCenterOfMass, } diff --git a/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py b/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py index 896c4e1e1e..160ba3cb36 100644 --- a/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py +++ b/src/spikeinterface/sortingcomponents/tests/test_features_from_peaks.py @@ -26,12 +26,11 @@ def test_features_from_peaks(): **job_kwargs, ) - feature_list = ["amplitude", "ptp", "center_of_mass", "energy"] + feature_list = ["amplitude", "ptp", "center_of_mass",] feature_params = { "amplitude": {"all_channels": False, "peak_sign": "neg"}, "ptp": {"all_channels": False}, "center_of_mass": {"radius_um": 120.0}, - "energy": {"radius_um": 160.0}, } features = compute_features_from_peaks(recording, peaks, feature_list, feature_params=feature_params, **job_kwargs) @@ -45,14 +44,13 @@ def test_features_from_peaks(): # split feature variable job_kwargs["n_jobs"] = 2 - amplitude, ptp, com, energy = compute_features_from_peaks( + amplitude, ptp, com, = compute_features_from_peaks( recording, peaks, feature_list, feature_params=feature_params, **job_kwargs ) assert amplitude.ndim == 1 # because all_channels=False assert ptp.ndim == 1 # because all_channels=False assert com.ndim == 1 assert "x" in com.dtype.fields - assert energy.ndim == 1 # amplitude and peak to peak with multi channels d = {"all_channels": True}