Skip to content

Commit

Permalink
Merge pull request #81 from LorenFrankLab/non_ptp_epoch_restriction
Browse files Browse the repository at this point in the history
Non ptp epoch restriction
  • Loading branch information
edeno authored Feb 3, 2024
2 parents 8a907f4 + 5cc0485 commit a52dfa1
Show file tree
Hide file tree
Showing 19 changed files with 76 additions and 22 deletions.
2 changes: 1 addition & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": true
"source.organizeImports": "explicit"
},
},
"isort.args": [
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ Developers should install from source.

These files need to be named in the following format `{date}_{animal}_{epoch}_{tag}.{extension}` where date is in the `YYYYMMDD` format, epoch is an integer with zero padding (e.g. `02` and not `2`), and tag can be any handy short descriptor. For example, `20230622_randy_02_r1.rec` is the recording file for animal randy, second epoch, run 1 (r1) for June 22, 2023.

(*Note: By default, Trodes saves video-related files (`.h264`, `videoPositionTracking`, `cameraHWSync`) slightly different from this format as `{date}_{animal}_{epoch}_{tag}.{camera number}.{extension}`. This is accepted by this conversion package, and used to match camera to position tracking in epochs with mulitple cameras*)
(_Note: By default, Trodes saves video-related files (`.h264`, `videoPositionTracking`, `cameraHWSync`) slightly different from this format as `{date}_{animal}_{epoch}_{tag}.{camera number}.{extension}`. This is accepted by this conversion package, and used to match camera to position tracking in epochs with mulitple cameras_)

2. Create a metadata yaml file for each recording session. We **HIGHLY** recommend using the [NWB YAML Creator](https://lorenfranklab.github.io/rec_to_nwb_yaml_creator/) to create the metadata yaml file to ensure compatability and correct format. You can also see this [example metadata yaml file](src/trodes_to_nwb/tests/test_data/20230622_sample_metadata.yml).

Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
- franklab
dependencies:
- jsonschema<4.21.0
- numpy
- scipy
- pandas
Expand All @@ -15,7 +16,6 @@ dependencies:
- pytest-cov
- pytest-mock
- pyyaml
- jsonschema
- ffmpeg
- pip
- pip:
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ dependencies = [
"neo",
"dask[complete]",
"ffmpeg",
"jsonschema<4.21.0",
]
dynamic = ["version"]

Expand Down
2 changes: 2 additions & 0 deletions src/trodes_to_nwb/convert_ephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ def __init__(
j = 0
previous_multiplex_state = None
iterator_loc = len(iterator_size) - i - 1
# calculate systime regression on full epoch, parameters stored and inherited by partial iterators
self.neo_io[iterator_loc].get_regressed_systime(0, None)
while j < size:
sub_iterators.append(
SpikeGadgetsRawIOPartial(
Expand Down
1 change: 1 addition & 0 deletions src/trodes_to_nwb/convert_intervals.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import numpy as np
import pandas as pd
from pynwb import NWBFile, TimeSeries

from trodes_to_nwb.convert_ephys import RecFileDataChunkIterator
from trodes_to_nwb.spike_gadgets_raw_io import SpikeGadgetsRawIO

Expand Down
40 changes: 37 additions & 3 deletions src/trodes_to_nwb/convert_position.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
import datetime
import logging
import os
import re
import subprocess
Expand Down Expand Up @@ -490,7 +490,32 @@ def get_position_timestamps(
dio_camera_timestamps=None | np.ndarray,
sample_count=None | np.ndarray,
ptp_enabled: bool = True,
epoch_interval: list[float] | None = None,
):
"""Get the timestamps for a position data file. Includes protocol;s for both ptp and non-ptp data.
Parameters
----------
position_timestamps_filepath : Path
path to the position timestamps file
position_tracking_filepath : _type_, optional
path to the position tracking file, by default None | Path
rec_dci_timestamps : _type_, optional
system clock times from the rec file used for non-ptp data, by default None | np.ndarray
dio_camera_timestamps : _type_, optional
Timestamps of the dio camera ticks used for non-ptp data, by default None | np.ndarray
sample_count : _type_, optional
trodes timestamps from the rec file used for non-ptp data, by default None | np.ndarray
ptp_enabled : bool, optional
whether ptp was enabled for position tracking, by default True
epoch_interval : list[float] | None, optional
the timeinterval for the epoch used for non-ptp data, by default None
Returns
-------
np.ndarray
timestamps for the position data
"""
logger = logging.getLogger("convert")

# Get video timestamps
Expand Down Expand Up @@ -594,10 +619,18 @@ def get_position_timestamps(

frame_count = np.asarray(video_timestamps.HWframeCount)

is_valid_camera_time = np.isin(video_timestamps.index, sample_count)
epoch_start_ind = np.digitize(epoch_interval[0], rec_dci_timestamps)
epoch_end_ind = np.digitize(epoch_interval[1], rec_dci_timestamps)
is_valid_camera_time = np.isin(
video_timestamps.index, sample_count[epoch_start_ind:epoch_end_ind]
)

camera_systime = rec_dci_timestamps[
wrapped_digitize(video_timestamps.index[is_valid_camera_time], sample_count)
wrapped_digitize(
video_timestamps.index[is_valid_camera_time],
sample_count[epoch_start_ind:epoch_end_ind],
)
+ epoch_start_ind
]
(
dio_camera_timestamps,
Expand Down Expand Up @@ -808,6 +841,7 @@ def add_position(
rec_dci_timestamps=rec_dci_timestamps,
dio_camera_timestamps=dio_camera_timestamps_epoch,
sample_count=sample_count,
epoch_interval=[epoch_start, epoch_end] if not ptp_enabled else None,
)

# TODO: Doesn't handle multiple cameras currently
Expand Down
2 changes: 1 addition & 1 deletion src/trodes_to_nwb/convert_yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
AssociatedFiles,
CameraDevice,
DataAcqDevice,
NwbElectrodeGroup,
Probe,
Shank,
ShanksElectrode,
NwbElectrodeGroup,
)
from pynwb import NWBFile
from pynwb.file import ProcessingModule, Subject
Expand Down
21 changes: 17 additions & 4 deletions src/trodes_to_nwb/spike_gadgets_raw_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,9 @@ def _parse_header(self):
# initialize interpolate index as none so can check if it has been set in a trodes timestamps call
self.interpolate_index = None

# initialize systime parameters as empty dict so can check if they have been set in a get_regressed_systime call
self.regressed_systime_parameters = {}

self._generate_minimal_annotations()
# info from GlobalConfiguration in xml are copied to block and seg annotations
bl_ann = self.raw_annotations["blocks"][0]
Expand Down Expand Up @@ -707,13 +710,22 @@ def get_digitalsignal(self, stream_id, channel_id):
@functools.lru_cache(maxsize=1)
def get_regressed_systime(self, i_start, i_stop=None):
NANOSECONDS_PER_SECOND = 1e9
# get values
# get trodes timestamp values
trodestime = self.get_analogsignal_timestamps(i_start, i_stop)
systime_seconds = self.get_sys_clock(i_start, i_stop)
# Convert
trodestime_index = np.asarray(trodestime, dtype=np.float64)
# regress
slope, intercept, _, _, _ = linregress(trodestime_index, systime_seconds)
if not self.regressed_systime_parameters:
# get raw systime values
systime_seconds = self.get_sys_clock(i_start, i_stop)
# regress
slope, intercept, _, _, _ = linregress(trodestime_index, systime_seconds)
self.regressed_systime_parameters = {
"slope": slope,
"intercept": intercept,
}
else:
slope = self.regressed_systime_parameters["slope"]
intercept = self.regressed_systime_parameters["intercept"]
adjusted_timestamps = intercept + slope * trodestime_index
return (adjusted_timestamps) / NANOSECONDS_PER_SECOND

Expand Down Expand Up @@ -857,6 +869,7 @@ def __init__(
self._mask_streams = full_io._mask_streams
self.selected_streams = full_io.selected_streams
self._generate_minimal_annotations()
self.regressed_systime_parameters = full_io.regressed_systime_parameters

# crop key information to range of interest
header_size = None
Expand Down
1 change: 1 addition & 0 deletions src/trodes_to_nwb/tests/test_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
from pynwb import NWBHDF5IO

from trodes_to_nwb.convert import create_nwbs, get_included_probe_metadata_paths
from trodes_to_nwb.data_scanner import get_file_info
from trodes_to_nwb.tests.utils import data_path
Expand Down
4 changes: 2 additions & 2 deletions src/trodes_to_nwb/tests/test_convert_dios.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@

import numpy as np
import pynwb

from trodes_to_nwb import convert_yaml
from trodes_to_nwb.convert_dios import add_dios
from trodes_to_nwb.tests.test_convert_rec_header import default_test_xml_tree
from trodes_to_nwb.tests.utils import data_path

from trodes_to_nwb import convert_yaml


def test_add_dios_single_rec():
# load metadata yml and make nwb file
Expand Down
4 changes: 2 additions & 2 deletions src/trodes_to_nwb/tests/test_convert_ephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@

import numpy as np
import pynwb

from trodes_to_nwb import convert_rec_header, convert_yaml
from trodes_to_nwb.convert_ephys import add_raw_ephys
from trodes_to_nwb.tests.test_convert_rec_header import default_test_xml_tree
from trodes_to_nwb.tests.utils import data_path

from trodes_to_nwb import convert_rec_header, convert_yaml

MICROVOLTS_PER_VOLT = 1e6


Expand Down
1 change: 1 addition & 0 deletions src/trodes_to_nwb/tests/test_convert_intervals.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import numpy as np
from pynwb import NWBHDF5IO

from trodes_to_nwb.convert_ephys import RecFileDataChunkIterator
from trodes_to_nwb.convert_intervals import add_epochs, add_sample_count
from trodes_to_nwb.convert_yaml import initialize_nwb, load_metadata
Expand Down
4 changes: 2 additions & 2 deletions src/trodes_to_nwb/tests/test_convert_position.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import pandas as pd
import pytest
from pynwb import NWBHDF5IO, TimeSeries

from trodes_to_nwb import convert, convert_rec_header, convert_yaml
from trodes_to_nwb.convert_dios import add_dios
from trodes_to_nwb.convert_ephys import RecFileDataChunkIterator
from trodes_to_nwb.convert_intervals import add_epochs, add_sample_count
Expand All @@ -27,8 +29,6 @@
from trodes_to_nwb.data_scanner import get_file_info
from trodes_to_nwb.tests.utils import data_path

from trodes_to_nwb import convert, convert_rec_header, convert_yaml


def test_wrapped_digitize():
x = np.array([4, 5, 6, 0, 1, 2])
Expand Down
2 changes: 1 addition & 1 deletion src/trodes_to_nwb/tests/test_convert_rec_header.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@

import pytest
from ndx_franklab_novela import HeaderDevice
from trodes_to_nwb.tests.utils import data_path

from trodes_to_nwb import convert, convert_rec_header, convert_yaml
from trodes_to_nwb.tests.utils import data_path


def default_test_xml_tree() -> ElementTree:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import pytest
import copy

import pytest

basic_data = {
"experimenter_name": ["michael jackson"],
"lab": "Loren Frank Lab",
Expand Down
1 change: 0 additions & 1 deletion src/trodes_to_nwb/tests/test_metadata_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from unittest.mock import MagicMock, patch

from trodes_to_nwb.metadata_validation import _get_nwb_json_schema_path, validate

from trodes_to_nwb.tests.test_data import test_metadata_dict_samples


Expand Down
1 change: 1 addition & 0 deletions src/trodes_to_nwb/tests/test_spikegadgets_io.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np

from trodes_to_nwb.spike_gadgets_raw_io import InsertedMemmap, SpikeGadgetsRawIO
from trodes_to_nwb.tests.utils import data_path

Expand Down
4 changes: 2 additions & 2 deletions src/trodes_to_nwb/tests/utils.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""Set the path to the bulk test data dir and copies the yaml/config files there"""

import os
from pathlib import Path
import shutil

from pathlib import Path

yaml_path = Path(__file__).resolve().parent / "test_data"

Expand Down

0 comments on commit a52dfa1

Please sign in to comment.