Skip to content

Commit

Permalink
Remove deprecations and add docstring to `BaseImagingExtractorInterfa…
Browse files Browse the repository at this point in the history
…ce` (#1126)
  • Loading branch information
h-mayorquin authored Nov 11, 2024
1 parent cbf68d4 commit 9448f95
Show file tree
Hide file tree
Showing 9 changed files with 28 additions and 133 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Upcoming

## Deprecations
* Completely removed compression settings from most places[PR #1126](https://github.com/catalystneuro/neuroconv/pull/1126)

## Bug Fixes

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import re
import warnings
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional, Union
Expand Down Expand Up @@ -210,8 +209,6 @@ def add_to_nwbfile(
self,
nwbfile: NWBFile,
metadata: Optional[dict] = None,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
compression_opts: Optional[int] = None, # TODO: remove completely after 10/1/2024
):
"""
Parameters
Expand All @@ -223,17 +220,6 @@ def add_to_nwbfile(
"""
import pandas as pd

# TODO: remove completely after 10/1/2024
if compression is not None or compression_opts is not None:
warnings.warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

fictrac_data_df = pd.read_csv(self.file_path, sep=",", header=None, names=self.columns_in_dat_file)

# Get the timestamps
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ def add_to_nwbfile(
starting_time: Optional[float] = None,
write_as: Literal["raw", "lfp", "processed"] = "lfp",
write_electrical_series: bool = True,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
compression_opts: Optional[int] = None,
iterator_type: str = "v2",
iterator_opts: Optional[dict] = None,
):
Expand All @@ -38,8 +36,6 @@ def add_to_nwbfile(
starting_time=starting_time,
write_as=write_as,
write_electrical_series=write_electrical_series,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
)
Original file line number Diff line number Diff line change
Expand Up @@ -308,8 +308,6 @@ def add_to_nwbfile(
starting_time: Optional[float] = None,
write_as: Literal["raw", "lfp", "processed"] = "raw",
write_electrical_series: bool = True,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
compression_opts: Optional[int] = None,
iterator_type: Optional[str] = "v2",
iterator_opts: Optional[dict] = None,
always_write_timestamps: bool = False,
Expand Down Expand Up @@ -388,8 +386,6 @@ def add_to_nwbfile(
write_as=write_as,
write_electrical_series=write_electrical_series,
es_key=self.es_key,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
always_write_timestamps=always_write_timestamps,
Expand Down
45 changes: 25 additions & 20 deletions src/neuroconv/datainterfaces/ophys/baseimagingextractorinterface.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
"""Author: Ben Dichter."""

import warnings
from typing import Literal, Optional

import numpy as np
Expand Down Expand Up @@ -46,17 +45,9 @@ def __init__(
self.photon_series_type = photon_series_type

def get_metadata_schema(
self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None
self,
) -> dict:

if photon_series_type is not None:
warnings.warn(
"The 'photon_series_type' argument is deprecated and will be removed in a future version. "
"Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.",
DeprecationWarning,
stacklevel=2,
)
self.photon_series_type = photon_series_type
metadata_schema = super().get_metadata_schema()

metadata_schema["required"] = ["Ophys"]
Expand Down Expand Up @@ -100,18 +91,9 @@ def get_metadata_schema(
return metadata_schema

def get_metadata(
self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None
self,
) -> DeepDict:

if photon_series_type is not None:
warnings.warn(
"The 'photon_series_type' argument is deprecated and will be removed in a future version. "
"Please set 'photon_series_type' during the initialization of the BaseImagingExtractorInterface instance.",
DeprecationWarning,
stacklevel=2,
)
self.photon_series_type = photon_series_type

from ...tools.roiextractors import get_nwb_imaging_metadata

metadata = super().get_metadata()
Expand Down Expand Up @@ -147,6 +129,29 @@ def add_to_nwbfile(
stub_test: bool = False,
stub_frames: int = 100,
):
"""
Add imaging data to the NWB file
Parameters
----------
nwbfile : NWBFile
The NWB file where the imaging data will be added.
metadata : dict, optional
Metadata for the NWBFile, by default None.
photon_series_type : {"TwoPhotonSeries", "OnePhotonSeries"}, optional
The type of photon series to be added, by default "TwoPhotonSeries".
photon_series_index : int, optional
The index of the photon series in the provided imaging data, by default 0.
parent_container : {"acquisition", "processing/ophys"}, optional
Specifies the parent container to which the photon series should be added, either as part of "acquisition" or
under the "processing/ophys" module, by default "acquisition".
stub_test : bool, optional
If True, only writes a small subset of frames for testing purposes, by default False.
stub_frames : int, optional
The number of frames to write when stub_test is True. Will use min(stub_frames, total_frames) to avoid
exceeding available frames, by default 100.
"""

from ...tools.roiextractors import add_imaging_to_nwbfile

if stub_test:
Expand Down
36 changes: 0 additions & 36 deletions src/neuroconv/tools/neo/neo.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,6 @@ def add_icephys_recordings(
icephys_experiment_type: str = "voltage_clamp",
stimulus_type: str = "not described",
skip_electrodes: tuple[int] = (),
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
):
"""
Add icephys recordings (stimulus/response pairs) to nwbfile object.
Expand All @@ -230,16 +229,6 @@ def add_icephys_recordings(
skip_electrodes : tuple, default: ()
Electrode IDs to skip.
"""
# TODO: remove completely after 10/1/2024
if compression is not None:
warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

n_segments = get_number_of_segments(neo_reader, block=0)

Expand Down Expand Up @@ -380,7 +369,6 @@ def add_neo_to_nwb(
neo_reader,
nwbfile: pynwb.NWBFile,
metadata: dict = None,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
icephys_experiment_type: str = "voltage_clamp",
stimulus_type: Optional[str] = None,
skip_electrodes: tuple[int] = (),
Expand Down Expand Up @@ -409,15 +397,6 @@ def add_neo_to_nwb(
assert isinstance(nwbfile, pynwb.NWBFile), "'nwbfile' should be of type pynwb.NWBFile"

# TODO: remove completely after 10/1/2024
if compression is not None:
warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

add_device_from_metadata(nwbfile=nwbfile, modality="Icephys", metadata=metadata)

Expand All @@ -443,7 +422,6 @@ def write_neo_to_nwb(
overwrite: bool = False,
nwbfile=None,
metadata: dict = None,
compression: Optional[str] = None, # TODO: remove completely after 10/1/2024
icephys_experiment_type: Optional[str] = None,
stimulus_type: Optional[str] = None,
skip_electrodes: Optional[tuple] = (),
Expand Down Expand Up @@ -499,9 +477,6 @@ def write_neo_to_nwb(
Note that data intended to be added to the electrodes table of the NWBFile should be set as channel
properties in the RecordingExtractor object.
compression: str (optional, defaults to "gzip")
Type of compression to use. Valid types are "gzip" and "lzf".
Set to None to disable all compression.
icephys_experiment_type: str (optional)
Type of Icephys experiment. Allowed types are: 'voltage_clamp', 'current_clamp' and 'izero'.
If no value is passed, 'voltage_clamp' is used as default.
Expand All @@ -518,17 +493,6 @@ def write_neo_to_nwb(

assert save_path is None or nwbfile is None, "Either pass a save_path location, or nwbfile object, but not both!"

# TODO: remove completely after 10/1/2024
if compression is not None:
warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

if metadata is None:
metadata = get_nwb_metadata(neo_reader=neo_reader)

Expand Down
38 changes: 0 additions & 38 deletions src/neuroconv/tools/spikeinterface/spikeinterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -749,8 +749,6 @@ def add_electrical_series(
write_as: Literal["raw", "processed", "lfp"] = "raw",
es_key: str = None,
write_scaled: bool = False,
compression: Optional[str] = None,
compression_opts: Optional[int] = None,
iterator_type: Optional[str] = "v2",
iterator_opts: Optional[dict] = None,
):
Expand All @@ -772,8 +770,6 @@ def add_electrical_series(
write_as=write_as,
es_key=es_key,
write_scaled=write_scaled,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
)
Expand Down Expand Up @@ -810,8 +806,6 @@ def add_electrical_series_to_nwbfile(
write_as: Literal["raw", "processed", "lfp"] = "raw",
es_key: str = None,
write_scaled: bool = False,
compression: Optional[str] = None,
compression_opts: Optional[int] = None,
iterator_type: Optional[str] = "v2",
iterator_opts: Optional[dict] = None,
always_write_timestamps: bool = False,
Expand Down Expand Up @@ -847,7 +841,6 @@ def add_electrical_series_to_nwbfile(
write_scaled : bool, default: False
If True, writes the traces in uV with the right conversion.
If False , the data is stored as it is and the right conversions factors are added to the nwbfile.
Only applies to compression="gzip". Controls the level of the GZIP.
iterator_type: {"v2", None}, default: 'v2'
The type of DataChunkIterator to use.
'v1' is the original DataChunkIterator of the hdmf data_utils.
Expand All @@ -868,16 +861,6 @@ def add_electrical_series_to_nwbfile(
Missing keys in an element of metadata['Ecephys']['ElectrodeGroup'] will be auto-populated with defaults
whenever possible.
"""
# TODO: remove completely after 10/1/2024
if compression is not None or compression_opts is not None:
warnings.warn(
message=(
"Specifying compression methods and their options at the level of tool functions has been deprecated. "
"Please use the `configure_backend` tool function for this purpose."
),
category=DeprecationWarning,
stacklevel=2,
)

assert write_as in [
"raw",
Expand Down Expand Up @@ -1042,8 +1025,6 @@ def add_recording(
es_key: Optional[str] = None,
write_electrical_series: bool = True,
write_scaled: bool = False,
compression: Optional[str] = "gzip",
compression_opts: Optional[int] = None,
iterator_type: str = "v2",
iterator_opts: Optional[dict] = None,
):
Expand All @@ -1065,8 +1046,6 @@ def add_recording(
es_key=es_key,
write_electrical_series=write_electrical_series,
write_scaled=write_scaled,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
)
Expand All @@ -1081,8 +1060,6 @@ def add_recording_to_nwbfile(
es_key: Optional[str] = None,
write_electrical_series: bool = True,
write_scaled: bool = False,
compression: Optional[str] = "gzip",
compression_opts: Optional[int] = None,
iterator_type: str = "v2",
iterator_opts: Optional[dict] = None,
always_write_timestamps: bool = False,
Expand Down Expand Up @@ -1163,8 +1140,6 @@ def add_recording_to_nwbfile(
write_as=write_as,
es_key=es_key,
write_scaled=write_scaled,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
always_write_timestamps=always_write_timestamps,
Expand All @@ -1183,8 +1158,6 @@ def write_recording(
es_key: Optional[str] = None,
write_electrical_series: bool = True,
write_scaled: bool = False,
compression: Optional[str] = "gzip",
compression_opts: Optional[int] = None,
iterator_type: Optional[str] = "v2",
iterator_opts: Optional[dict] = None,
):
Expand All @@ -1209,8 +1182,6 @@ def write_recording(
es_key=es_key,
write_electrical_series=write_electrical_series,
write_scaled=write_scaled,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
)
Expand All @@ -1228,8 +1199,6 @@ def write_recording_to_nwbfile(
es_key: Optional[str] = None,
write_electrical_series: bool = True,
write_scaled: bool = False,
compression: Optional[str] = "gzip",
compression_opts: Optional[int] = None,
iterator_type: Optional[str] = "v2",
iterator_opts: Optional[dict] = None,
) -> pynwb.NWBFile:
Expand Down Expand Up @@ -1303,11 +1272,6 @@ def write_recording_to_nwbfile(
and electrodes are written to NWB.
write_scaled: bool, default: True
If True, writes the scaled traces (return_scaled=True)
compression: {None, 'gzip', 'lzp'}, default: 'gzip'
Type of compression to use. Set to None to disable all compression.
To use the `configure_backend` function, you should set this to None.
compression_opts: int, optional, default: 4
Only applies to compression="gzip". Controls the level of the GZIP.
iterator_type: {"v2", "v1", None}
The type of DataChunkIterator to use.
'v1' is the original DataChunkIterator of the hdmf data_utils.
Expand Down Expand Up @@ -1348,8 +1312,6 @@ def write_recording_to_nwbfile(
es_key=es_key,
write_electrical_series=write_electrical_series,
write_scaled=write_scaled,
compression=compression,
compression_opts=compression_opts,
iterator_type=iterator_type,
iterator_opts=iterator_opts,
)
Expand Down
4 changes: 2 additions & 2 deletions src/neuroconv/tools/testing/mock_interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,9 +265,9 @@ def __init__(
self.verbose = verbose
self.photon_series_type = photon_series_type

def get_metadata(self, photon_series_type: Optional[Literal["OnePhotonSeries", "TwoPhotonSeries"]] = None) -> dict:
def get_metadata(self) -> dict:
session_start_time = datetime.now().astimezone()
metadata = super().get_metadata(photon_series_type=photon_series_type)
metadata = super().get_metadata()
metadata["NWBFile"]["session_start_time"] = session_start_time
return metadata

Expand Down
Loading

0 comments on commit 9448f95

Please sign in to comment.