Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add epochs to week long session #20

Merged
merged 6 commits into from
Dec 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,39 @@
from neuroconv.utils import DeepDict, dict_deep_update


def get_miniscope_folder_path(folder_path: Union[str, Path]):
"""
Retrieve the path to the Miniscope folder within the given session folder based on metadata.

Parameters:
-----------
folder_path : Union[str, Path]
Path to the main session folder, which should contain a "metaData.json" file with information about the Miniscope.

Returns:
--------
Optional[Path]
Path to the Miniscope folder, formatted to replace any spaces in the Miniscope name with underscores. Returns `None` if the
specified folder is not a directory or if the metadata JSON is missing or misconfigured.

Raises:
-------
AssertionError
If the "metaData.json" file is not found in the given folder path.
"""
folder_path = Path(folder_path)
if folder_path.is_dir():
general_metadata_json = folder_path / "metaData.json"
assert general_metadata_json.exists(), f"General metadata json not found in {folder_path}"
with open(general_metadata_json) as f:
general_metadata = json.load(f)
miniscope_name = general_metadata["miniscopes"][0]
return folder_path / miniscope_name.replace(" ", "_")
else:
print(f"No Miniscope data found at {folder_path}")
return None


def get_recording_start_time(file_path: Union[str, Path]):
"""
Retrieve the recording start time from metadata in the specified folder.
Expand Down
2 changes: 1 addition & 1 deletion src/cai_lab_to_nwb/zaki_2024/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .cell_registration import add_cell_registration, get_global_ids_from_csv
from .edf_slicing import get_session_slicing_time_range, get_session_run_time
63 changes: 63 additions & 0 deletions src/cai_lab_to_nwb/zaki_2024/utils/edf_slicing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from pathlib import Path
from typing import Union
from datetime import timedelta

from src.cai_lab_to_nwb.zaki_2024.interfaces.miniscope_imaging_interface import (
get_miniscope_timestamps,
get_recording_start_time,
)


def get_session_slicing_time_range(miniscope_metadata_json: Union[str, Path], timestamps_file_path: Union[str, Path]):
"""
Calculate the time range for EDF slicing based on session start time and Miniscope timestamps.

Parameters:
-----------
miniscope_metadata_json : Union[str, Path]
Path to the metadata.json file produced by Miniscope output.

timestamps_file_path : Union[str, Path]
Path to the Miniscope timeStamps.csv file.

Returns:
--------
Tuple[datetime, datetime]
A tuple containing the start and stop timestamps (as datetime objects) for the EDF slicing period. The start timestamp
corresponds to the session's start time adjusted by the first Miniscope timestamp, and the stop timestamp is the session's
start time adjusted by the last Miniscope timestamp.

"""
miniscope_metadata_json = Path(miniscope_metadata_json)
timestamps_file_path = Path(timestamps_file_path)
if miniscope_metadata_json.is_file() and timestamps_file_path.is_file():

session_start_time = get_recording_start_time(file_path=miniscope_metadata_json)
miniscope_timestamps = get_miniscope_timestamps(file_path=timestamps_file_path)

start_datetime_timestamp = session_start_time + timedelta(seconds=miniscope_timestamps[0])
stop_datetime_timestamp = session_start_time + timedelta(seconds=miniscope_timestamps[-1])

return start_datetime_timestamp, stop_datetime_timestamp


def get_session_run_time(txt_file_path: Union[str, Path]):
import re

try:
with open(txt_file_path, "r") as file:
text = file.read()
except FileNotFoundError:
print(f"File not found at {txt_file_path}")
exit()
# Extract the Run Time line
run_time_line = re.search(r"Run Time\s*:\s*([\d:.]+)", text)
if run_time_line:
run_time_str = run_time_line.group(1)

# Convert Run Time to seconds
h, m, s = map(float, run_time_str.split(":"))
duration = h * 3600 + m * 60 + s
return duration
else:
print("Run Time information not found.")
75 changes: 5 additions & 70 deletions src/cai_lab_to_nwb/zaki_2024/zaki_2024_convert_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,79 +4,14 @@

from pathlib import Path
from typing import Union
from datetime import datetime, timedelta
from datetime import datetime
import pandas as pd
import json

from neuroconv.utils import load_dict_from_file, dict_deep_update

from zaki_2024_nwbconverter import Zaki2024NWBConverter
from interfaces.miniscope_imaging_interface import get_miniscope_timestamps, get_recording_start_time


def get_miniscope_folder_path(folder_path: Union[str, Path]):
"""
Retrieve the path to the Miniscope folder within the given session folder based on metadata.

Parameters:
-----------
folder_path : Union[str, Path]
Path to the main session folder, which should contain a "metaData.json" file with information about the Miniscope.

Returns:
--------
Optional[Path]
Path to the Miniscope folder, formatted to replace any spaces in the Miniscope name with underscores. Returns `None` if the
specified folder is not a directory or if the metadata JSON is missing or misconfigured.

Raises:
-------
AssertionError
If the "metaData.json" file is not found in the given folder path.
"""
folder_path = Path(folder_path)
if folder_path.is_dir():
general_metadata_json = folder_path / "metaData.json"
assert general_metadata_json.exists(), f"General metadata json not found in {folder_path}"
with open(general_metadata_json) as f:
general_metadata = json.load(f)
miniscope_name = general_metadata["miniscopes"][0]
return folder_path / miniscope_name.replace(" ", "_")
else:
print(f"No Miniscope data found at {folder_path}")
return None


def get_edf_slicing_time_range(miniscope_metadata_json: Union[str, Path], timestamps_file_path: Union[str, Path]):
"""
Calculate the time range for EDF slicing based on session start time and Miniscope timestamps.

Parameters:
-----------
miniscope_metadata_json : Union[str, Path]
Path to the metadata.json file produced by Miniscope output.

timestamps_file_path : Union[str, Path]
Path to the Miniscope timeStamps.csv file.

Returns:
--------
Tuple[datetime, datetime]
A tuple containing the start and stop timestamps (as datetime objects) for the EDF slicing period. The start timestamp
corresponds to the session's start time adjusted by the first Miniscope timestamp, and the stop timestamp is the session's
start time adjusted by the last Miniscope timestamp.

"""
miniscope_metadata_json = Path(miniscope_metadata_json)
timestamps_file_path = Path(timestamps_file_path)
if miniscope_metadata_json.is_file() and timestamps_file_path.is_file():

session_start_time = get_recording_start_time(file_path=miniscope_metadata_json)
miniscope_timestamps = get_miniscope_timestamps(file_path=timestamps_file_path)

start_datetime_timestamp = session_start_time + timedelta(seconds=miniscope_timestamps[0])
stop_datetime_timestamp = session_start_time + timedelta(seconds=miniscope_timestamps[-1])

return start_datetime_timestamp, stop_datetime_timestamp
from utils import get_session_slicing_time_range
from interfaces.miniscope_imaging_interface import get_miniscope_folder_path


def session_to_nwb(
Expand Down Expand Up @@ -182,7 +117,7 @@ def session_to_nwb(
assert miniscope_metadata_json.exists(), f"General metadata json not found in {folder_path}"
timestamps_file_path = miniscope_folder_path / "timeStamps.csv"
assert timestamps_file_path.exists(), f"Miniscope timestamps file not found in {miniscope_folder_path}"
start_datetime_timestamp, stop_datetime_timestamp = get_edf_slicing_time_range(
start_datetime_timestamp, stop_datetime_timestamp = get_session_slicing_time_range(
miniscope_metadata_json=miniscope_metadata_json, timestamps_file_path=timestamps_file_path
)
source_data.update(
Expand Down
75 changes: 63 additions & 12 deletions src/cai_lab_to_nwb/zaki_2024/zaki_2024_convert_week_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,17 @@
import time
from natsort import natsorted
from pathlib import Path
import warnings
from typing import Union
import re
import pandas as pd
from datetime import datetime
from mne.io import read_raw_edf

from neuroconv.utils import load_dict_from_file, dict_deep_update
from neuroconv.tools.nwb_helpers import configure_and_write_nwbfile

from utils import get_session_slicing_time_range, get_session_run_time
from interfaces.miniscope_imaging_interface import get_miniscope_folder_path
from zaki_2024_nwbconverter import Zaki2024NWBConverter


Expand All @@ -18,7 +24,6 @@ def session_to_nwb(
stub_test: bool = False,
verbose: bool = True,
):

if verbose:
print(f"Converting week-long session")
start = time.time()
Expand Down Expand Up @@ -66,27 +71,73 @@ def session_to_nwb(
conversion_options.update(dict(CellRegistration=dict(stub_test=stub_test, subject_id=subject_id)))

converter = Zaki2024NWBConverter(source_data=source_data)

# Add datetime to conversion
metadata = converter.get_metadata()
# Update default metadata with the editable in the corresponding yaml file
editable_metadata_path = Path(__file__).parent / "zaki_2024_metadata.yaml"
editable_metadata = load_dict_from_file(editable_metadata_path)
metadata = dict_deep_update(metadata, editable_metadata)

from mne.io import read_raw_edf
metadata["Subject"]["subject_id"] = subject_id

edf_reader = read_raw_edf(input_fname=edf_file_paths[0], verbose=verbose)
session_start_time = edf_reader.info["meas_date"]

metadata["NWBFile"]["session_start_time"] = session_start_time

# Update default metadata with the editable in the corresponding yaml file
editable_metadata_path = Path(__file__).parent / "zaki_2024_metadata.yaml"
editable_metadata = load_dict_from_file(editable_metadata_path)
metadata = dict_deep_update(metadata, editable_metadata)
nwbfile = converter.create_nwbfile(metadata=metadata, conversion_options=conversion_options)

metadata["Subject"]["subject_id"] = subject_id
# Add epochs table to store time range of conditioning and offline sessions
sessions_summary_file = data_dir_path / f"Ca_EEG_Experiment/{subject_id}/{subject_id}_SessionTimes.csv"
sessions_summary_df = pd.read_csv(sessions_summary_file)

# Add columns to TimeIntervals
nwbfile.add_epoch_column(name="session_ids", description="ID of the session")

for task, date_str, time_str in zip(
sessions_summary_df["Session"], sessions_summary_df["Date"], sessions_summary_df["Time"]
):
session_id = subject_id + "_" + task
if "Offline" in session_id:
offline_day = session_id.split("Session")[0]
experiment_dir_path = (
data_dir_path / "Ca_EEG_Experiment" / subject_id / (subject_id + "_Offline") / offline_day
)
else:
experiment_dir_path = (
data_dir_path / "Ca_EEG_Experiment" / subject_id / (subject_id + "_Sessions") / session_id
)
try:
folder_path = experiment_dir_path / date_str / time_str
miniscope_folder_path = get_miniscope_folder_path(folder_path)
miniscope_metadata_json = folder_path / "metaData.json"
assert miniscope_metadata_json.exists(), f"General metadata json not found in {folder_path}"
timestamps_file_path = miniscope_folder_path / "timeStamps.csv"
assert timestamps_file_path.exists(), f"Miniscope timestamps file not found in {miniscope_folder_path}"

start_datetime_timestamp, stop_datetime_timestamp = get_session_slicing_time_range(
miniscope_metadata_json=miniscope_metadata_json, timestamps_file_path=timestamps_file_path
)

start_time = (start_datetime_timestamp - session_start_time.replace(tzinfo=None)).total_seconds()
stop_time = (stop_datetime_timestamp - session_start_time.replace(tzinfo=None)).total_seconds()

# Some sessions may not have imaging data, so we extract the run time from the session notes (.txt file)
# and use the data string and time string to retrieve the start datetime of the session
except:
datetime_str = date_str + " " + time_str
start_datetime_timestamp = datetime.strptime(datetime_str, "%Y_%m_%d %H_%M_%S")

txt_file_path = experiment_dir_path / f"{session_id}.txt"
session_run_time = get_session_run_time(txt_file_path=txt_file_path)

start_time = (start_datetime_timestamp - session_start_time.replace(tzinfo=None)).total_seconds()
stop_time = start_time + session_run_time

nwbfile.add_epoch(start_time=start_time, stop_time=stop_time, session_ids=session_id)

# Run conversion
converter.run_conversion(
metadata=metadata, nwbfile_path=nwbfile_path, conversion_options=conversion_options, overwrite=True
)
configure_and_write_nwbfile(nwbfile=nwbfile, backend="hdf5", output_filepath=nwbfile_path)

if verbose:
stop_time = time.time()
Expand Down