Skip to content

Commit

Permalink
Track Sources for outputs with BIDS URIs (PennLINC#966)
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo authored Oct 20, 2023
1 parent 8ffb4b8 commit c6ef7b4
Show file tree
Hide file tree
Showing 19 changed files with 2,010 additions and 552 deletions.
40 changes: 12 additions & 28 deletions docs/outputs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -165,21 +165,15 @@ Denoised or residual BOLD data
xcp_d/
sub-<label>/[ses-<label>/]
func/
# Nifti
# NIfTI
<source_entities>_space-<label>_desc-denoised_bold.nii.gz
<source_entities>_space-<label>_desc-denoised_bold.json
<source_entities>_space-<label>_desc-denoisedSmoothed_bold.nii.gz
<source_entities>_space-<label>_desc-denoisedSmoothed_bold.json
<source_entities>_space-<label>_desc-interpolated_bold.nii.gz
<source_entities>_space-<label>_desc-interpolated_bold.json
# Cifti
# CIFTI
<source_entities>_space-fsLR_den-91k_desc-denoised_bold.dtseries.nii
<source_entities>_space-fsLR_den-91k_desc-denoised_bold.json
<source_entities>_space-fsLR_den-91k_desc-denoisedSmoothed_bold.dtseries.nii
<source_entities>_space-fsLR_den-91k_desc-denoisedSmoothed_bold.json
<source_entities>_space-fsLR_den-91k_desc-interpolated_bold.dtseries.nii
<source_entities>_space-fsLR_den-91k_desc-interpolated_bold.json
.. important::

Expand Down Expand Up @@ -212,21 +206,21 @@ This includes the atlases used to extract the timeseries.
.. code-block::
xcp_d/
# Nifti
# NIfTI
space-<label>_atlas-<label>_dseg.nii.gz
# Cifti
# CIFTI
space-<label>_atlas-<label>_dseg.dlabel.nii
sub-<label>/[ses-<label>/]
func/
# Nifti
# NIfTI
<source_entities>_space-<label>_atlas-<label>_coverage.tsv
<source_entities>_space-<label>_atlas-<label>_timeseries.tsv
<source_entities>_space-<label>_atlas-<label>_measure-pearsoncorrelation_conmat.tsv
<source_entities>_space-<label>_atlas-<label>_measure-pearsoncorrelation_desc-<INT>volumes_conmat.tsv
# Cifti
# CIFTI
<source_entities>_space-fsLR_atlas-<label>_den-91k_coverage.tsv
<source_entities>_space-fsLR_atlas-<label>_den-91k_coverage.pscalar.nii
<source_entities>_space-fsLR_atlas-<label>_den-91k_timeseries.tsv
Expand Down Expand Up @@ -259,14 +253,14 @@ data.
xcp_d/
sub-<label>/[ses-<label>/]
func/
# Nifti
# NIfTI
<source_entities>_space-<label>_reho.nii.gz
<source_entities>_space-<label>_alff.nii.gz
<source_entities>_space-<label>_desc-smooth_alff.nii.gz
<source_entities>_space-<label>_atlas-<atlas>_alff.tsv
<source_entities>_space-<label>_atlas-<atlas>_reho.tsv
# Cifti
# CIFTI
<source_entities>_space-fsLR_den-91k_reho.dscalar.nii
<source_entities>_space-fsLR_den-91k_alff.dscalar.nii
<source_entities>_space-fsLR_den-91k_desc-smooth_alff.dscalar.nii
Expand All @@ -284,21 +278,15 @@ Other outputs include quality control, framewise displacement, and confounds fil
sub-<label>/[ses-<label>/]
func/
# Nifti
<source_entities>_space-<label>_desc-linc_qc.csv
<source_entities>[_desc-filtered]_motion.tsv
<source_entities>[_desc-filtered]_motion.json
<source_entities>_outliers.tsv
<source_entities>_outliers.json
<source_entities>_design.tsv
# Cifti
# NIfTI
<source_entities>_space-<label>_desc-linc_qc.csv
# CIFTI
<source_entities>_space-fsLR_desc-linc_qc.csv
<source_entities>[_desc-filtered]_motion.tsv
<source_entities>[_desc-filtered]_motion.json
<source_entities>_outliers.tsv
<source_entities>_outliers.json
<source_entities>_design.tsv
``[desc-filtered]_motion.tsv`` is a tab-delimited file with seven columns:
one for each of the six filtered motion parameters, as well as "framewise_displacement".
Expand Down Expand Up @@ -327,10 +315,6 @@ to 1mm FD in 0.01 steps.
xcp_d/
sub-<label>/[ses-<label>/]
func/
# Nifti
<source_entities>_desc-dcan_qc.hdf5
# Cifti
<source_entities>_desc-dcan_qc.hdf5
These files have the following keys:
Expand Down
273 changes: 252 additions & 21 deletions xcp_d/tests/data/test_ds001419_cifti_outputs.txt

Large diffs are not rendered by default.

234 changes: 209 additions & 25 deletions xcp_d/tests/data/test_ds001419_nifti_outputs.txt

Large diffs are not rendered by default.

138 changes: 125 additions & 13 deletions xcp_d/tests/data/test_fmriprep_without_freesurfer_outputs.txt

Large diffs are not rendered by default.

92 changes: 79 additions & 13 deletions xcp_d/tests/data/test_nibabies_outputs.txt

Large diffs are not rendered by default.

121 changes: 110 additions & 11 deletions xcp_d/tests/data/test_pnc_cifti_outputs.txt

Large diffs are not rendered by default.

119 changes: 109 additions & 10 deletions xcp_d/tests/data/test_pnc_cifti_t2wonly_outputs.txt

Large diffs are not rendered by default.

146 changes: 129 additions & 17 deletions xcp_d/tests/data/test_pnc_nifti_outputs.txt

Large diffs are not rendered by default.

89 changes: 84 additions & 5 deletions xcp_d/tests/test_utils_bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,17 +147,22 @@ def test_write_dataset_description(datasets, tmp_path_factory, caplog):

# It will work when we give it a real fmri_dir.
fmri_dir = datasets["ds001419"]
xbids.write_dataset_description(fmri_dir, tmpdir)
xbids.write_dataset_description(fmri_dir, tmpdir, custom_confounds_folder="/fake/path4")
assert os.path.isfile(dset_description)

# Now overwrite the description.
xbids.write_dataset_description(fmri_dir, tmpdir)
assert os.path.isfile(dset_description)

# Now change the version and re-run the function.
with open(dset_description, "r") as fo:
desc = json.load(fo)

assert "'preprocessed' is already a dataset link" not in caplog.text
assert "'xcp_d' is already a dataset link" not in caplog.text
assert "'custom_confounds' is already a dataset link" not in caplog.text
xbids.write_dataset_description(tmpdir, tmpdir, custom_confounds_folder="/fake/path4")
assert "'preprocessed' is already a dataset link" in caplog.text
assert "'xcp_d' is already a dataset link" in caplog.text
assert "'custom_confounds' is already a dataset link" in caplog.text

# Now change the version and re-run the function.
desc["GeneratedBy"][0]["Version"] = "0.0.1"
with open(dset_description, "w") as fo:
json.dump(desc, fo, indent=4)
Expand Down Expand Up @@ -294,3 +299,77 @@ def test_group_across_runs():
"/path/sub-01_task-rest_dir-LR_run-2_bold.nii.gz",
"/path/sub-01_task-rest_dir-RL_run-2_bold.nii.gz",
]


def test_make_uri():
"""Test _make_uri."""
in_file = "/path/to/dset/sub-01/func/sub-01_task-rest_bold.nii.gz"
dataset_name = "test"
dataset_path = "/path/to/dset"
uri = xbids._make_uri(in_file, dataset_name=dataset_name, dataset_path=dataset_path)
assert uri == "bids:test:sub-01/func/sub-01_task-rest_bold.nii.gz"

dataset_path = "/another/path/haha"
with pytest.raises(ValueError, match="does not start with"):
xbids._make_uri(in_file, dataset_name=dataset_name, dataset_path=dataset_path)


def test_make_xcpd_uri():
"""Test _make_xcpd_uri."""
out_file = "/path/to/dset/xcp_d/sub-01/func/sub-01_task-rest_bold.nii.gz"
uri = xbids._make_xcpd_uri(out_file, output_dir="/path/to/dset")
assert uri == ["bids:xcp_d:sub-01/func/sub-01_task-rest_bold.nii.gz"]

xbids._make_xcpd_uri([out_file], output_dir="/path/to/dset")
assert uri == ["bids:xcp_d:sub-01/func/sub-01_task-rest_bold.nii.gz"]


def test_make_xcpd_uri_lol():
"""Test _make_xcpd_uri_lol."""
in_list = [
[
"/path/to/dset/xcp_d/sub-01/func/sub-01_task-rest_run-1_bold.nii.gz",
"/path/to/dset/xcp_d/sub-02/func/sub-01_task-rest_run-1_bold.nii.gz",
"/path/to/dset/xcp_d/sub-03/func/sub-01_task-rest_run-1_bold.nii.gz",
],
[
"/path/to/dset/xcp_d/sub-01/func/sub-01_task-rest_run-2_bold.nii.gz",
"/path/to/dset/xcp_d/sub-02/func/sub-01_task-rest_run-2_bold.nii.gz",
"/path/to/dset/xcp_d/sub-03/func/sub-01_task-rest_run-2_bold.nii.gz",
],
]
uris = xbids._make_xcpd_uri_lol(in_list, output_dir="/path/to/dset/")
assert uris == [
[
"bids:xcp_d:sub-01/func/sub-01_task-rest_run-1_bold.nii.gz",
"bids:xcp_d:sub-01/func/sub-01_task-rest_run-2_bold.nii.gz",
],
[
"bids:xcp_d:sub-02/func/sub-01_task-rest_run-1_bold.nii.gz",
"bids:xcp_d:sub-02/func/sub-01_task-rest_run-2_bold.nii.gz",
],
[
"bids:xcp_d:sub-03/func/sub-01_task-rest_run-1_bold.nii.gz",
"bids:xcp_d:sub-03/func/sub-01_task-rest_run-2_bold.nii.gz",
],
]


def test_make_preproc_uri():
"""Test _make_preproc_uri."""
out_file = "/path/to/dset/sub-01/func/sub-01_task-rest_bold.nii.gz"
uri = xbids._make_preproc_uri(out_file, fmri_dir="/path/to/dset")
assert uri == ["bids:preprocessed:sub-01/func/sub-01_task-rest_bold.nii.gz"]

xbids._make_preproc_uri([out_file], fmri_dir="/path/to/dset")
assert uri == ["bids:preprocessed:sub-01/func/sub-01_task-rest_bold.nii.gz"]


def test_make_custom_uri():
"""Test _make_custom_uri."""
out_file = "/path/to/dset/sub-01_task-rest_bold.nii.gz"
uri = xbids._make_custom_uri(out_file)
assert uri == ["bids:custom_confounds:sub-01_task-rest_bold.nii.gz"]

xbids._make_custom_uri([out_file])
assert uri == ["bids:custom_confounds:sub-01_task-rest_bold.nii.gz"]
85 changes: 85 additions & 0 deletions xcp_d/tests/test_utils_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,3 +378,88 @@ def test_select_first():

lst = "abc"
assert utils._select_first(lst) == "a"


def test_listify():
"""Test _listify."""
inputs = [
1,
(1,),
"a",
["a"],
["a", ["b", "c"]],
("a", "b"),
]
outputs = [
[1],
(1,),
["a"],
["a"],
["a", ["b", "c"]],
("a", "b"),
]
for i, input_ in enumerate(inputs):
expected_output = outputs[i]
output = utils._listify(input_)
assert output == expected_output


def test_make_dictionary():
"""Test _make_dictionary."""
metadata = {"Sources": ["a"]}
out_metadata = utils._make_dictionary(metadata, Sources=["b"])
# Ensure the original dictionary isn't modified.
assert metadata["Sources"] == ["a"]
assert out_metadata["Sources"] == ["a", "b"]

metadata = {"Test": "a"}
out_metadata = utils._make_dictionary(metadata, Sources=["b"])
assert out_metadata["Sources"] == ["b"]

metadata = {"Test": ["a"]}
out_metadata = utils._make_dictionary(metadata, Sources="b")
assert out_metadata["Sources"] == "b"

metadata = {"Sources": "a"}
out_metadata = utils._make_dictionary(metadata, Sources=["b"])
# Ensure the original dictionary isn't modified.
assert metadata["Sources"] == "a"
assert out_metadata["Sources"] == ["a", "b"]

metadata = {"Sources": ["a"]}
out_metadata = utils._make_dictionary(metadata, Sources="b")
# Ensure the original dictionary isn't modified.
assert metadata["Sources"] == ["a"]
assert out_metadata["Sources"] == ["a", "b"]

out_metadata = utils._make_dictionary(metadata=None, Sources=["b"])
assert out_metadata["Sources"] == ["b"]


def test_transpose_lol():
"""Test _transpose_lol."""
inputs = [
[
["a", "b", "c"],
[1, 2, 3],
],
[
["a", "b", "c", "d"],
[1, 2, 3],
],
]
outputs = [
[
["a", 1],
["b", 2],
["c", 3],
],
[
["a", 1],
["b", 2],
["c", 3],
],
]
for i, input_ in enumerate(inputs):
expected_output = outputs[i]
assert utils._transpose_lol(input_) == expected_output
2 changes: 1 addition & 1 deletion xcp_d/tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def reorder_expected_outputs():
with open(expected_output_file, "r") as fo:
file_contents = fo.readlines()

file_contents = sorted(file_contents)
file_contents = sorted(list(set(file_contents)))

with open(expected_output_file, "w") as fo:
fo.writelines(file_contents)
Loading

0 comments on commit c6ef7b4

Please sign in to comment.