From 4482442761257f043343fafa65e5328fddc76975 Mon Sep 17 00:00:00 2001 From: Taylor Salo Date: Mon, 5 Aug 2024 12:24:28 -0400 Subject: [PATCH] Restructure XCP-D to better match fMRIPrep (#1225) --- docs/workflows.rst | 32 +- xcp_d/tests/test_workflows_anatomical.py | 8 +- xcp_d/tests/test_workflows_connectivity.py | 4 +- ...tingstate.py => test_workflows_metrics.py} | 14 +- ...csummary.py => test_workflows_plotting.py} | 6 +- xcp_d/workflows/__init__.py | 20 +- xcp_d/workflows/anatomical/__init__.py | 11 + xcp_d/workflows/anatomical/outputs.py | 116 +++ xcp_d/workflows/anatomical/parcellation.py | 155 ++++ xcp_d/workflows/anatomical/plotting.py | 367 +++++++++ .../{anatomical.py => anatomical/surface.py} | 273 +------ xcp_d/workflows/anatomical/volume.py | 261 ++++++ xcp_d/workflows/base.py | 19 +- xcp_d/workflows/bold/__init__.py | 23 + xcp_d/workflows/{ => bold}/cifti.py | 17 +- xcp_d/workflows/{ => bold}/concatenation.py | 2 +- xcp_d/workflows/{ => bold}/connectivity.py | 525 +----------- .../{restingstate.py => bold/metrics.py} | 3 +- xcp_d/workflows/{bold.py => bold/nifti.py} | 19 +- xcp_d/workflows/{ => bold}/outputs.py | 108 +-- xcp_d/workflows/bold/plotting.py | 698 ++++++++++++++++ xcp_d/workflows/{ => bold}/postprocessing.py | 1 + xcp_d/workflows/execsummary.py | 764 ------------------ xcp_d/workflows/parcellation.py | 392 +++++++++ xcp_d/workflows/plotting.py | 587 ++++---------- 25 files changed, 2257 insertions(+), 2168 deletions(-) rename xcp_d/tests/{test_workflows_restingstate.py => test_workflows_metrics.py} (96%) rename xcp_d/tests/{test_workflows_execsummary.py => test_workflows_plotting.py} (87%) create mode 100644 xcp_d/workflows/anatomical/__init__.py create mode 100644 xcp_d/workflows/anatomical/outputs.py create mode 100644 xcp_d/workflows/anatomical/parcellation.py create mode 100644 xcp_d/workflows/anatomical/plotting.py rename xcp_d/workflows/{anatomical.py => anatomical/surface.py} (78%) create mode 100644 xcp_d/workflows/anatomical/volume.py create mode 100644 xcp_d/workflows/bold/__init__.py rename xcp_d/workflows/{ => bold}/cifti.py (97%) rename xcp_d/workflows/{ => bold}/concatenation.py (99%) rename xcp_d/workflows/{ => bold}/connectivity.py (55%) rename xcp_d/workflows/{restingstate.py => bold/metrics.py} (99%) rename xcp_d/workflows/{bold.py => bold/nifti.py} (96%) rename xcp_d/workflows/{ => bold}/outputs.py (91%) create mode 100644 xcp_d/workflows/bold/plotting.py rename xcp_d/workflows/{ => bold}/postprocessing.py (99%) delete mode 100644 xcp_d/workflows/execsummary.py create mode 100644 xcp_d/workflows/parcellation.py diff --git a/docs/workflows.rst b/docs/workflows.rst index bff718ceb..9c7425adf 100644 --- a/docs/workflows.rst +++ b/docs/workflows.rst @@ -245,7 +245,7 @@ See :ref:`usage_inputs` for information on input dataset structures. Anatomical processing ===================== -:func:`~xcp_d.workflows.anatomical.init_postprocess_anat_wf` +:func:`~xcp_d.workflows.anatomical.volume.init_postprocess_anat_wf` XCP-D performs minimal postprocessing on anatomical derivatives from the preprocessing pipeline. This includes applying existing transforms to preprocessed T1w and T2w volumes, @@ -255,7 +255,7 @@ while retaining the original resolution. Surface normalization --------------------- -:func:`~xcp_d.workflows.anatomical.init_warp_surfaces_to_template_wf` +:func:`~xcp_d.workflows.anatomical.surface.init_warp_surfaces_to_template_wf` If the ``--warp-surfaces-native2std`` flag is used, then fsnative surface files from the preprocessing derivatives will be warped to fsLR-32k space. @@ -267,7 +267,7 @@ then fsnative surface files from the preprocessing derivatives will be warped to Identification of high-motion outlier volumes ============================================= -:func:`~xcp_d.workflows.postprocessing.init_prepare_confounds_wf`, +:func:`~xcp_d.workflows.bold.postprocessing.init_prepare_confounds_wf`, :class:`~xcp_d.interfaces.censoring.GenerateConfounds` XCP-D uses framewise displacement to identify high-motion outlier volumes. @@ -282,7 +282,7 @@ The threshold used to identify outlier volumes can be set with the ``--fd-thresh Motion parameter filtering [OPTIONAL] ------------------------------------- -:func:`~xcp_d.workflows.postprocessing.init_prepare_confounds_wf`, +:func:`~xcp_d.workflows.bold.postprocessing.init_prepare_confounds_wf`, :class:`~xcp_d.interfaces.censoring.GenerateConfounds`, :func:`~xcp_d.utils.confounds.load_motion` @@ -336,7 +336,7 @@ per :footcite:t:`gratton2020removal`. Framewise displacement calculation and thresholding --------------------------------------------------- -:func:`~xcp_d.workflows.postprocessing.init_prepare_confounds_wf`, +:func:`~xcp_d.workflows.bold.postprocessing.init_prepare_confounds_wf`, :class:`~xcp_d.interfaces.censoring.GenerateConfounds`, :func:`~xcp_d.utils.modified_data.compute_fd` @@ -357,7 +357,7 @@ These volumes will later be removed from the denoised data. Confound regressor selection ============================ -:func:`~xcp_d.workflows.postprocessing.init_prepare_confounds_wf`, +:func:`~xcp_d.workflows.bold.postprocessing.init_prepare_confounds_wf`, :func:`~xcp_d.interfaces.censoring.GenerateConfounds` The confound regressor configurations in the table below are implemented in XCP-D, @@ -533,7 +533,7 @@ For more information about confound regressor selection, please refer to :footci Dummy scan removal [OPTIONAL] ============================= -:func:`~xcp_d.workflows.postprocessing.init_prepare_confounds_wf`, +:func:`~xcp_d.workflows.bold.postprocessing.init_prepare_confounds_wf`, :class:`~xcp_d.interfaces.censoring.RemoveDummyVolumes` XCP-D allows the first *N* volumes to be removed before processing. @@ -548,7 +548,7 @@ or they may rely on the preprocessing pipeline's estimated non-steady-state volu Despiking [OPTIONAL] ==================== -:func:`~xcp_d.workflows.postprocessing.init_despike_wf` +:func:`~xcp_d.workflows.bold.postprocessing.init_despike_wf` Despiking is a process in which large spikes in the BOLD times series are truncated. Despiking reduces/limits the amplitude or magnitude of the large spikes but preserves those @@ -697,7 +697,7 @@ These include regional homogeneity (ReHo) and amplitude of low-frequency fluctua ALFF ---- -:func:`~xcp_d.workflows.restingstate.init_alff_wf` +:func:`~xcp_d.workflows.bold.metrics.init_alff_wf` Amplitude of low-frequency fluctuation (ALFF) is a measure that ostensibly localizes spontaneous neural activity in resting-state BOLD data. @@ -730,8 +730,8 @@ Smoothed ALFF derivatives will also be generated if the ``--smoothing`` flag is ReHo ---- -:func:`~xcp_d.workflows.restingstate.init_reho_nifti_wf`, -:func:`~xcp_d.workflows.restingstate.init_reho_cifti_wf` +:func:`~xcp_d.workflows.bold.metrics.init_reho_nifti_wf`, +:func:`~xcp_d.workflows.bold.metrics.init_reho_cifti_wf` Regional Homogeneity (ReHo) is a measure of local temporal uniformity in the BOLD signal computed at each voxel of the processed image. Greater ReHo values correspond to greater synchrony among BOLD activity patterns measured in a local neighborhood of voxels, with neighborhood size determined by a user-specified radius of voxels. @@ -744,8 +744,8 @@ For subcortical voxels in the CIFTIs, 3dReho is used with the same parameters th Parcellation and functional connectivity estimation [OPTIONAL] ============================================================== -:func:`~xcp_d.workflows.connectivity.init_functional_connectivity_nifti_wf`, -:func:`~xcp_d.workflows.connectivity.init_functional_connectivity_cifti_wf` +:func:`~xcp_d.workflows.bold.connectivity.init_functional_connectivity_nifti_wf`, +:func:`~xcp_d.workflows.bold.connectivity.init_functional_connectivity_cifti_wf` If the user chooses, the ``denoised BOLD`` is fed into a functional connectivity workflow, @@ -790,7 +790,7 @@ which improves reproducibility. Smoothing [OPTIONAL] ==================== -:func:`~xcp_d.workflows.postprocessing.init_resd_smoothing_wf` +:func:`~xcp_d.workflows.bold.postprocessing.init_resd_smoothing_wf` The ``denoised BOLD`` may optionally be smoothed with a Gaussian kernel. This smoothing kernel is set with the ``--smoothing`` parameter. @@ -798,7 +798,7 @@ This smoothing kernel is set with the ``--smoothing`` parameter. Concatenation of functional derivatives [OPTIONAL] ================================================== -:func:`~xcp_d.workflows.concatenation.init_concatenate_data_wf` +:func:`~xcp_d.workflows.bold.concatenation.init_concatenate_data_wf` If the ``--combine-runs`` flag is included, then BOLD runs will be grouped by task and concatenated. Several concatenated derivatives will be generated, including the ``denoised BOLD``, @@ -816,7 +816,7 @@ the ``denoised, interpolated BOLD``, the temporal mask, and the filtered motion Quality control =============== -:func:`~xcp_d.workflows.plotting.init_qc_report_wf` +:func:`~xcp_d.workflows.bold.plotting.init_qc_report_wf` The quality control (QC) in ``XCP-D`` estimates the quality of BOLD data before and after regression and also estimates BOLD-T1w coregistration and BOLD-Template normalization diff --git a/xcp_d/tests/test_workflows_anatomical.py b/xcp_d/tests/test_workflows_anatomical.py index 41869d394..23eacc431 100644 --- a/xcp_d/tests/test_workflows_anatomical.py +++ b/xcp_d/tests/test_workflows_anatomical.py @@ -1,4 +1,4 @@ -"""Tests for the xcp_d.workflows.anatomical module.""" +"""Tests for the xcp_d.workflows.anatomical.surface module.""" import os import shutil @@ -66,7 +66,7 @@ def test_warp_surfaces_to_template_wf( """ tmpdir = tmp_path_factory.mktemp("test_warp_surfaces_to_template_wf") - wf = anatomical.init_warp_surfaces_to_template_wf( + wf = anatomical.surface.init_warp_surfaces_to_template_wf( output_dir=tmpdir, software="FreeSurfer", omp_nthreads=1, @@ -95,7 +95,7 @@ def test_warp_surfaces_to_template_wf( def test_postprocess_anat_wf(ds001419_data, tmp_path_factory): - """Test xcp_d.workflows.anatomical.init_postprocess_anat_wf.""" + """Test xcp_d.workflows.anatomical.volume.init_postprocess_anat_wf.""" tmpdir = tmp_path_factory.mktemp("test_postprocess_anat_wf") anat_to_template_xfm = ds001419_data["anat_to_template_xfm"] @@ -110,7 +110,7 @@ def test_postprocess_anat_wf(ds001419_data, tmp_path_factory): config.nipype.omp_nthreads = 1 config.nipype.mem_gb = 0.1 - wf = anatomical.init_postprocess_anat_wf( + wf = anatomical.volume.init_postprocess_anat_wf( t1w_available=True, t2w_available=True, target_space="MNI152NLin2009cAsym", diff --git a/xcp_d/tests/test_workflows_connectivity.py b/xcp_d/tests/test_workflows_connectivity.py index c16918666..4f27ad775 100644 --- a/xcp_d/tests/test_workflows_connectivity.py +++ b/xcp_d/tests/test_workflows_connectivity.py @@ -17,11 +17,11 @@ from xcp_d.utils.bids import _get_tr from xcp_d.utils.utils import _create_mem_gb, get_std2bold_xfms from xcp_d.utils.write_save import read_ndata, write_ndata -from xcp_d.workflows.connectivity import ( +from xcp_d.workflows.bold.connectivity import ( init_functional_connectivity_cifti_wf, init_functional_connectivity_nifti_wf, - init_load_atlases_wf, ) +from xcp_d.workflows.parcellation import init_load_atlases_wf np.set_printoptions(threshold=sys.maxsize) diff --git a/xcp_d/tests/test_workflows_restingstate.py b/xcp_d/tests/test_workflows_metrics.py similarity index 96% rename from xcp_d/tests/test_workflows_restingstate.py rename to xcp_d/tests/test_workflows_metrics.py index f1779a729..b7b9c8c62 100644 --- a/xcp_d/tests/test_workflows_restingstate.py +++ b/xcp_d/tests/test_workflows_metrics.py @@ -12,7 +12,7 @@ from xcp_d.utils.bids import _get_tr from xcp_d.utils.utils import _create_mem_gb from xcp_d.utils.write_save import read_ndata, write_ndata -from xcp_d.workflows import restingstate +from xcp_d.workflows.bold import metrics def test_nifti_alff(ds001419_data, tmp_path_factory): @@ -41,7 +41,7 @@ def test_nifti_alff(ds001419_data, tmp_path_factory): config.workflow.smoothing = 6 config.nipype.omp_nthreads = 2 - alff_wf = restingstate.init_alff_wf( + alff_wf = metrics.init_alff_wf( name_source=bold_file, TR=TR, mem_gb=mem_gbx, @@ -84,7 +84,7 @@ def test_nifti_alff(ds001419_data, tmp_path_factory): # the amplitude in low frequencies for a voxel tempdir = tmp_path_factory.mktemp("test_nifti_alff_02") - alff_wf = restingstate.init_alff_wf( + alff_wf = metrics.init_alff_wf( name_source=bold_file, TR=TR, mem_gb=mem_gbx, @@ -128,7 +128,7 @@ def test_cifti_alff(ds001419_data, tmp_path_factory): config.workflow.smoothing = 6 config.nipype.omp_nthreads = 2 - alff_wf = restingstate.init_alff_wf( + alff_wf = metrics.init_alff_wf( name_source=bold_file, TR=TR, mem_gb=mem_gbx, @@ -216,7 +216,7 @@ def test_nifti_reho(ds001419_data, tmp_path_factory): config.execution.xcp_d_dir = tempdir config.nipype.omp_nthreads = 2 - reho_wf = restingstate.init_reho_nifti_wf(name_source=bold_file, mem_gb=mem_gbx) + reho_wf = metrics.init_reho_nifti_wf(name_source=bold_file, mem_gb=mem_gbx) reho_wf.inputs.inputnode.bold_mask = bold_mask reho_wf.base_dir = tempdir reho_wf.inputs.inputnode.denoised_bold = bold_file @@ -272,7 +272,7 @@ def test_cifti_reho(ds001419_data, tmp_path_factory): config.execution.xcp_d_dir = tempdir config.nipype.omp_nthreads = 2 - reho_wf = restingstate.init_reho_cifti_wf( + reho_wf = metrics.init_reho_cifti_wf( name_source=source_file, mem_gb=mem_gbx, name="orig_reho_wf", @@ -296,7 +296,7 @@ def test_cifti_reho(ds001419_data, tmp_path_factory): # Run ReHo again assert os.path.isfile(noisy_bold_file) - reho_wf = restingstate.init_reho_cifti_wf( + reho_wf = metrics.init_reho_cifti_wf( name_source=source_file, mem_gb=mem_gbx, name="noisy_reho_wf", diff --git a/xcp_d/tests/test_workflows_execsummary.py b/xcp_d/tests/test_workflows_plotting.py similarity index 87% rename from xcp_d/tests/test_workflows_execsummary.py rename to xcp_d/tests/test_workflows_plotting.py index ba0a6321e..a196ccbd4 100644 --- a/xcp_d/tests/test_workflows_execsummary.py +++ b/xcp_d/tests/test_workflows_plotting.py @@ -1,11 +1,11 @@ -"""Test xcp_d.workflows.execsummary.""" +"""Test xcp_d.workflows.plotting.""" import os from nilearn import image from xcp_d.tests.utils import get_nodes -from xcp_d.workflows import execsummary +from xcp_d.workflows import plotting def test_init_plot_custom_slices_wf(ds001419_data, tmp_path_factory): @@ -17,7 +17,7 @@ def test_init_plot_custom_slices_wf(ds001419_data, tmp_path_factory): img_3d = image.index_img(nifti_file, 5) img_3d.to_filename(nifti_3d) - wf = execsummary.init_plot_custom_slices_wf( + wf = plotting.init_plot_custom_slices_wf( output_dir=tmpdir, desc="SubcorticalOnAtlas", name="plot_custom_slices_wf", diff --git a/xcp_d/workflows/__init__.py b/xcp_d/workflows/__init__.py index 736acc534..087c5c629 100644 --- a/xcp_d/workflows/__init__.py +++ b/xcp_d/workflows/__init__.py @@ -2,28 +2,12 @@ # vi: set ft=python sts=4 ts=4 sw=4 et """Nipype workflows for xcp_d.""" -from xcp_d.workflows import ( - anatomical, - base, - bold, - cifti, - connectivity, - execsummary, - outputs, - plotting, - postprocessing, - restingstate, -) +from xcp_d.workflows import anatomical, base, bold, parcellation, plotting __all__ = [ "anatomical", "base", "bold", - "cifti", - "connectivity", - "execsummary", - "outputs", + "parcellation", "plotting", - "postprocessing", - "restingstate", ] diff --git a/xcp_d/workflows/anatomical/__init__.py b/xcp_d/workflows/anatomical/__init__.py new file mode 100644 index 000000000..9732db3fc --- /dev/null +++ b/xcp_d/workflows/anatomical/__init__.py @@ -0,0 +1,11 @@ +"""Workflow for anatomical postprocessing.""" + +from xcp_d.workflows.anatomical import outputs, parcellation, plotting, surface, volume + +__all__ = [ + "outputs", + "parcellation", + "plotting", + "surface", + "volume", +] diff --git a/xcp_d/workflows/anatomical/outputs.py b/xcp_d/workflows/anatomical/outputs.py new file mode 100644 index 000000000..4fbccaa9d --- /dev/null +++ b/xcp_d/workflows/anatomical/outputs.py @@ -0,0 +1,116 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Workflows for collecting and saving anatomical outputs.""" + +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow + +from xcp_d import config +from xcp_d.interfaces.bids import DerivativesDataSink +from xcp_d.interfaces.utils import FilterUndefined +from xcp_d.utils.doc import fill_doc + + +@fill_doc +def init_copy_inputs_to_outputs_wf(name="copy_inputs_to_outputs_wf"): + """Copy files from the preprocessing derivatives to the output folder, with no modifications. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.outputs import init_copy_inputs_to_outputs_wf + + with mock_config(): + wf = init_copy_inputs_to_outputs_wf() + + Parameters + ---------- + %(name)s + Default is "copy_inputs_to_outputs_wf". + + Inputs + ------ + lh_pial_surf + rh_pial_surf + lh_wm_surf + rh_wm_surf + sulcal_depth + sulcal_curv + cortical_thickness + cortical_thickness_corr + myelin + myelin_smoothed + """ + workflow = Workflow(name=name) + + output_dir = config.execution.xcp_d_dir + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "lh_pial_surf", + "rh_pial_surf", + "lh_wm_surf", + "rh_wm_surf", + "sulcal_depth", + "sulcal_curv", + "cortical_thickness", + "cortical_thickness_corr", + "myelin", + "myelin_smoothed", + ], + ), + name="inputnode", + ) + + # Place the surfaces in a single node. + collect_files = pe.Node( + niu.Merge(10), + name="collect_files", + ) + workflow.connect([ + (inputnode, collect_files, [ + # fsLR-space surface mesh files + ("lh_pial_surf", "in1"), + ("rh_pial_surf", "in2"), + ("lh_wm_surf", "in3"), + ("rh_wm_surf", "in4"), + # fsLR-space surface shape files + ("sulcal_depth", "in5"), + ("sulcal_curv", "in6"), + ("cortical_thickness", "in7"), + ("cortical_thickness_corr", "in8"), + ("myelin", "in9"), + ("myelin_smoothed", "in10"), + ]), + ]) # fmt:skip + + filter_out_undefined = pe.Node( + FilterUndefined(), + name="filter_out_undefined", + ) + workflow.connect([(collect_files, filter_out_undefined, [("out", "inlist")])]) + + ds_copied_outputs = pe.MapNode( + DerivativesDataSink( + base_directory=output_dir, + check_hdr=False, + ), + name="ds_copied_outputs", + run_without_submitting=True, + mem_gb=1, + iterfield=["in_file", "source_file"], + ) + workflow.connect([ + (filter_out_undefined, ds_copied_outputs, [ + ("outlist", "in_file"), + ("outlist", "source_file"), + ]), + ]) # fmt:skip + + return workflow diff --git a/xcp_d/workflows/anatomical/parcellation.py b/xcp_d/workflows/anatomical/parcellation.py new file mode 100644 index 000000000..ddb4b2d72 --- /dev/null +++ b/xcp_d/workflows/anatomical/parcellation.py @@ -0,0 +1,155 @@ +"""Workflows for parcellating anatomical data.""" + +from nipype import Function, logging +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow + +from xcp_d import config +from xcp_d.interfaces.bids import DerivativesDataSink +from xcp_d.utils.atlas import get_atlas_cifti, select_atlases +from xcp_d.utils.doc import fill_doc +from xcp_d.workflows.parcellation import init_parcellate_cifti_wf + +LOGGER = logging.getLogger("nipype.workflow") + + +@fill_doc +def init_parcellate_surfaces_wf(files_to_parcellate, name="parcellate_surfaces_wf"): + """Parcellate surface files and write them out to the output directory. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.connectivity import init_parcellate_surfaces_wf + + with mock_config(): + wf = init_parcellate_surfaces_wf( + files_to_parcellate=["sulcal_depth", "sulcal_curv", "cortical_thickness"], + name="parcellate_surfaces_wf", + ) + + Parameters + ---------- + files_to_parcellate : :obj:`list` of :obj:`str` + List of surface file types to parcellate + (e.g., "sulcal_depth", "sulcal_curv", "cortical_thickness"). + %(name)s + + Inputs + ------ + sulcal_depth + sulcal_curv + cortical_thickness + cortical_thickness_corr + myelin + myelin_smoothed + """ + from xcp_d.interfaces.workbench import CiftiCreateDenseFromTemplate + + workflow = Workflow(name=name) + + output_dir = config.execution.xcp_d_dir + atlases = config.execution.atlases + + SURF_DESCS = { + "sulcal_depth": "sulc", + "sulcal_curv": "curv", + "cortical_thickness": "thickness", + "cortical_thickness_corr": "thicknessCorrected", + "myelin": "myelin", + "myelin_smoothed": "myelinSmoothed", + } + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "sulcal_depth", + "sulcal_curv", + "cortical_thickness", + "cortical_thickness_corr", + "myelin", + "myelin_smoothed", + ], + ), + name="inputnode", + ) + + selected_atlases = select_atlases(atlases=atlases, subset="cortical") + + if not selected_atlases: + LOGGER.warning( + "No cortical atlases have been selected, so surface metrics will not be parcellated." + ) + # If no cortical atlases are selected, inputnode could go unconnected, so add explicitly. + workflow.add_nodes([inputnode]) + + return workflow + + # Get CIFTI atlases via load_data + atlas_file_grabber = pe.MapNode( + Function( + input_names=["atlas"], + output_names=["atlas_file", "atlas_labels_file", "atlas_metadata_file"], + function=get_atlas_cifti, + ), + name="atlas_file_grabber", + iterfield=["atlas"], + ) + atlas_file_grabber.inputs.atlas = selected_atlases + + for file_to_parcellate in files_to_parcellate: + resample_atlas_to_surface = pe.MapNode( + CiftiCreateDenseFromTemplate(out_file="resampled_atlas.dlabel.nii"), + name=f"resample_atlas_to_{file_to_parcellate}", + iterfield=["label"], + ) + workflow.connect([ + (inputnode, resample_atlas_to_surface, [(file_to_parcellate, "template_cifti")]), + (atlas_file_grabber, resample_atlas_to_surface, [("atlas_file", "label")]), + ]) # fmt:skip + + parcellate_surface_wf = init_parcellate_cifti_wf( + mem_gb={"resampled": 2}, + compute_mask=True, + name=f"parcellate_{file_to_parcellate}_wf", + ) + workflow.connect([ + (inputnode, parcellate_surface_wf, [(file_to_parcellate, "inputnode.in_file")]), + (atlas_file_grabber, parcellate_surface_wf, [ + ("atlas_labels_file", "inputnode.atlas_labels_files"), + ]), + (resample_atlas_to_surface, parcellate_surface_wf, [ + ("out_file", "inputnode.atlas_files"), + ]), + ]) # fmt:skip + + # Write out the parcellated files + ds_parcellated_surface = pe.MapNode( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["hemi", "desc", "den", "res"], + desc=SURF_DESCS[file_to_parcellate], + statistic="mean", + suffix="morph", + extension=".tsv", + ), + name=f"ds_parcellated_{file_to_parcellate}", + run_without_submitting=True, + mem_gb=1, + iterfield=["segmentation", "in_file"], + ) + ds_parcellated_surface.inputs.segmentation = selected_atlases + + workflow.connect([ + (inputnode, ds_parcellated_surface, [(file_to_parcellate, "source_file")]), + (parcellate_surface_wf, ds_parcellated_surface, [ + ("outputnode.parcellated_tsv", "in_file"), + ]), + ]) # fmt:skip + + return workflow diff --git a/xcp_d/workflows/anatomical/plotting.py b/xcp_d/workflows/anatomical/plotting.py new file mode 100644 index 000000000..e181a2466 --- /dev/null +++ b/xcp_d/workflows/anatomical/plotting.py @@ -0,0 +1,367 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Workflows for generating plots from anatomical data.""" + +from nipype import Function, logging +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow + +from xcp_d import config +from xcp_d.data import load as load_data +from xcp_d.interfaces.bids import DerivativesDataSink +from xcp_d.interfaces.nilearn import ResampleToImage +from xcp_d.interfaces.workbench import ShowScene +from xcp_d.utils.doc import fill_doc +from xcp_d.utils.execsummary import ( + get_n_frames, + get_png_image_names, + make_mosaic, + modify_brainsprite_scene_template, + modify_pngs_scene_template, +) +from xcp_d.workflows.plotting import init_plot_overlay_wf + +LOGGER = logging.getLogger("nipype.workflow") + + +@fill_doc +def init_brainsprite_figures_wf(t1w_available, t2w_available, name="brainsprite_figures_wf"): + """Create mosaic and PNG files for executive summary brainsprite. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.anatomical.plotting import init_brainsprite_figures_wf + + with mock_config(): + wf = init_brainsprite_figures_wf( + t1w_available=True, + t2w_available=True, + name="brainsprite_figures_wf", + ) + + Parameters + ---------- + t1w_available : bool + True if a T1w image is available. + t2w_available : bool + True if a T2w image is available. + %(name)s + Default is "init_brainsprite_figures_wf". + + Inputs + ------ + t1w + Path to T1w image. Optional. Should only be defined if ``t1w_available`` is True. + t2w + Path to T2w image. Optional. Should only be defined if ``t2w_available`` is True. + lh_wm_surf + rh_wm_surf + lh_pial_surf + rh_pial_surf + """ + workflow = Workflow(name=name) + + output_dir = config.execution.xcp_d_dir + omp_nthreads = config.nipype.omp_nthreads + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "t1w", + "t2w", + "lh_wm_surf", + "rh_wm_surf", + "lh_pial_surf", + "rh_pial_surf", + ], + ), + name="inputnode", + ) + + # Load template scene file + brainsprite_scene_template = str( + load_data("executive_summary_scenes/brainsprite_template.scene.gz") + ) + pngs_scene_template = str(load_data("executive_summary_scenes/pngs_template.scene.gz")) + + if t1w_available and t2w_available: + image_types = ["T1", "T2"] + elif t2w_available: + image_types = ["T2"] + else: + image_types = ["T1"] + + for image_type in image_types: + inputnode_anat_name = f"{image_type.lower()}w" + # Create frame-wise PNGs + get_number_of_frames = pe.Node( + Function( + function=get_n_frames, + input_names=["anat_file"], + output_names=["frame_numbers"], + ), + name=f"get_number_of_frames_{image_type}", + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + omp_nthreads=omp_nthreads, + ) + workflow.connect([ + (inputnode, get_number_of_frames, [(inputnode_anat_name, "anat_file")]), + ]) # fmt:skip + + # Modify template scene file with file paths + modify_brainsprite_template_scene = pe.MapNode( + Function( + function=modify_brainsprite_scene_template, + input_names=[ + "slice_number", + "anat_file", + "rh_pial_surf", + "lh_pial_surf", + "rh_wm_surf", + "lh_wm_surf", + "scene_template", + ], + output_names=["out_file"], + ), + name=f"modify_brainsprite_template_scene_{image_type}", + iterfield=["slice_number"], + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + omp_nthreads=omp_nthreads, + ) + modify_brainsprite_template_scene.inputs.scene_template = brainsprite_scene_template + workflow.connect([ + (inputnode, modify_brainsprite_template_scene, [ + (inputnode_anat_name, "anat_file"), + ("lh_wm_surf", "lh_wm_surf"), + ("rh_wm_surf", "rh_wm_surf"), + ("lh_pial_surf", "lh_pial_surf"), + ("rh_pial_surf", "rh_pial_surf"), + ]), + (get_number_of_frames, modify_brainsprite_template_scene, [ + ("frame_numbers", "slice_number"), + ]), + ]) # fmt:skip + + create_framewise_pngs = pe.MapNode( + ShowScene( + scene_name_or_number=1, + image_width=900, + image_height=800, + ), + name=f"create_framewise_pngs_{image_type}", + iterfield=["scene_file"], + mem_gb=1, + omp_nthreads=omp_nthreads, + ) + workflow.connect([ + (modify_brainsprite_template_scene, create_framewise_pngs, [ + ("out_file", "scene_file"), + ]), + ]) # fmt:skip + + # Make mosaic + make_mosaic_node = pe.Node( + Function( + function=make_mosaic, + input_names=["png_files"], + output_names=["mosaic_file"], + ), + name=f"make_mosaic_{image_type}", + mem_gb=1, + omp_nthreads=omp_nthreads, + ) + + workflow.connect([(create_framewise_pngs, make_mosaic_node, [("out_file", "png_files")])]) + + ds_mosaic_file = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["desc"], + desc="mosaic", + datatype="figures", + suffix=f"{image_type}w", + ), + name=f"ds_mosaic_file_{image_type}", + run_without_submitting=False, + ) + workflow.connect([ + (inputnode, ds_mosaic_file, [(inputnode_anat_name, "source_file")]), + (make_mosaic_node, ds_mosaic_file, [("mosaic_file", "in_file")]), + ]) # fmt:skip + + # Start working on the selected PNG images for the button + modify_pngs_template_scene = pe.Node( + Function( + function=modify_pngs_scene_template, + input_names=[ + "anat_file", + "rh_pial_surf", + "lh_pial_surf", + "rh_wm_surf", + "lh_wm_surf", + "scene_template", + ], + output_names=["out_file"], + ), + name=f"modify_pngs_template_scene_{image_type}", + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + omp_nthreads=omp_nthreads, + ) + modify_pngs_template_scene.inputs.scene_template = pngs_scene_template + workflow.connect([ + (inputnode, modify_pngs_template_scene, [ + (inputnode_anat_name, "anat_file"), + ("lh_wm_surf", "lh_wm_surf"), + ("rh_wm_surf", "rh_wm_surf"), + ("lh_pial_surf", "lh_pial_surf"), + ("rh_pial_surf", "rh_pial_surf"), + ]) + ]) # fmt:skip + + # Create specific PNGs for button + get_png_scene_names = pe.Node( + Function( + function=get_png_image_names, + output_names=["scene_index", "scene_descriptions"], + ), + name=f"get_png_scene_names_{image_type}", + ) + + create_scenewise_pngs = pe.MapNode( + ShowScene(image_width=900, image_height=800), + name=f"create_scenewise_pngs_{image_type}", + iterfield=["scene_name_or_number"], + mem_gb=1, + omp_nthreads=omp_nthreads, + ) + workflow.connect([ + (modify_pngs_template_scene, create_scenewise_pngs, [("out_file", "scene_file")]), + (get_png_scene_names, create_scenewise_pngs, [ + ("scene_index", "scene_name_or_number"), + ]), + ]) # fmt:skip + + ds_scenewise_pngs = pe.MapNode( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["desc"], + datatype="figures", + suffix=f"{image_type}w", + ), + name=f"ds_scenewise_pngs_{image_type}", + run_without_submitting=False, + iterfield=["desc", "in_file"], + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + workflow.connect([ + (inputnode, ds_scenewise_pngs, [(inputnode_anat_name, "source_file")]), + (get_png_scene_names, ds_scenewise_pngs, [("scene_descriptions", "desc")]), + (create_scenewise_pngs, ds_scenewise_pngs, [("out_file", "in_file")]), + ]) # fmt:skip + + return workflow + + +@fill_doc +def init_execsummary_anatomical_plots_wf( + t1w_available, + t2w_available, + name="execsummary_anatomical_plots_wf", +): + """Generate the anatomical figures for an executive summary. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.anatomical.plotting import init_execsummary_anatomical_plots_wf + + with mock_config(): + wf = init_execsummary_anatomical_plots_wf( + t1w_available=True, + t2w_available=True, + ) + + Parameters + ---------- + t1w_available : bool + Generally True. + t2w_available : bool + Generally False. + %(name)s + + Inputs + ------ + t1w + T1w image, after warping to standard space. + t2w + T2w image, after warping to standard space. + template + """ + workflow = Workflow(name=name) + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "t1w", + "t2w", + "template", + ], + ), + name="inputnode", + ) + + # Start plotting the overlay figures + # Atlas in T1w/T2w, T1w/T2w in Atlas + anatomicals = (["t1w"] if t1w_available else []) + (["t2w"] if t2w_available else []) + for anat in anatomicals: + # Resample anatomical to match resolution of template data + resample_anat = pe.Node( + ResampleToImage(), + name=f"resample_{anat}", + mem_gb=1, + ) + workflow.connect([ + (inputnode, resample_anat, [ + (anat, "in_file"), + ("template", "target_file"), + ]), + ]) # fmt:skip + + plot_anat_on_atlas_wf = init_plot_overlay_wf( + desc="AnatOnAtlas", + name=f"plot_{anat}_on_atlas_wf", + ) + workflow.connect([ + (inputnode, plot_anat_on_atlas_wf, [ + ("template", "inputnode.underlay_file"), + (anat, "inputnode.name_source"), + ]), + (resample_anat, plot_anat_on_atlas_wf, [("out_file", "inputnode.overlay_file")]), + ]) # fmt:skip + + plot_atlas_on_anat_wf = init_plot_overlay_wf( + desc="AtlasOnAnat", + name=f"plot_atlas_on_{anat}_wf", + ) + workflow.connect([ + (inputnode, plot_atlas_on_anat_wf, [ + ("template", "inputnode.overlay_file"), + (anat, "inputnode.name_source"), + ]), + (resample_anat, plot_atlas_on_anat_wf, [("out_file", "inputnode.underlay_file")]), + ]) # fmt:skip + + # TODO: Add subcortical overlay images as well. + # 1. Binarize atlas. + + return workflow diff --git a/xcp_d/workflows/anatomical.py b/xcp_d/workflows/anatomical/surface.py similarity index 78% rename from xcp_d/workflows/anatomical.py rename to xcp_d/workflows/anatomical/surface.py index 04e081295..2386d9cf3 100644 --- a/xcp_d/workflows/anatomical.py +++ b/xcp_d/workflows/anatomical/surface.py @@ -1,20 +1,16 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Anatomical post-processing workflows.""" +"""Workflows for processing surface anatomical files.""" + from nipype import logging from nipype.interfaces import utility as niu from nipype.interfaces.ants import CompositeTransformUtil # MB from nipype.interfaces.freesurfer import MRIsConvert from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow -from templateflow.api import get as get_template from xcp_d import config -from xcp_d.interfaces.ants import ( - ApplyTransforms, - CompositeInvTransformUtil, - ConvertTransformFile, -) +from xcp_d.interfaces.ants import CompositeInvTransformUtil, ConvertTransformFile from xcp_d.interfaces.bids import CollectRegistrationFiles, DerivativesDataSink from xcp_d.interfaces.c3 import C3d # TM from xcp_d.interfaces.nilearn import BinaryMath, Merge @@ -29,261 +25,12 @@ SurfaceSphereProjectUnproject, ) from xcp_d.utils.doc import fill_doc -from xcp_d.utils.utils import list_to_str -from xcp_d.workflows.execsummary import ( - init_brainsprite_figures_wf, - init_execsummary_anatomical_plots_wf, -) -from xcp_d.workflows.outputs import init_copy_inputs_to_outputs_wf +from xcp_d.workflows.anatomical.outputs import init_copy_inputs_to_outputs_wf +from xcp_d.workflows.anatomical.plotting import init_brainsprite_figures_wf LOGGER = logging.getLogger("nipype.workflow") -@fill_doc -def init_postprocess_anat_wf( - t1w_available, - t2w_available, - target_space, - name="postprocess_anat_wf", -): - """Copy T1w, segmentation, and, optionally, T2w to the derivative directory. - - If necessary, this workflow will also warp the images to standard space. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.anatomical import init_postprocess_anat_wf - - with mock_config(): - wf = init_postprocess_anat_wf( - t1w_available=True, - t2w_available=True, - target_space="MNI152NLin6Asym", - name="postprocess_anat_wf", - ) - - Parameters - ---------- - t1w_available : bool - True if a preprocessed T1w is available, False if not. - t2w_available : bool - True if a preprocessed T2w is available, False if not. - target_space : :obj:`str` - Target NIFTI template for T1w. - %(name)s - Default is "postprocess_anat_wf". - - Inputs - ------ - t1w : :obj:`str` - Path to the preprocessed T1w file. - This file may be in standard space or native T1w space. - t2w : :obj:`str` or None - Path to the preprocessed T2w file. - This file may be in standard space or native T1w space. - anat_dseg : :obj:`str` - Path to the T1w segmentation file. - %(anat_to_template_xfm)s - We need to use MNI152NLin6Asym for the template. - template : :obj:`str` - The target template. - - Outputs - ------- - t1w : :obj:`str` - Path to the preprocessed T1w file in standard space. - t2w : :obj:`str` or None - Path to the preprocessed T2w file in standard space. - """ - workflow = Workflow(name=name) - output_dir = config.execution.xcp_d_dir - input_type = config.workflow.input_type - omp_nthreads = config.nipype.omp_nthreads - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "t1w", - "t2w", - "anat_dseg", - "anat_to_template_xfm", - "template", - ], - ), - name="inputnode", - ) - - outputnode = pe.Node( - niu.IdentityInterface(fields=["t1w", "t2w"]), - name="outputnode", - ) - - # Split cohort out of the space for MNIInfant templates. - cohort = None - if "+" in target_space: - target_space, cohort = target_space.split("+") - - template_file = str( - get_template(template=target_space, cohort=cohort, resolution=1, desc=None, suffix="T1w") - ) - inputnode.inputs.template = template_file - - ds_anat_dseg_std = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - space=target_space, - cohort=cohort, - extension=".nii.gz", - ), - name="ds_anat_dseg_std", - run_without_submitting=False, - ) - - workflow.connect([(inputnode, ds_anat_dseg_std, [("anat_dseg", "source_file")])]) - - if t1w_available: - ds_t1w_std = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - space=target_space, - cohort=cohort, - extension=".nii.gz", - ), - name="ds_t1w_std", - run_without_submitting=False, - ) - workflow.connect([ - (inputnode, ds_t1w_std, [("t1w", "source_file")]), - (ds_t1w_std, outputnode, [("out_file", "t1w")]), - ]) # fmt:skip - - if t2w_available: - ds_t2w_std = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - space=target_space, - cohort=cohort, - extension=".nii.gz", - ), - name="ds_t2w_std", - run_without_submitting=False, - ) - workflow.connect([ - (inputnode, ds_t2w_std, [("t2w", "source_file")]), - (ds_t2w_std, outputnode, [("out_file", "t2w")]), - ]) # fmt:skip - - if input_type in ("dcan", "hcp", "ukb"): - # Assume that the T1w, T1w segmentation, and T2w files are in standard space, - # but don't have the "space" entity, for the "dcan" and "hcp" derivatives. - # This is a bug, and the converted filenames are inaccurate, so we have this - # workaround in place. - workflow.connect([(inputnode, ds_anat_dseg_std, [("anat_dseg", "in_file")])]) - - if t1w_available: - workflow.connect([(inputnode, ds_t1w_std, [("t1w", "in_file")])]) - - if t2w_available: - workflow.connect([(inputnode, ds_t2w_std, [("t2w", "in_file")])]) - - else: - out = ( - ["T1w"] if t1w_available else [] + ["T2w"] if t2w_available else [] + ["segmentation"] - ) - workflow.__desc__ = f"""\ -Native-space {list_to_str(out)} images were transformed to {target_space} space at 1 mm3 -resolution. -""" - warp_anat_dseg_to_template = pe.Node( - ApplyTransforms( - num_threads=2, - interpolation="GenericLabel", - input_image_type=3, - dimension=3, - ), - name="warp_anat_dseg_to_template", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([ - (inputnode, warp_anat_dseg_to_template, [ - ("anat_dseg", "input_image"), - ("anat_to_template_xfm", "transforms"), - ("template", "reference_image"), - ]), - (warp_anat_dseg_to_template, ds_anat_dseg_std, [("output_image", "in_file")]), - ]) # fmt:skip - - if t1w_available: - # Warp the native T1w-space T1w, T1w segmentation, and T2w files to standard space. - warp_t1w_to_template = pe.Node( - ApplyTransforms( - num_threads=2, - interpolation="LanczosWindowedSinc", - input_image_type=3, - dimension=3, - ), - name="warp_t1w_to_template", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([ - (inputnode, warp_t1w_to_template, [ - ("t1w", "input_image"), - ("anat_to_template_xfm", "transforms"), - ("template", "reference_image"), - ]), - (warp_t1w_to_template, ds_t1w_std, [("output_image", "in_file")]), - ]) # fmt:skip - - if t2w_available: - warp_t2w_to_template = pe.Node( - ApplyTransforms( - num_threads=2, - interpolation="LanczosWindowedSinc", - input_image_type=3, - dimension=3, - ), - name="warp_t2w_to_template", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([ - (inputnode, warp_t2w_to_template, [ - ("t2w", "input_image"), - ("anat_to_template_xfm", "transforms"), - ("template", "reference_image"), - ]), - (warp_t2w_to_template, ds_t2w_std, [("output_image", "in_file")]), - ]) # fmt:skip - - if config.workflow.abcc_qc: - execsummary_anatomical_plots_wf = init_execsummary_anatomical_plots_wf( - t1w_available=t1w_available, - t2w_available=t2w_available, - ) - workflow.connect([ - (inputnode, execsummary_anatomical_plots_wf, [("template", "inputnode.template")]), - ]) # fmt:skip - - if t1w_available: - workflow.connect([ - (ds_t1w_std, execsummary_anatomical_plots_wf, [("out_file", "inputnode.t1w")]), - ]) # fmt:skip - - if t2w_available: - workflow.connect([ - (ds_t2w_std, execsummary_anatomical_plots_wf, [("out_file", "inputnode.t2w")]), - ]) # fmt:skip - - return workflow - - @fill_doc def init_postprocess_surfaces_wf( mesh_available, @@ -321,7 +68,7 @@ def init_postprocess_surfaces_wf( from xcp_d.tests.tests import mock_config from xcp_d import config - from xcp_d.workflows.anatomical import init_postprocess_surfaces_wf + from xcp_d.workflows.anatomical.surface import init_postprocess_surfaces_wf with mock_config(): wf = init_postprocess_surfaces_wf( @@ -561,7 +308,7 @@ def init_warp_surfaces_to_template_wf( :graph2use: orig :simple_form: yes - from xcp_d.workflows.anatomical import init_warp_surfaces_to_template_wf + from xcp_d.workflows.anatomical.surface import init_warp_surfaces_to_template_wf wf = init_warp_surfaces_to_template_wf( output_dir=".", @@ -741,7 +488,7 @@ def init_generate_hcp_surfaces_wf(name="generate_hcp_surfaces_wf"): from xcp_d.tests.tests import mock_config from xcp_d import config - from xcp_d.workflows.anatomical import init_generate_hcp_surfaces_wf + from xcp_d.workflows.anatomical.surface import init_generate_hcp_surfaces_wf with mock_config(): wf = init_generate_hcp_surfaces_wf(name="generate_hcp_surfaces_wf") @@ -877,7 +624,7 @@ def init_ants_xfm_to_fsl_wf(mem_gb, omp_nthreads, name="ants_xfm_to_fsl_wf"): :graph2use: orig :simple_form: yes - from xcp_d.workflows.anatomical import init_ants_xfm_to_fsl_wf + from xcp_d.workflows.anatomical.surface import init_ants_xfm_to_fsl_wf wf = init_ants_xfm_to_fsl_wf( mem_gb=0.1, @@ -1133,7 +880,7 @@ def init_warp_one_hemisphere_wf( :graph2use: orig :simple_form: yes - from xcp_d.workflows.anatomical import init_warp_one_hemisphere_wf + from xcp_d.workflows.anatomical.surface import init_warp_one_hemisphere_wf wf = init_warp_one_hemisphere_wf( hemisphere="L", diff --git a/xcp_d/workflows/anatomical/volume.py b/xcp_d/workflows/anatomical/volume.py new file mode 100644 index 000000000..e8d76572c --- /dev/null +++ b/xcp_d/workflows/anatomical/volume.py @@ -0,0 +1,261 @@ +"""Workflows for processing volumetric anatomical data.""" + +from nipype import logging +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow +from templateflow.api import get as get_template + +from xcp_d import config +from xcp_d.interfaces.ants import ApplyTransforms +from xcp_d.interfaces.bids import DerivativesDataSink +from xcp_d.utils.doc import fill_doc +from xcp_d.utils.utils import list_to_str +from xcp_d.workflows.anatomical.plotting import init_execsummary_anatomical_plots_wf + +LOGGER = logging.getLogger("nipype.workflow") + + +@fill_doc +def init_postprocess_anat_wf( + t1w_available, + t2w_available, + target_space, + name="postprocess_anat_wf", +): + """Copy T1w, segmentation, and, optionally, T2w to the derivative directory. + + If necessary, this workflow will also warp the images to standard space. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.anatomical.volume import init_postprocess_anat_wf + + with mock_config(): + wf = init_postprocess_anat_wf( + t1w_available=True, + t2w_available=True, + target_space="MNI152NLin6Asym", + name="postprocess_anat_wf", + ) + + Parameters + ---------- + t1w_available : bool + True if a preprocessed T1w is available, False if not. + t2w_available : bool + True if a preprocessed T2w is available, False if not. + target_space : :obj:`str` + Target NIFTI template for T1w. + %(name)s + Default is "postprocess_anat_wf". + + Inputs + ------ + t1w : :obj:`str` + Path to the preprocessed T1w file. + This file may be in standard space or native T1w space. + t2w : :obj:`str` or None + Path to the preprocessed T2w file. + This file may be in standard space or native T1w space. + anat_dseg : :obj:`str` + Path to the T1w segmentation file. + %(anat_to_template_xfm)s + We need to use MNI152NLin6Asym for the template. + template : :obj:`str` + The target template. + + Outputs + ------- + t1w : :obj:`str` + Path to the preprocessed T1w file in standard space. + t2w : :obj:`str` or None + Path to the preprocessed T2w file in standard space. + """ + workflow = Workflow(name=name) + output_dir = config.execution.xcp_d_dir + input_type = config.workflow.input_type + omp_nthreads = config.nipype.omp_nthreads + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "t1w", + "t2w", + "anat_dseg", + "anat_to_template_xfm", + "template", + ], + ), + name="inputnode", + ) + + outputnode = pe.Node( + niu.IdentityInterface(fields=["t1w", "t2w"]), + name="outputnode", + ) + + # Split cohort out of the space for MNIInfant templates. + cohort = None + if "+" in target_space: + target_space, cohort = target_space.split("+") + + template_file = str( + get_template(template=target_space, cohort=cohort, resolution=1, desc=None, suffix="T1w") + ) + inputnode.inputs.template = template_file + + ds_anat_dseg_std = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + space=target_space, + cohort=cohort, + extension=".nii.gz", + ), + name="ds_anat_dseg_std", + run_without_submitting=False, + ) + + workflow.connect([(inputnode, ds_anat_dseg_std, [("anat_dseg", "source_file")])]) + + if t1w_available: + ds_t1w_std = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + space=target_space, + cohort=cohort, + extension=".nii.gz", + ), + name="ds_t1w_std", + run_without_submitting=False, + ) + workflow.connect([ + (inputnode, ds_t1w_std, [("t1w", "source_file")]), + (ds_t1w_std, outputnode, [("out_file", "t1w")]), + ]) # fmt:skip + + if t2w_available: + ds_t2w_std = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + space=target_space, + cohort=cohort, + extension=".nii.gz", + ), + name="ds_t2w_std", + run_without_submitting=False, + ) + workflow.connect([ + (inputnode, ds_t2w_std, [("t2w", "source_file")]), + (ds_t2w_std, outputnode, [("out_file", "t2w")]), + ]) # fmt:skip + + if input_type in ("dcan", "hcp", "ukb"): + # Assume that the T1w, T1w segmentation, and T2w files are in standard space, + # but don't have the "space" entity, for the "dcan" and "hcp" derivatives. + # This is a bug, and the converted filenames are inaccurate, so we have this + # workaround in place. + workflow.connect([(inputnode, ds_anat_dseg_std, [("anat_dseg", "in_file")])]) + + if t1w_available: + workflow.connect([(inputnode, ds_t1w_std, [("t1w", "in_file")])]) + + if t2w_available: + workflow.connect([(inputnode, ds_t2w_std, [("t2w", "in_file")])]) + + else: + out = ( + ["T1w"] if t1w_available else [] + ["T2w"] if t2w_available else [] + ["segmentation"] + ) + workflow.__desc__ = f"""\ +Native-space {list_to_str(out)} images were transformed to {target_space} space at 1 mm3 +resolution. +""" + warp_anat_dseg_to_template = pe.Node( + ApplyTransforms( + num_threads=2, + interpolation="GenericLabel", + input_image_type=3, + dimension=3, + ), + name="warp_anat_dseg_to_template", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([ + (inputnode, warp_anat_dseg_to_template, [ + ("anat_dseg", "input_image"), + ("anat_to_template_xfm", "transforms"), + ("template", "reference_image"), + ]), + (warp_anat_dseg_to_template, ds_anat_dseg_std, [("output_image", "in_file")]), + ]) # fmt:skip + + if t1w_available: + # Warp the native T1w-space T1w, T1w segmentation, and T2w files to standard space. + warp_t1w_to_template = pe.Node( + ApplyTransforms( + num_threads=2, + interpolation="LanczosWindowedSinc", + input_image_type=3, + dimension=3, + ), + name="warp_t1w_to_template", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([ + (inputnode, warp_t1w_to_template, [ + ("t1w", "input_image"), + ("anat_to_template_xfm", "transforms"), + ("template", "reference_image"), + ]), + (warp_t1w_to_template, ds_t1w_std, [("output_image", "in_file")]), + ]) # fmt:skip + + if t2w_available: + warp_t2w_to_template = pe.Node( + ApplyTransforms( + num_threads=2, + interpolation="LanczosWindowedSinc", + input_image_type=3, + dimension=3, + ), + name="warp_t2w_to_template", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([ + (inputnode, warp_t2w_to_template, [ + ("t2w", "input_image"), + ("anat_to_template_xfm", "transforms"), + ("template", "reference_image"), + ]), + (warp_t2w_to_template, ds_t2w_std, [("output_image", "in_file")]), + ]) # fmt:skip + + if config.workflow.abcc_qc: + execsummary_anatomical_plots_wf = init_execsummary_anatomical_plots_wf( + t1w_available=t1w_available, + t2w_available=t2w_available, + ) + workflow.connect([ + (inputnode, execsummary_anatomical_plots_wf, [("template", "inputnode.template")]), + ]) # fmt:skip + + if t1w_available: + workflow.connect([ + (ds_t1w_std, execsummary_anatomical_plots_wf, [("out_file", "inputnode.t1w")]), + ]) # fmt:skip + + if t2w_available: + workflow.connect([ + (ds_t2w_std, execsummary_anatomical_plots_wf, [("out_file", "inputnode.t2w")]), + ]) # fmt:skip + + return workflow diff --git a/xcp_d/workflows/base.py b/xcp_d/workflows/base.py index 4df526b6b..5156a52da 100644 --- a/xcp_d/workflows/base.py +++ b/xcp_d/workflows/base.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The primary workflows for xcp_d.""" + import os import sys from copy import deepcopy @@ -36,17 +37,13 @@ from xcp_d.utils.doc import fill_doc from xcp_d.utils.modified_data import calculate_exact_scans, flag_bad_run from xcp_d.utils.utils import estimate_brain_radius -from xcp_d.workflows.anatomical import ( - init_postprocess_anat_wf, - init_postprocess_surfaces_wf, -) -from xcp_d.workflows.bold import init_postprocess_nifti_wf -from xcp_d.workflows.cifti import init_postprocess_cifti_wf -from xcp_d.workflows.concatenation import init_concatenate_data_wf -from xcp_d.workflows.connectivity import ( - init_load_atlases_wf, - init_parcellate_surfaces_wf, -) +from xcp_d.workflows.anatomical.parcellation import init_parcellate_surfaces_wf +from xcp_d.workflows.anatomical.surface import init_postprocess_surfaces_wf +from xcp_d.workflows.anatomical.volume import init_postprocess_anat_wf +from xcp_d.workflows.bold.cifti import init_postprocess_cifti_wf +from xcp_d.workflows.bold.concatenation import init_concatenate_data_wf +from xcp_d.workflows.bold.nifti import init_postprocess_nifti_wf +from xcp_d.workflows.parcellation import init_load_atlases_wf LOGGER = logging.getLogger("nipype.workflow") diff --git a/xcp_d/workflows/bold/__init__.py b/xcp_d/workflows/bold/__init__.py new file mode 100644 index 000000000..f202068cf --- /dev/null +++ b/xcp_d/workflows/bold/__init__.py @@ -0,0 +1,23 @@ +"""Workflows for processing BOLD data.""" + +from xcp_d.workflows.bold import ( + cifti, + concatenation, + connectivity, + metrics, + nifti, + outputs, + plotting, + postprocessing, +) + +__all__ = [ + "cifti", + "concatenation", + "connectivity", + "metrics", + "nifti", + "outputs", + "plotting", + "postprocessing", +] diff --git a/xcp_d/workflows/cifti.py b/xcp_d/workflows/bold/cifti.py similarity index 97% rename from xcp_d/workflows/cifti.py rename to xcp_d/workflows/bold/cifti.py index fdac8186b..e1f460c08 100644 --- a/xcp_d/workflows/cifti.py +++ b/xcp_d/workflows/bold/cifti.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Workflows for post-processing CIFTI-format BOLD data.""" + from nipype import logging from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe @@ -12,16 +13,18 @@ from xcp_d.utils.confounds import get_custom_confounds from xcp_d.utils.doc import fill_doc from xcp_d.utils.utils import _create_mem_gb -from xcp_d.workflows.connectivity import init_functional_connectivity_cifti_wf -from xcp_d.workflows.execsummary import init_execsummary_functional_plots_wf -from xcp_d.workflows.outputs import init_postproc_derivatives_wf -from xcp_d.workflows.plotting import init_qc_report_wf -from xcp_d.workflows.postprocessing import ( +from xcp_d.workflows.bold.connectivity import init_functional_connectivity_cifti_wf +from xcp_d.workflows.bold.metrics import init_alff_wf, init_reho_cifti_wf +from xcp_d.workflows.bold.outputs import init_postproc_derivatives_wf +from xcp_d.workflows.bold.plotting import ( + init_execsummary_functional_plots_wf, + init_qc_report_wf, +) +from xcp_d.workflows.bold.postprocessing import ( init_denoise_bold_wf, init_despike_wf, init_prepare_confounds_wf, ) -from xcp_d.workflows.restingstate import init_alff_wf, init_reho_cifti_wf LOGGER = logging.getLogger("nipype.workflow") @@ -49,7 +52,7 @@ def init_postprocess_cifti_wf( from xcp_d.tests.tests import mock_config from xcp_d import config from xcp_d.utils.bids import collect_data, collect_run_data - from xcp_d.workflows.cifti import init_postprocess_cifti_wf + from xcp_d.workflows.bold.cifti import init_postprocess_cifti_wf with mock_config(): bold_file = str( diff --git a/xcp_d/workflows/concatenation.py b/xcp_d/workflows/bold/concatenation.py similarity index 99% rename from xcp_d/workflows/concatenation.py rename to xcp_d/workflows/bold/concatenation.py index dbeefc13b..f5db4a2fe 100644 --- a/xcp_d/workflows/concatenation.py +++ b/xcp_d/workflows/bold/concatenation.py @@ -15,7 +15,7 @@ from xcp_d.utils.bids import _make_xcpd_uri, _make_xcpd_uri_lol from xcp_d.utils.doc import fill_doc from xcp_d.utils.utils import _make_dictionary, _select_first -from xcp_d.workflows.plotting import init_qc_report_wf +from xcp_d.workflows.bold.plotting import init_qc_report_wf @fill_doc diff --git a/xcp_d/workflows/connectivity.py b/xcp_d/workflows/bold/connectivity.py similarity index 55% rename from xcp_d/workflows/connectivity.py rename to xcp_d/workflows/bold/connectivity.py index 1cf912f31..e5a071bcb 100644 --- a/xcp_d/workflows/connectivity.py +++ b/xcp_d/workflows/bold/connectivity.py @@ -1,327 +1,22 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Workflows for extracting time series and computing functional connectivity.""" -from nipype import Function, logging + +from nipype import logging from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow from xcp_d import config -from xcp_d.interfaces.ants import ApplyTransforms from xcp_d.interfaces.bids import DerivativesDataSink -from xcp_d.interfaces.nilearn import IndexImage -from xcp_d.utils.atlas import get_atlas_cifti, get_atlas_nifti, select_atlases +from xcp_d.utils.atlas import select_atlases from xcp_d.utils.boilerplate import describe_atlases from xcp_d.utils.doc import fill_doc -from xcp_d.utils.utils import get_std2bold_xfms +from xcp_d.workflows.parcellation import init_parcellate_cifti_wf LOGGER = logging.getLogger("nipype.workflow") -@fill_doc -def init_load_atlases_wf(name="load_atlases_wf"): - """Load atlases and warp them to the same space as the BOLD file. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.connectivity import init_load_atlases_wf - - with mock_config(): - wf = init_load_atlases_wf() - - Parameters - ---------- - %(name)s - Default is "load_atlases_wf". - - Inputs - ------ - %(name_source)s - bold_file - - Outputs - ------- - atlas_files - atlas_labels_files - """ - from xcp_d.interfaces.bids import CopyAtlas - - workflow = Workflow(name=name) - atlases = config.execution.atlases - output_dir = config.execution.xcp_d_dir - file_format = config.workflow.file_format - omp_nthreads = config.nipype.omp_nthreads - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "name_source", - "bold_file", - ], - ), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "atlas_files", - "atlas_labels_files", - ], - ), - name="outputnode", - ) - - # get atlases via load_data - atlas_file_grabber = pe.MapNode( - Function( - input_names=["atlas"], - output_names=["atlas_file", "atlas_labels_file", "atlas_metadata_file"], - function=get_atlas_cifti if file_format == "cifti" else get_atlas_nifti, - ), - name="atlas_file_grabber", - iterfield=["atlas"], - ) - atlas_file_grabber.inputs.atlas = atlases - - atlas_buffer = pe.Node(niu.IdentityInterface(fields=["atlas_file"]), name="atlas_buffer") - - if file_format == "nifti": - get_transforms_to_bold_space = pe.Node( - Function( - input_names=["bold_file"], - output_names=["transformfile"], - function=get_std2bold_xfms, - ), - name="get_transforms_to_bold_space", - ) - - workflow.connect([ - (inputnode, get_transforms_to_bold_space, [("name_source", "bold_file")]), - ]) # fmt:skip - - # ApplyTransforms needs a 3D image for the reference image. - grab_first_volume = pe.Node( - IndexImage(index=0), - name="grab_first_volume", - ) - - workflow.connect([(inputnode, grab_first_volume, [("bold_file", "in_file")])]) - - # Using the generated transforms, apply them to get everything in the correct MNI form - warp_atlases_to_bold_space = pe.MapNode( - ApplyTransforms( - interpolation="GenericLabel", - input_image_type=3, - dimension=3, - ), - name="warp_atlases_to_bold_space", - iterfield=["input_image"], - mem_gb=2, - n_procs=omp_nthreads, - ) - - workflow.connect([ - (grab_first_volume, warp_atlases_to_bold_space, [("out_file", "reference_image")]), - (atlas_file_grabber, warp_atlases_to_bold_space, [("atlas_file", "input_image")]), - (get_transforms_to_bold_space, warp_atlases_to_bold_space, [ - ("transformfile", "transforms"), - ]), - (warp_atlases_to_bold_space, atlas_buffer, [("output_image", "atlas_file")]), - ]) # fmt:skip - - else: - workflow.connect([(atlas_file_grabber, atlas_buffer, [("atlas_file", "atlas_file")])]) - - ds_atlas = pe.MapNode( - CopyAtlas(output_dir=output_dir), - name="ds_atlas", - iterfield=["in_file", "atlas"], - run_without_submitting=True, - ) - ds_atlas.inputs.atlas = atlases - - workflow.connect([ - (inputnode, ds_atlas, [("name_source", "name_source")]), - (atlas_buffer, ds_atlas, [("atlas_file", "in_file")]), - (ds_atlas, outputnode, [("out_file", "atlas_files")]), - ]) # fmt:skip - - ds_atlas_labels_file = pe.MapNode( - CopyAtlas(output_dir=output_dir), - name="ds_atlas_labels_file", - iterfield=["in_file", "atlas"], - run_without_submitting=True, - ) - ds_atlas_labels_file.inputs.atlas = atlases - - workflow.connect([ - (inputnode, ds_atlas_labels_file, [("name_source", "name_source")]), - (atlas_file_grabber, ds_atlas_labels_file, [("atlas_labels_file", "in_file")]), - (ds_atlas_labels_file, outputnode, [("out_file", "atlas_labels_files")]), - ]) # fmt:skip - - ds_atlas_metadata = pe.MapNode( - CopyAtlas(output_dir=output_dir), - name="ds_atlas_metadata", - iterfield=["in_file", "atlas"], - run_without_submitting=True, - ) - ds_atlas_metadata.inputs.atlas = atlases - - workflow.connect([ - (inputnode, ds_atlas_metadata, [("name_source", "name_source")]), - (atlas_file_grabber, ds_atlas_metadata, [("atlas_metadata_file", "in_file")]), - ]) # fmt:skip - - return workflow - - -@fill_doc -def init_parcellate_surfaces_wf(files_to_parcellate, name="parcellate_surfaces_wf"): - """Parcellate surface files and write them out to the output directory. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.connectivity import init_parcellate_surfaces_wf - - with mock_config(): - wf = init_parcellate_surfaces_wf( - files_to_parcellate=["sulcal_depth", "sulcal_curv", "cortical_thickness"], - name="parcellate_surfaces_wf", - ) - - Parameters - ---------- - files_to_parcellate : :obj:`list` of :obj:`str` - List of surface file types to parcellate - (e.g., "sulcal_depth", "sulcal_curv", "cortical_thickness"). - %(name)s - - Inputs - ------ - sulcal_depth - sulcal_curv - cortical_thickness - cortical_thickness_corr - myelin - myelin_smoothed - """ - from xcp_d.interfaces.workbench import CiftiCreateDenseFromTemplate - - workflow = Workflow(name=name) - - output_dir = config.execution.xcp_d_dir - atlases = config.execution.atlases - - SURF_DESCS = { - "sulcal_depth": "sulc", - "sulcal_curv": "curv", - "cortical_thickness": "thickness", - "cortical_thickness_corr": "thicknessCorrected", - "myelin": "myelin", - "myelin_smoothed": "myelinSmoothed", - } - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "sulcal_depth", - "sulcal_curv", - "cortical_thickness", - "cortical_thickness_corr", - "myelin", - "myelin_smoothed", - ], - ), - name="inputnode", - ) - - selected_atlases = select_atlases(atlases=atlases, subset="cortical") - - if not selected_atlases: - LOGGER.warning( - "No cortical atlases have been selected, so surface metrics will not be parcellated." - ) - # If no cortical atlases are selected, inputnode could go unconnected, so add explicitly. - workflow.add_nodes([inputnode]) - - return workflow - - # Get CIFTI atlases via load_data - atlas_file_grabber = pe.MapNode( - Function( - input_names=["atlas"], - output_names=["atlas_file", "atlas_labels_file", "atlas_metadata_file"], - function=get_atlas_cifti, - ), - name="atlas_file_grabber", - iterfield=["atlas"], - ) - atlas_file_grabber.inputs.atlas = selected_atlases - - for file_to_parcellate in files_to_parcellate: - resample_atlas_to_surface = pe.MapNode( - CiftiCreateDenseFromTemplate(out_file="resampled_atlas.dlabel.nii"), - name=f"resample_atlas_to_{file_to_parcellate}", - iterfield=["label"], - ) - workflow.connect([ - (inputnode, resample_atlas_to_surface, [(file_to_parcellate, "template_cifti")]), - (atlas_file_grabber, resample_atlas_to_surface, [("atlas_file", "label")]), - ]) # fmt:skip - - parcellate_surface_wf = init_parcellate_cifti_wf( - mem_gb={"resampled": 2}, - compute_mask=True, - name=f"parcellate_{file_to_parcellate}_wf", - ) - workflow.connect([ - (inputnode, parcellate_surface_wf, [(file_to_parcellate, "inputnode.in_file")]), - (atlas_file_grabber, parcellate_surface_wf, [ - ("atlas_labels_file", "inputnode.atlas_labels_files"), - ]), - (resample_atlas_to_surface, parcellate_surface_wf, [ - ("out_file", "inputnode.atlas_files"), - ]), - ]) # fmt:skip - - # Write out the parcellated files - ds_parcellated_surface = pe.MapNode( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["hemi", "desc", "den", "res"], - desc=SURF_DESCS[file_to_parcellate], - statistic="mean", - suffix="morph", - extension=".tsv", - ), - name=f"ds_parcellated_{file_to_parcellate}", - run_without_submitting=True, - mem_gb=1, - iterfield=["segmentation", "in_file"], - ) - ds_parcellated_surface.inputs.segmentation = selected_atlases - - workflow.connect([ - (inputnode, ds_parcellated_surface, [(file_to_parcellate, "source_file")]), - (parcellate_surface_wf, ds_parcellated_surface, [ - ("outputnode.parcellated_tsv", "in_file"), - ]), - ]) # fmt:skip - - return workflow - - @fill_doc def init_functional_connectivity_nifti_wf(mem_gb, name="connectivity_wf"): """Extract BOLD time series and compute functional connectivity. @@ -937,215 +632,3 @@ def init_functional_connectivity_cifti_wf(mem_gb, exact_scans, name="connectivit ]) # fmt:skip return workflow - - -def init_parcellate_cifti_wf( - mem_gb, - compute_mask=True, - name="parcellate_cifti_wf", -): - """Parcellate a CIFTI file using a set of atlases. - - Part of the parcellation includes applying vertex-wise and node-wise masks. - - Vertex-wise masks are typically calculated from the full BOLD run, - wherein any vertex that has a time series of all zeros or NaNs is excluded. - Additionally, if *any* volumes in a vertex's time series are NaNs, - that vertex will be excluded. - - The node-wise mask is determined based on the vertex-wise mask and the workflow's - coverage threshold. - Any nodes in the atlas with less than the coverage threshold's % of vertices retained by the - vertex-wise mask will have that node's time series set to NaNs. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.connectivity import init_parcellate_cifti_wf - - with mock_config(): - wf = init_parcellate_cifti_wf(mem_gb={"resampled": 2}) - - Parameters - ---------- - mem_gb : :obj:`dict` - Dictionary of memory allocations. - compute_mask : :obj:`bool` - Whether to compute a vertex-wise mask for the CIFTI file. - When processing full BOLD runs, this should be True. - When processing truncated BOLD runs or scalar maps, this should be False, - and the vertex-wise mask should be provided via the inputnode.. - Default is True. - name : :obj:`str` - Workflow name. - Default is "parcellate_cifti_wf". - - Inputs - ------ - in_file - CIFTI file to parcellate. - atlas_files - List of CIFTI atlas files. - atlas_labels_files - List of TSV atlas labels files. - vertexwise_coverage - Vertex-wise coverage mask. - Only used if `compute_mask` is False. - coverage_cifti - Coverage CIFTI files. One for each atlas. - Only used if `compute_mask` is False. - - Outputs - ------- - parcellated_cifti - Parcellated CIFTI files. One for each atlas. - parcellated_tsv - Parcellated TSV files. One for each atlas. - vertexwise_coverage - Vertex-wise coverage mask. Only output if `compute_mask` is True. - coverage_cifti - Coverage CIFTI files. One for each atlas. Only output if `compute_mask` is True. - coverage_tsv - Coverage TSV files. One for each atlas. Only output if `compute_mask` is True. - """ - from xcp_d import config - from xcp_d.interfaces.connectivity import CiftiMask, CiftiToTSV, CiftiVertexMask - from xcp_d.interfaces.workbench import CiftiMath, CiftiParcellateWorkbench - - workflow = Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_file", - "atlas_files", - "atlas_labels_files", - "vertexwise_coverage", - "coverage_cifti", - ], - ), - name="inputnode", - ) - - outputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "parcellated_cifti", - "parcellated_tsv", - "vertexwise_coverage", - "coverage_cifti", - "coverage_tsv", - ], - ), - name="outputnode", - ) - - # Replace vertices with all zeros with NaNs using Python. - coverage_buffer = pe.Node( - niu.IdentityInterface(fields=["vertexwise_coverage", "coverage_cifti"]), - name="coverage_buffer", - ) - if compute_mask: - # Write out a vertex-wise binary coverage map using Python. - vertexwise_coverage = pe.Node( - CiftiVertexMask(), - name="vertexwise_coverage", - ) - workflow.connect([ - (inputnode, vertexwise_coverage, [("in_file", "in_file")]), - (vertexwise_coverage, coverage_buffer, [("mask_file", "vertexwise_coverage")]), - (vertexwise_coverage, outputnode, [("mask_file", "vertexwise_coverage")]), - ]) # fmt:skip - - parcellate_coverage = pe.MapNode( - CiftiParcellateWorkbench( - direction="COLUMN", - only_numeric=True, - out_file="parcellated_atlas.pscalar.nii", - ), - name="parcellate_coverage", - iterfield=["atlas_label"], - ) - workflow.connect([ - (inputnode, parcellate_coverage, [("atlas_files", "atlas_label")]), - (vertexwise_coverage, parcellate_coverage, [("mask_file", "in_file")]), - (parcellate_coverage, coverage_buffer, [("out_file", "coverage_cifti")]), - (parcellate_coverage, outputnode, [("out_file", "coverage_cifti")]), - ]) # fmt:skip - - coverage_to_tsv = pe.MapNode( - CiftiToTSV(), - name="coverage_to_tsv", - iterfield=["in_file", "atlas_labels"], - ) - workflow.connect([ - (inputnode, coverage_to_tsv, [("atlas_labels_files", "atlas_labels")]), - (parcellate_coverage, coverage_to_tsv, [("out_file", "in_file")]), - (coverage_to_tsv, outputnode, [("out_file", "coverage_tsv")]), - ]) # fmt:skip - else: - workflow.connect([ - (inputnode, coverage_buffer, [ - ("vertexwise_coverage", "vertexwise_coverage"), - ("coverage_cifti", "coverage_cifti"), - ]), - ]) # fmt:skip - - # Parcellate the data file using the vertex-wise coverage. - parcellate_data = pe.MapNode( - CiftiParcellateWorkbench( - direction="COLUMN", - only_numeric=True, - out_file=f"parcellated_data.{'ptseries' if compute_mask else 'pscalar'}.nii", - ), - name="parcellate_data", - iterfield=["atlas_label"], - mem_gb=mem_gb["resampled"], - ) - workflow.connect([ - (inputnode, parcellate_data, [ - ("in_file", "in_file"), - ("atlas_files", "atlas_label"), - ]), - (coverage_buffer, parcellate_data, [("vertexwise_coverage", "cifti_weights")]), - ]) # fmt:skip - - # Threshold node coverage values based on coverage threshold. - threshold_coverage = pe.MapNode( - CiftiMath(expression=f"data > {config.workflow.min_coverage}"), - name="threshold_coverage", - iterfield=["data"], - mem_gb=mem_gb["resampled"], - ) - workflow.connect([(coverage_buffer, threshold_coverage, [("coverage_cifti", "data")])]) - - # Mask out uncovered nodes from parcellated denoised data - mask_parcellated_data = pe.MapNode( - CiftiMask(), - name="mask_parcellated_data", - iterfield=["in_file", "mask"], - mem_gb=mem_gb["resampled"], - ) - workflow.connect([ - (parcellate_data, mask_parcellated_data, [("out_file", "in_file")]), - (threshold_coverage, mask_parcellated_data, [("out_file", "mask")]), - (mask_parcellated_data, outputnode, [("out_file", "parcellated_cifti")]), - ]) # fmt:skip - - # Convert the parcellated CIFTI to a TSV file - cifti_to_tsv = pe.MapNode( - CiftiToTSV(), - name="cifti_to_tsv", - iterfield=["in_file", "atlas_labels"], - ) - workflow.connect([ - (inputnode, cifti_to_tsv, [("atlas_labels_files", "atlas_labels")]), - (mask_parcellated_data, cifti_to_tsv, [("out_file", "in_file")]), - (cifti_to_tsv, outputnode, [("out_file", "parcellated_tsv")]), - ]) # fmt:skip - - return workflow diff --git a/xcp_d/workflows/restingstate.py b/xcp_d/workflows/bold/metrics.py similarity index 99% rename from xcp_d/workflows/restingstate.py rename to xcp_d/workflows/bold/metrics.py index 5e6db8ebb..2b6191469 100644 --- a/xcp_d/workflows/restingstate.py +++ b/xcp_d/workflows/bold/metrics.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Workflows for calculating resting state-specific metrics.""" +"""Workflows for calculating BOLD metrics (ALFF and ReHo).""" + from nipype.interfaces import utility as niu from nipype.interfaces.workbench.cifti import CiftiSmooth from nipype.pipeline import engine as pe diff --git a/xcp_d/workflows/bold.py b/xcp_d/workflows/bold/nifti.py similarity index 96% rename from xcp_d/workflows/bold.py rename to xcp_d/workflows/bold/nifti.py index 82cc38fa8..b9cd0dbcf 100644 --- a/xcp_d/workflows/bold.py +++ b/xcp_d/workflows/bold/nifti.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Workflows for post-processing the BOLD data.""" +"""Workflows for post-processing NIfTI-format BOLD data.""" + from nipype import logging from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe @@ -12,16 +13,18 @@ from xcp_d.utils.confounds import get_custom_confounds from xcp_d.utils.doc import fill_doc from xcp_d.utils.utils import _create_mem_gb -from xcp_d.workflows.connectivity import init_functional_connectivity_nifti_wf -from xcp_d.workflows.execsummary import init_execsummary_functional_plots_wf -from xcp_d.workflows.outputs import init_postproc_derivatives_wf -from xcp_d.workflows.plotting import init_qc_report_wf -from xcp_d.workflows.postprocessing import ( +from xcp_d.workflows.bold.connectivity import init_functional_connectivity_nifti_wf +from xcp_d.workflows.bold.metrics import init_alff_wf, init_reho_nifti_wf +from xcp_d.workflows.bold.outputs import init_postproc_derivatives_wf +from xcp_d.workflows.bold.plotting import ( + init_execsummary_functional_plots_wf, + init_qc_report_wf, +) +from xcp_d.workflows.bold.postprocessing import ( init_denoise_bold_wf, init_despike_wf, init_prepare_confounds_wf, ) -from xcp_d.workflows.restingstate import init_alff_wf, init_reho_nifti_wf LOGGER = logging.getLogger("nipype.workflow") @@ -49,7 +52,7 @@ def init_postprocess_nifti_wf( from xcp_d.tests.tests import mock_config from xcp_d import config from xcp_d.utils.bids import collect_data, collect_run_data - from xcp_d.workflows.bold import init_postprocess_nifti_wf + from xcp_d.workflows.bold.nifti import init_postprocess_nifti_wf with mock_config(): bold_file = str( diff --git a/xcp_d/workflows/outputs.py b/xcp_d/workflows/bold/outputs.py similarity index 91% rename from xcp_d/workflows/outputs.py rename to xcp_d/workflows/bold/outputs.py index 57c9879a3..34fd033fd 100644 --- a/xcp_d/workflows/outputs.py +++ b/xcp_d/workflows/bold/outputs.py @@ -1,13 +1,13 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Workflows for collecting and saving xcp_d outputs.""" +"""Workflows for collecting and saving functional outputs.""" + from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow from xcp_d import config from xcp_d.interfaces.bids import DerivativesDataSink -from xcp_d.interfaces.utils import FilterUndefined from xcp_d.utils.bids import ( _make_atlas_uri, _make_custom_uri, @@ -19,110 +19,6 @@ from xcp_d.utils.utils import _make_dictionary -@fill_doc -def init_copy_inputs_to_outputs_wf(name="copy_inputs_to_outputs_wf"): - """Copy files from the preprocessing derivatives to the output folder, with no modifications. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.outputs import init_copy_inputs_to_outputs_wf - - with mock_config(): - wf = init_copy_inputs_to_outputs_wf() - - Parameters - ---------- - %(name)s - Default is "copy_inputs_to_outputs_wf". - - Inputs - ------ - lh_pial_surf - rh_pial_surf - lh_wm_surf - rh_wm_surf - sulcal_depth - sulcal_curv - cortical_thickness - cortical_thickness_corr - myelin - myelin_smoothed - """ - workflow = Workflow(name=name) - - output_dir = config.execution.xcp_d_dir - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "lh_pial_surf", - "rh_pial_surf", - "lh_wm_surf", - "rh_wm_surf", - "sulcal_depth", - "sulcal_curv", - "cortical_thickness", - "cortical_thickness_corr", - "myelin", - "myelin_smoothed", - ], - ), - name="inputnode", - ) - - # Place the surfaces in a single node. - collect_files = pe.Node( - niu.Merge(10), - name="collect_files", - ) - workflow.connect([ - (inputnode, collect_files, [ - # fsLR-space surface mesh files - ("lh_pial_surf", "in1"), - ("rh_pial_surf", "in2"), - ("lh_wm_surf", "in3"), - ("rh_wm_surf", "in4"), - # fsLR-space surface shape files - ("sulcal_depth", "in5"), - ("sulcal_curv", "in6"), - ("cortical_thickness", "in7"), - ("cortical_thickness_corr", "in8"), - ("myelin", "in9"), - ("myelin_smoothed", "in10"), - ]), - ]) # fmt:skip - - filter_out_undefined = pe.Node( - FilterUndefined(), - name="filter_out_undefined", - ) - workflow.connect([(collect_files, filter_out_undefined, [("out", "inlist")])]) - - ds_copied_outputs = pe.MapNode( - DerivativesDataSink( - base_directory=output_dir, - check_hdr=False, - ), - name="ds_copied_outputs", - run_without_submitting=True, - mem_gb=1, - iterfield=["in_file", "source_file"], - ) - workflow.connect([ - (filter_out_undefined, ds_copied_outputs, [ - ("outlist", "in_file"), - ("outlist", "source_file"), - ]), - ]) # fmt:skip - - return workflow - - @fill_doc def init_postproc_derivatives_wf( name_source, diff --git a/xcp_d/workflows/bold/plotting.py b/xcp_d/workflows/bold/plotting.py new file mode 100644 index 000000000..52a68fdf7 --- /dev/null +++ b/xcp_d/workflows/bold/plotting.py @@ -0,0 +1,698 @@ +"""Workflows for generating plots from functional data.""" + +import fnmatch +import os + +from nipype import Function, logging +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow +from templateflow.api import get as get_template + +from xcp_d import config +from xcp_d.interfaces.ants import ApplyTransforms +from xcp_d.interfaces.bids import DerivativesDataSink +from xcp_d.interfaces.nilearn import BinaryMath, ResampleToImage +from xcp_d.interfaces.plotting import AnatomicalPlot, QCPlots, QCPlotsES +from xcp_d.interfaces.report import FunctionalSummary +from xcp_d.interfaces.utils import ABCCQC, LINCQC +from xcp_d.utils.doc import fill_doc +from xcp_d.utils.utils import get_bold2std_and_t1w_xfms, get_std2bold_xfms +from xcp_d.workflows.plotting import init_plot_overlay_wf + +LOGGER = logging.getLogger("nipype.workflow") + + +@fill_doc +def init_qc_report_wf( + TR, + head_radius, + name="qc_report_wf", +): + """Generate quality control figures and a QC file. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.bold.plotting import init_qc_report_wf + + with mock_config(): + wf = init_qc_report_wf( + TR=0.5, + head_radius=50, + name="qc_report_wf", + ) + + Parameters + ---------- + %(TR)s + %(head_radius)s + %(name)s + Default is "qc_report_wf". + + Inputs + ------ + %(name_source)s + preprocessed_bold + The preprocessed BOLD file, after dummy scan removal. + Used for carpet plots. + %(denoised_interpolated_bold)s + Used for DCAN carpet plots. + Only used if abcc_qc is True. + %(censored_denoised_bold)s + Used for LINC carpet plots. + %(boldref)s + Only used with non-CIFTI data. + bold_mask + Path to the BOLD run's brain mask in the same space as ``preprocessed_bold``. + Only used with non-CIFTI data. + anat_brainmask + Path to the anatomical brain mask in the same standard space as ``bold_mask``. + Only used with non-CIFTI data. + %(template_to_anat_xfm)s + Only used with non-CIFTI data. + %(dummy_scans)s + %(fmriprep_confounds_file)s + %(temporal_mask)s + %(filtered_motion)s + + Outputs + ------- + qc_file + """ + workflow = Workflow(name=name) + + output_dir = config.execution.xcp_d_dir + omp_nthreads = config.nipype.omp_nthreads + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "name_source", + "preprocessed_bold", + "denoised_interpolated_bold", + "censored_denoised_bold", + "dummy_scans", + "fmriprep_confounds_file", + "filtered_motion", + "temporal_mask", + "run_index", # will only be set for concatenated data + # nifti-only inputs + "bold_mask", + "anat", # T1w/T2w image in anatomical space + "anat_brainmask", + "boldref", + "template_to_anat_xfm", + ], + ), + name="inputnode", + ) + + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "qc_file", + ], + ), + name="outputnode", + ) + + nlin2009casym_brain_mask = str( + get_template( + "MNI152NLin2009cAsym", + resolution=2, + desc="brain", + suffix="mask", + extension=[".nii", ".nii.gz"], + ) + ) + + if config.workflow.file_format == "nifti": + # We need the BOLD mask in T1w and standard spaces for QC metric calculation. + # This is only possible for nifti inputs. + get_native2space_transforms = pe.Node( + Function( + input_names=["bold_file", "template_to_anat_xfm"], + output_names=[ + "bold_to_std_xfms", + "bold_to_std_xfms_invert", + "bold_to_t1w_xfms", + "bold_to_t1w_xfms_invert", + ], + function=get_bold2std_and_t1w_xfms, + ), + name="get_native2space_transforms", + ) + + workflow.connect([ + (inputnode, get_native2space_transforms, [ + ("name_source", "bold_file"), + ("template_to_anat_xfm", "template_to_anat_xfm"), + ]), + ]) # fmt:skip + + warp_boldmask_to_t1w = pe.Node( + ApplyTransforms( + dimension=3, + interpolation="NearestNeighbor", + ), + name="warp_boldmask_to_t1w", + n_procs=omp_nthreads, + mem_gb=1, + ) + workflow.connect([ + (inputnode, warp_boldmask_to_t1w, [ + ("bold_mask", "input_image"), + ("anat", "reference_image"), + ]), + (get_native2space_transforms, warp_boldmask_to_t1w, [ + ("bold_to_t1w_xfms", "transforms"), + ("bold_to_t1w_xfms_invert", "invert_transform_flags"), + ]), + ]) # fmt:skip + + warp_boldmask_to_mni = pe.Node( + ApplyTransforms( + dimension=3, + reference_image=nlin2009casym_brain_mask, + interpolation="NearestNeighbor", + ), + name="warp_boldmask_to_mni", + n_procs=omp_nthreads, + mem_gb=1, + ) + workflow.connect([ + (inputnode, warp_boldmask_to_mni, [("bold_mask", "input_image")]), + (get_native2space_transforms, warp_boldmask_to_mni, [ + ("bold_to_std_xfms", "transforms"), + ("bold_to_std_xfms_invert", "invert_transform_flags"), + ]), + ]) # fmt:skip + + # Warp the standard-space anatomical brain mask to the anatomical space + warp_anatmask_to_t1w = pe.Node( + ApplyTransforms( + dimension=3, + interpolation="NearestNeighbor", + ), + name="warp_anatmask_to_t1w", + n_procs=omp_nthreads, + mem_gb=1, + ) + workflow.connect([ + (inputnode, warp_anatmask_to_t1w, [ + ("bold_mask", "input_image"), + ("anat", "reference_image"), + ]), + (get_native2space_transforms, warp_anatmask_to_t1w, [ + ("bold_to_t1w_xfms", "transforms"), + ("bold_to_t1w_xfms_invert", "invert_transform_flags"), + ]), + ]) # fmt:skip + + # NIFTI files require a tissue-type segmentation in the same space as the BOLD data. + # Get the set of transforms from MNI152NLin6Asym (the dseg) to the BOLD space. + # Given that xcp-d doesn't process native-space data, this transform will never be used. + get_mni_to_bold_xfms = pe.Node( + Function( + input_names=["bold_file"], + output_names=["transform_list"], + function=get_std2bold_xfms, + ), + name="get_std2native_transform", + ) + workflow.connect([(inputnode, get_mni_to_bold_xfms, [("name_source", "bold_file")])]) + + # Use MNI152NLin2009cAsym tissue-type segmentation file for carpet plots. + dseg_file = str( + get_template( + "MNI152NLin2009cAsym", + resolution=1, + desc="carpet", + suffix="dseg", + extension=[".nii", ".nii.gz"], + ) + ) + + # Get MNI152NLin2009cAsym --> MNI152NLin6Asym xform. + MNI152NLin2009cAsym_to_MNI152NLin6Asym = str( + get_template( + template="MNI152NLin6Asym", + mode="image", + suffix="xfm", + extension=".h5", + **{"from": "MNI152NLin2009cAsym"}, + ), + ) + + # Add the MNI152NLin2009cAsym --> MNI152NLin6Asym xform to the end of the + # BOLD --> MNI152NLin6Asym xform list, because xforms are applied in reverse order. + add_xfm_to_nlin6asym = pe.Node( + niu.Merge(2), + name="add_xfm_to_nlin6asym", + ) + add_xfm_to_nlin6asym.inputs.in2 = MNI152NLin2009cAsym_to_MNI152NLin6Asym + + workflow.connect([ + (get_mni_to_bold_xfms, add_xfm_to_nlin6asym, [("transform_list", "in1")]), + ]) # fmt:skip + + # Transform MNI152NLin2009cAsym dseg file to the same space as the BOLD data. + warp_dseg_to_bold = pe.Node( + ApplyTransforms( + dimension=3, + input_image=dseg_file, + interpolation="GenericLabel", + ), + name="warp_dseg_to_bold", + n_procs=omp_nthreads, + mem_gb=3, + ) + workflow.connect([ + (inputnode, warp_dseg_to_bold, [("boldref", "reference_image")]), + (add_xfm_to_nlin6asym, warp_dseg_to_bold, [("out", "transforms")]), + ]) # fmt:skip + + if config.workflow.linc_qc: + make_linc_qc = pe.Node( + LINCQC( + TR=TR, + head_radius=head_radius, + template_mask=nlin2009casym_brain_mask, + ), + name="make_linc_qc", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([ + (inputnode, make_linc_qc, [ + ("name_source", "name_source"), + ("preprocessed_bold", "bold_file"), + ("censored_denoised_bold", "cleaned_file"), + ("fmriprep_confounds_file", "fmriprep_confounds_file"), + ("temporal_mask", "temporal_mask"), + ("dummy_scans", "dummy_scans"), + ]), + (make_linc_qc, outputnode, [("qc_file", "qc_file")]), + ]) # fmt:skip + + if config.workflow.file_format == "nifti": + workflow.connect([ + (inputnode, make_linc_qc, [("bold_mask", "bold_mask_inputspace")]), + (warp_boldmask_to_t1w, make_linc_qc, [("output_image", "bold_mask_anatspace")]), + (warp_boldmask_to_mni, make_linc_qc, [("output_image", "bold_mask_stdspace")]), + (warp_anatmask_to_t1w, make_linc_qc, [("output_image", "anat_mask_anatspace")]), + ]) # fmt:skip + else: + make_linc_qc.inputs.bold_mask_inputspace = None + + ds_qc_metadata = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=list(DerivativesDataSink._allowed_entities), + allowed_entities=["desc"], + desc="linc", + suffix="qc", + extension=".json", + ), + name="ds_qc_metadata", + run_without_submitting=True, + ) + workflow.connect([ + (inputnode, ds_qc_metadata, [("name_source", "source_file")]), + (make_linc_qc, ds_qc_metadata, [("qc_metadata", "in_file")]), + ]) # fmt:skip + + make_qc_plots_nipreps = pe.Node( + QCPlots(TR=TR, head_radius=head_radius), + name="make_qc_plots_nipreps", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([ + (inputnode, make_qc_plots_nipreps, [ + ("preprocessed_bold", "bold_file"), + ("censored_denoised_bold", "cleaned_file"), + ("fmriprep_confounds_file", "fmriprep_confounds_file"), + ("temporal_mask", "temporal_mask"), + ]), + ]) # fmt:skip + + if config.workflow.file_format == "nifti": + workflow.connect([ + (inputnode, make_qc_plots_nipreps, [("bold_mask", "mask_file")]), + (warp_dseg_to_bold, make_qc_plots_nipreps, [("output_image", "seg_file")]), + ]) # fmt:skip + else: + make_qc_plots_nipreps.inputs.mask_file = None + + ds_preproc_qc_plot_nipreps = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc="preprocessing", + datatype="figures", + ), + name="ds_preproc_qc_plot_nipreps", + run_without_submitting=False, + ) + workflow.connect([ + (inputnode, ds_preproc_qc_plot_nipreps, [("name_source", "source_file")]), + (make_qc_plots_nipreps, ds_preproc_qc_plot_nipreps, [("raw_qcplot", "in_file")]), + ]) # fmt:skip + + ds_postproc_qc_plot_nipreps = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc="postprocessing", + datatype="figures", + ), + name="ds_postproc_qc_plot_nipreps", + run_without_submitting=False, + ) + workflow.connect([ + (inputnode, ds_postproc_qc_plot_nipreps, [("name_source", "source_file")]), + (make_qc_plots_nipreps, ds_postproc_qc_plot_nipreps, [("clean_qcplot", "in_file")]), + ]) # fmt:skip + + functional_qc = pe.Node( + FunctionalSummary(TR=TR), + name="qcsummary", + run_without_submitting=False, + mem_gb=2, + ) + workflow.connect([ + (inputnode, functional_qc, [("name_source", "bold_file")]), + (make_linc_qc, functional_qc, [("qc_file", "qc_file")]), + ]) # fmt:skip + + ds_report_qualitycontrol = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc="qualitycontrol", + datatype="figures", + ), + name="ds_report_qualitycontrol", + run_without_submitting=False, + ) + workflow.connect([ + (inputnode, ds_report_qualitycontrol, [("name_source", "source_file")]), + (functional_qc, ds_report_qualitycontrol, [("out_report", "in_file")]), + ]) # fmt:skip + else: + # Need to explicitly add the outputnode to the workflow, since it's not set otherwise. + workflow.add_nodes([outputnode]) + + if config.workflow.abcc_qc: + make_abcc_qc = pe.Node( + ABCCQC(TR=TR), + name="make_abcc_qc", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([(inputnode, make_abcc_qc, [("filtered_motion", "filtered_motion")])]) + + ds_abcc_qc = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + datatype="func", + desc="abcc", + suffix="qc", + extension="hdf5", + ), + name="ds_abcc_qc", + run_without_submitting=True, + ) + workflow.connect([ + (inputnode, ds_abcc_qc, [("name_source", "source_file")]), + (make_abcc_qc, ds_abcc_qc, [("qc_file", "in_file")]), + ]) # fmt:skip + + # Generate preprocessing and postprocessing carpet plots. + make_qc_plots_es = pe.Node( + QCPlotsES(TR=TR, standardize=config.workflow.params == "none"), + name="make_qc_plots_es", + mem_gb=2, + n_procs=omp_nthreads, + ) + workflow.connect([ + (inputnode, make_qc_plots_es, [ + ("preprocessed_bold", "preprocessed_bold"), + ("denoised_interpolated_bold", "denoised_interpolated_bold"), + ("filtered_motion", "filtered_motion"), + ("temporal_mask", "temporal_mask"), + ("run_index", "run_index"), + ]), + ]) # fmt:skip + + if config.workflow.file_format == "nifti": + workflow.connect([ + (inputnode, make_qc_plots_es, [("bold_mask", "mask")]), + (warp_dseg_to_bold, make_qc_plots_es, [("output_image", "seg_data")]), + ]) # fmt:skip + + ds_preproc_qc_plot_es = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc="preprocESQC", + ), + name="ds_preproc_qc_plot_es", + run_without_submitting=True, + ) + workflow.connect([ + (inputnode, ds_preproc_qc_plot_es, [("name_source", "source_file")]), + (make_qc_plots_es, ds_preproc_qc_plot_es, [("before_process", "in_file")]), + ]) # fmt:skip + + ds_postproc_qc_plot_es = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc="postprocESQC", + ), + name="ds_postproc_qc_plot_es", + run_without_submitting=True, + ) + workflow.connect([ + (inputnode, ds_postproc_qc_plot_es, [("name_source", "source_file")]), + (make_qc_plots_es, ds_postproc_qc_plot_es, [("after_process", "in_file")]), + ]) # fmt:skip + + return workflow + + +@fill_doc +def init_execsummary_functional_plots_wf( + preproc_nifti, + t1w_available, + t2w_available, + mem_gb, + name="execsummary_functional_plots_wf", +): + """Generate the functional figures for an executive summary. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.bold.plotting import init_execsummary_functional_plots_wf + + with mock_config(): + wf = init_execsummary_functional_plots_wf( + preproc_nifti=None, + t1w_available=True, + t2w_available=True, + mem_gb={"resampled": 1}, + name="execsummary_functional_plots_wf", + ) + + Parameters + ---------- + preproc_nifti : :obj:`str` or None + BOLD data before post-processing. + A NIFTI file, not a CIFTI. + t1w_available : :obj:`bool` + Generally True. + t2w_available : :obj:`bool` + Generally False. + mem_gb : :obj:`dict` + Memory size in GB. + %(name)s + + Inputs + ------ + preproc_nifti + BOLD data before post-processing. + A NIFTI file, not a CIFTI. + Set from the parameter. + %(boldref)s + t1w + T1w image in a standard space, taken from the output of init_postprocess_anat_wf. + t2w + T2w image in a standard space, taken from the output of init_postprocess_anat_wf. + """ + workflow = Workflow(name=name) + + output_dir = config.execution.xcp_d_dir + layout = config.execution.layout + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "preproc_nifti", + "boldref", # a nifti boldref + "t1w", + "t2w", # optional + ], + ), + name="inputnode", + ) + if not preproc_nifti: + raise ValueError( + "No preprocessed NIfTI found. Executive summary figures cannot be generated." + ) + + inputnode.inputs.preproc_nifti = preproc_nifti + + # Get bb_registration_file prefix from fmriprep + # TODO: Replace with interfaces. + current_bold_file = os.path.basename(preproc_nifti) + if "_space" in current_bold_file: + bb_register_prefix = current_bold_file.split("_space")[0] + else: + bb_register_prefix = current_bold_file.split("_desc")[0] + + # TODO: Switch to interface + bold_t1w_registration_files = layout.get( + desc=["bbregister", "coreg", "bbr", "flirtbbr", "flirtnobbr"], + extension=".svg", + suffix="bold", + return_type="file", + ) + bold_t1w_registration_files = fnmatch.filter( + bold_t1w_registration_files, + f"*/{bb_register_prefix}*", + ) + if not bold_t1w_registration_files: + LOGGER.warning("No coregistration figure found in preprocessing derivatives.") + else: + bold_t1w_registration_file = bold_t1w_registration_files[0] + + ds_registration_figure = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + in_file=bold_t1w_registration_file, + dismiss_entities=["den"], + datatype="figures", + desc="bbregister", + ), + name="ds_registration_figure", + run_without_submitting=True, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + + workflow.connect([(inputnode, ds_registration_figure, [("preproc_nifti", "source_file")])]) + + # Calculate the mean bold image + calculate_mean_bold = pe.Node( + BinaryMath(expression="np.mean(img, axis=3)"), + name="calculate_mean_bold", + mem_gb=mem_gb["timeseries"], + ) + workflow.connect([(inputnode, calculate_mean_bold, [("preproc_nifti", "in_file")])]) + + # Plot the mean bold image + plot_meanbold = pe.Node(AnatomicalPlot(), name="plot_meanbold") + workflow.connect([(calculate_mean_bold, plot_meanbold, [("out_file", "in_file")])]) + + # Write out the figures. + ds_meanbold_figure = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc="mean", + ), + name="ds_meanbold_figure", + run_without_submitting=True, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + workflow.connect([ + (inputnode, ds_meanbold_figure, [("preproc_nifti", "source_file")]), + (plot_meanbold, ds_meanbold_figure, [("out_file", "in_file")]), + ]) # fmt:skip + + # Plot the reference bold image + plot_boldref = pe.Node(AnatomicalPlot(), name="plot_boldref") + workflow.connect([(inputnode, plot_boldref, [("boldref", "in_file")])]) + + # Write out the figures. + ds_boldref_figure = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc="boldref", + ), + name="ds_boldref_figure", + run_without_submitting=True, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + workflow.connect([ + (inputnode, ds_boldref_figure, [("preproc_nifti", "source_file")]), + (plot_boldref, ds_boldref_figure, [("out_file", "in_file")]), + ]) # fmt:skip + + # Start plotting the overlay figures + # T1 in Task, Task in T1, Task in T2, T2 in Task + anatomicals = (["t1w"] if t1w_available else []) + (["t2w"] if t2w_available else []) + for anat in anatomicals: + # Resample BOLD to match resolution of T1w/T2w data + resample_bold_to_anat = pe.Node( + ResampleToImage(), + name=f"resample_bold_to_{anat}", + mem_gb=mem_gb["resampled"], + ) + workflow.connect([ + (inputnode, resample_bold_to_anat, [(anat, "target_file")]), + (calculate_mean_bold, resample_bold_to_anat, [("out_file", "in_file")]), + ]) # fmt:skip + + plot_anat_on_task_wf = init_plot_overlay_wf( + desc=f"{anat[0].upper()}{anat[1:]}OnTask", + name=f"plot_{anat}_on_task_wf", + ) + workflow.connect([ + (inputnode, plot_anat_on_task_wf, [ + ("preproc_nifti", "inputnode.name_source"), + (anat, "inputnode.overlay_file"), + ]), + (resample_bold_to_anat, plot_anat_on_task_wf, [ + ("out_file", "inputnode.underlay_file"), + ]), + ]) # fmt:skip + + plot_task_on_anat_wf = init_plot_overlay_wf( + desc=f"TaskOn{anat[0].upper()}{anat[1:]}", + name=f"plot_task_on_{anat}_wf", + ) + workflow.connect([ + (inputnode, plot_task_on_anat_wf, [ + ("preproc_nifti", "inputnode.name_source"), + (anat, "inputnode.underlay_file"), + ]), + (resample_bold_to_anat, plot_task_on_anat_wf, [ + ("out_file", "inputnode.overlay_file"), + ]), + ]) # fmt:skip + + return workflow diff --git a/xcp_d/workflows/postprocessing.py b/xcp_d/workflows/bold/postprocessing.py similarity index 99% rename from xcp_d/workflows/postprocessing.py rename to xcp_d/workflows/bold/postprocessing.py index dca36d5e2..e38c46bfc 100644 --- a/xcp_d/workflows/postprocessing.py +++ b/xcp_d/workflows/bold/postprocessing.py @@ -1,6 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Workflows for post-processing BOLD data.""" + from nipype.interfaces import utility as niu from nipype.interfaces.workbench.cifti import CiftiSmooth from nipype.pipeline import engine as pe diff --git a/xcp_d/workflows/execsummary.py b/xcp_d/workflows/execsummary.py deleted file mode 100644 index d96ea39ac..000000000 --- a/xcp_d/workflows/execsummary.py +++ /dev/null @@ -1,764 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Workflows for generating the executive summary.""" -import fnmatch -import os - -from nipype import Function, logging -from nipype.interfaces import fsl -from nipype.interfaces import utility as niu -from nipype.pipeline import engine as pe -from niworkflows.engine.workflows import LiterateWorkflow as Workflow - -from xcp_d import config -from xcp_d.data import load as load_data -from xcp_d.interfaces.bids import DerivativesDataSink -from xcp_d.interfaces.execsummary import FormatForBrainSwipes -from xcp_d.interfaces.nilearn import BinaryMath, ResampleToImage -from xcp_d.interfaces.plotting import AnatomicalPlot, PNGAppend -from xcp_d.interfaces.workbench import ShowScene -from xcp_d.utils.doc import fill_doc -from xcp_d.utils.execsummary import ( - get_n_frames, - get_png_image_names, - make_mosaic, - modify_brainsprite_scene_template, - modify_pngs_scene_template, -) - -LOGGER = logging.getLogger("nipype.workflow") - - -@fill_doc -def init_brainsprite_figures_wf(t1w_available, t2w_available, name="brainsprite_figures_wf"): - """Create mosaic and PNG files for executive summary brainsprite. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.execsummary import init_brainsprite_figures_wf - - with mock_config(): - wf = init_brainsprite_figures_wf( - t1w_available=True, - t2w_available=True, - name="brainsprite_figures_wf", - ) - - Parameters - ---------- - t1w_available : bool - True if a T1w image is available. - t2w_available : bool - True if a T2w image is available. - %(name)s - Default is "init_brainsprite_figures_wf". - - Inputs - ------ - t1w - Path to T1w image. Optional. Should only be defined if ``t1w_available`` is True. - t2w - Path to T2w image. Optional. Should only be defined if ``t2w_available`` is True. - lh_wm_surf - rh_wm_surf - lh_pial_surf - rh_pial_surf - """ - workflow = Workflow(name=name) - - output_dir = config.execution.xcp_d_dir - omp_nthreads = config.nipype.omp_nthreads - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "t1w", - "t2w", - "lh_wm_surf", - "rh_wm_surf", - "lh_pial_surf", - "rh_pial_surf", - ], - ), - name="inputnode", - ) - - # Load template scene file - brainsprite_scene_template = str( - load_data("executive_summary_scenes/brainsprite_template.scene.gz") - ) - pngs_scene_template = str(load_data("executive_summary_scenes/pngs_template.scene.gz")) - - if t1w_available and t2w_available: - image_types = ["T1", "T2"] - elif t2w_available: - image_types = ["T2"] - else: - image_types = ["T1"] - - for image_type in image_types: - inputnode_anat_name = f"{image_type.lower()}w" - # Create frame-wise PNGs - get_number_of_frames = pe.Node( - Function( - function=get_n_frames, - input_names=["anat_file"], - output_names=["frame_numbers"], - ), - name=f"get_number_of_frames_{image_type}", - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - omp_nthreads=omp_nthreads, - ) - workflow.connect([ - (inputnode, get_number_of_frames, [(inputnode_anat_name, "anat_file")]), - ]) # fmt:skip - - # Modify template scene file with file paths - modify_brainsprite_template_scene = pe.MapNode( - Function( - function=modify_brainsprite_scene_template, - input_names=[ - "slice_number", - "anat_file", - "rh_pial_surf", - "lh_pial_surf", - "rh_wm_surf", - "lh_wm_surf", - "scene_template", - ], - output_names=["out_file"], - ), - name=f"modify_brainsprite_template_scene_{image_type}", - iterfield=["slice_number"], - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - omp_nthreads=omp_nthreads, - ) - modify_brainsprite_template_scene.inputs.scene_template = brainsprite_scene_template - workflow.connect([ - (inputnode, modify_brainsprite_template_scene, [ - (inputnode_anat_name, "anat_file"), - ("lh_wm_surf", "lh_wm_surf"), - ("rh_wm_surf", "rh_wm_surf"), - ("lh_pial_surf", "lh_pial_surf"), - ("rh_pial_surf", "rh_pial_surf"), - ]), - (get_number_of_frames, modify_brainsprite_template_scene, [ - ("frame_numbers", "slice_number"), - ]), - ]) # fmt:skip - - create_framewise_pngs = pe.MapNode( - ShowScene( - scene_name_or_number=1, - image_width=900, - image_height=800, - ), - name=f"create_framewise_pngs_{image_type}", - iterfield=["scene_file"], - mem_gb=1, - omp_nthreads=omp_nthreads, - ) - workflow.connect([ - (modify_brainsprite_template_scene, create_framewise_pngs, [ - ("out_file", "scene_file"), - ]), - ]) # fmt:skip - - # Make mosaic - make_mosaic_node = pe.Node( - Function( - function=make_mosaic, - input_names=["png_files"], - output_names=["mosaic_file"], - ), - name=f"make_mosaic_{image_type}", - mem_gb=1, - omp_nthreads=omp_nthreads, - ) - - workflow.connect([(create_framewise_pngs, make_mosaic_node, [("out_file", "png_files")])]) - - ds_mosaic_file = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["desc"], - desc="mosaic", - datatype="figures", - suffix=f"{image_type}w", - ), - name=f"ds_mosaic_file_{image_type}", - run_without_submitting=False, - ) - workflow.connect([ - (inputnode, ds_mosaic_file, [(inputnode_anat_name, "source_file")]), - (make_mosaic_node, ds_mosaic_file, [("mosaic_file", "in_file")]), - ]) # fmt:skip - - # Start working on the selected PNG images for the button - modify_pngs_template_scene = pe.Node( - Function( - function=modify_pngs_scene_template, - input_names=[ - "anat_file", - "rh_pial_surf", - "lh_pial_surf", - "rh_wm_surf", - "lh_wm_surf", - "scene_template", - ], - output_names=["out_file"], - ), - name=f"modify_pngs_template_scene_{image_type}", - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - omp_nthreads=omp_nthreads, - ) - modify_pngs_template_scene.inputs.scene_template = pngs_scene_template - workflow.connect([ - (inputnode, modify_pngs_template_scene, [ - (inputnode_anat_name, "anat_file"), - ("lh_wm_surf", "lh_wm_surf"), - ("rh_wm_surf", "rh_wm_surf"), - ("lh_pial_surf", "lh_pial_surf"), - ("rh_pial_surf", "rh_pial_surf"), - ]) - ]) # fmt:skip - - # Create specific PNGs for button - get_png_scene_names = pe.Node( - Function( - function=get_png_image_names, - output_names=["scene_index", "scene_descriptions"], - ), - name=f"get_png_scene_names_{image_type}", - ) - - create_scenewise_pngs = pe.MapNode( - ShowScene(image_width=900, image_height=800), - name=f"create_scenewise_pngs_{image_type}", - iterfield=["scene_name_or_number"], - mem_gb=1, - omp_nthreads=omp_nthreads, - ) - workflow.connect([ - (modify_pngs_template_scene, create_scenewise_pngs, [("out_file", "scene_file")]), - (get_png_scene_names, create_scenewise_pngs, [ - ("scene_index", "scene_name_or_number"), - ]), - ]) # fmt:skip - - ds_scenewise_pngs = pe.MapNode( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["desc"], - datatype="figures", - suffix=f"{image_type}w", - ), - name=f"ds_scenewise_pngs_{image_type}", - run_without_submitting=False, - iterfield=["desc", "in_file"], - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - workflow.connect([ - (inputnode, ds_scenewise_pngs, [(inputnode_anat_name, "source_file")]), - (get_png_scene_names, ds_scenewise_pngs, [("scene_descriptions", "desc")]), - (create_scenewise_pngs, ds_scenewise_pngs, [("out_file", "in_file")]), - ]) # fmt:skip - - return workflow - - -@fill_doc -def init_execsummary_functional_plots_wf( - preproc_nifti, - t1w_available, - t2w_available, - mem_gb, - name="execsummary_functional_plots_wf", -): - """Generate the functional figures for an executive summary. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.execsummary import init_execsummary_functional_plots_wf - - with mock_config(): - wf = init_execsummary_functional_plots_wf( - preproc_nifti=None, - t1w_available=True, - t2w_available=True, - mem_gb={"resampled": 1}, - name="execsummary_functional_plots_wf", - ) - - Parameters - ---------- - preproc_nifti : :obj:`str` or None - BOLD data before post-processing. - A NIFTI file, not a CIFTI. - t1w_available : :obj:`bool` - Generally True. - t2w_available : :obj:`bool` - Generally False. - mem_gb : :obj:`dict` - Memory size in GB. - %(name)s - - Inputs - ------ - preproc_nifti - BOLD data before post-processing. - A NIFTI file, not a CIFTI. - Set from the parameter. - %(boldref)s - t1w - T1w image in a standard space, taken from the output of init_postprocess_anat_wf. - t2w - T2w image in a standard space, taken from the output of init_postprocess_anat_wf. - """ - workflow = Workflow(name=name) - - output_dir = config.execution.xcp_d_dir - layout = config.execution.layout - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "preproc_nifti", - "boldref", # a nifti boldref - "t1w", - "t2w", # optional - ], - ), - name="inputnode", - ) - if not preproc_nifti: - raise ValueError( - "No preprocessed NIfTI found. Executive summary figures cannot be generated." - ) - - inputnode.inputs.preproc_nifti = preproc_nifti - - # Get bb_registration_file prefix from fmriprep - # TODO: Replace with interfaces. - current_bold_file = os.path.basename(preproc_nifti) - if "_space" in current_bold_file: - bb_register_prefix = current_bold_file.split("_space")[0] - else: - bb_register_prefix = current_bold_file.split("_desc")[0] - - # TODO: Switch to interface - bold_t1w_registration_files = layout.get( - desc=["bbregister", "coreg", "bbr", "flirtbbr", "flirtnobbr"], - extension=".svg", - suffix="bold", - return_type="file", - ) - bold_t1w_registration_files = fnmatch.filter( - bold_t1w_registration_files, - f"*/{bb_register_prefix}*", - ) - if not bold_t1w_registration_files: - LOGGER.warning("No coregistration figure found in preprocessing derivatives.") - else: - bold_t1w_registration_file = bold_t1w_registration_files[0] - - ds_registration_figure = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - in_file=bold_t1w_registration_file, - dismiss_entities=["den"], - datatype="figures", - desc="bbregister", - ), - name="ds_registration_figure", - run_without_submitting=True, - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - - workflow.connect([(inputnode, ds_registration_figure, [("preproc_nifti", "source_file")])]) - - # Calculate the mean bold image - calculate_mean_bold = pe.Node( - BinaryMath(expression="np.mean(img, axis=3)"), - name="calculate_mean_bold", - mem_gb=mem_gb["timeseries"], - ) - workflow.connect([(inputnode, calculate_mean_bold, [("preproc_nifti", "in_file")])]) - - # Plot the mean bold image - plot_meanbold = pe.Node(AnatomicalPlot(), name="plot_meanbold") - workflow.connect([(calculate_mean_bold, plot_meanbold, [("out_file", "in_file")])]) - - # Write out the figures. - ds_meanbold_figure = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc="mean", - ), - name="ds_meanbold_figure", - run_without_submitting=True, - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - workflow.connect([ - (inputnode, ds_meanbold_figure, [("preproc_nifti", "source_file")]), - (plot_meanbold, ds_meanbold_figure, [("out_file", "in_file")]), - ]) # fmt:skip - - # Plot the reference bold image - plot_boldref = pe.Node(AnatomicalPlot(), name="plot_boldref") - workflow.connect([(inputnode, plot_boldref, [("boldref", "in_file")])]) - - # Write out the figures. - ds_boldref_figure = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc="boldref", - ), - name="ds_boldref_figure", - run_without_submitting=True, - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - workflow.connect([ - (inputnode, ds_boldref_figure, [("preproc_nifti", "source_file")]), - (plot_boldref, ds_boldref_figure, [("out_file", "in_file")]), - ]) # fmt:skip - - # Start plotting the overlay figures - # T1 in Task, Task in T1, Task in T2, T2 in Task - anatomicals = (["t1w"] if t1w_available else []) + (["t2w"] if t2w_available else []) - for anat in anatomicals: - # Resample BOLD to match resolution of T1w/T2w data - resample_bold_to_anat = pe.Node( - ResampleToImage(), - name=f"resample_bold_to_{anat}", - mem_gb=mem_gb["resampled"], - ) - workflow.connect([ - (inputnode, resample_bold_to_anat, [(anat, "target_file")]), - (calculate_mean_bold, resample_bold_to_anat, [("out_file", "in_file")]), - ]) # fmt:skip - - plot_anat_on_task_wf = init_plot_overlay_wf( - desc=f"{anat[0].upper()}{anat[1:]}OnTask", - name=f"plot_{anat}_on_task_wf", - ) - workflow.connect([ - (inputnode, plot_anat_on_task_wf, [ - ("preproc_nifti", "inputnode.name_source"), - (anat, "inputnode.overlay_file"), - ]), - (resample_bold_to_anat, plot_anat_on_task_wf, [ - ("out_file", "inputnode.underlay_file"), - ]), - ]) # fmt:skip - - plot_task_on_anat_wf = init_plot_overlay_wf( - desc=f"TaskOn{anat[0].upper()}{anat[1:]}", - name=f"plot_task_on_{anat}_wf", - ) - workflow.connect([ - (inputnode, plot_task_on_anat_wf, [ - ("preproc_nifti", "inputnode.name_source"), - (anat, "inputnode.underlay_file"), - ]), - (resample_bold_to_anat, plot_task_on_anat_wf, [ - ("out_file", "inputnode.overlay_file"), - ]), - ]) # fmt:skip - - return workflow - - -@fill_doc -def init_execsummary_anatomical_plots_wf( - t1w_available, - t2w_available, - name="execsummary_anatomical_plots_wf", -): - """Generate the anatomical figures for an executive summary. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.execsummary import init_execsummary_anatomical_plots_wf - - with mock_config(): - wf = init_execsummary_anatomical_plots_wf( - t1w_available=True, - t2w_available=True, - ) - - Parameters - ---------- - t1w_available : bool - Generally True. - t2w_available : bool - Generally False. - %(name)s - - Inputs - ------ - t1w - T1w image, after warping to standard space. - t2w - T2w image, after warping to standard space. - template - """ - workflow = Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "t1w", - "t2w", - "template", - ], - ), - name="inputnode", - ) - - # Start plotting the overlay figures - # Atlas in T1w/T2w, T1w/T2w in Atlas - anatomicals = (["t1w"] if t1w_available else []) + (["t2w"] if t2w_available else []) - for anat in anatomicals: - # Resample anatomical to match resolution of template data - resample_anat = pe.Node( - ResampleToImage(), - name=f"resample_{anat}", - mem_gb=1, - ) - workflow.connect([ - (inputnode, resample_anat, [ - (anat, "in_file"), - ("template", "target_file"), - ]), - ]) # fmt:skip - - plot_anat_on_atlas_wf = init_plot_overlay_wf( - desc="AnatOnAtlas", - name=f"plot_{anat}_on_atlas_wf", - ) - workflow.connect([ - (inputnode, plot_anat_on_atlas_wf, [ - ("template", "inputnode.underlay_file"), - (anat, "inputnode.name_source"), - ]), - (resample_anat, plot_anat_on_atlas_wf, [("out_file", "inputnode.overlay_file")]), - ]) # fmt:skip - - plot_atlas_on_anat_wf = init_plot_overlay_wf( - desc="AtlasOnAnat", - name=f"plot_atlas_on_{anat}_wf", - ) - workflow.connect([ - (inputnode, plot_atlas_on_anat_wf, [ - ("template", "inputnode.overlay_file"), - (anat, "inputnode.name_source"), - ]), - (resample_anat, plot_atlas_on_anat_wf, [("out_file", "inputnode.underlay_file")]), - ]) # fmt:skip - - # TODO: Add subcortical overlay images as well. - # 1. Binarize atlas. - - return workflow - - -@fill_doc -def init_plot_custom_slices_wf( - output_dir, - desc, - name="plot_custom_slices_wf", -): - """Plot a custom selection of slices with Slicer. - - This workflow is used to produce subcortical registration plots specifically for - infant data. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from xcp_d.workflows.execsummary import init_plot_custom_slices_wf - - wf = init_plot_custom_slices_wf( - output_dir=".", - desc="AtlasOnSubcorticals", - name="plot_custom_slices_wf", - ) - - Parameters - ---------- - %(output_dir)s - desc : :obj:`str` - String to be used as ``desc`` entity in output filename. - %(name)s - Default is "plot_custom_slices_wf". - - Inputs - ------ - underlay_file - overlay_file - name_source - """ - # NOTE: These slices are almost certainly specific to a given MNI template and resolution. - SINGLE_SLICES = ["x", "x", "x", "y", "y", "y", "z", "z", "z"] - SLICE_NUMBERS = [36, 45, 52, 43, 54, 65, 23, 33, 39] - - workflow = Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "underlay_file", - "overlay_file", - "name_source", - ], - ), - name="inputnode", - ) - - # slices/slicer does not do well trying to make the red outline when it - # cannot find the edges, so cannot use the ROI files with some low intensities. - binarize_edges = pe.Node( - BinaryMath(expression="img.astype(bool).astype(int)"), - name="binarize_edges", - mem_gb=1, - ) - - workflow.connect([(inputnode, binarize_edges, [("overlay_file", "in_file")])]) - - make_image = pe.MapNode( - fsl.Slicer(show_orientation=True, label_slices=True), - name="make_image", - iterfield=["single_slice", "slice_number"], - mem_gb=1, - ) - make_image.inputs.single_slice = SINGLE_SLICES - make_image.inputs.slice_number = SLICE_NUMBERS - workflow.connect([ - (inputnode, make_image, [("underlay_file", "in_file")]), - (binarize_edges, make_image, [("out_file", "image_edges")]), - ]) # fmt:skip - - combine_images = pe.Node( - PNGAppend(out_file="out.png"), - name="combine_images", - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - - workflow.connect([(make_image, combine_images, [("out_file", "in_files")])]) - - ds_overlay_figure = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc=desc, - extension=".png", - ), - name="ds_overlay_figure", - run_without_submitting=True, - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - workflow.connect([ - (inputnode, ds_overlay_figure, [("name_source", "source_file")]), - (combine_images, ds_overlay_figure, [("out_file", "in_file")]), - ]) # fmt:skip - - return workflow - - -def init_plot_overlay_wf(desc, name="plot_overlay_wf"): - """Use the default slices from slicesdir to make a plot.""" - from xcp_d.interfaces.plotting import SlicesDir - - workflow = Workflow(name=name) - - output_dir = config.execution.xcp_d_dir - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "underlay_file", - "overlay_file", - "name_source", - ], - ), - name="inputnode", - ) - - plot_overlay_figure = pe.Node( - SlicesDir(out_extension=".png"), - name="plot_overlay_figure", - mem_gb=1, - ) - - workflow.connect([ - (inputnode, plot_overlay_figure, [ - ("underlay_file", "in_files"), - ("overlay_file", "outline_image"), - ]), - ]) # fmt:skip - - ds_overlay_figure = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc=desc, - extension=".png", - ), - name="ds_overlay_figure", - run_without_submitting=True, - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - - workflow.connect([ - (inputnode, ds_overlay_figure, [("name_source", "source_file")]), - (plot_overlay_figure, ds_overlay_figure, [("out_files", "in_file")]), - ]) # fmt:skip - - reformat_for_brain_swipes = pe.Node(FormatForBrainSwipes(), name="reformat_for_brain_swipes") - workflow.connect([ - (plot_overlay_figure, reformat_for_brain_swipes, [("slicewise_files", "in_files")]), - ]) # fmt:skip - - ds_reformatted_figure = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc=f"{desc}BrainSwipes", - extension=".png", - ), - name="ds_reformatted_figure", - run_without_submitting=True, - mem_gb=config.DEFAULT_MEMORY_MIN_GB, - ) - - workflow.connect([ - (inputnode, ds_reformatted_figure, [("name_source", "source_file")]), - (reformat_for_brain_swipes, ds_reformatted_figure, [("out_file", "in_file")]), - ]) # fmt:skip - - return workflow diff --git a/xcp_d/workflows/parcellation.py b/xcp_d/workflows/parcellation.py new file mode 100644 index 000000000..64b800a52 --- /dev/null +++ b/xcp_d/workflows/parcellation.py @@ -0,0 +1,392 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Workflows for parcellating imaging data.""" + +from nipype import Function, logging +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow + +from xcp_d import config +from xcp_d.interfaces.ants import ApplyTransforms +from xcp_d.interfaces.nilearn import IndexImage +from xcp_d.utils.atlas import get_atlas_cifti, get_atlas_nifti +from xcp_d.utils.doc import fill_doc +from xcp_d.utils.utils import get_std2bold_xfms + +LOGGER = logging.getLogger("nipype.workflow") + + +@fill_doc +def init_load_atlases_wf(name="load_atlases_wf"): + """Load atlases and warp them to the same space as the BOLD file. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.connectivity import init_load_atlases_wf + + with mock_config(): + wf = init_load_atlases_wf() + + Parameters + ---------- + %(name)s + Default is "load_atlases_wf". + + Inputs + ------ + %(name_source)s + bold_file + + Outputs + ------- + atlas_files + atlas_labels_files + """ + from xcp_d.interfaces.bids import CopyAtlas + + workflow = Workflow(name=name) + atlases = config.execution.atlases + output_dir = config.execution.xcp_d_dir + file_format = config.workflow.file_format + omp_nthreads = config.nipype.omp_nthreads + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "name_source", + "bold_file", + ], + ), + name="inputnode", + ) + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "atlas_files", + "atlas_labels_files", + ], + ), + name="outputnode", + ) + + # get atlases via load_data + atlas_file_grabber = pe.MapNode( + Function( + input_names=["atlas"], + output_names=["atlas_file", "atlas_labels_file", "atlas_metadata_file"], + function=get_atlas_cifti if file_format == "cifti" else get_atlas_nifti, + ), + name="atlas_file_grabber", + iterfield=["atlas"], + ) + atlas_file_grabber.inputs.atlas = atlases + + atlas_buffer = pe.Node(niu.IdentityInterface(fields=["atlas_file"]), name="atlas_buffer") + + if file_format == "nifti": + get_transforms_to_bold_space = pe.Node( + Function( + input_names=["bold_file"], + output_names=["transformfile"], + function=get_std2bold_xfms, + ), + name="get_transforms_to_bold_space", + ) + + workflow.connect([ + (inputnode, get_transforms_to_bold_space, [("name_source", "bold_file")]), + ]) # fmt:skip + + # ApplyTransforms needs a 3D image for the reference image. + grab_first_volume = pe.Node( + IndexImage(index=0), + name="grab_first_volume", + ) + + workflow.connect([(inputnode, grab_first_volume, [("bold_file", "in_file")])]) + + # Using the generated transforms, apply them to get everything in the correct MNI form + warp_atlases_to_bold_space = pe.MapNode( + ApplyTransforms( + interpolation="GenericLabel", + input_image_type=3, + dimension=3, + ), + name="warp_atlases_to_bold_space", + iterfield=["input_image"], + mem_gb=2, + n_procs=omp_nthreads, + ) + + workflow.connect([ + (grab_first_volume, warp_atlases_to_bold_space, [("out_file", "reference_image")]), + (atlas_file_grabber, warp_atlases_to_bold_space, [("atlas_file", "input_image")]), + (get_transforms_to_bold_space, warp_atlases_to_bold_space, [ + ("transformfile", "transforms"), + ]), + (warp_atlases_to_bold_space, atlas_buffer, [("output_image", "atlas_file")]), + ]) # fmt:skip + + else: + workflow.connect([(atlas_file_grabber, atlas_buffer, [("atlas_file", "atlas_file")])]) + + ds_atlas = pe.MapNode( + CopyAtlas(output_dir=output_dir), + name="ds_atlas", + iterfield=["in_file", "atlas"], + run_without_submitting=True, + ) + ds_atlas.inputs.atlas = atlases + + workflow.connect([ + (inputnode, ds_atlas, [("name_source", "name_source")]), + (atlas_buffer, ds_atlas, [("atlas_file", "in_file")]), + (ds_atlas, outputnode, [("out_file", "atlas_files")]), + ]) # fmt:skip + + ds_atlas_labels_file = pe.MapNode( + CopyAtlas(output_dir=output_dir), + name="ds_atlas_labels_file", + iterfield=["in_file", "atlas"], + run_without_submitting=True, + ) + ds_atlas_labels_file.inputs.atlas = atlases + + workflow.connect([ + (inputnode, ds_atlas_labels_file, [("name_source", "name_source")]), + (atlas_file_grabber, ds_atlas_labels_file, [("atlas_labels_file", "in_file")]), + (ds_atlas_labels_file, outputnode, [("out_file", "atlas_labels_files")]), + ]) # fmt:skip + + ds_atlas_metadata = pe.MapNode( + CopyAtlas(output_dir=output_dir), + name="ds_atlas_metadata", + iterfield=["in_file", "atlas"], + run_without_submitting=True, + ) + ds_atlas_metadata.inputs.atlas = atlases + + workflow.connect([ + (inputnode, ds_atlas_metadata, [("name_source", "name_source")]), + (atlas_file_grabber, ds_atlas_metadata, [("atlas_metadata_file", "in_file")]), + ]) # fmt:skip + + return workflow + + +def init_parcellate_cifti_wf( + mem_gb, + compute_mask=True, + name="parcellate_cifti_wf", +): + """Parcellate a CIFTI file using a set of atlases. + + Part of the parcellation includes applying vertex-wise and node-wise masks. + + Vertex-wise masks are typically calculated from the full BOLD run, + wherein any vertex that has a time series of all zeros or NaNs is excluded. + Additionally, if *any* volumes in a vertex's time series are NaNs, + that vertex will be excluded. + + The node-wise mask is determined based on the vertex-wise mask and the workflow's + coverage threshold. + Any nodes in the atlas with less than the coverage threshold's % of vertices retained by the + vertex-wise mask will have that node's time series set to NaNs. + + Workflow Graph + .. workflow:: + :graph2use: orig + :simple_form: yes + + from xcp_d.tests.tests import mock_config + from xcp_d import config + from xcp_d.workflows.connectivity import init_parcellate_cifti_wf + + with mock_config(): + wf = init_parcellate_cifti_wf(mem_gb={"resampled": 2}) + + Parameters + ---------- + mem_gb : :obj:`dict` + Dictionary of memory allocations. + compute_mask : :obj:`bool` + Whether to compute a vertex-wise mask for the CIFTI file. + When processing full BOLD runs, this should be True. + When processing truncated BOLD runs or scalar maps, this should be False, + and the vertex-wise mask should be provided via the inputnode.. + Default is True. + name : :obj:`str` + Workflow name. + Default is "parcellate_cifti_wf". + + Inputs + ------ + in_file + CIFTI file to parcellate. + atlas_files + List of CIFTI atlas files. + atlas_labels_files + List of TSV atlas labels files. + vertexwise_coverage + Vertex-wise coverage mask. + Only used if `compute_mask` is False. + coverage_cifti + Coverage CIFTI files. One for each atlas. + Only used if `compute_mask` is False. + + Outputs + ------- + parcellated_cifti + Parcellated CIFTI files. One for each atlas. + parcellated_tsv + Parcellated TSV files. One for each atlas. + vertexwise_coverage + Vertex-wise coverage mask. Only output if `compute_mask` is True. + coverage_cifti + Coverage CIFTI files. One for each atlas. Only output if `compute_mask` is True. + coverage_tsv + Coverage TSV files. One for each atlas. Only output if `compute_mask` is True. + """ + from xcp_d import config + from xcp_d.interfaces.connectivity import CiftiMask, CiftiToTSV, CiftiVertexMask + from xcp_d.interfaces.workbench import CiftiMath, CiftiParcellateWorkbench + + workflow = Workflow(name=name) + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "in_file", + "atlas_files", + "atlas_labels_files", + "vertexwise_coverage", + "coverage_cifti", + ], + ), + name="inputnode", + ) + + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "parcellated_cifti", + "parcellated_tsv", + "vertexwise_coverage", + "coverage_cifti", + "coverage_tsv", + ], + ), + name="outputnode", + ) + + # Replace vertices with all zeros with NaNs using Python. + coverage_buffer = pe.Node( + niu.IdentityInterface(fields=["vertexwise_coverage", "coverage_cifti"]), + name="coverage_buffer", + ) + if compute_mask: + # Write out a vertex-wise binary coverage map using Python. + vertexwise_coverage = pe.Node( + CiftiVertexMask(), + name="vertexwise_coverage", + ) + workflow.connect([ + (inputnode, vertexwise_coverage, [("in_file", "in_file")]), + (vertexwise_coverage, coverage_buffer, [("mask_file", "vertexwise_coverage")]), + (vertexwise_coverage, outputnode, [("mask_file", "vertexwise_coverage")]), + ]) # fmt:skip + + parcellate_coverage = pe.MapNode( + CiftiParcellateWorkbench( + direction="COLUMN", + only_numeric=True, + out_file="parcellated_atlas.pscalar.nii", + ), + name="parcellate_coverage", + iterfield=["atlas_label"], + ) + workflow.connect([ + (inputnode, parcellate_coverage, [("atlas_files", "atlas_label")]), + (vertexwise_coverage, parcellate_coverage, [("mask_file", "in_file")]), + (parcellate_coverage, coverage_buffer, [("out_file", "coverage_cifti")]), + (parcellate_coverage, outputnode, [("out_file", "coverage_cifti")]), + ]) # fmt:skip + + coverage_to_tsv = pe.MapNode( + CiftiToTSV(), + name="coverage_to_tsv", + iterfield=["in_file", "atlas_labels"], + ) + workflow.connect([ + (inputnode, coverage_to_tsv, [("atlas_labels_files", "atlas_labels")]), + (parcellate_coverage, coverage_to_tsv, [("out_file", "in_file")]), + (coverage_to_tsv, outputnode, [("out_file", "coverage_tsv")]), + ]) # fmt:skip + else: + workflow.connect([ + (inputnode, coverage_buffer, [ + ("vertexwise_coverage", "vertexwise_coverage"), + ("coverage_cifti", "coverage_cifti"), + ]), + ]) # fmt:skip + + # Parcellate the data file using the vertex-wise coverage. + parcellate_data = pe.MapNode( + CiftiParcellateWorkbench( + direction="COLUMN", + only_numeric=True, + out_file=f"parcellated_data.{'ptseries' if compute_mask else 'pscalar'}.nii", + ), + name="parcellate_data", + iterfield=["atlas_label"], + mem_gb=mem_gb["resampled"], + ) + workflow.connect([ + (inputnode, parcellate_data, [ + ("in_file", "in_file"), + ("atlas_files", "atlas_label"), + ]), + (coverage_buffer, parcellate_data, [("vertexwise_coverage", "cifti_weights")]), + ]) # fmt:skip + + # Threshold node coverage values based on coverage threshold. + threshold_coverage = pe.MapNode( + CiftiMath(expression=f"data > {config.workflow.min_coverage}"), + name="threshold_coverage", + iterfield=["data"], + mem_gb=mem_gb["resampled"], + ) + workflow.connect([(coverage_buffer, threshold_coverage, [("coverage_cifti", "data")])]) + + # Mask out uncovered nodes from parcellated denoised data + mask_parcellated_data = pe.MapNode( + CiftiMask(), + name="mask_parcellated_data", + iterfield=["in_file", "mask"], + mem_gb=mem_gb["resampled"], + ) + workflow.connect([ + (parcellate_data, mask_parcellated_data, [("out_file", "in_file")]), + (threshold_coverage, mask_parcellated_data, [("out_file", "mask")]), + (mask_parcellated_data, outputnode, [("out_file", "parcellated_cifti")]), + ]) # fmt:skip + + # Convert the parcellated CIFTI to a TSV file + cifti_to_tsv = pe.MapNode( + CiftiToTSV(), + name="cifti_to_tsv", + iterfield=["in_file", "atlas_labels"], + ) + workflow.connect([ + (inputnode, cifti_to_tsv, [("atlas_labels_files", "atlas_labels")]), + (mask_parcellated_data, cifti_to_tsv, [("out_file", "in_file")]), + (cifti_to_tsv, outputnode, [("out_file", "parcellated_tsv")]), + ]) # fmt:skip + + return workflow diff --git a/xcp_d/workflows/plotting.py b/xcp_d/workflows/plotting.py index b4e554e95..e9742fedc 100644 --- a/xcp_d/workflows/plotting.py +++ b/xcp_d/workflows/plotting.py @@ -1,480 +1,195 @@ -"""Plotting workflows.""" +"""Workflows for generating plots from imaging data.""" -from nipype import Function +from nipype.interfaces import fsl from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow -from templateflow.api import get as get_template from xcp_d import config -from xcp_d.interfaces.ants import ApplyTransforms from xcp_d.interfaces.bids import DerivativesDataSink -from xcp_d.interfaces.plotting import QCPlots, QCPlotsES -from xcp_d.interfaces.report import FunctionalSummary -from xcp_d.interfaces.utils import ABCCQC, LINCQC +from xcp_d.interfaces.execsummary import FormatForBrainSwipes +from xcp_d.interfaces.nilearn import BinaryMath +from xcp_d.interfaces.plotting import PNGAppend from xcp_d.utils.doc import fill_doc -from xcp_d.utils.utils import get_bold2std_and_t1w_xfms, get_std2bold_xfms + + +def init_plot_overlay_wf(desc, name="plot_overlay_wf"): + """Use the default slices from slicesdir to make a plot.""" + from xcp_d.interfaces.plotting import SlicesDir + + workflow = Workflow(name=name) + + output_dir = config.execution.xcp_d_dir + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "underlay_file", + "overlay_file", + "name_source", + ], + ), + name="inputnode", + ) + + plot_overlay_figure = pe.Node( + SlicesDir(out_extension=".png"), + name="plot_overlay_figure", + mem_gb=1, + ) + + workflow.connect([ + (inputnode, plot_overlay_figure, [ + ("underlay_file", "in_files"), + ("overlay_file", "outline_image"), + ]), + ]) # fmt:skip + + ds_overlay_figure = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc=desc, + extension=".png", + ), + name="ds_overlay_figure", + run_without_submitting=True, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + + workflow.connect([ + (inputnode, ds_overlay_figure, [("name_source", "source_file")]), + (plot_overlay_figure, ds_overlay_figure, [("out_files", "in_file")]), + ]) # fmt:skip + + reformat_for_brain_swipes = pe.Node(FormatForBrainSwipes(), name="reformat_for_brain_swipes") + workflow.connect([ + (plot_overlay_figure, reformat_for_brain_swipes, [("slicewise_files", "in_files")]), + ]) # fmt:skip + + ds_reformatted_figure = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc=f"{desc}BrainSwipes", + extension=".png", + ), + name="ds_reformatted_figure", + run_without_submitting=True, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + + workflow.connect([ + (inputnode, ds_reformatted_figure, [("name_source", "source_file")]), + (reformat_for_brain_swipes, ds_reformatted_figure, [("out_file", "in_file")]), + ]) # fmt:skip + + return workflow @fill_doc -def init_qc_report_wf( - TR, - head_radius, - name="qc_report_wf", +def init_plot_custom_slices_wf( + output_dir, + desc, + name="plot_custom_slices_wf", ): - """Generate quality control figures and a QC file. + """Plot a custom selection of slices with Slicer. + + This workflow is used to produce subcortical registration plots specifically for + infant data. Workflow Graph .. workflow:: :graph2use: orig :simple_form: yes - from xcp_d.tests.tests import mock_config - from xcp_d import config - from xcp_d.workflows.plotting import init_qc_report_wf + from xcp_d.workflows.execsummary import init_plot_custom_slices_wf - with mock_config(): - wf = init_qc_report_wf( - TR=0.5, - head_radius=50, - name="qc_report_wf", - ) + wf = init_plot_custom_slices_wf( + output_dir=".", + desc="AtlasOnSubcorticals", + name="plot_custom_slices_wf", + ) Parameters ---------- - %(TR)s - %(head_radius)s + %(output_dir)s + desc : :obj:`str` + String to be used as ``desc`` entity in output filename. %(name)s - Default is "qc_report_wf". + Default is "plot_custom_slices_wf". Inputs ------ - %(name_source)s - preprocessed_bold - The preprocessed BOLD file, after dummy scan removal. - Used for carpet plots. - %(denoised_interpolated_bold)s - Used for DCAN carpet plots. - Only used if abcc_qc is True. - %(censored_denoised_bold)s - Used for LINC carpet plots. - %(boldref)s - Only used with non-CIFTI data. - bold_mask - Path to the BOLD run's brain mask in the same space as ``preprocessed_bold``. - Only used with non-CIFTI data. - anat_brainmask - Path to the anatomical brain mask in the same standard space as ``bold_mask``. - Only used with non-CIFTI data. - %(template_to_anat_xfm)s - Only used with non-CIFTI data. - %(dummy_scans)s - %(fmriprep_confounds_file)s - %(temporal_mask)s - %(filtered_motion)s - - Outputs - ------- - qc_file + underlay_file + overlay_file + name_source """ - workflow = Workflow(name=name) + # NOTE: These slices are almost certainly specific to a given MNI template and resolution. + SINGLE_SLICES = ["x", "x", "x", "y", "y", "y", "z", "z", "z"] + SLICE_NUMBERS = [36, 45, 52, 43, 54, 65, 23, 33, 39] - output_dir = config.execution.xcp_d_dir - omp_nthreads = config.nipype.omp_nthreads + workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface( fields=[ + "underlay_file", + "overlay_file", "name_source", - "preprocessed_bold", - "denoised_interpolated_bold", - "censored_denoised_bold", - "dummy_scans", - "fmriprep_confounds_file", - "filtered_motion", - "temporal_mask", - "run_index", # will only be set for concatenated data - # nifti-only inputs - "bold_mask", - "anat", # T1w/T2w image in anatomical space - "anat_brainmask", - "boldref", - "template_to_anat_xfm", ], ), name="inputnode", ) - outputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "qc_file", - ], - ), - name="outputnode", + # slices/slicer does not do well trying to make the red outline when it + # cannot find the edges, so cannot use the ROI files with some low intensities. + binarize_edges = pe.Node( + BinaryMath(expression="img.astype(bool).astype(int)"), + name="binarize_edges", + mem_gb=1, ) - nlin2009casym_brain_mask = str( - get_template( - "MNI152NLin2009cAsym", - resolution=2, - desc="brain", - suffix="mask", - extension=[".nii", ".nii.gz"], - ) + workflow.connect([(inputnode, binarize_edges, [("overlay_file", "in_file")])]) + + make_image = pe.MapNode( + fsl.Slicer(show_orientation=True, label_slices=True), + name="make_image", + iterfield=["single_slice", "slice_number"], + mem_gb=1, + ) + make_image.inputs.single_slice = SINGLE_SLICES + make_image.inputs.slice_number = SLICE_NUMBERS + workflow.connect([ + (inputnode, make_image, [("underlay_file", "in_file")]), + (binarize_edges, make_image, [("out_file", "image_edges")]), + ]) # fmt:skip + + combine_images = pe.Node( + PNGAppend(out_file="out.png"), + name="combine_images", + mem_gb=config.DEFAULT_MEMORY_MIN_GB, ) - if config.workflow.file_format == "nifti": - # We need the BOLD mask in T1w and standard spaces for QC metric calculation. - # This is only possible for nifti inputs. - get_native2space_transforms = pe.Node( - Function( - input_names=["bold_file", "template_to_anat_xfm"], - output_names=[ - "bold_to_std_xfms", - "bold_to_std_xfms_invert", - "bold_to_t1w_xfms", - "bold_to_t1w_xfms_invert", - ], - function=get_bold2std_and_t1w_xfms, - ), - name="get_native2space_transforms", - ) - - workflow.connect([ - (inputnode, get_native2space_transforms, [ - ("name_source", "bold_file"), - ("template_to_anat_xfm", "template_to_anat_xfm"), - ]), - ]) # fmt:skip - - warp_boldmask_to_t1w = pe.Node( - ApplyTransforms( - dimension=3, - interpolation="NearestNeighbor", - ), - name="warp_boldmask_to_t1w", - n_procs=omp_nthreads, - mem_gb=1, - ) - workflow.connect([ - (inputnode, warp_boldmask_to_t1w, [ - ("bold_mask", "input_image"), - ("anat", "reference_image"), - ]), - (get_native2space_transforms, warp_boldmask_to_t1w, [ - ("bold_to_t1w_xfms", "transforms"), - ("bold_to_t1w_xfms_invert", "invert_transform_flags"), - ]), - ]) # fmt:skip - - warp_boldmask_to_mni = pe.Node( - ApplyTransforms( - dimension=3, - reference_image=nlin2009casym_brain_mask, - interpolation="NearestNeighbor", - ), - name="warp_boldmask_to_mni", - n_procs=omp_nthreads, - mem_gb=1, - ) - workflow.connect([ - (inputnode, warp_boldmask_to_mni, [("bold_mask", "input_image")]), - (get_native2space_transforms, warp_boldmask_to_mni, [ - ("bold_to_std_xfms", "transforms"), - ("bold_to_std_xfms_invert", "invert_transform_flags"), - ]), - ]) # fmt:skip - - # Warp the standard-space anatomical brain mask to the anatomical space - warp_anatmask_to_t1w = pe.Node( - ApplyTransforms( - dimension=3, - interpolation="NearestNeighbor", - ), - name="warp_anatmask_to_t1w", - n_procs=omp_nthreads, - mem_gb=1, - ) - workflow.connect([ - (inputnode, warp_anatmask_to_t1w, [ - ("bold_mask", "input_image"), - ("anat", "reference_image"), - ]), - (get_native2space_transforms, warp_anatmask_to_t1w, [ - ("bold_to_t1w_xfms", "transforms"), - ("bold_to_t1w_xfms_invert", "invert_transform_flags"), - ]), - ]) # fmt:skip - - # NIFTI files require a tissue-type segmentation in the same space as the BOLD data. - # Get the set of transforms from MNI152NLin6Asym (the dseg) to the BOLD space. - # Given that xcp-d doesn't process native-space data, this transform will never be used. - get_mni_to_bold_xfms = pe.Node( - Function( - input_names=["bold_file"], - output_names=["transform_list"], - function=get_std2bold_xfms, - ), - name="get_std2native_transform", - ) - workflow.connect([(inputnode, get_mni_to_bold_xfms, [("name_source", "bold_file")])]) - - # Use MNI152NLin2009cAsym tissue-type segmentation file for carpet plots. - dseg_file = str( - get_template( - "MNI152NLin2009cAsym", - resolution=1, - desc="carpet", - suffix="dseg", - extension=[".nii", ".nii.gz"], - ) - ) - - # Get MNI152NLin2009cAsym --> MNI152NLin6Asym xform. - MNI152NLin2009cAsym_to_MNI152NLin6Asym = str( - get_template( - template="MNI152NLin6Asym", - mode="image", - suffix="xfm", - extension=".h5", - **{"from": "MNI152NLin2009cAsym"}, - ), - ) - - # Add the MNI152NLin2009cAsym --> MNI152NLin6Asym xform to the end of the - # BOLD --> MNI152NLin6Asym xform list, because xforms are applied in reverse order. - add_xfm_to_nlin6asym = pe.Node( - niu.Merge(2), - name="add_xfm_to_nlin6asym", - ) - add_xfm_to_nlin6asym.inputs.in2 = MNI152NLin2009cAsym_to_MNI152NLin6Asym - - workflow.connect([ - (get_mni_to_bold_xfms, add_xfm_to_nlin6asym, [("transform_list", "in1")]), - ]) # fmt:skip - - # Transform MNI152NLin2009cAsym dseg file to the same space as the BOLD data. - warp_dseg_to_bold = pe.Node( - ApplyTransforms( - dimension=3, - input_image=dseg_file, - interpolation="GenericLabel", - ), - name="warp_dseg_to_bold", - n_procs=omp_nthreads, - mem_gb=3, - ) - workflow.connect([ - (inputnode, warp_dseg_to_bold, [("boldref", "reference_image")]), - (add_xfm_to_nlin6asym, warp_dseg_to_bold, [("out", "transforms")]), - ]) # fmt:skip - - if config.workflow.linc_qc: - make_linc_qc = pe.Node( - LINCQC( - TR=TR, - head_radius=head_radius, - template_mask=nlin2009casym_brain_mask, - ), - name="make_linc_qc", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([ - (inputnode, make_linc_qc, [ - ("name_source", "name_source"), - ("preprocessed_bold", "bold_file"), - ("censored_denoised_bold", "cleaned_file"), - ("fmriprep_confounds_file", "fmriprep_confounds_file"), - ("temporal_mask", "temporal_mask"), - ("dummy_scans", "dummy_scans"), - ]), - (make_linc_qc, outputnode, [("qc_file", "qc_file")]), - ]) # fmt:skip - - if config.workflow.file_format == "nifti": - workflow.connect([ - (inputnode, make_linc_qc, [("bold_mask", "bold_mask_inputspace")]), - (warp_boldmask_to_t1w, make_linc_qc, [("output_image", "bold_mask_anatspace")]), - (warp_boldmask_to_mni, make_linc_qc, [("output_image", "bold_mask_stdspace")]), - (warp_anatmask_to_t1w, make_linc_qc, [("output_image", "anat_mask_anatspace")]), - ]) # fmt:skip - else: - make_linc_qc.inputs.bold_mask_inputspace = None - - ds_qc_metadata = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=list(DerivativesDataSink._allowed_entities), - allowed_entities=["desc"], - desc="linc", - suffix="qc", - extension=".json", - ), - name="ds_qc_metadata", - run_without_submitting=True, - ) - workflow.connect([ - (inputnode, ds_qc_metadata, [("name_source", "source_file")]), - (make_linc_qc, ds_qc_metadata, [("qc_metadata", "in_file")]), - ]) # fmt:skip - - make_qc_plots_nipreps = pe.Node( - QCPlots(TR=TR, head_radius=head_radius), - name="make_qc_plots_nipreps", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([ - (inputnode, make_qc_plots_nipreps, [ - ("preprocessed_bold", "bold_file"), - ("censored_denoised_bold", "cleaned_file"), - ("fmriprep_confounds_file", "fmriprep_confounds_file"), - ("temporal_mask", "temporal_mask"), - ]), - ]) # fmt:skip - - if config.workflow.file_format == "nifti": - workflow.connect([ - (inputnode, make_qc_plots_nipreps, [("bold_mask", "mask_file")]), - (warp_dseg_to_bold, make_qc_plots_nipreps, [("output_image", "seg_file")]), - ]) # fmt:skip - else: - make_qc_plots_nipreps.inputs.mask_file = None - - ds_preproc_qc_plot_nipreps = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - desc="preprocessing", - datatype="figures", - ), - name="ds_preproc_qc_plot_nipreps", - run_without_submitting=False, - ) - workflow.connect([ - (inputnode, ds_preproc_qc_plot_nipreps, [("name_source", "source_file")]), - (make_qc_plots_nipreps, ds_preproc_qc_plot_nipreps, [("raw_qcplot", "in_file")]), - ]) # fmt:skip - - ds_postproc_qc_plot_nipreps = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - desc="postprocessing", - datatype="figures", - ), - name="ds_postproc_qc_plot_nipreps", - run_without_submitting=False, - ) - workflow.connect([ - (inputnode, ds_postproc_qc_plot_nipreps, [("name_source", "source_file")]), - (make_qc_plots_nipreps, ds_postproc_qc_plot_nipreps, [("clean_qcplot", "in_file")]), - ]) # fmt:skip - - functional_qc = pe.Node( - FunctionalSummary(TR=TR), - name="qcsummary", - run_without_submitting=False, - mem_gb=2, - ) - workflow.connect([ - (inputnode, functional_qc, [("name_source", "bold_file")]), - (make_linc_qc, functional_qc, [("qc_file", "qc_file")]), - ]) # fmt:skip - - ds_report_qualitycontrol = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - desc="qualitycontrol", - datatype="figures", - ), - name="ds_report_qualitycontrol", - run_without_submitting=False, - ) - workflow.connect([ - (inputnode, ds_report_qualitycontrol, [("name_source", "source_file")]), - (functional_qc, ds_report_qualitycontrol, [("out_report", "in_file")]), - ]) # fmt:skip - else: - # Need to explicitly add the outputnode to the workflow, since it's not set otherwise. - workflow.add_nodes([outputnode]) - - if config.workflow.abcc_qc: - make_abcc_qc = pe.Node( - ABCCQC(TR=TR), - name="make_abcc_qc", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([(inputnode, make_abcc_qc, [("filtered_motion", "filtered_motion")])]) - - ds_abcc_qc = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - datatype="func", - desc="abcc", - suffix="qc", - extension="hdf5", - ), - name="ds_abcc_qc", - run_without_submitting=True, - ) - workflow.connect([ - (inputnode, ds_abcc_qc, [("name_source", "source_file")]), - (make_abcc_qc, ds_abcc_qc, [("qc_file", "in_file")]), - ]) # fmt:skip - - # Generate preprocessing and postprocessing carpet plots. - make_qc_plots_es = pe.Node( - QCPlotsES(TR=TR, standardize=config.workflow.params == "none"), - name="make_qc_plots_es", - mem_gb=2, - n_procs=omp_nthreads, - ) - workflow.connect([ - (inputnode, make_qc_plots_es, [ - ("preprocessed_bold", "preprocessed_bold"), - ("denoised_interpolated_bold", "denoised_interpolated_bold"), - ("filtered_motion", "filtered_motion"), - ("temporal_mask", "temporal_mask"), - ("run_index", "run_index"), - ]), - ]) # fmt:skip - - if config.workflow.file_format == "nifti": - workflow.connect([ - (inputnode, make_qc_plots_es, [("bold_mask", "mask")]), - (warp_dseg_to_bold, make_qc_plots_es, [("output_image", "seg_data")]), - ]) # fmt:skip - - ds_preproc_qc_plot_es = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc="preprocESQC", - ), - name="ds_preproc_qc_plot_es", - run_without_submitting=True, - ) - workflow.connect([ - (inputnode, ds_preproc_qc_plot_es, [("name_source", "source_file")]), - (make_qc_plots_es, ds_preproc_qc_plot_es, [("before_process", "in_file")]), - ]) # fmt:skip - - ds_postproc_qc_plot_es = pe.Node( - DerivativesDataSink( - base_directory=output_dir, - dismiss_entities=["den"], - datatype="figures", - desc="postprocESQC", - ), - name="ds_postproc_qc_plot_es", - run_without_submitting=True, - ) - workflow.connect([ - (inputnode, ds_postproc_qc_plot_es, [("name_source", "source_file")]), - (make_qc_plots_es, ds_postproc_qc_plot_es, [("after_process", "in_file")]), - ]) # fmt:skip + workflow.connect([(make_image, combine_images, [("out_file", "in_files")])]) + + ds_overlay_figure = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + dismiss_entities=["den"], + datatype="figures", + desc=desc, + extension=".png", + ), + name="ds_overlay_figure", + run_without_submitting=True, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + workflow.connect([ + (inputnode, ds_overlay_figure, [("name_source", "source_file")]), + (combine_images, ds_overlay_figure, [("out_file", "in_file")]), + ]) # fmt:skip return workflow