From 260d83ec30ab24843a6de67fb905bd9f5e2d79b0 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 4 Mar 2024 07:37:19 +1100 Subject: [PATCH 01/47] added __all__ attribute to init --- nipype-auto-conv/generate | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nipype-auto-conv/generate b/nipype-auto-conv/generate index c4be575..79f9993 100755 --- a/nipype-auto-conv/generate +++ b/nipype-auto-conv/generate @@ -35,6 +35,7 @@ auto_dir = PKG_ROOT / "pydra" / "tasks" / PKG_NAME / "auto" if auto_dir.exists(): shutil.rmtree(auto_dir) +all_interfaces = [] for fspath in sorted(SPECS_DIR.glob("**/*.yaml")): with open(fspath) as f: spec = yaml.load(f, Loader=yaml.SafeLoader) @@ -57,6 +58,7 @@ for fspath in sorted(SPECS_DIR.glob("**/*.yaml")): ) converter.generate(PKG_ROOT) auto_init += f"from .{module_name} import {converter.task_name}\n" + all_interfaces.append(converter.task_name) with open(PKG_ROOT / "pydra" / "tasks" / PKG_NAME / "auto" / "_version.py", "w") as f: From 5ede57c50363921bf401900b337f3ac03010e116 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 20 Mar 2024 08:45:28 +1100 Subject: [PATCH 02/47] updated actions versions --- .github/workflows/ci-cd.yaml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index cf4ec37..42c8af7 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -21,12 +21,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 - name: Install build dependencies run: python -m pip install --upgrade pip - name: Install requirements @@ -50,7 +50,7 @@ jobs: - '--editable git+https://github.com/nipype/pydra.git#egg=pydra' steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') @@ -63,7 +63,7 @@ jobs: run: | sed -i '/\/pydra\/tasks\/anatomical\/auto/d' .gitignore - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install build dependencies @@ -90,12 +90,12 @@ jobs: matrix: python-version: ['3.8', '3.11'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install build dependencies @@ -149,7 +149,7 @@ jobs: source $FREESURFER_HOME/SetUpFreeSurfer.sh echo $FREESURFER_LICENCE > $FREESURFER_HOME/license.txt export PATH=$FREESURFER_HOME/bin:$PATH - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') @@ -162,7 +162,7 @@ jobs: run: | sed -i '/\/src\/pydra\/tasks\/anatomical\/auto/d' .gitignore - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install build dependencies @@ -187,12 +187,12 @@ jobs: needs: [devcheck, test] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' - name: Install build tools @@ -219,12 +219,12 @@ jobs: needs: [deploy-fileformats] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' - name: Install build tools @@ -251,7 +251,7 @@ jobs: needs: [deploy-fileformats-extras] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: recursive fetch-depth: 0 @@ -270,7 +270,7 @@ jobs: git commit -am"added auto-generated version to make new tag for package version" git tag ${TAG}post${POST} - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.11' - name: Install build tools From 40d08d3158cb7fcd63773d2e48eeb0bc0174298b Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 17 Apr 2024 09:19:58 +1000 Subject: [PATCH 03/47] refreshed generated package --- .gitignore | 3 +- AUTHORS | 1 + NOTICE | 6 + README.md | 2 - README.rst | 51 +++--- docs/conf.py | 2 +- docs/index.rst | 2 +- nipype-auto-conv/generate | 78 +-------- nipype-auto-conv/requirements.txt | 10 -- .../specs/add_provenance_callables.py | 1 - .../specs/artifact_mask_callables.py | 1 - .../specs/compute_qi2_callables.py | 1 - .../specs/conform_image_callables.py | 1 - .../specs/correct_signal_drift_callables.py | 1 - nipype-auto-conv/specs/dipy_dti_callables.py | 1 - .../specs/ensure_size_callables.py | 1 - .../specs/extract_b0_callables.py | 1 - .../specs/filter_shells_callables.py | 1 - .../specs/functional_qc_callables.py | 1 - .../specs/gather_timeseries_callables.py | 1 - nipype-auto-conv/specs/gcor_callables.py | 1 - nipype-auto-conv/specs/harmonize_callables.py | 1 - .../{ => interfaces}/add_provenance.yaml | 11 +- .../interfaces/add_provenance_callables.py | 13 ++ .../specs/{ => interfaces}/artifact_mask.yaml | 17 +- .../interfaces/artifact_mask_callables.py | 27 +++ .../specs/interfaces/cc_segmentation.yaml | 87 +++++++++ .../interfaces/cc_segmentation_callables.py | 27 +++ .../specs/{ => interfaces}/compute_qi2.yaml | 11 +- .../specs/interfaces/compute_qi2_callables.py | 20 +++ .../specs/interfaces/conform_image.yaml | 130 ++++++++++++++ .../interfaces/conform_image_callables.py | 13 ++ .../correct_signal_drift.yaml | 13 +- .../correct_signal_drift_callables.py | 34 ++++ .../datalad_identity_interface.yaml} | 23 +-- .../datalad_identity_interface_callables.py | 6 + .../interfaces/derivatives_data_sink.yaml | 91 ++++++++++ .../derivatives_data_sink_callables.py | 34 ++++ .../specs/interfaces/diffusion_model.yaml | 99 +++++++++++ .../interfaces/diffusion_model_callables.py | 41 +++++ .../specs/interfaces/diffusion_qc.yaml | 165 ++++++++++++++++++ .../interfaces/diffusion_qc_callables.py | 90 ++++++++++ .../specs/{ => interfaces}/ensure_size.yaml | 7 +- .../specs/interfaces/ensure_size_callables.py | 20 +++ .../extract_orientations.yaml} | 39 ++--- .../extract_orientations_callables.py | 20 +++ .../specs/{ => interfaces}/filter_shells.yaml | 17 +- .../interfaces/filter_shells_callables.py | 34 ++++ .../specs/{ => interfaces}/functional_qc.yaml | 41 ++++- .../interfaces/functional_qc_callables.py | 90 ++++++++++ .../{ => interfaces}/gather_timeseries.yaml | 7 +- .../interfaces/gather_timeseries_callables.py | 20 +++ .../specs/{ => interfaces}/gcor.yaml | 13 +- .../specs/interfaces/gcor_callables.py | 18 ++ .../specs/{ => interfaces}/harmonize.yaml | 5 +- .../specs/interfaces/harmonize_callables.py | 13 ++ .../specs/interfaces/iqm_file_sink.yaml | 99 +++++++++++ .../interfaces/iqm_file_sink_callables.py | 13 ++ .../{ => interfaces}/number_of_shells.yaml | 11 +- .../interfaces/number_of_shells_callables.py | 55 ++++++ .../piesno.yaml} | 31 ++-- .../specs/interfaces/piesno_callables.py | 20 +++ .../{ => interfaces}/read_dwi_metadata.yaml | 25 ++- .../interfaces/read_dwi_metadata_callables.py | 90 ++++++++++ .../specs/interfaces/rotate_vectors.yaml | 77 ++++++++ .../interfaces/rotate_vectors_callables.py | 20 +++ .../specs/{ => interfaces}/rotation_mask.yaml | 5 +- .../interfaces/rotation_mask_callables.py | 13 ++ .../specs/{ => interfaces}/select_echo.yaml | 9 +- .../specs/interfaces/select_echo_callables.py | 27 +++ .../specs/{ => interfaces}/spikes.yaml | 21 ++- .../specs/interfaces/spikes_callables.py | 27 +++ .../specs/interfaces/spiking_voxels_mask.yaml | 79 +++++++++ .../spiking_voxels_mask_callables.py | 13 ++ .../specs/{ => interfaces}/split_shells.yaml | 7 +- .../interfaces/split_shells_callables.py | 13 ++ .../specs/{ => interfaces}/structural_qc.yaml | 63 +++++-- .../interfaces/structural_qc_callables.py | 132 ++++++++++++++ .../specs/interfaces/synth_strip.yaml | 101 +++++++++++ .../specs/interfaces/synth_strip_callables.py | 151 ++++++++++++++++ .../specs/{ => interfaces}/upload_iq_ms.yaml | 7 +- .../interfaces/upload_iq_ms_callables.py | 13 ++ .../specs/{ => interfaces}/weighted_stat.yaml | 5 +- .../interfaces/weighted_stat_callables.py | 13 ++ .../specs/number_of_shells_callables.py | 1 - nipype-auto-conv/specs/package.yaml | 8 + .../specs/read_dwi_metadata_callables.py | 1 - .../specs/rotation_mask_callables.py | 1 - .../specs/select_echo_callables.py | 1 - nipype-auto-conv/specs/spikes_callables.py | 1 - .../specs/split_shells_callables.py | 1 - .../specs/structural_qc_callables.py | 1 - .../specs/upload_iq_ms_callables.py | 1 - .../specs/weighted_stat_callables.py | 1 - ...c.workflows.anatomical.base.airmsk_wf.yaml | 20 +++ ...lows.anatomical.base.anat_qc_workflow.yaml | 20 +++ ...orkflows.anatomical.base.compute_iqms.yaml | 20 +++ ....workflows.anatomical.base.headmsk_wf.yaml | 20 +++ ...l.base.init_brain_tissue_segmentation.yaml | 20 +++ ...anatomical.base.spatial_normalization.yaml | 20 +++ ...anatomical.output.init_anat_report_wf.yaml | 20 +++ ...workflows.diffusion.base.compute_iqms.yaml | 18 ++ ...flows.diffusion.base.dmri_qc_workflow.yaml | 18 ++ ...orkflows.diffusion.base.epi_mni_align.yaml | 20 +++ ...workflows.diffusion.base.hmc_workflow.yaml | 20 +++ ...s.diffusion.output.init_dwi_report_wf.yaml | 27 +++ ...orkflows.functional.base.compute_iqms.yaml | 18 ++ ...rkflows.functional.base.epi_mni_align.yaml | 20 +++ ...ws.functional.base.fmri_bmsk_workflow.yaml | 20 +++ ...lows.functional.base.fmri_qc_workflow.yaml | 20 +++ .../mriqc.workflows.functional.base.hmc.yaml | 20 +++ ...functional.output.init_func_report_wf.yaml | 20 +++ .../mriqc.workflows.shared.synthstrip_wf.yaml | 20 +++ pydra/tasks/mriqc/__init__.py | 37 ---- pydra/tasks/mriqc/latest.py | 3 - pydra/tasks/mriqc/v23_2/__init__.py | 0 pyproject.toml | 1 + related-packages/conftest.py | 37 ---- related-packages/fileformats-extras/LICENSE | 13 -- .../fileformats-extras/README.rst | 29 --- .../extras/medimage_anatomical/__init__.py | 7 - .../fileformats-extras/pyproject.toml | 87 --------- related-packages/fileformats/LICENSE | 13 -- related-packages/fileformats/README.rst | 39 ----- .../medimage_anatomical/__init__.py | 1 - related-packages/fileformats/pyproject.toml | 84 --------- report_progress.py | 31 ++++ tools/increment_tool_version.py | 69 ++++++++ tools/rename_template.py | 46 +++++ tools/requirements.txt | 3 + 130 files changed, 2897 insertions(+), 605 deletions(-) create mode 100644 AUTHORS create mode 100644 NOTICE delete mode 100644 README.md delete mode 100644 nipype-auto-conv/specs/add_provenance_callables.py delete mode 100644 nipype-auto-conv/specs/artifact_mask_callables.py delete mode 100644 nipype-auto-conv/specs/compute_qi2_callables.py delete mode 100644 nipype-auto-conv/specs/conform_image_callables.py delete mode 100644 nipype-auto-conv/specs/correct_signal_drift_callables.py delete mode 100644 nipype-auto-conv/specs/dipy_dti_callables.py delete mode 100644 nipype-auto-conv/specs/ensure_size_callables.py delete mode 100644 nipype-auto-conv/specs/extract_b0_callables.py delete mode 100644 nipype-auto-conv/specs/filter_shells_callables.py delete mode 100644 nipype-auto-conv/specs/functional_qc_callables.py delete mode 100644 nipype-auto-conv/specs/gather_timeseries_callables.py delete mode 100644 nipype-auto-conv/specs/gcor_callables.py delete mode 100644 nipype-auto-conv/specs/harmonize_callables.py rename nipype-auto-conv/specs/{ => interfaces}/add_provenance.yaml (91%) create mode 100644 nipype-auto-conv/specs/interfaces/add_provenance_callables.py rename nipype-auto-conv/specs/{ => interfaces}/artifact_mask.yaml (93%) create mode 100644 nipype-auto-conv/specs/interfaces/artifact_mask_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/cc_segmentation.yaml create mode 100644 nipype-auto-conv/specs/interfaces/cc_segmentation_callables.py rename nipype-auto-conv/specs/{ => interfaces}/compute_qi2.yaml (90%) create mode 100644 nipype-auto-conv/specs/interfaces/compute_qi2_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/conform_image.yaml create mode 100644 nipype-auto-conv/specs/interfaces/conform_image_callables.py rename nipype-auto-conv/specs/{ => interfaces}/correct_signal_drift.yaml (87%) create mode 100644 nipype-auto-conv/specs/interfaces/correct_signal_drift_callables.py rename nipype-auto-conv/specs/{extract_b0.yaml => interfaces/datalad_identity_interface.yaml} (83%) create mode 100644 nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/derivatives_data_sink.yaml create mode 100644 nipype-auto-conv/specs/interfaces/derivatives_data_sink_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/diffusion_model.yaml create mode 100644 nipype-auto-conv/specs/interfaces/diffusion_model_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/diffusion_qc.yaml create mode 100644 nipype-auto-conv/specs/interfaces/diffusion_qc_callables.py rename nipype-auto-conv/specs/{ => interfaces}/ensure_size.yaml (91%) create mode 100644 nipype-auto-conv/specs/interfaces/ensure_size_callables.py rename nipype-auto-conv/specs/{dipy_dti.yaml => interfaces/extract_orientations.yaml} (77%) create mode 100644 nipype-auto-conv/specs/interfaces/extract_orientations_callables.py rename nipype-auto-conv/specs/{ => interfaces}/filter_shells.yaml (92%) create mode 100644 nipype-auto-conv/specs/interfaces/filter_shells_callables.py rename nipype-auto-conv/specs/{ => interfaces}/functional_qc.yaml (82%) create mode 100644 nipype-auto-conv/specs/interfaces/functional_qc_callables.py rename nipype-auto-conv/specs/{ => interfaces}/gather_timeseries.yaml (91%) create mode 100644 nipype-auto-conv/specs/interfaces/gather_timeseries_callables.py rename nipype-auto-conv/specs/{ => interfaces}/gcor.yaml (90%) create mode 100644 nipype-auto-conv/specs/interfaces/gcor_callables.py rename nipype-auto-conv/specs/{ => interfaces}/harmonize.yaml (92%) create mode 100644 nipype-auto-conv/specs/interfaces/harmonize_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml create mode 100644 nipype-auto-conv/specs/interfaces/iqm_file_sink_callables.py rename nipype-auto-conv/specs/{ => interfaces}/number_of_shells.yaml (87%) create mode 100644 nipype-auto-conv/specs/interfaces/number_of_shells_callables.py rename nipype-auto-conv/specs/{conform_image.yaml => interfaces/piesno.yaml} (77%) create mode 100644 nipype-auto-conv/specs/interfaces/piesno_callables.py rename nipype-auto-conv/specs/{ => interfaces}/read_dwi_metadata.yaml (84%) create mode 100644 nipype-auto-conv/specs/interfaces/read_dwi_metadata_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/rotate_vectors.yaml create mode 100644 nipype-auto-conv/specs/interfaces/rotate_vectors_callables.py rename nipype-auto-conv/specs/{ => interfaces}/rotation_mask.yaml (91%) create mode 100644 nipype-auto-conv/specs/interfaces/rotation_mask_callables.py rename nipype-auto-conv/specs/{ => interfaces}/select_echo.yaml (88%) create mode 100644 nipype-auto-conv/specs/interfaces/select_echo_callables.py rename nipype-auto-conv/specs/{ => interfaces}/spikes.yaml (91%) create mode 100644 nipype-auto-conv/specs/interfaces/spikes_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/spiking_voxels_mask.yaml create mode 100644 nipype-auto-conv/specs/interfaces/spiking_voxels_mask_callables.py rename nipype-auto-conv/specs/{ => interfaces}/split_shells.yaml (89%) create mode 100644 nipype-auto-conv/specs/interfaces/split_shells_callables.py rename nipype-auto-conv/specs/{ => interfaces}/structural_qc.yaml (79%) create mode 100644 nipype-auto-conv/specs/interfaces/structural_qc_callables.py create mode 100644 nipype-auto-conv/specs/interfaces/synth_strip.yaml create mode 100644 nipype-auto-conv/specs/interfaces/synth_strip_callables.py rename nipype-auto-conv/specs/{ => interfaces}/upload_iq_ms.yaml (89%) create mode 100644 nipype-auto-conv/specs/interfaces/upload_iq_ms_callables.py rename nipype-auto-conv/specs/{ => interfaces}/weighted_stat.yaml (92%) create mode 100644 nipype-auto-conv/specs/interfaces/weighted_stat_callables.py delete mode 100644 nipype-auto-conv/specs/number_of_shells_callables.py create mode 100644 nipype-auto-conv/specs/package.yaml delete mode 100644 nipype-auto-conv/specs/read_dwi_metadata_callables.py delete mode 100644 nipype-auto-conv/specs/rotation_mask_callables.py delete mode 100644 nipype-auto-conv/specs/select_echo_callables.py delete mode 100644 nipype-auto-conv/specs/spikes_callables.py delete mode 100644 nipype-auto-conv/specs/split_shells_callables.py delete mode 100644 nipype-auto-conv/specs/structural_qc_callables.py delete mode 100644 nipype-auto-conv/specs/upload_iq_ms_callables.py delete mode 100644 nipype-auto-conv/specs/weighted_stat_callables.py create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml delete mode 100644 pydra/tasks/mriqc/__init__.py delete mode 100644 pydra/tasks/mriqc/latest.py delete mode 100644 pydra/tasks/mriqc/v23_2/__init__.py delete mode 100644 related-packages/conftest.py delete mode 100644 related-packages/fileformats-extras/LICENSE delete mode 100644 related-packages/fileformats-extras/README.rst delete mode 100644 related-packages/fileformats-extras/fileformats/extras/medimage_anatomical/__init__.py delete mode 100644 related-packages/fileformats-extras/pyproject.toml delete mode 100644 related-packages/fileformats/LICENSE delete mode 100644 related-packages/fileformats/README.rst delete mode 100644 related-packages/fileformats/fileformats/medimage_anatomical/__init__.py delete mode 100644 related-packages/fileformats/pyproject.toml create mode 100644 report_progress.py create mode 100755 tools/increment_tool_version.py create mode 100755 tools/rename_template.py create mode 100644 tools/requirements.txt diff --git a/.gitignore b/.gitignore index 34bd660..970d75d 100644 --- a/.gitignore +++ b/.gitignore @@ -137,5 +137,4 @@ dmypy.json # Mac garbarge .DS_store -/pydra/tasks/mriqc/auto -/pydra/tasks/mriqc/_version.py +/pydra diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..180b937 --- /dev/null +++ b/AUTHORS @@ -0,0 +1 @@ +# Enter list of names and emails of contributors to this package \ No newline at end of file diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000..fd1b4f4 --- /dev/null +++ b/NOTICE @@ -0,0 +1,6 @@ +Pydra-mriqc +Copyright 2024 Pydra Development Team + +The bases for the task interfaces defined in this package were semi-automatically converted +from Nipype interfaces (https://github.com/nipy/nipype) using the Nipype2Pydra tool +(https://github.com/nipype/nipype2pydra). diff --git a/README.md b/README.md deleted file mode 100644 index 7f28f39..0000000 --- a/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# pydra-mriqc -pydra version of nipreps/mriqc diff --git a/README.rst b/README.rst index bd41ece..685dd2a 100644 --- a/README.rst +++ b/README.rst @@ -1,20 +1,20 @@ -=============================== -Pydra task package for anatomical -=============================== - -.. image:: https://github.com/nipype/pydra-anatomical/actions/workflows/pythonpackage.yaml/badge.svg - :target: https://github.com/nipype/pydra-anatomical/actions/workflows/pythonpackage.yaml -.. .. image:: https://codecov.io/gh/nipype/pydra-anatomical/branch/main/graph/badge.svg?token=UIS0OGPST7 -.. :target: https://codecov.io/gh/nipype/pydra-anatomical -.. image:: https://img.shields.io/pypi/pyversions/pydra-anatomical.svg - :target: https://pypi.python.org/pypi/pydra-anatomical/ +============================ +Pydra task package for mriqc +============================ + +.. image:: https://github.com/nipype/pydra-mriqc/actions/workflows/ci-cd.yaml/badge.svg + :target: https://github.com/nipype/pydra-mriqc/actions/workflows/ci-cd.yaml +.. image:: https://codecov.io/gh/nipype/pydra-mriqc/branch/main/graph/badge.svg?token=UIS0OGPST7 + :target: https://codecov.io/gh/nipype/pydra-mriqc +.. image:: https://img.shields.io/pypi/pyversions/pydra-mriqc.svg + :target: https://pypi.python.org/pypi/pydra-mriqc/ :alt: Supported Python versions -.. image:: https://img.shields.io/pypi/v/pydra-anatomical.svg - :target: https://pypi.python.org/pypi/pydra-anatomical/ +.. image:: https://img.shields.io/pypi/v/pydra-mriqc.svg + :target: https://pypi.python.org/pypi/pydra-mriqc/ :alt: Latest Version -This package contains a collection of Pydra task interfaces for the anatomical toolkit. +This package contains a collection of Pydra task interfaces for the mriqc toolkit. The basis of this collection has been formed by the semi-automatic conversion of existing `Nipype `__ interfaces to Pydra using the `Nipype2Pydra `__ tool @@ -23,10 +23,10 @@ existing `Nipype `__ interfaces to Pydra using t Automatically-generated vs manually-curated tasks ------------------------------------------------- -Automatically generated tasks can be found in the `pydra.tasks.anatomical.auto` package. +Automatically generated tasks can be found in the `pydra.tasks.mriqc.auto` package. These packages should be treated with extreme caution as they likely do not pass testing. Generated tasks that have been edited and pass testing are imported into one or more of the -`pydra.tasks.anatomical.v*` packages, corresponding to the version of the anatomical toolkit +`pydra.tasks.mriqc.v*` packages, corresponding to the version of the mriqc toolkit they are designed for. Tests @@ -71,6 +71,14 @@ Contributing to this package Developer installation ~~~~~~~~~~~~~~~~~~~~~~ +Install the `fileformats `__ packages +corresponding to AFNI specific file formats + + +.. code-block:: + + $ pip install -e ./related-packages/fileformats[dev] + $ pip install -e ./related-packages/fileformats-extras[dev] Install repo in developer mode from the source directory and install pre-commit to ensure consistent code-style and quality. @@ -78,7 +86,7 @@ ensure consistent code-style and quality. .. code-block:: $ pip install -e .[test,dev] -$ pre-commit install + $ pre-commit install Next install the requirements for running the auto-conversion script and generate the Pydra task interfaces from their Nipype counterparts @@ -93,7 +101,8 @@ The run the conversion script to convert Nipype interfaces to Pydra $ nipype-auto-conv/generate -## Methodology +Methodology +~~~~~~~~~~~ The development of this package is expected to have two phases @@ -111,7 +120,7 @@ The auto-converted Pydra tasks are generated from their corresponding Nipype int in combination with "conversion hints" contained in YAML specs located in `nipype-auto-conv/specs/`. The self-documented conversion specs are to be edited by hand in order to assist the auto-converter produce valid pydra tasks. -After editing one or more conversion specs the `pydra.tasks.anatomical.auto` package should +After editing one or more conversion specs the `pydra.tasks.mriqc.auto` package should be regenerated by running .. code-block:: @@ -122,15 +131,15 @@ The tests should be run on the auto-generated tasks to see if they are valid .. code-block:: - $ pytest --doctest-modules pydra/tasks/anatomical/auto/tests/test_.py + $ pytest --doctest-modules pydra/tasks/mriqc/auto/tests/test_.py -If the test passes you should then edit the `pydra/tasks/anatomical/v/__init__.py` file +If the test passes you should then edit the `pydra/tasks/mriqc/v/__init__.py` file to import the now valid task interface to signify that it has been validated and is ready for use, e.g. .. code-block::python - from pydra.tasks.anatomical.auto import + from pydra.tasks.mriqc.auto import Typing and sample test data diff --git a/docs/conf.py b/docs/conf.py index af3cb1d..03a8f65 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,7 +17,7 @@ # -- Project information ----------------------------------------------------- -project = "pydra-anatomical" +project = "pydra-mriqc" copyright = "2020, Xihe Xie" author = "Xihe Xie" diff --git a/docs/index.rst b/docs/index.rst index 86dda89..bd0a60a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,4 +1,4 @@ -Welcome to pydra-anatomical's documentation! +Welcome to pydra-mriqc's documentation! ========================================= .. toctree:: diff --git a/nipype-auto-conv/generate b/nipype-auto-conv/generate index 79f9993..3e72ae0 100755 --- a/nipype-auto-conv/generate +++ b/nipype-auto-conv/generate @@ -1,75 +1,3 @@ -#!/usr/bin/env python3 -import sys -import os.path -from warnings import warn -from pathlib import Path -import shutil -from importlib import import_module -import yaml -import nipype -import nipype2pydra.utils -from nipype2pydra.task import get_converter - - -SPECS_DIR = Path(__file__).parent / "specs" -PKG_ROOT = Path(__file__).parent.parent -PKG_NAME = "mriqc" - -if ".dev" in nipype.__version__: - raise RuntimeError( - f"Cannot use a development version of Nipype {nipype.__version__}" - ) - -if ".dev" in nipype2pydra.__version__: - warn( - f"using development version of nipype2pydra ({nipype2pydra.__version__}), " - f"development component will be dropped in {PKG_NAME} package version" - ) - -# Insert specs dir into path so we can load callables modules -sys.path.insert(0, str(SPECS_DIR)) - -auto_init = f"# Auto-generated by {__file__}, do not edit as it will be overwritten\n\n" - -auto_dir = PKG_ROOT / "pydra" / "tasks" / PKG_NAME / "auto" -if auto_dir.exists(): - shutil.rmtree(auto_dir) - -all_interfaces = [] -for fspath in sorted(SPECS_DIR.glob("**/*.yaml")): - with open(fspath) as f: - spec = yaml.load(f, Loader=yaml.SafeLoader) - print(f"processing {fspath}") - - rel_pkg_path = str(fspath.parent.relative_to(SPECS_DIR)).replace(os.path.sep, ".") - if rel_pkg_path == ".": - rel_pkg_path = fspath.stem - else: - rel_pkg_path += "." + fspath.stem - - callables = import_module(rel_pkg_path + "_callables") - - module_name = nipype2pydra.utils.to_snake_case(spec["task_name"]) - - converter = get_converter( - output_module=f"pydra.tasks.{PKG_NAME}.auto.{module_name}", - callables_module=callables, # type: ignore - **spec, - ) - converter.generate(PKG_ROOT) - auto_init += f"from .{module_name} import {converter.task_name}\n" - all_interfaces.append(converter.task_name) - - -with open(PKG_ROOT / "pydra" / "tasks" / PKG_NAME / "auto" / "_version.py", "w") as f: - f.write( - f"""# Auto-generated by {__file__}, do not edit as it will be overwritten - -nipype_version = "{nipype.__version__.split('.dev')[0]}" -nipype2pydra_version = "{nipype2pydra.__version__.split('.dev')[0]}" -post_release = (nipype_version + nipype2pydra_version).replace(".", "") -""" - ) - -with open(PKG_ROOT / "pydra" / "tasks" / PKG_NAME / "auto" / "__init__.py", "w") as f: - f.write(auto_init) +#!/usr/bin/env bash +conv_dir=$(dirname $0) +nipype2pydra convert $conv_dir/specs $conv_dir/.. diff --git a/nipype-auto-conv/requirements.txt b/nipype-auto-conv/requirements.txt index 71aa364..20a0b10 100644 --- a/nipype-auto-conv/requirements.txt +++ b/nipype-auto-conv/requirements.txt @@ -1,11 +1 @@ -black -attrs>=22.1.0 -nipype -pydra -PyYAML>=6.0 -fileformats >=0.8 -fileformats-medimage >=0.4 -fileformats-datascience >= 0.1 -fileformats-medimage-anatomical -traits nipype2pydra \ No newline at end of file diff --git a/nipype-auto-conv/specs/add_provenance_callables.py b/nipype-auto-conv/specs/add_provenance_callables.py deleted file mode 100644 index 1681d92..0000000 --- a/nipype-auto-conv/specs/add_provenance_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in AddProvenance.yaml""" diff --git a/nipype-auto-conv/specs/artifact_mask_callables.py b/nipype-auto-conv/specs/artifact_mask_callables.py deleted file mode 100644 index 38e48cc..0000000 --- a/nipype-auto-conv/specs/artifact_mask_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in ArtifactMask.yaml""" diff --git a/nipype-auto-conv/specs/compute_qi2_callables.py b/nipype-auto-conv/specs/compute_qi2_callables.py deleted file mode 100644 index 30791b6..0000000 --- a/nipype-auto-conv/specs/compute_qi2_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in ComputeQI2.yaml""" diff --git a/nipype-auto-conv/specs/conform_image_callables.py b/nipype-auto-conv/specs/conform_image_callables.py deleted file mode 100644 index 19d369a..0000000 --- a/nipype-auto-conv/specs/conform_image_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in ConformImage.yaml""" diff --git a/nipype-auto-conv/specs/correct_signal_drift_callables.py b/nipype-auto-conv/specs/correct_signal_drift_callables.py deleted file mode 100644 index 3f8cb9e..0000000 --- a/nipype-auto-conv/specs/correct_signal_drift_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in CorrectSignalDrift.yaml""" diff --git a/nipype-auto-conv/specs/dipy_dti_callables.py b/nipype-auto-conv/specs/dipy_dti_callables.py deleted file mode 100644 index 0682bcc..0000000 --- a/nipype-auto-conv/specs/dipy_dti_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in DipyDTI.yaml""" diff --git a/nipype-auto-conv/specs/ensure_size_callables.py b/nipype-auto-conv/specs/ensure_size_callables.py deleted file mode 100644 index 0289beb..0000000 --- a/nipype-auto-conv/specs/ensure_size_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in EnsureSize.yaml""" diff --git a/nipype-auto-conv/specs/extract_b0_callables.py b/nipype-auto-conv/specs/extract_b0_callables.py deleted file mode 100644 index b07d039..0000000 --- a/nipype-auto-conv/specs/extract_b0_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in ExtractB0.yaml""" diff --git a/nipype-auto-conv/specs/filter_shells_callables.py b/nipype-auto-conv/specs/filter_shells_callables.py deleted file mode 100644 index c567417..0000000 --- a/nipype-auto-conv/specs/filter_shells_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in FilterShells.yaml""" diff --git a/nipype-auto-conv/specs/functional_qc_callables.py b/nipype-auto-conv/specs/functional_qc_callables.py deleted file mode 100644 index e991c37..0000000 --- a/nipype-auto-conv/specs/functional_qc_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in FunctionalQC.yaml""" diff --git a/nipype-auto-conv/specs/gather_timeseries_callables.py b/nipype-auto-conv/specs/gather_timeseries_callables.py deleted file mode 100644 index d5f2a6e..0000000 --- a/nipype-auto-conv/specs/gather_timeseries_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in GatherTimeseries.yaml""" diff --git a/nipype-auto-conv/specs/gcor_callables.py b/nipype-auto-conv/specs/gcor_callables.py deleted file mode 100644 index d268a02..0000000 --- a/nipype-auto-conv/specs/gcor_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in GCOR.yaml""" diff --git a/nipype-auto-conv/specs/harmonize_callables.py b/nipype-auto-conv/specs/harmonize_callables.py deleted file mode 100644 index 9904b86..0000000 --- a/nipype-auto-conv/specs/harmonize_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in Harmonize.yaml""" diff --git a/nipype-auto-conv/specs/add_provenance.yaml b/nipype-auto-conv/specs/interfaces/add_provenance.yaml similarity index 91% rename from nipype-auto-conv/specs/add_provenance.yaml rename to nipype-auto-conv/specs/interfaces/add_provenance.yaml index 5ac2dbc..806d983 100644 --- a/nipype-auto-conv/specs/add_provenance.yaml +++ b/nipype-auto-conv/specs/interfaces/add_provenance.yaml @@ -20,12 +20,15 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: input file air_msk: generic/file # type=file|default=: air mask file + in_file: generic/file + # type=file|default=: input file rot_msk: generic/file # type=file|default=: rotation mask file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -42,6 +45,8 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + out_prov: out_prov_callable + # type=dict: templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -59,7 +64,7 @@ tests: modality: # type=str|default='': provenance type imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/add_provenance_callables.py b/nipype-auto-conv/specs/interfaces/add_provenance_callables.py new file mode 100644 index 0000000..50b54c8 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/add_provenance_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of AddProvenance.yaml""" + + +def out_prov_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_prov"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/artifact_mask.yaml b/nipype-auto-conv/specs/interfaces/artifact_mask.yaml similarity index 93% rename from nipype-auto-conv/specs/artifact_mask.yaml rename to nipype-auto-conv/specs/interfaces/artifact_mask.yaml index 7fbe87d..9fa1f22 100644 --- a/nipype-auto-conv/specs/artifact_mask.yaml +++ b/nipype-auto-conv/specs/interfaces/artifact_mask.yaml @@ -22,12 +22,15 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: File to be plotted head_mask: generic/file # type=file|default=: head mask + in_file: generic/file + # type=file|default=: File to be plotted ind2std_xfm: generic/file # type=file|default=: individual to standard affine transform + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -41,12 +44,12 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_hat_msk: generic/file - # type=file: output "hat" mask - out_art_msk: generic/file - # type=file: output artifacts mask out_air_msk: generic/file # type=file: output "hat" mask, without artifacts + out_art_msk: generic/file + # type=file: output artifacts mask + out_hat_msk: generic/file + # type=file: output "hat" mask callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields @@ -71,7 +74,7 @@ tests: zscore: # type=float|default=10.0: z-score to consider artifacts imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/artifact_mask_callables.py b/nipype-auto-conv/specs/interfaces/artifact_mask_callables.py new file mode 100644 index 0000000..3f3378f --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/artifact_mask_callables.py @@ -0,0 +1,27 @@ +"""Module to put any functions that are referred to in the "callables" section of ArtifactMask.yaml""" + + +def out_air_msk_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_air_msk"] + + +def out_art_msk_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_art_msk"] + + +def out_hat_msk_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_hat_msk"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/cc_segmentation.yaml b/nipype-auto-conv/specs/interfaces/cc_segmentation.yaml new file mode 100644 index 0000000..c29c245 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/cc_segmentation.yaml @@ -0,0 +1,87 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.diffusion.CCSegmentation' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# Computes :abbr:`QC (Quality Control)` measures on the input DWI EPI scan. +task_name: CCSegmentation +nipype_name: CCSegmentation +nipype_module: mriqc.interfaces.diffusion +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + in_cfa: generic/file + # type=file|default=: color FA file + in_fa: generic/file + # type=file|default=: fractional anisotropy (FA) file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_mask: generic/file + # type=file: output mask of the corpus callosum + wm_finalmask: generic/file + # type=file: output mask of the white-matter after binary opening + wm_mask: generic/file + # type=file: output mask of the white-matter (thresholded) + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_fa: + # type=file|default=: fractional anisotropy (FA) file + in_cfa: + # type=file|default=: color FA file + min_rgb: + # type=tuple|default=(0.4, 0.008, 0.008): minimum RGB within the CC + max_rgb: + # type=tuple|default=(1.1, 0.25, 0.25): maximum RGB within the CC + wm_threshold: + # type=float|default=0.35: WM segmentation threshold + clean_mask: + # type=bool|default=False: run a final cleanup step on mask + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/cc_segmentation_callables.py b/nipype-auto-conv/specs/interfaces/cc_segmentation_callables.py new file mode 100644 index 0000000..973767b --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/cc_segmentation_callables.py @@ -0,0 +1,27 @@ +"""Module to put any functions that are referred to in the "callables" section of CCSegmentation.yaml""" + + +def out_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_mask"] + + +def wm_finalmask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["wm_finalmask"] + + +def wm_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["wm_mask"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/compute_qi2.yaml b/nipype-auto-conv/specs/interfaces/compute_qi2.yaml similarity index 90% rename from nipype-auto-conv/specs/compute_qi2.yaml rename to nipype-auto-conv/specs/interfaces/compute_qi2.yaml index ac11ad2..2606c71 100644 --- a/nipype-auto-conv/specs/compute_qi2.yaml +++ b/nipype-auto-conv/specs/interfaces/compute_qi2.yaml @@ -22,10 +22,13 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: File to be plotted air_msk: generic/file # type=file|default=: air (without artifacts) mask + in_file: generic/file + # type=file|default=: File to be plotted + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -44,6 +47,8 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + qi2: qi2_callable + # type=float: computed QI2 value templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -57,7 +62,7 @@ tests: air_msk: # type=file|default=: air (without artifacts) mask imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/compute_qi2_callables.py b/nipype-auto-conv/specs/interfaces/compute_qi2_callables.py new file mode 100644 index 0000000..86a6ecc --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/compute_qi2_callables.py @@ -0,0 +1,20 @@ +"""Module to put any functions that are referred to in the "callables" section of ComputeQI2.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +def qi2_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["qi2"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/conform_image.yaml b/nipype-auto-conv/specs/interfaces/conform_image.yaml new file mode 100644 index 0000000..e381b1d --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/conform_image.yaml @@ -0,0 +1,130 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.common.conform_image.ConformImage' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# +# Conforms an input image. +# +# List of nifti datatypes: +# +# .. note: Original Analyze 7.5 types +# +# DT_NONE 0 +# DT_UNKNOWN 0 / what it says, dude / +# DT_BINARY 1 / binary (1 bit/voxel) / +# DT_UNSIGNED_CHAR 2 / unsigned char (8 bits/voxel) / +# DT_SIGNED_SHORT 4 / signed short (16 bits/voxel) / +# DT_SIGNED_INT 8 / signed int (32 bits/voxel) / +# DT_FLOAT 16 / float (32 bits/voxel) / +# DT_COMPLEX 32 / complex (64 bits/voxel) / +# DT_DOUBLE 64 / double (64 bits/voxel) / +# DT_RGB 128 / RGB triple (24 bits/voxel) / +# DT_ALL 255 / not very useful (?) / +# +# .. note: Added names for the same data types +# +# DT_UINT8 2 +# DT_INT16 4 +# DT_INT32 8 +# DT_FLOAT32 16 +# DT_COMPLEX64 32 +# DT_FLOAT64 64 +# DT_RGB24 128 +# +# .. note: New codes for NIfTI +# +# DT_INT8 256 / signed char (8 bits) / +# DT_UINT16 512 / unsigned short (16 bits) / +# DT_UINT32 768 / unsigned int (32 bits) / +# DT_INT64 1024 / long long (64 bits) / +# DT_UINT64 1280 / unsigned long long (64 bits) / +# DT_FLOAT128 1536 / long double (128 bits) / +# DT_COMPLEX128 1792 / double pair (128 bits) / +# DT_COMPLEX256 2048 / long double pair (256 bits) / +# NIFTI_TYPE_UINT8 2 /! unsigned char. / +# NIFTI_TYPE_INT16 4 /! signed short. / +# NIFTI_TYPE_INT32 8 /! signed int. / +# NIFTI_TYPE_FLOAT32 16 /! 32 bit float. / +# NIFTI_TYPE_COMPLEX64 32 /! 64 bit complex = 2 32 bit floats. / +# NIFTI_TYPE_FLOAT64 64 /! 64 bit float = double. / +# NIFTI_TYPE_RGB24 128 /! 3 8 bit bytes. / +# NIFTI_TYPE_INT8 256 /! signed char. / +# NIFTI_TYPE_UINT16 512 /! unsigned short. / +# NIFTI_TYPE_UINT32 768 /! unsigned int. / +# NIFTI_TYPE_INT64 1024 /! signed long long. / +# NIFTI_TYPE_UINT64 1280 /! unsigned long long. / +# NIFTI_TYPE_FLOAT128 1536 /! 128 bit float = long double. / +# NIFTI_TYPE_COMPLEX128 1792 /! 128 bit complex = 2 64 bit floats. / +# NIFTI_TYPE_COMPLEX256 2048 /! 256 bit complex = 2 128 bit floats / +# +# +task_name: ConformImage +nipype_name: ConformImage +nipype_module: mriqc.interfaces.common.conform_image +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + in_file: generic/file + # type=file|default=: input image + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_file: generic/file + # type=file: output conformed file + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: input image + check_ras: + # type=bool|default=True: check that orientation is RAS + check_dtype: + # type=bool|default=True: check data type + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/conform_image_callables.py b/nipype-auto-conv/specs/interfaces/conform_image_callables.py new file mode 100644 index 0000000..a26857a --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/conform_image_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of ConformImage.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/correct_signal_drift.yaml b/nipype-auto-conv/specs/interfaces/correct_signal_drift.yaml similarity index 87% rename from nipype-auto-conv/specs/correct_signal_drift.yaml rename to nipype-auto-conv/specs/interfaces/correct_signal_drift.yaml index f49fe2e..5726c69 100644 --- a/nipype-auto-conv/specs/correct_signal_drift.yaml +++ b/nipype-auto-conv/specs/interfaces/correct_signal_drift.yaml @@ -20,8 +20,6 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: a 4D file with all low-b volumes bias_file: generic/file # type=file|default=: a B1 bias field brainmask_file: generic/file @@ -30,6 +28,11 @@ inputs: # type=file|default=: bvalues file full_epi: generic/file # type=file|default=: a whole DWI dataset to be corrected for drift + in_file: generic/file + # type=file|default=: a 4D file with (exclusively) realigned low-b volumes + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -44,7 +47,7 @@ outputs: # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. out_file: generic/file - # type=file: input file after drift correction + # type=file: a 4D file with (exclusively) realigned, drift-corrected low-b volumes out_full_file: generic/file # type=file: full DWI input after drift correction callables: @@ -59,7 +62,7 @@ tests: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) in_file: - # type=file|default=: a 4D file with all low-b volumes + # type=file|default=: a 4D file with (exclusively) realigned low-b volumes bias_file: # type=file|default=: a B1 bias field brainmask_file: @@ -71,7 +74,7 @@ tests: full_epi: # type=file|default=: a whole DWI dataset to be corrected for drift imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/correct_signal_drift_callables.py b/nipype-auto-conv/specs/interfaces/correct_signal_drift_callables.py new file mode 100644 index 0000000..f8f9d08 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/correct_signal_drift_callables.py @@ -0,0 +1,34 @@ +"""Module to put any functions that are referred to in the "callables" section of CorrectSignalDrift.yaml""" + + +def b0_drift_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["b0_drift"] + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +def out_full_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_full_file"] + + +def signal_drift_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["signal_drift"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/extract_b0.yaml b/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml similarity index 83% rename from nipype-auto-conv/specs/extract_b0.yaml rename to nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml index 86611b2..62db871 100644 --- a/nipype-auto-conv/specs/extract_b0.yaml +++ b/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml @@ -1,14 +1,14 @@ # This file is used to manually specify the semi-automatic conversion of -# 'mriqc.interfaces.diffusion.ExtractB0' from Nipype to Pydra. +# 'mriqc.interfaces.datalad.DataladIdentityInterface' from Nipype to Pydra. # # Please fill-in/edit the fields below where appropriate # # Docs # ---- -# Extract all b=0 volumes from a dwi series. -task_name: ExtractB0 -nipype_name: ExtractB0 -nipype_module: mriqc.interfaces.diffusion +# Sneaks a ``datalad get`` in paths, if datalad is available. +task_name: DataladIdentityInterface +nipype_name: DataladIdentityInterface +nipype_module: mriqc.interfaces.datalad inputs: omit: # list[str] - fields to omit from the Pydra interface @@ -20,8 +20,9 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: dwi file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -35,8 +36,6 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_file: generic/file - # type=file: output b0 file callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields @@ -48,12 +47,8 @@ tests: - inputs: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: dwi file - b0_ixs: - # type=list|default=[]: Index of b0s imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py b/nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py new file mode 100644 index 0000000..d5225ae --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py @@ -0,0 +1,6 @@ +"""Module to put any functions that are referred to in the "callables" section of DataladIdentityInterface.yaml""" + + +# Original source at L139 of /interfaces/datalad.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + raise NotImplementedError diff --git a/nipype-auto-conv/specs/interfaces/derivatives_data_sink.yaml b/nipype-auto-conv/specs/interfaces/derivatives_data_sink.yaml new file mode 100644 index 0000000..3272310 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/derivatives_data_sink.yaml @@ -0,0 +1,91 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.DerivativesDataSink' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# +task_name: DerivativesDataSink +nipype_name: DerivativesDataSink +nipype_module: mriqc.interfaces +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + base_directory: generic/directory + # type=directory|default='': Path to the base directory for storing data. + in_file: generic/file+list-of + # type=inputmultiobject|default=[]: the object to be saved + source_file: generic/file+list-of + # type=inputmultiobject|default=[]: the source file(s) to extract entities from + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_file: generic/file+list-of + # type=outputmultiobject: + out_meta: generic/file+list-of + # type=outputmultiobject: + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + base_directory: + # type=directory|default='': Path to the base directory for storing data. + check_hdr: + # type=bool|default=True: fix headers of NIfTI outputs + compress: + # type=inputmultiobject|default=[]: whether ``in_file`` should be compressed (True), uncompressed (False) or left unmodified (None, default). + data_dtype: + # type=str|default='': NumPy datatype to coerce NIfTI data to, or `source` tomatch the input file dtype + dismiss_entities: + # type=inputmultiobject|default=[]: a list entities that will not be propagated from the source file + in_file: + # type=inputmultiobject|default=[]: the object to be saved + meta_dict: + # type=dict|default={}: an input dictionary containing metadata + source_file: + # type=inputmultiobject|default=[]: the source file(s) to extract entities from + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/derivatives_data_sink_callables.py b/nipype-auto-conv/specs/interfaces/derivatives_data_sink_callables.py new file mode 100644 index 0000000..7447954 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/derivatives_data_sink_callables.py @@ -0,0 +1,34 @@ +"""Module to put any functions that are referred to in the "callables" section of DerivativesDataSink.yaml""" + + +def compression_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["compression"] + + +def fixed_hdr_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fixed_hdr"] + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +def out_meta_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_meta"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/diffusion_model.yaml b/nipype-auto-conv/specs/interfaces/diffusion_model.yaml new file mode 100644 index 0000000..a507a4f --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/diffusion_model.yaml @@ -0,0 +1,99 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.diffusion.DiffusionModel' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# +# Fit a :obj:`~dipy.reconst.dki.DiffusionKurtosisModel` on the dataset. +# +# If ``n_shells`` is set to 1, then a :obj:`~dipy.reconst.dti.TensorModel` +# is used. +# +# +task_name: DiffusionModel +nipype_name: DiffusionModel +nipype_module: mriqc.interfaces.diffusion +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + brain_mask: generic/file + # type=file|default=: brain mask file + bvec_file: generic/file + # type=file|default=: b-vectors + in_file: generic/file + # type=file|default=: dwi file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_cfa: generic/file + # type=file: output color FA file + out_fa: generic/file + # type=file: output FA file + out_fa_degenerate: generic/file + # type=file: binary mask of values outside [0, 1] in the "raw" FA map + out_fa_nans: generic/file + # type=file: binary mask of NaN values in the "raw" FA map + out_md: generic/file + # type=file: output MD file + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: dwi file + bvals: + # type=list|default=[]: bval table + bvec_file: + # type=file|default=: b-vectors + brain_mask: + # type=file|default=: brain mask file + decimals: + # type=int|default=3: round output maps for reliability + n_shells: + # type=int|default=0: number of shells + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/diffusion_model_callables.py b/nipype-auto-conv/specs/interfaces/diffusion_model_callables.py new file mode 100644 index 0000000..a197637 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/diffusion_model_callables.py @@ -0,0 +1,41 @@ +"""Module to put any functions that are referred to in the "callables" section of DiffusionModel.yaml""" + + +def out_cfa_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_cfa"] + + +def out_fa_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_fa"] + + +def out_fa_degenerate_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_fa_degenerate"] + + +def out_fa_nans_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_fa_nans"] + + +def out_md_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_md"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/diffusion_qc.yaml b/nipype-auto-conv/specs/interfaces/diffusion_qc.yaml new file mode 100644 index 0000000..dd3c1a2 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/diffusion_qc.yaml @@ -0,0 +1,165 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.diffusion.DiffusionQC' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# Computes :abbr:`QC (Quality Control)` measures on the input DWI EPI scan. +task_name: DiffusionQC +nipype_name: DiffusionQC +nipype_module: mriqc.interfaces.diffusion +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + brain_mask: generic/file + # type=file|default=: input probabilistic brain mask + cc_mask: generic/file + # type=file|default=: input binary mask of the corpus callosum + in_b0: generic/file + # type=file|default=: input b=0 average + in_bval_file: generic/file + # type=file|default=: original b-vals file + in_cfa: generic/file + # type=file|default=: output color FA file + in_fa: generic/file + # type=file|default=: input FA map + in_fa_degenerate: generic/file + # type=file|default=: binary mask of values outside [0, 1] in the "raw" FA map + in_fa_nans: generic/file + # type=file|default=: binary mask of NaN values in the "raw" FA map + in_fd: generic/file + # type=file|default=: motion parameters for FD computation + in_file: generic/file + # type=file|default=: original EPI 4D file + in_md: generic/file + # type=file|default=: input MD map + in_shells: generic/file+list-of + # type=inputmultiobject|default=[]: DWI data after HMC and split by shells (indexed by in_bval) + spikes_mask: generic/file + # type=file|default=: input binary mask of spiking voxels + wm_mask: generic/file + # type=file|default=: input probabilistic white-matter mask + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + bdiffs: bdiffs_callable + # type=dict: + efc: efc_callable + # type=dict: + fa_degenerate: fa_degenerate_callable + # type=float: + fa_nans: fa_nans_callable + # type=float: + fber: fber_callable + # type=dict: + fd: fd_callable + # type=dict: + ndc: ndc_callable + # type=float: + out_qc: out_qc_callable + # type=dict: output flattened dictionary with all measures + sigma: sigma_callable + # type=dict: + snr_cc: snr_cc_callable + # type=dict: + spikes: spikes_callable + # type=dict: + summary: summary_callable + # type=dict: + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: original EPI 4D file + in_b0: + # type=file|default=: input b=0 average + in_shells: + # type=inputmultiobject|default=[]: DWI data after HMC and split by shells (indexed by in_bval) + in_shells_bval: + # type=list|default=[]: list of unique b-values (one per shell), ordered by growing intensity + in_bval_file: + # type=file|default=: original b-vals file + in_bvec: + # type=list|default=[]: a list of shell-wise splits of b-vectors lists -- first list are b=0 + in_bvec_rotated: + # type=list|default=[]: b-vectors after rotating by the head-motion correction transform + in_bvec_diff: + # type=list|default=[]: list of angle deviations from the original b-vectors table + in_fa: + # type=file|default=: input FA map + in_fa_nans: + # type=file|default=: binary mask of NaN values in the "raw" FA map + in_fa_degenerate: + # type=file|default=: binary mask of values outside [0, 1] in the "raw" FA map + in_cfa: + # type=file|default=: output color FA file + in_md: + # type=file|default=: input MD map + brain_mask: + # type=file|default=: input probabilistic brain mask + wm_mask: + # type=file|default=: input probabilistic white-matter mask + cc_mask: + # type=file|default=: input binary mask of the corpus callosum + spikes_mask: + # type=file|default=: input binary mask of spiking voxels + noise_floor: + # type=float|default=0.0: noise-floor map estimated by means of PCA + direction: + # type=enum|default='all'|allowed['-x','-y','all','x','y']: direction for GSR computation + in_fd: + # type=file|default=: motion parameters for FD computation + fd_thres: + # type=float|default=0.2: FD threshold for orientation exclusion based on head motion + in_fwhm: + # type=list|default=[]: smoothness estimated with AFNI + qspace_neighbors: + # type=list|default=[]: q-space nearest neighbor pairs + piesno_sigma: + # type=float|default=-1.0: noise sigma calculated with PIESNO + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/diffusion_qc_callables.py b/nipype-auto-conv/specs/interfaces/diffusion_qc_callables.py new file mode 100644 index 0000000..4b638ab --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/diffusion_qc_callables.py @@ -0,0 +1,90 @@ +"""Module to put any functions that are referred to in the "callables" section of DiffusionQC.yaml""" + + +def bdiffs_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["bdiffs"] + + +def efc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["efc"] + + +def fa_degenerate_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fa_degenerate"] + + +def fa_nans_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fa_nans"] + + +def fber_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fber"] + + +def fd_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fd"] + + +def ndc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["ndc"] + + +def out_qc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_qc"] + + +def sigma_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["sigma"] + + +def snr_cc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["snr_cc"] + + +def spikes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["spikes"] + + +def summary_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["summary"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/ensure_size.yaml b/nipype-auto-conv/specs/interfaces/ensure_size.yaml similarity index 91% rename from nipype-auto-conv/specs/ensure_size.yaml rename to nipype-auto-conv/specs/interfaces/ensure_size.yaml index c1a7ee5..4063c18 100644 --- a/nipype-auto-conv/specs/ensure_size.yaml +++ b/nipype-auto-conv/specs/interfaces/ensure_size.yaml @@ -10,7 +10,7 @@ # task_name: EnsureSize nipype_name: EnsureSize -nipype_module: mriqc.interfaces.common +nipype_module: mriqc.interfaces.common.ensure_size inputs: omit: # list[str] - fields to omit from the Pydra interface @@ -26,6 +26,9 @@ inputs: # type=file|default=: input image in_mask: generic/file # type=file|default=: input mask + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -61,7 +64,7 @@ tests: pixel_size: # type=float|default=2.0: desired pixel size (mm) imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/ensure_size_callables.py b/nipype-auto-conv/specs/interfaces/ensure_size_callables.py new file mode 100644 index 0000000..96d1a94 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/ensure_size_callables.py @@ -0,0 +1,20 @@ +"""Module to put any functions that are referred to in the "callables" section of EnsureSize.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +def out_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_mask"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/dipy_dti.yaml b/nipype-auto-conv/specs/interfaces/extract_orientations.yaml similarity index 77% rename from nipype-auto-conv/specs/dipy_dti.yaml rename to nipype-auto-conv/specs/interfaces/extract_orientations.yaml index ddc1c36..8da5f32 100644 --- a/nipype-auto-conv/specs/dipy_dti.yaml +++ b/nipype-auto-conv/specs/interfaces/extract_orientations.yaml @@ -1,13 +1,13 @@ # This file is used to manually specify the semi-automatic conversion of -# 'mriqc.interfaces.diffusion.DipyDTI' from Nipype to Pydra. +# 'mriqc.interfaces.diffusion.ExtractOrientations' from Nipype to Pydra. # # Please fill-in/edit the fields below where appropriate # # Docs # ---- -# Split a DWI dataset into . -task_name: DipyDTI -nipype_name: DipyDTI +# Extract all b=0 volumes from a dwi series. +task_name: ExtractOrientations +nipype_name: ExtractOrientations nipype_module: mriqc.interfaces.diffusion inputs: omit: @@ -20,12 +20,13 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. + in_bvec_file: generic/file + # type=file|default=: b-vectors file in_file: generic/file # type=file|default=: dwi file - bvec_file: generic/file - # type=file|default=: b-vectors - brainmask: generic/file - # type=file|default=: brain mask file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -39,10 +40,8 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_fa: generic/file - # type=file: output FA file - out_md: generic/file - # type=file: output MD file + out_file: generic/file + # type=file: output b0 file callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields @@ -56,18 +55,12 @@ tests: # (if not specified, will try to choose a sensible value) in_file: # type=file|default=: dwi file - bvals: - # type=list|default=[]: bval table - bvec_file: - # type=file|default=: b-vectors - brainmask: - # type=file|default=: brain mask file - free_water_model: - # type=bool|default=False: use free water model - b_threshold: - # type=float|default=1100: use only inner shells of the data + indices: + # type=list|default=[]: indices to be extracted + in_bvec_file: + # type=file|default=: b-vectors file imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/extract_orientations_callables.py b/nipype-auto-conv/specs/interfaces/extract_orientations_callables.py new file mode 100644 index 0000000..8a003f8 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/extract_orientations_callables.py @@ -0,0 +1,20 @@ +"""Module to put any functions that are referred to in the "callables" section of ExtractOrientations.yaml""" + + +def out_bvec_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bvec"] + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/filter_shells.yaml b/nipype-auto-conv/specs/interfaces/filter_shells.yaml similarity index 92% rename from nipype-auto-conv/specs/filter_shells.yaml rename to nipype-auto-conv/specs/interfaces/filter_shells.yaml index 07d39d8..c6eba3d 100644 --- a/nipype-auto-conv/specs/filter_shells.yaml +++ b/nipype-auto-conv/specs/interfaces/filter_shells.yaml @@ -20,10 +20,13 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: dwi file bvec_file: generic/file # type=file|default=: b-vectors + in_file: generic/file + # type=file|default=: dwi file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -37,12 +40,12 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_file: generic/file - # type=file: filtered DWI file - out_bvec_file: generic/file - # type=file: filtered bvecs file out_bval_file: generic/file # type=file: filtered bvals file + out_bvec_file: generic/file + # type=file: filtered bvecs file + out_file: generic/file + # type=file: filtered DWI file callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields @@ -63,7 +66,7 @@ tests: b_threshold: # type=float|default=1100: b-values threshold imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/filter_shells_callables.py b/nipype-auto-conv/specs/interfaces/filter_shells_callables.py new file mode 100644 index 0000000..69b9897 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/filter_shells_callables.py @@ -0,0 +1,34 @@ +"""Module to put any functions that are referred to in the "callables" section of FilterShells.yaml""" + + +def out_bval_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bval_file"] + + +def out_bvals_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bvals"] + + +def out_bvec_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bvec_file"] + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/functional_qc.yaml b/nipype-auto-conv/specs/interfaces/functional_qc.yaml similarity index 82% rename from nipype-auto-conv/specs/functional_qc.yaml rename to nipype-auto-conv/specs/interfaces/functional_qc.yaml index 45a9d44..a7e18c2 100644 --- a/nipype-auto-conv/specs/functional_qc.yaml +++ b/nipype-auto-conv/specs/interfaces/functional_qc.yaml @@ -24,18 +24,21 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. + in_dvars: generic/file + # type=file|default=: input file containing DVARS in_epi: generic/file # type=file|default=: input EPI file + in_fd: generic/file + # type=file|default=: motion parameters for FD computation in_hmc: generic/file # type=file|default=: input motion corrected file - in_tsnr: generic/file - # type=file|default=: input tSNR volume in_mask: generic/file # type=file|default=: input mask - in_fd: generic/file - # type=file|default=: motion parameters for FD computation - in_dvars: generic/file - # type=file|default=: input file containing DVARS + in_tsnr: generic/file + # type=file|default=: input tSNR volume + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -52,6 +55,30 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + dvars: dvars_callable + # type=dict: + efc: efc_callable + # type=float: + fber: fber_callable + # type=float: + fd: fd_callable + # type=dict: + fwhm: fwhm_callable + # type=dict: full width half-maximum measure + gsr: gsr_callable + # type=dict: + out_qc: out_qc_callable + # type=dict: output flattened dictionary with all measures + size: size_callable + # type=dict: + snr: snr_callable + # type=float: + spacing: spacing_callable + # type=dict: + summary: summary_callable + # type=dict: + tsnr: tsnr_callable + # type=float: templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -79,7 +106,7 @@ tests: in_fwhm: # type=list|default=[]: smoothness estimated with AFNI imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/functional_qc_callables.py b/nipype-auto-conv/specs/interfaces/functional_qc_callables.py new file mode 100644 index 0000000..a8cf7b3 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/functional_qc_callables.py @@ -0,0 +1,90 @@ +"""Module to put any functions that are referred to in the "callables" section of FunctionalQC.yaml""" + + +def dvars_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["dvars"] + + +def efc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["efc"] + + +def fber_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fber"] + + +def fd_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fd"] + + +def fwhm_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fwhm"] + + +def gsr_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["gsr"] + + +def out_qc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_qc"] + + +def size_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["size"] + + +def snr_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["snr"] + + +def spacing_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["spacing"] + + +def summary_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["summary"] + + +def tsnr_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["tsnr"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/gather_timeseries.yaml b/nipype-auto-conv/specs/interfaces/gather_timeseries.yaml similarity index 91% rename from nipype-auto-conv/specs/gather_timeseries.yaml rename to nipype-auto-conv/specs/interfaces/gather_timeseries.yaml index e3a324b..caf01a2 100644 --- a/nipype-auto-conv/specs/gather_timeseries.yaml +++ b/nipype-auto-conv/specs/interfaces/gather_timeseries.yaml @@ -33,6 +33,9 @@ inputs: # type=file|default=: input file containing timeseries of AFNI's outlier count quality: generic/file # type=file|default=: input file containing AFNI's Quality Index + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -51,6 +54,8 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + timeseries_metadata: timeseries_metadata_callable + # type=dict: Metadata dictionary describing columns templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -72,7 +77,7 @@ tests: quality: # type=file|default=: input file containing AFNI's Quality Index imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/gather_timeseries_callables.py b/nipype-auto-conv/specs/interfaces/gather_timeseries_callables.py new file mode 100644 index 0000000..ee78952 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/gather_timeseries_callables.py @@ -0,0 +1,20 @@ +"""Module to put any functions that are referred to in the "callables" section of GatherTimeseries.yaml""" + + +def timeseries_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["timeseries_file"] + + +def timeseries_metadata_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["timeseries_metadata"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/gcor.yaml b/nipype-auto-conv/specs/interfaces/gcor.yaml similarity index 90% rename from nipype-auto-conv/specs/gcor.yaml rename to nipype-auto-conv/specs/interfaces/gcor.yaml index 58096a4..812d9ef 100644 --- a/nipype-auto-conv/specs/gcor.yaml +++ b/nipype-auto-conv/specs/interfaces/gcor.yaml @@ -40,6 +40,9 @@ inputs: # type=file|default=: input dataset to compute the GCOR over mask: generic/file # type=file|default=: mask dataset, for restricting the computation + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -56,6 +59,8 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + out: out_callable + # type=float: global correlation value templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -77,7 +82,7 @@ tests: environ: # type=dict|default={}: Environment variables imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically @@ -99,7 +104,7 @@ tests: nfirst: '4' # type=int|default=0: specify number of initial TRs to ignore imports: &id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys - module: mriqc.interfaces.transitional name: GCOR @@ -123,12 +128,12 @@ doctests: # dict[str, str] - name-value pairs for inputs to be provided to the doctest. # If the field is of file-format type and the value is None, then the # '.mock()' method of the corresponding class is used instead. - in_file: + in_file: '"func.nii"' # type=file|default=: input dataset to compute the GCOR over nfirst: '4' # type=int|default=0: specify number of initial TRs to ignore imports: *id001 - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys directive: # str - any doctest directive to place on the cmdline call, e.g. # doctest: +ELLIPSIS diff --git a/nipype-auto-conv/specs/interfaces/gcor_callables.py b/nipype-auto-conv/specs/interfaces/gcor_callables.py new file mode 100644 index 0000000..58a9755 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/gcor_callables.py @@ -0,0 +1,18 @@ +"""Module to put any functions that are referred to in the "callables" section of GCOR.yaml""" + + +def out_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out"] + + +# Original source at L885 of /interfaces/base/core.py +def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): + raise NotImplementedError + + +# Original source at L98 of /interfaces/transitional.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return {"out": _gcor} diff --git a/nipype-auto-conv/specs/harmonize.yaml b/nipype-auto-conv/specs/interfaces/harmonize.yaml similarity index 92% rename from nipype-auto-conv/specs/harmonize.yaml rename to nipype-auto-conv/specs/interfaces/harmonize.yaml index d070290..9e3faf1 100644 --- a/nipype-auto-conv/specs/harmonize.yaml +++ b/nipype-auto-conv/specs/interfaces/harmonize.yaml @@ -26,6 +26,9 @@ inputs: # type=file|default=: input data (after bias correction) wm_mask: generic/file # type=file|default=: white-matter mask + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -61,7 +64,7 @@ tests: thresh: # type=float|default=0.9: WM probability threshold imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/harmonize_callables.py b/nipype-auto-conv/specs/interfaces/harmonize_callables.py new file mode 100644 index 0000000..27a9ad6 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/harmonize_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of Harmonize.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml b/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml new file mode 100644 index 0000000..db0a473 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml @@ -0,0 +1,99 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.bids.IQMFileSink' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# +task_name: IQMFileSink +nipype_name: IQMFileSink +nipype_module: mriqc.interfaces.bids +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_dir: Path + # type=file|default=: the output directory + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_file: generic/file + # type=file: the output JSON file containing the IQMs + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=str|default='': path of input file + subject_id: + # type=str|default='': the subject id + modality: + # type=str|default='': the qc type + session_id: + # type=traitcompound|default=None: + task_id: + # type=traitcompound|default=None: + acq_id: + # type=traitcompound|default=None: + rec_id: + # type=traitcompound|default=None: + run_id: + # type=traitcompound|default=None: + dataset: + # type=str|default='': dataset identifier + dismiss_entities: + # type=list|default=['part']: + metadata: + # type=dict|default={}: + provenance: + # type=dict|default={}: + root: + # type=dict|default={}: output root dictionary + out_dir: + # type=file|default=: the output directory + _outputs: + # type=dict|default={}: + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/iqm_file_sink_callables.py b/nipype-auto-conv/specs/interfaces/iqm_file_sink_callables.py new file mode 100644 index 0000000..37a8f1d --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/iqm_file_sink_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of IQMFileSink.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/number_of_shells.yaml b/nipype-auto-conv/specs/interfaces/number_of_shells.yaml similarity index 87% rename from nipype-auto-conv/specs/number_of_shells.yaml rename to nipype-auto-conv/specs/interfaces/number_of_shells.yaml index 6605b62..ce8f215 100644 --- a/nipype-auto-conv/specs/number_of_shells.yaml +++ b/nipype-auto-conv/specs/interfaces/number_of_shells.yaml @@ -37,6 +37,9 @@ inputs: # passed to the field in the automatically generated unittests. in_bvals: generic/file # type=file|default=: bvals file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -53,6 +56,10 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + b_dict: b_dict_callable + # type=dict: a map of b-values (including b=0) and masks + n_shells: n_shells_callable + # type=int: number of shells templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -65,8 +72,10 @@ tests: # type=file|default=: bvals file b0_threshold: # type=float|default=50: a threshold for the low-b values + dsi_threshold: + # type=int|default=11: number of shells to call a dataset DSI imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/number_of_shells_callables.py b/nipype-auto-conv/specs/interfaces/number_of_shells_callables.py new file mode 100644 index 0000000..4f14b4a --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/number_of_shells_callables.py @@ -0,0 +1,55 @@ +"""Module to put any functions that are referred to in the "callables" section of NumberOfShells.yaml""" + + +def b_dict_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["b_dict"] + + +def b_indices_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["b_indices"] + + +def b_masks_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["b_masks"] + + +def b_values_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["b_values"] + + +def models_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["models"] + + +def n_shells_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["n_shells"] + + +def out_data_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_data"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/conform_image.yaml b/nipype-auto-conv/specs/interfaces/piesno.yaml similarity index 77% rename from nipype-auto-conv/specs/conform_image.yaml rename to nipype-auto-conv/specs/interfaces/piesno.yaml index cd9a2c9..849c0ea 100644 --- a/nipype-auto-conv/specs/conform_image.yaml +++ b/nipype-auto-conv/specs/interfaces/piesno.yaml @@ -1,14 +1,14 @@ # This file is used to manually specify the semi-automatic conversion of -# 'mriqc.interfaces.common.conform_image.ConformImage' from Nipype to Pydra. +# 'mriqc.interfaces.diffusion.PIESNO' from Nipype to Pydra. # # Please fill-in/edit the fields below where appropriate # # Docs # ---- -# -task_name: ConformImage -nipype_name: ConformImage -nipype_module: mriqc.interfaces.common +# Computes :abbr:`QC (Quality Control)` measures on the input DWI EPI scan. +task_name: PIESNO +nipype_name: PIESNO +nipype_module: mriqc.interfaces.diffusion inputs: omit: # list[str] - fields to omit from the Pydra interface @@ -21,7 +21,10 @@ inputs: # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. in_file: generic/file - # type=file|default=: input image + # type=file|default=: a DWI 4D file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -35,11 +38,13 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_file: generic/file - # type=file: output conformed file + out_mask: generic/file + # type=file: a 4D binary mask of spiking voxels callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + sigma: sigma_callable + # type=float: noise sigma calculated with PIESNO templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -49,13 +54,11 @@ tests: # dict[str, str] - values to provide to inputs fields in the task initialisation # (if not specified, will try to choose a sensible value) in_file: - # type=file|default=: input image - check_ras: - # type=bool|default=True: check that orientation is RAS - check_dtype: - # type=bool|default=True: check data type + # type=file|default=: a DWI 4D file + n_channels: + # type=int|default=4: number of channels imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/piesno_callables.py b/nipype-auto-conv/specs/interfaces/piesno_callables.py new file mode 100644 index 0000000..9a70b98 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/piesno_callables.py @@ -0,0 +1,20 @@ +"""Module to put any functions that are referred to in the "callables" section of PIESNO.yaml""" + + +def out_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_mask"] + + +def sigma_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["sigma"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/read_dwi_metadata.yaml b/nipype-auto-conv/specs/interfaces/read_dwi_metadata.yaml similarity index 84% rename from nipype-auto-conv/specs/read_dwi_metadata.yaml rename to nipype-auto-conv/specs/interfaces/read_dwi_metadata.yaml index 8fc951d..44d8f17 100644 --- a/nipype-auto-conv/specs/read_dwi_metadata.yaml +++ b/nipype-auto-conv/specs/interfaces/read_dwi_metadata.yaml @@ -26,6 +26,9 @@ inputs: # type=file|default=: the input nifti file index_db: generic/directory # type=directory|default=: a PyBIDS layout cache directory + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -39,13 +42,29 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_bvec_file: generic/file - # type=file: corresponding bvec file out_bval_file: generic/file # type=file: corresponding bval file + out_bvec_file: generic/file + # type=file: corresponding bvec file callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + acquisition: acquisition_callable + # type=str: + out_dict: out_dict_callable + # type=dict: + reconstruction: reconstruction_callable + # type=str: + run: run_callable + # type=int: + session: session_callable + # type=str: + subject: subject_callable + # type=str: + suffix: suffix_callable + # type=str: + task: task_callable + # type=str: templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -63,7 +82,7 @@ tests: index_db: # type=directory|default=: a PyBIDS layout cache directory imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/read_dwi_metadata_callables.py b/nipype-auto-conv/specs/interfaces/read_dwi_metadata_callables.py new file mode 100644 index 0000000..2d22ad3 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/read_dwi_metadata_callables.py @@ -0,0 +1,90 @@ +"""Module to put any functions that are referred to in the "callables" section of ReadDWIMetadata.yaml""" + + +def acquisition_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["acquisition"] + + +def out_bmatrix_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bmatrix"] + + +def out_bval_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bval_file"] + + +def out_bvec_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bvec_file"] + + +def out_dict_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_dict"] + + +def qspace_neighbors_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["qspace_neighbors"] + + +def reconstruction_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["reconstruction"] + + +def run_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["run"] + + +def session_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["session"] + + +def subject_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["subject"] + + +def suffix_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["suffix"] + + +def task_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["task"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/rotate_vectors.yaml b/nipype-auto-conv/specs/interfaces/rotate_vectors.yaml new file mode 100644 index 0000000..b3f0043 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/rotate_vectors.yaml @@ -0,0 +1,77 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.diffusion.RotateVectors' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# Extract all b=0 volumes from a dwi series. +task_name: RotateVectors +nipype_name: RotateVectors +nipype_module: mriqc.interfaces.diffusion +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + in_file: generic/file + # type=file|default=: TSV file containing original b-vectors and b-values + reference: generic/file + # type=file|default=: dwi-related file providing the reference affine + transforms: generic/file + # type=file|default=: list of head-motion transforms + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: TSV file containing original b-vectors and b-values + reference: + # type=file|default=: dwi-related file providing the reference affine + transforms: + # type=file|default=: list of head-motion transforms + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/rotate_vectors_callables.py b/nipype-auto-conv/specs/interfaces/rotate_vectors_callables.py new file mode 100644 index 0000000..58f2798 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/rotate_vectors_callables.py @@ -0,0 +1,20 @@ +"""Module to put any functions that are referred to in the "callables" section of RotateVectors.yaml""" + + +def out_bvec_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_bvec"] + + +def out_diff_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_diff"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/rotation_mask.yaml b/nipype-auto-conv/specs/interfaces/rotation_mask.yaml similarity index 91% rename from nipype-auto-conv/specs/rotation_mask.yaml rename to nipype-auto-conv/specs/interfaces/rotation_mask.yaml index d2d9411..d9cc9e7 100644 --- a/nipype-auto-conv/specs/rotation_mask.yaml +++ b/nipype-auto-conv/specs/interfaces/rotation_mask.yaml @@ -24,6 +24,9 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input data + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -53,7 +56,7 @@ tests: in_file: # type=file|default=: input data imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/rotation_mask_callables.py b/nipype-auto-conv/specs/interfaces/rotation_mask_callables.py new file mode 100644 index 0000000..a4b7d80 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/rotation_mask_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of RotationMask.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/select_echo.yaml b/nipype-auto-conv/specs/interfaces/select_echo.yaml similarity index 88% rename from nipype-auto-conv/specs/select_echo.yaml rename to nipype-auto-conv/specs/interfaces/select_echo.yaml index 9c8c1cc..c743985 100644 --- a/nipype-auto-conv/specs/select_echo.yaml +++ b/nipype-auto-conv/specs/interfaces/select_echo.yaml @@ -26,6 +26,9 @@ inputs: # passed to the field in the automatically generated unittests. in_files: generic/file+list-of # type=inputmultiobject|default=[]: input EPI file(s) + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -44,6 +47,10 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + echo_index: echo_index_callable + # type=int: index of the selected echo + is_multiecho: is_multiecho_callable + # type=bool: whether it is a multiecho dataset templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -59,7 +66,7 @@ tests: te_reference: # type=float|default=0.03: reference SE-EPI echo time imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/select_echo_callables.py b/nipype-auto-conv/specs/interfaces/select_echo_callables.py new file mode 100644 index 0000000..7f3e49d --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/select_echo_callables.py @@ -0,0 +1,27 @@ +"""Module to put any functions that are referred to in the "callables" section of SelectEcho.yaml""" + + +def echo_index_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["echo_index"] + + +def is_multiecho_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["is_multiecho"] + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/spikes.yaml b/nipype-auto-conv/specs/interfaces/spikes.yaml similarity index 91% rename from nipype-auto-conv/specs/spikes.yaml rename to nipype-auto-conv/specs/interfaces/spikes.yaml index 1db3062..79e9d41 100644 --- a/nipype-auto-conv/specs/spikes.yaml +++ b/nipype-auto-conv/specs/interfaces/spikes.yaml @@ -28,12 +28,15 @@ inputs: # type=file|default=: input fMRI dataset in_mask: generic/file # type=file|default=: brain mask - out_tsz: generic/file - # type=file: slice-wise z-scored timeseries (Z x N), inside brainmask - # type=file|default='spikes_tsz.txt': output file name - out_spikes: generic/file + out_spikes: Path # type=file: indices of spikes # type=file|default='spikes_idx.txt': output file name + out_tsz: Path + # type=file: slice-wise z-scored timeseries (Z x N), inside brainmask + # type=file|default='spikes_tsz.txt': output file name + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -47,15 +50,17 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - out_tsz: generic/file - # type=file: slice-wise z-scored timeseries (Z x N), inside brainmask - # type=file|default='spikes_tsz.txt': output file name out_spikes: generic/file # type=file: indices of spikes # type=file|default='spikes_idx.txt': output file name + out_tsz: generic/file + # type=file: slice-wise z-scored timeseries (Z x N), inside brainmask + # type=file|default='spikes_tsz.txt': output file name callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + num_spikes: num_spikes_callable + # type=int: number of spikes found (total) templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -85,7 +90,7 @@ tests: # type=file: indices of spikes # type=file|default='spikes_idx.txt': output file name imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/spikes_callables.py b/nipype-auto-conv/specs/interfaces/spikes_callables.py new file mode 100644 index 0000000..1ccb440 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/spikes_callables.py @@ -0,0 +1,27 @@ +"""Module to put any functions that are referred to in the "callables" section of Spikes.yaml""" + + +def num_spikes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["num_spikes"] + + +def out_spikes_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_spikes"] + + +def out_tsz_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_tsz"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/spiking_voxels_mask.yaml b/nipype-auto-conv/specs/interfaces/spiking_voxels_mask.yaml new file mode 100644 index 0000000..a42ba6b --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/spiking_voxels_mask.yaml @@ -0,0 +1,79 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.diffusion.SpikingVoxelsMask' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# Computes :abbr:`QC (Quality Control)` measures on the input DWI EPI scan. +task_name: SpikingVoxelsMask +nipype_name: SpikingVoxelsMask +nipype_module: mriqc.interfaces.diffusion +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + brain_mask: generic/file + # type=file|default=: input probabilistic brain 3D mask + in_file: generic/file + # type=file|default=: a DWI 4D file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_mask: generic/file + # type=file: a 4D binary mask of spiking voxels + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: a DWI 4D file + brain_mask: + # type=file|default=: input probabilistic brain 3D mask + z_threshold: + # type=float|default=3.0: z-score threshold + b_masks: + # type=list|default=[]: list of ``n_shells`` b-value-wise indices lists + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/spiking_voxels_mask_callables.py b/nipype-auto-conv/specs/interfaces/spiking_voxels_mask_callables.py new file mode 100644 index 0000000..93d0e0b --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/spiking_voxels_mask_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of SpikingVoxelsMask.yaml""" + + +def out_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_mask"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/split_shells.yaml b/nipype-auto-conv/specs/interfaces/split_shells.yaml similarity index 89% rename from nipype-auto-conv/specs/split_shells.yaml rename to nipype-auto-conv/specs/interfaces/split_shells.yaml index 37c579e..a2b84ce 100644 --- a/nipype-auto-conv/specs/split_shells.yaml +++ b/nipype-auto-conv/specs/interfaces/split_shells.yaml @@ -22,6 +22,9 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: dwi file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -35,6 +38,8 @@ outputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. + out_file: generic/file+list-of + # type=outputmultiobject: output b0 file callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields @@ -51,7 +56,7 @@ tests: bvals: # type=list|default=[]: bval table imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/split_shells_callables.py b/nipype-auto-conv/specs/interfaces/split_shells_callables.py new file mode 100644 index 0000000..a614995 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/split_shells_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of SplitShells.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/structural_qc.yaml b/nipype-auto-conv/specs/interfaces/structural_qc.yaml similarity index 79% rename from nipype-auto-conv/specs/structural_qc.yaml rename to nipype-auto-conv/specs/interfaces/structural_qc.yaml index bb95b78..a9d2b0c 100644 --- a/nipype-auto-conv/specs/structural_qc.yaml +++ b/nipype-auto-conv/specs/interfaces/structural_qc.yaml @@ -24,28 +24,31 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_file: generic/file - # type=file|default=: file to be plotted - in_noinu: generic/file - # type=file|default=: image after INU correction - in_segm: generic/file - # type=file|default=: segmentation file from FSL FAST - in_bias: generic/file - # type=file|default=: bias file - head_msk: generic/file - # type=file|default=: head mask air_msk: generic/file # type=file|default=: air mask - rot_msk: generic/file - # type=file|default=: rotation mask artifact_msk: generic/file # type=file|default=: air mask + head_msk: generic/file + # type=file|default=: head mask + in_bias: generic/file + # type=file|default=: bias file + in_file: generic/file + # type=file|default=: file to be plotted + in_noinu: generic/file + # type=file|default=: image after INU correction in_pvms: generic/file+list-of # type=inputmultiobject|default=[]: partial volume maps from FSL FAST + in_segm: generic/file + # type=file|default=: segmentation file from FSL FAST in_tpms: generic/file+list-of # type=inputmultiobject|default=[]: tissue probability maps from FSL FAST mni_tpms: generic/file+list-of # type=inputmultiobject|default=[]: tissue probability maps from FSL FAST + rot_msk: generic/file + # type=file|default=: rotation mask + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -64,6 +67,40 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + cjv: cjv_callable + # type=float: + cnr: cnr_callable + # type=float: + efc: efc_callable + # type=float: + fber: fber_callable + # type=float: + fwhm: fwhm_callable + # type=dict: full width half-maximum measure + icvs: icvs_callable + # type=dict: intracranial volume (ICV) fractions + inu: inu_callable + # type=dict: summary statistics of the bias field + out_qc: out_qc_callable + # type=dict: output flattened dictionary with all measures + qi_1: qi_1_callable + # type=float: + rpve: rpve_callable + # type=dict: partial volume fractions + size: size_callable + # type=dict: image sizes + snr: snr_callable + # type=dict: + snrd: snrd_callable + # type=dict: + spacing: spacing_callable + # type=dict: image sizes + summary: summary_callable + # type=dict: summary statistics per tissue + tpm_overlap: tpm_overlap_callable + # type=dict: + wm2max: wm2max_callable + # type=float: templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -99,7 +136,7 @@ tests: human: # type=bool|default=True: human workflow imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/structural_qc_callables.py b/nipype-auto-conv/specs/interfaces/structural_qc_callables.py new file mode 100644 index 0000000..56f676a --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/structural_qc_callables.py @@ -0,0 +1,132 @@ +"""Module to put any functions that are referred to in the "callables" section of StructuralQC.yaml""" + + +def cjv_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["cjv"] + + +def cnr_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["cnr"] + + +def efc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["efc"] + + +def fber_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fber"] + + +def fwhm_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["fwhm"] + + +def icvs_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["icvs"] + + +def inu_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["inu"] + + +def out_noisefit_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_noisefit"] + + +def out_qc_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_qc"] + + +def qi_1_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["qi_1"] + + +def rpve_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["rpve"] + + +def size_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["size"] + + +def snr_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["snr"] + + +def snrd_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["snrd"] + + +def spacing_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["spacing"] + + +def summary_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["summary"] + + +def tpm_overlap_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["tpm_overlap"] + + +def wm2max_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["wm2max"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/interfaces/synth_strip.yaml b/nipype-auto-conv/specs/interfaces/synth_strip.yaml new file mode 100644 index 0000000..bdbe066 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/synth_strip.yaml @@ -0,0 +1,101 @@ +# This file is used to manually specify the semi-automatic conversion of +# 'mriqc.interfaces.synthstrip.SynthStrip' from Nipype to Pydra. +# +# Please fill-in/edit the fields below where appropriate +# +# Docs +# ---- +# +task_name: SynthStrip +nipype_name: SynthStrip +nipype_module: mriqc.interfaces.synthstrip +inputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + in_file: generic/file + # type=file|default=: Input image to be brain extracted + model: generic/file + # type=file|default=: file containing model's weights + out_file: Path + # type=file: brain-extracted image + # type=file|default=: store brain-extracted input to file + out_mask: Path + # type=file: brain mask + # type=file|default=: store brainmask to file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields + metadata: + # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) +outputs: + omit: + # list[str] - fields to omit from the Pydra interface + rename: + # dict[str, str] - fields to rename in the Pydra interface + types: + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. + out_file: generic/file + # type=file: brain-extracted image + # type=file|default=: store brain-extracted input to file + out_mask: generic/file + # type=file: brain mask + # type=file|default=: store brainmask to file + callables: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields + templates: + # dict[str, str] - `output_file_template` values to be provided to output fields + requirements: + # dict[str, list[str]] - input fields that are required to be provided for the output field to be present +tests: +- inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: Input image to be brain extracted + use_gpu: + # type=bool|default=False: Use GPU + model: + # type=file|default=: file containing model's weights + border_mm: + # type=int|default=1: Mask border threshold in mm + out_file: + # type=file: brain-extracted image + # type=file|default=: store brain-extracted input to file + out_mask: + # type=file: brain mask + # type=file|default=: store brainmask to file + num_threads: + # type=int|default=0: Number of threads + args: + # type=str|default='': Additional parameters to the command + environ: + # type=dict|default={}: Environment variables + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file +doctests: [] diff --git a/nipype-auto-conv/specs/interfaces/synth_strip_callables.py b/nipype-auto-conv/specs/interfaces/synth_strip_callables.py new file mode 100644 index 0000000..526cf72 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/synth_strip_callables.py @@ -0,0 +1,151 @@ +"""Module to put any functions that are referred to in the "callables" section of SynthStrip.yaml""" + +import attrs +import logging +import os +from nipype import logging +from nipype.utils.filemanip import split_filename +from nipype.interfaces.base.support import NipypeInterfaceError +from nipype.interfaces.base.traits_extension import traits + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +def out_mask_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_mask"] + + +iflogger = logging.getLogger("nipype.interface") + + +# Original source at L809 of /interfaces/base/core.py +def _filename_from_source( + name, chain=None, inputs=None, stdout=None, stderr=None, output_dir=None +): + if chain is None: + chain = [] + + trait_spec = inputs.trait(name) + retval = getattr(inputs, name) + source_ext = None + if (retval is attrs.NOTHING) or "%s" in retval: + if not trait_spec.name_source: + return retval + + # Do not generate filename when excluded by other inputs + if any( + (getattr(inputs, field) is not attrs.NOTHING) + for field in trait_spec.xor or () + ): + return retval + + # Do not generate filename when required fields are missing + if not all( + (getattr(inputs, field) is not attrs.NOTHING) + for field in trait_spec.requires or () + ): + return retval + + if (retval is not attrs.NOTHING) and "%s" in retval: + name_template = retval + else: + name_template = trait_spec.name_template + if not name_template: + name_template = "%s_generated" + + ns = trait_spec.name_source + while isinstance(ns, (list, tuple)): + if len(ns) > 1: + iflogger.warning("Only one name_source per trait is allowed") + ns = ns[0] + + if not isinstance(ns, (str, bytes)): + raise ValueError( + "name_source of '{}' trait should be an input trait " + "name, but a type {} object was found".format(name, type(ns)) + ) + + if getattr(inputs, ns) is not attrs.NOTHING: + name_source = ns + source = getattr(inputs, name_source) + while isinstance(source, list): + source = source[0] + + # special treatment for files + try: + _, base, source_ext = split_filename(source) + except (AttributeError, TypeError): + base = source + else: + if name in chain: + raise NipypeInterfaceError("Mutually pointing name_sources") + + chain.append(name) + base = _filename_from_source( + ns, + chain, + inputs=inputs, + stdout=stdout, + stderr=stderr, + output_dir=output_dir, + ) + if base is not attrs.NOTHING: + _, _, source_ext = split_filename(base) + else: + # Do not generate filename when required fields are missing + return retval + + chain = None + retval = name_template % base + _, _, ext = split_filename(retval) + if trait_spec.keep_extension and (ext or source_ext): + if (ext is None or not ext) and source_ext: + retval = retval + source_ext + else: + retval = _overload_extension( + retval, + name, + inputs=inputs, + stdout=stdout, + stderr=stderr, + output_dir=output_dir, + ) + return retval + + +# Original source at L885 of /interfaces/base/core.py +def _gen_filename(name, inputs=None, stdout=None, stderr=None, output_dir=None): + raise NotImplementedError + + +# Original source at L891 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + metadata = dict(name_source=lambda t: t is not None) + traits = inputs.traits(**metadata) + if traits: + outputs = {} + for name, trait_spec in list(traits.items()): + out_name = name + if trait_spec.output_name is not None: + out_name = trait_spec.output_name + fname = _filename_from_source( + name, inputs=inputs, stdout=stdout, stderr=stderr, output_dir=output_dir + ) + if fname is not attrs.NOTHING: + outputs[out_name] = os.path.abspath(fname) + return outputs + + +# Original source at L888 of /interfaces/base/core.py +def _overload_extension( + value, name=None, inputs=None, stdout=None, stderr=None, output_dir=None +): + return value diff --git a/nipype-auto-conv/specs/upload_iq_ms.yaml b/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml similarity index 89% rename from nipype-auto-conv/specs/upload_iq_ms.yaml rename to nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml index 54775fd..a2e5ae1 100644 --- a/nipype-auto-conv/specs/upload_iq_ms.yaml +++ b/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml @@ -24,6 +24,9 @@ inputs: # passed to the field in the automatically generated unittests. in_iqms: generic/file # type=file|default=: the input IQMs-JSON file + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -40,6 +43,8 @@ outputs: callables: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` # to set to the `callable` attribute of output fields + api_id: api_id_callable + # type=traitcompound: Id for report returned by the web api templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: @@ -59,7 +64,7 @@ tests: strict: # type=bool|default=False: crash if upload was not successful imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/upload_iq_ms_callables.py b/nipype-auto-conv/specs/interfaces/upload_iq_ms_callables.py new file mode 100644 index 0000000..8bffaf8 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/upload_iq_ms_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of UploadIQMs.yaml""" + + +def api_id_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["api_id"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/weighted_stat.yaml b/nipype-auto-conv/specs/interfaces/weighted_stat.yaml similarity index 92% rename from nipype-auto-conv/specs/weighted_stat.yaml rename to nipype-auto-conv/specs/interfaces/weighted_stat.yaml index b3665f5..5aed7ff 100644 --- a/nipype-auto-conv/specs/weighted_stat.yaml +++ b/nipype-auto-conv/specs/interfaces/weighted_stat.yaml @@ -22,6 +22,9 @@ inputs: # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: an image + callable_defaults: + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set as the `default` method of input fields metadata: # dict[str, dict[str, any]] - additional metadata to set on any of the input fields (e.g. out_file: position: 1) outputs: @@ -55,7 +58,7 @@ tests: stat: # type=enum|default='mean'|allowed['mean','std']: statistic to compute imports: - # list[nipype2pydra.task.base.importstatement] - list import statements required by the test, with each list item + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item # consisting of 'module', 'name', and optionally 'alias' keys expected_outputs: # dict[str, str] - expected values for selected outputs, noting that tests will typically diff --git a/nipype-auto-conv/specs/interfaces/weighted_stat_callables.py b/nipype-auto-conv/specs/interfaces/weighted_stat_callables.py new file mode 100644 index 0000000..f1e00d5 --- /dev/null +++ b/nipype-auto-conv/specs/interfaces/weighted_stat_callables.py @@ -0,0 +1,13 @@ +"""Module to put any functions that are referred to in the "callables" section of WeightedStat.yaml""" + + +def out_file_callable(output_dir, inputs, stdout, stderr): + outputs = _list_outputs( + output_dir=output_dir, inputs=inputs, stdout=stdout, stderr=stderr + ) + return outputs["out_file"] + + +# Original source at L568 of /interfaces/base/core.py +def _list_outputs(inputs=None, stdout=None, stderr=None, output_dir=None): + return _results diff --git a/nipype-auto-conv/specs/number_of_shells_callables.py b/nipype-auto-conv/specs/number_of_shells_callables.py deleted file mode 100644 index 63378f6..0000000 --- a/nipype-auto-conv/specs/number_of_shells_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in NumberOfShells.yaml""" diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml new file mode 100644 index 0000000..478272a --- /dev/null +++ b/nipype-auto-conv/specs/package.yaml @@ -0,0 +1,8 @@ +# name of the package to generate, e.g. pydra.tasks.mriqc +name: pydra.tasks.mriqc +# name of the nipype package to generate from (e.g. mriqc) +nipype_name: mriqc +# The name of the global struct/dict that contains workflow inputs that are to be converted to inputs of the function along with the type of the struct, either "dict" or "class" +config_params: null +# Mappings between nipype packages and their pydra equivalents. Regular expressions are supported +import_translations: null diff --git a/nipype-auto-conv/specs/read_dwi_metadata_callables.py b/nipype-auto-conv/specs/read_dwi_metadata_callables.py deleted file mode 100644 index 197e018..0000000 --- a/nipype-auto-conv/specs/read_dwi_metadata_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in ReadDWIMetadata.yaml""" diff --git a/nipype-auto-conv/specs/rotation_mask_callables.py b/nipype-auto-conv/specs/rotation_mask_callables.py deleted file mode 100644 index c844375..0000000 --- a/nipype-auto-conv/specs/rotation_mask_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in RotationMask.yaml""" diff --git a/nipype-auto-conv/specs/select_echo_callables.py b/nipype-auto-conv/specs/select_echo_callables.py deleted file mode 100644 index cce5cf5..0000000 --- a/nipype-auto-conv/specs/select_echo_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in SelectEcho.yaml""" diff --git a/nipype-auto-conv/specs/spikes_callables.py b/nipype-auto-conv/specs/spikes_callables.py deleted file mode 100644 index b4f0c3b..0000000 --- a/nipype-auto-conv/specs/spikes_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in Spikes.yaml""" diff --git a/nipype-auto-conv/specs/split_shells_callables.py b/nipype-auto-conv/specs/split_shells_callables.py deleted file mode 100644 index ec593c9..0000000 --- a/nipype-auto-conv/specs/split_shells_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in SplitShells.yaml""" diff --git a/nipype-auto-conv/specs/structural_qc_callables.py b/nipype-auto-conv/specs/structural_qc_callables.py deleted file mode 100644 index 28d0952..0000000 --- a/nipype-auto-conv/specs/structural_qc_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in StructuralQC.yaml""" diff --git a/nipype-auto-conv/specs/upload_iq_ms_callables.py b/nipype-auto-conv/specs/upload_iq_ms_callables.py deleted file mode 100644 index 6cc7c99..0000000 --- a/nipype-auto-conv/specs/upload_iq_ms_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in UploadIQMs.yaml""" diff --git a/nipype-auto-conv/specs/weighted_stat_callables.py b/nipype-auto-conv/specs/weighted_stat_callables.py deleted file mode 100644 index 3ea363d..0000000 --- a/nipype-auto-conv/specs/weighted_stat_callables.py +++ /dev/null @@ -1 +0,0 @@ -"""Module to put any functions that are referred to in WeightedStat.yaml""" diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml new file mode 100644 index 0000000..107190c --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: airmsk_wf +# name of the nipype workflow constructor +nipype_name: airmsk_wf +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml new file mode 100644 index 0000000..2eb7b4d --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: anat_qc_workflow +# name of the nipype workflow constructor +nipype_name: anat_qc_workflow +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml new file mode 100644 index 0000000..00b6f18 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: compute_iqms +# name of the nipype workflow constructor +nipype_name: compute_iqms +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml new file mode 100644 index 0000000..1658068 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: headmsk_wf +# name of the nipype workflow constructor +nipype_name: headmsk_wf +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml new file mode 100644 index 0000000..346d3e8 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: init_brain_tissue_segmentation +# name of the nipype workflow constructor +nipype_name: init_brain_tissue_segmentation +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml new file mode 100644 index 0000000..7eed0be --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: spatial_normalization +# name of the nipype workflow constructor +nipype_name: spatial_normalization +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml new file mode 100644 index 0000000..65c8701 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: init_anat_report_wf +# name of the nipype workflow constructor +nipype_name: init_anat_report_wf +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.anatomical.output +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml new file mode 100644 index 0000000..35425b3 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -0,0 +1,18 @@ +# name of the converted workflow constructor function +name: compute_iqms +# name of the nipype workflow constructor +nipype_name: compute_iqms +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.diffusion.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + "": inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + "": outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml new file mode 100644 index 0000000..39fa8fa --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -0,0 +1,18 @@ +# name of the converted workflow constructor function +name: dmri_qc_workflow +# name of the nipype workflow constructor +nipype_name: dmri_qc_workflow +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.diffusion.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + "": inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + "": dwi_report_wf +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml new file mode 100644 index 0000000..49263f5 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: epi_mni_align +# name of the nipype workflow constructor +nipype_name: epi_mni_align +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.diffusion.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml new file mode 100644 index 0000000..7dac072 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: hmc_workflow +# name of the nipype workflow constructor +nipype_name: hmc_workflow +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.diffusion.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml new file mode 100644 index 0000000..965bdae --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -0,0 +1,27 @@ +# name of the converted workflow constructor function +name: init_dwi_report_wf +# name of the nipype workflow constructor +nipype_name: init_dwi_report_wf +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.diffusion.output +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + "": inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + snr: ds_report_snr + noise: ds_report_noise + fa: ds_report_fa + md: ds_report_md + hm: ds_report_hm + spikes: ds_report_spikes + norm: ds_report_norm + carpet: ds_report_carpet + background: ds_report_background + bmask: ds_report_bmask +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml new file mode 100644 index 0000000..40b5e8d --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -0,0 +1,18 @@ +# name of the converted workflow constructor function +name: compute_iqms +# name of the nipype workflow constructor +nipype_name: compute_iqms +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + "": inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + "": outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml new file mode 100644 index 0000000..ec1435f --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: epi_mni_align +# name of the nipype workflow constructor +nipype_name: epi_mni_align +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml new file mode 100644 index 0000000..cbebb36 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: fmri_bmsk_workflow +# name of the nipype workflow constructor +nipype_name: fmri_bmsk_workflow +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml new file mode 100644 index 0000000..c2b10d2 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: fmri_qc_workflow +# name of the nipype workflow constructor +nipype_name: fmri_qc_workflow +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml new file mode 100644 index 0000000..77ec3b7 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: hmc +# name of the nipype workflow constructor +nipype_name: hmc +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml new file mode 100644 index 0000000..255e3cb --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: init_func_report_wf +# name of the nipype workflow constructor +nipype_name: init_func_report_wf +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.output +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml new file mode 100644 index 0000000..5337d81 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml @@ -0,0 +1,20 @@ +# name of the converted workflow constructor function +name: synthstrip_wf +# name of the nipype workflow constructor +nipype_name: synthstrip_wf +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.shared +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_nodes: + ? '' + : inputnode +# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_nodes: + ? '' + : outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: null +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/pydra/tasks/mriqc/__init__.py b/pydra/tasks/mriqc/__init__.py deleted file mode 100644 index 4dfb7cd..0000000 --- a/pydra/tasks/mriqc/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -This is a basic doctest demonstrating that the package and pydra can both be successfully -imported. - ->>> import pydra.engine ->>> import pydra.tasks.anatomical -""" -from warnings import warn -from pathlib import Path - -pkg_path = Path(__file__).parent.parent - -try: - from ._version import __version__ -except ImportError: - raise RuntimeError( - "pydra-anatomical has not been properly installed, please run " - f"`pip install -e {str(pkg_path)}` to install a development version" - ) -if "nipype" not in __version__: - try: - from .auto._version import nipype_version, nipype2pydra_version - except ImportError: - warn( - "Nipype interfaces haven't been automatically converted from their specs in " - f"`nipype-auto-conv`. Please run `{str(pkg_path / 'nipype-auto-conv' / 'generate')}` " - "to generated the converted Nipype interfaces in pydra.tasks.anatomical.auto" - ) - else: - n_ver = nipype_version.replace(".", "_") - n2p_ver = nipype2pydra_version.replace(".", "_") - __version__ += ( - "_" if "+" in __version__ else "+" - ) + f"nipype{n_ver}_nipype2pydra{n2p_ver}" - - -__all__ = ["__version__"] diff --git a/pydra/tasks/mriqc/latest.py b/pydra/tasks/mriqc/latest.py deleted file mode 100644 index f41e057..0000000 --- a/pydra/tasks/mriqc/latest.py +++ /dev/null @@ -1,3 +0,0 @@ -PACKAGE_VERSION = "v1" - -from .v1 import * # noqa diff --git a/pydra/tasks/mriqc/v23_2/__init__.py b/pydra/tasks/mriqc/v23_2/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyproject.toml b/pyproject.toml index a086e7a..23b9ff7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ doc = [ "sphinxcontrib-versioning", ] test = [ + "nipype2pydra", "pytest >= 4.4.0", "pytest-cov", "pytest-env", diff --git a/related-packages/conftest.py b/related-packages/conftest.py deleted file mode 100644 index 2a703c0..0000000 --- a/related-packages/conftest.py +++ /dev/null @@ -1,37 +0,0 @@ -import os -import logging -from pathlib import Path -import tempfile -import pytest - -# Set DEBUG logging for unittests - -log_level = logging.WARNING - -logger = logging.getLogger("fileformats") -logger.setLevel(log_level) - -sch = logging.StreamHandler() -sch.setLevel(log_level) -formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") -sch.setFormatter(formatter) -logger.addHandler(sch) - - -# For debugging in IDE's don't catch raised exceptions and let the IDE -# break at it -if os.getenv("_PYTEST_RAISE", "0") != "0": - - @pytest.hookimpl(tryfirst=True) - def pytest_exception_interact(call): - raise call.excinfo.value - - @pytest.hookimpl(tryfirst=True) - def pytest_internalerror(excinfo): - raise excinfo.value - - -@pytest.fixture -def work_dir(): - work_dir = tempfile.mkdtemp() - return Path(work_dir) diff --git a/related-packages/fileformats-extras/LICENSE b/related-packages/fileformats-extras/LICENSE deleted file mode 100644 index e00bcb3..0000000 --- a/related-packages/fileformats-extras/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ - Copyright 2021 Nipype developers - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/related-packages/fileformats-extras/README.rst b/related-packages/fileformats-extras/README.rst deleted file mode 100644 index b2547de..0000000 --- a/related-packages/fileformats-extras/README.rst +++ /dev/null @@ -1,29 +0,0 @@ -FileFormats-anatomical Extras -====================================== -.. image:: https://github.com/nipype/pydra-freesurfer/actions/workflows/ci-cd.yaml/badge.svg - :target: https://github.com/nipype/pydra-freesurfer/actions/workflows/ci-cd.yaml - - -This is a extras module for the `fileformats-anatomical `__ -fileformats extension package, which provides additional functionality to format classes (i.e. aside -from basic identification and validation), such as conversion tools, metadata parsers, test data generators, etc... - - -Quick Installation ------------------- - -This extension can be installed for Python 3 using *pip*:: - - $ pip3 install fileformats-anatomical-extras - -This will install the core package and any other dependencies - -License -------- - -This work is licensed under a -`Creative Commons Attribution 4.0 International License `_ - -.. image:: https://i.creativecommons.org/l/by/4.0/88x31.png - :target: http://creativecommons.org/licenses/by/4.0/ - :alt: Creative Commons Attribution 4.0 International License diff --git a/related-packages/fileformats-extras/fileformats/extras/medimage_anatomical/__init__.py b/related-packages/fileformats-extras/fileformats/extras/medimage_anatomical/__init__.py deleted file mode 100644 index ec83731..0000000 --- a/related-packages/fileformats-extras/fileformats/extras/medimage_anatomical/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from pathlib import Path -import typing as ty -from random import Random -from fileformats.core import FileSet -from fileformats.medimage_anatomical import ( -) - diff --git a/related-packages/fileformats-extras/pyproject.toml b/related-packages/fileformats-extras/pyproject.toml deleted file mode 100644 index 594156a..0000000 --- a/related-packages/fileformats-extras/pyproject.toml +++ /dev/null @@ -1,87 +0,0 @@ -[build-system] -requires = ["hatchling", "hatch-vcs"] -build-backend = "hatchling.build" - -[project] -name = "fileformats-medimage-anatomical-extras" -description = "Extensions to add functionality to tool-specific *fileformats* classes" -readme = "README.rst" -requires-python = ">=3.8" -dependencies = [ - "fileformats", - "fileformats-medimage-anatomical", - "pydra >= 0.23.0a" -] -license = {file = "LICENSE"} -authors = [ - {name = "Thomas G. Close", email = "tom.g.close@gmail.com"}, -] -maintainers = [ - {name = "Thomas G. Close", email = "tom.g.close@gmail.com"}, -] -keywords = [ - "file formats", - "data", -] -classifiers = [ - "Development Status :: 3 - Alpha", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: Apache Software License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Topic :: Scientific/Engineering", -] -dynamic = ["version"] - -[project.optional-dependencies] -dev = [ - "black", - "pre-commit", - "codespell", - "flake8", - "flake8-pyproject", -] -test = [ - "pytest >=6.2.5", - "pytest-env>=0.6.2", - "pytest-cov>=2.12.1", - "codecov", -] - -converters = [ -] - -[project.urls] -repository = "https://github.com/nipype/pydra-anatomical" - -[tool.hatch.version] -source = "vcs" -raw-options = { root = "../.." } - -[tool.hatch.build.hooks.vcs] -version-file = "fileformats/extras/medimage_anatomical/_version.py" - -[tool.hatch.build.targets.wheel] -packages = ["fileformats"] - -[tool.black] -target-version = ['py38'] -exclude = "fileformats/extras/medimage_anatomical/_version.py" - -[tool.codespell] -ignore-words = ".codespell-ignorewords" - -[tool.flake8] -doctests = true -per-file-ignores = [ - "__init__.py:F401" -] -max-line-length = 88 -select = "C,E,F,W,B,B950" -extend-ignore = ['E203', 'E501', 'E129'] diff --git a/related-packages/fileformats/LICENSE b/related-packages/fileformats/LICENSE deleted file mode 100644 index e00bcb3..0000000 --- a/related-packages/fileformats/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ - Copyright 2021 Nipype developers - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/related-packages/fileformats/README.rst b/related-packages/fileformats/README.rst deleted file mode 100644 index dc578d5..0000000 --- a/related-packages/fileformats/README.rst +++ /dev/null @@ -1,39 +0,0 @@ -How to customise this template -============================== - -#. Rename the `related-packages/fileformats/anatomical` directory to the name of the fileformats subpackage (e.g. `medimage_fsl`) -#. Search and replace "anatomical" with the name of the fileformats subpackage the extras are to be added -#. Replace name + email placeholders in `pyproject.toml` for developers and maintainers -#. Add the extension file-format classes -#. Ensure that all the extension file-format classes are imported into the extras package root, i.e. `fileformats/anatomical` -#. Delete these instructions - -... - -FileFormats Extension - anatomical -==================================== -.. image:: https://github.com/nipype/pydra-anatomical/actions/workflows/ci-cd.yml/badge.svg - :target: https://github.com/nipype/pydra-anatomical/actions/workflows/ci-cd.yml - -This is the "anatomical" extension module for the -`fileformats `__ package - - -Quick Installation ------------------- - -This extension can be installed for Python 3 using *pip*:: - - $ pip3 install fileformats-anatomical - -This will install the core package and any other dependencies - -License -------- - -This work is licensed under a -`Creative Commons Attribution 4.0 International License `_ - -.. image:: https://i.creativecommons.org/l/by/4.0/88x31.png - :target: http://creativecommons.org/licenses/by/4.0/ - :alt: Creative Commons Attribution 4.0 International License diff --git a/related-packages/fileformats/fileformats/medimage_anatomical/__init__.py b/related-packages/fileformats/fileformats/medimage_anatomical/__init__.py deleted file mode 100644 index 10bd9c0..0000000 --- a/related-packages/fileformats/fileformats/medimage_anatomical/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from fileformats.generic import File \ No newline at end of file diff --git a/related-packages/fileformats/pyproject.toml b/related-packages/fileformats/pyproject.toml deleted file mode 100644 index 34a0e49..0000000 --- a/related-packages/fileformats/pyproject.toml +++ /dev/null @@ -1,84 +0,0 @@ -[build-system] -requires = ["hatchling", "hatch-vcs"] -build-backend = "hatchling.build" - -[project] -name = "fileformats-medimage-anatomical" -description = "Classes for representing different file formats in Python classes for use in type hinting in data workflows" -readme = "README.rst" -requires-python = ">=3.8" -dependencies = [ - "fileformats", - "fileformats-medimage" -] -license = {file = "LICENSE"} -authors = [ - {name = "Thomas G. Close", email = "tom.g.close@gmail.com"}, -] -maintainers = [ - {name = "Thomas G. Close", email = "tom.g.close@gmail.com"}, -] -keywords = [ - "file formats", - "data", -] -classifiers = [ - "Development Status :: 3 - Alpha", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: Apache Software License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Topic :: Scientific/Engineering", -] -dynamic = ["version"] - -[project.optional-dependencies] -dev = [ - "black", - "pre-commit", - "codespell", - "flake8", - "flake8-pyproject", -] -test = [ - "pytest >=6.2.5", - "pytest-env>=0.6.2", - "pytest-cov>=2.12.1", - "codecov", - "fileformats-medimage-CHANGME-extras", -] - -[project.urls] -repository = "https://github.com/nipype/pydra-anatomical" - -[tool.hatch.version] -source = "vcs" -raw-options = { root = "../.." } - -[tool.hatch.build.hooks.vcs] -version-file = "fileformats/medimage_anatomical/_version.py" - -[tool.hatch.build.targets.wheel] -packages = ["fileformats"] - -[tool.black] -target-version = ['py38'] -exclude = "fileformats/medimage_anatomical/_version.py" - -[tool.codespell] -ignore-words = ".codespell-ignorewords" - -[tool.flake8] -doctests = true -per-file-ignores = [ - "__init__.py:F401" -] -max-line-length = 88 -select = "C,E,F,W,B,B950" -extend-ignore = ['E203', 'E501', 'E129'] diff --git a/report_progress.py b/report_progress.py new file mode 100644 index 0000000..1fc0767 --- /dev/null +++ b/report_progress.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +from pathlib import Path +import json +import yaml +import click + + +@click.command +@click.argument( + "out_json_path", + type=click.Path(path_type=Path), + help="The output path to save the report", +) +def report_progress(out_json_path: Path): + + out_json_path.parent.mkdir(exist_ok=True, parents=True) + + SPECS_DIR = Path(__file__).parent / "nipype-auto-conv" / "specs" + + report = {} + + for spec_path in SPECS_DIR.glob("*.yaml"): + with open(spec_path) as f: + spec = yaml.load(f, Loader=yaml.SafeLoader) + + report[spec["task_name"]] = { + n: not s["xfail"] for n, s in spec["tests"].items() + } + + with open(out_json_path, "w") as f: + json.dump(report, f) diff --git a/tools/increment_tool_version.py b/tools/increment_tool_version.py new file mode 100755 index 0000000..e6d56ed --- /dev/null +++ b/tools/increment_tool_version.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +from pathlib import Path +import inspect +from importlib import import_module +import click +from looseversion import LooseVersion +from pydra.engine.core import TaskBase + + +PKG_DIR = Path(__file__).parent.parent +TASKS_DIR = PKG_DIR / "pydra" / "tasks" / "ants" +VERSION_GRANULARITY = ( + 2 # Number of version parts to include: 1 - major, 2 - minor, 3 - micro +) + + +@click.command( + help="""Increment the latest version or create a new sub-package for interfaces for +a new release of AFNI depending on whether one already exists or not. + +NEW_VERSION the version of AFNI to create a new sub-package for +""" +) +@click.argument("new_version", type=LooseVersion) +def increment_tool_version(new_version: LooseVersion): + + # Get the name of the sub-package, e.g. "v2_5" + new_subpkg_name = "_".join(str(p) for p in new_version.version[:VERSION_GRANULARITY]) # type: ignore + if not new_subpkg_name.startswith("v"): + new_subpkg_name = "v" + new_subpkg_name + sub_pkg_dir = TASKS_DIR / new_subpkg_name + if not sub_pkg_dir.exists(): + + prev_version = sorted( + ( + p.name + for p in TASKS_DIR.iterdir() + if p.is_dir() and p.name.startswith("v") + ), + key=lambda x: LooseVersion(".".join(x.split("_"))).version, + )[-1] + prev_ver_mod = import_module(f"pydra.tasks.ants.{prev_version}") + + mod_attrs = [getattr(prev_ver_mod, a) for a in dir(prev_ver_mod)] + task_classes = [ + a for a in mod_attrs if inspect.isclass(a) and issubclass(a, TaskBase) + ] + + code_str = ( + f"from pydra.tasks.ants import {prev_version}\n" + "from . import _tool_version\n" + ) + + for task_cls in task_classes: + code_str += ( + f"\n\nclass {task_cls.__name__}({prev_version}.{task_cls.__name__}):\n" + " TOOL_VERSION = _tool_version.TOOL_VERSION\n" + ) + + sub_pkg_dir.mkdir(exist_ok=True) + with open(sub_pkg_dir / "__init__.py", "w") as f: + f.write(code_str) + + with open(sub_pkg_dir / "_tool_version.py", "w") as f: + f.write(f'TOOL_VERSION = "{new_version}"\n') + + +if __name__ == "__main__": + increment_tool_version() diff --git a/tools/rename_template.py b/tools/rename_template.py new file mode 100755 index 0000000..a682d24 --- /dev/null +++ b/tools/rename_template.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +import sys +import os +import re +import fnmatch +import functools +from pathlib import Path + +PACKAGE_ROOT = Path(__file__).absolute().parent.parent + + +@functools.lru_cache() +def load_gitignore(repo): + gitignore = repo / ".gitignore" + ignore = [fnmatch.translate(".git/"), fnmatch.translate(Path(__file__).name)] + if gitignore.exists(): + ignore.extend( + fnmatch.translate(line.strip()) + for line in gitignore.read_text().splitlines() + if line.strip() and not line[0] == "#" + ) + return re.compile("|".join(ignore)) + + +cmd, new_name, *_ = sys.argv + +for root_, dirs, files in os.walk(PACKAGE_ROOT): + ignore = load_gitignore(PACKAGE_ROOT).search + for d in [d for d in dirs if ignore(f"{d}/")]: + dirs.remove(d) + for f in [f for f in files if ignore(f)]: + files.remove(f) + + root = Path(root_) + for src in list(dirs): + if "mriqc" in src: + dst = src.replace("mriqc", new_name) + print(f"Renaming: {root / src} -> {root / dst}") + os.rename(root / src, root / dst) + dirs.remove(src) + dirs.append(dst) + for fname in files: + text = Path.read_text(root / fname) + if "mriqc" in text: + print(f"Rewriting: {root / fname}") + Path.write_text(root / fname, text.replace("mriqc", new_name)) diff --git a/tools/requirements.txt b/tools/requirements.txt new file mode 100644 index 0000000..3b7ccec --- /dev/null +++ b/tools/requirements.txt @@ -0,0 +1,3 @@ +click >= 8.1.3 +looseversion >= 1.1 +pydra >= 0.23 \ No newline at end of file From 67de781766060d9b83a765a02f5eff1682210182 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 17 Apr 2024 11:04:29 +1000 Subject: [PATCH 04/47] autoformat --- .../datalad_identity_interface.yaml | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml b/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml index 62db871..306a3dd 100644 --- a/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml +++ b/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml @@ -7,7 +7,7 @@ # ---- # Sneaks a ``datalad get`` in paths, if datalad is available. task_name: DataladIdentityInterface -nipype_name: DataladIdentityInterface +nipype_name: c nipype_module: mriqc.interfaces.datalad inputs: omit: @@ -44,22 +44,22 @@ outputs: requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] From 71d1c1e255d0d445b99ad757a3b5d3984d62f146 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 18 Apr 2024 14:14:58 +1000 Subject: [PATCH 05/47] touched up conversion specs --- ...ce.yaml => datalad_identity_interface.yaml | 0 ...=> datalad_identity_interface_callables.py | 0 nipype-auto-conv/specs/package.yaml | 20 ++++++++++++++++++- ...anatomical.output.init_anat_report_wf.yaml | 18 +++++++++++------ ...functional.output.init_func_report_wf.yaml | 11 ++++++---- 5 files changed, 38 insertions(+), 11 deletions(-) rename nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml => datalad_identity_interface.yaml (100%) rename nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py => datalad_identity_interface_callables.py (100%) diff --git a/nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml b/datalad_identity_interface.yaml similarity index 100% rename from nipype-auto-conv/specs/interfaces/datalad_identity_interface.yaml rename to datalad_identity_interface.yaml diff --git a/nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py b/datalad_identity_interface_callables.py similarity index 100% rename from nipype-auto-conv/specs/interfaces/datalad_identity_interface_callables.py rename to datalad_identity_interface_callables.py diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 478272a..a96e008 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -3,6 +3,24 @@ name: pydra.tasks.mriqc # name of the nipype package to generate from (e.g. mriqc) nipype_name: mriqc # The name of the global struct/dict that contains workflow inputs that are to be converted to inputs of the function along with the type of the struct, either "dict" or "class" -config_params: null +config_params: + wf: + varname: config.workflow + type: struct + module: mriqc + exec: + varname: config.execution + type: struct + module: mriqc + defaults: + work_dir: Path.cwd() + nipype: + varname: config.nipype + type: struct + module: mriqc + env: + varname: config.environment + type: struct + module: mriqc # Mappings between nipype packages and their pydra equivalents. Regular expressions are supported import_translations: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index 65c8701..a8fd0a1 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -6,15 +6,21 @@ nipype_name: init_anat_report_wf nipype_module: mriqc.workflows.anatomical.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + zoom: ds_report_zoomed + bg: ds_report_background + segm: ds_report_segm + bmask: ds_report_bmask + artmask: ds_report_artmask + airmask: ds_report_airmask + headmask: ds_report_headmask + norm: ds_report_norm + noisefit: ds_report_noisefit # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported -external_nested_workflows: null +external_nested_workflows: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index 255e3cb..5a1dac7 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -6,12 +6,15 @@ nipype_name: init_func_report_wf nipype_module: mriqc.workflows.functional.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + mean: ds_report_mean + stdev: ds_report_stdev + background: ds_report_background + zoomed: ds_report_zoomed + carpet: ds_report_carpet + # Generic regular expression substitutions to be run over the code before it is processed find_replace: null # name of the workflow variable that is returned From b2f6c7d87e6b7570f21228395c010a1226548123 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 18 Apr 2024 18:19:39 +1000 Subject: [PATCH 06/47] updates to package and prproject yamls --- nipype-auto-conv/specs/package.yaml | 7 ++++++- pyproject.toml | 2 -- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index a96e008..5c05cd9 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -23,4 +23,9 @@ config_params: type: struct module: mriqc # Mappings between nipype packages and their pydra equivalents. Regular expressions are supported -import_translations: null +import_translations: + - [nireports, pydra.tasks.nireports] + - [niworkflows, pydra.tasks.niworkflows] +find_replace: + - [config\.loggers\.\w+\., logger.] + - [config.to_filename\(\), ""] diff --git a/pyproject.toml b/pyproject.toml index 23b9ff7..e053d9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,6 @@ dependencies = [ "fileformats >=0.8.3", "fileformats-datascience >=0.1", "fileformats-medimage >=0.4.1", - "fileformats-medimage-mriqc" ] license = {file = "LICENSE"} authors = [{name = "Nipype developers", email = "neuroimaging@python.org"}] @@ -54,7 +53,6 @@ test = [ "fileformats-extras", "fileformats-datascience-extras", "fileformats-medimage-extras", - "fileformats-medimage-mriqc-extras" ] [tool.hatch.version] From 30e5ead2216ebfb6a8dd2e5e736495df48be3f94 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Sun, 21 Apr 2024 22:02:35 +1000 Subject: [PATCH 07/47] editing conversion specs to get package to build --- .../specs/interfaces/add_provenance.yaml | 72 ++++----- .../specs/interfaces/conform_image.yaml | 94 ++++++------ .../specs/interfaces/ensure_size.yaml | 79 +++++----- .../specs/interfaces/iqm_file_sink.yaml | 120 +++++++-------- .../specs/interfaces/structural_qc.yaml | 142 +++++++++--------- .../specs/interfaces/upload_iq_ms.yaml | 82 +++++----- nipype-auto-conv/specs/package.yaml | 3 + ...c.workflows.anatomical.base.airmsk_wf.yaml | 11 +- ...lows.anatomical.base.anat_qc_workflow.yaml | 14 +- ...orkflows.anatomical.base.compute_iqms.yaml | 11 +- ....workflows.anatomical.base.headmsk_wf.yaml | 11 +- ...l.base.init_brain_tissue_segmentation.yaml | 11 +- ...anatomical.base.spatial_normalization.yaml | 11 +- ...anatomical.output.init_anat_report_wf.yaml | 3 + ...workflows.diffusion.base.compute_iqms.yaml | 5 +- ...flows.diffusion.base.dmri_qc_workflow.yaml | 7 +- ...orkflows.diffusion.base.epi_mni_align.yaml | 11 +- ...workflows.diffusion.base.hmc_workflow.yaml | 11 +- ...s.diffusion.output.init_dwi_report_wf.yaml | 5 +- ...orkflows.functional.base.compute_iqms.yaml | 6 +- ...rkflows.functional.base.epi_mni_align.yaml | 11 +- ...ws.functional.base.fmri_bmsk_workflow.yaml | 11 +- ...lows.functional.base.fmri_qc_workflow.yaml | 13 +- .../mriqc.workflows.functional.base.hmc.yaml | 11 +- ...functional.output.init_func_report_wf.yaml | 5 +- .../mriqc.workflows.shared.synthstrip_wf.yaml | 11 +- 26 files changed, 422 insertions(+), 349 deletions(-) diff --git a/nipype-auto-conv/specs/interfaces/add_provenance.yaml b/nipype-auto-conv/specs/interfaces/add_provenance.yaml index 806d983..dc890e2 100644 --- a/nipype-auto-conv/specs/interfaces/add_provenance.yaml +++ b/nipype-auto-conv/specs/interfaces/add_provenance.yaml @@ -15,11 +15,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. air_msk: generic/file # type=file|default=: air mask file in_file: generic/file @@ -43,39 +43,43 @@ outputs: # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. callables: - # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` - # to set to the `callable` attribute of output fields + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields out_prov: out_prov_callable - # type=dict: + # type=dict: templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: input file - air_msk: - # type=file|default=: air mask file - rot_msk: - # type=file|default=: rotation mask file - modality: - # type=str|default='': provenance type - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: input file + air_msk: + # type=file|default=: air mask file + rot_msk: + # type=file|default=: rotation mask file + modality: + # type=str|default='': provenance type + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] +find_replace: + - ["config.environment.version", "''"] + - ["config.execution.debug", "False"] + - ["config.workflow.fd_thres,", "0.2, # .fd_thres"] diff --git a/nipype-auto-conv/specs/interfaces/conform_image.yaml b/nipype-auto-conv/specs/interfaces/conform_image.yaml index e381b1d..9647d5b 100644 --- a/nipype-auto-conv/specs/interfaces/conform_image.yaml +++ b/nipype-auto-conv/specs/interfaces/conform_image.yaml @@ -5,13 +5,13 @@ # # Docs # ---- -# +# # Conforms an input image. -# +# # List of nifti datatypes: -# +# # .. note: Original Analyze 7.5 types -# +# # DT_NONE 0 # DT_UNKNOWN 0 / what it says, dude / # DT_BINARY 1 / binary (1 bit/voxel) / @@ -23,9 +23,9 @@ # DT_DOUBLE 64 / double (64 bits/voxel) / # DT_RGB 128 / RGB triple (24 bits/voxel) / # DT_ALL 255 / not very useful (?) / -# +# # .. note: Added names for the same data types -# +# # DT_UINT8 2 # DT_INT16 4 # DT_INT32 8 @@ -33,9 +33,9 @@ # DT_COMPLEX64 32 # DT_FLOAT64 64 # DT_RGB24 128 -# +# # .. note: New codes for NIfTI -# +# # DT_INT8 256 / signed char (8 bits) / # DT_UINT16 512 / unsigned short (16 bits) / # DT_UINT32 768 / unsigned int (32 bits) / @@ -59,8 +59,8 @@ # NIFTI_TYPE_FLOAT128 1536 /! 128 bit float = long double. / # NIFTI_TYPE_COMPLEX128 1792 /! 128 bit complex = 2 64 bit floats. / # NIFTI_TYPE_COMPLEX256 2048 /! 256 bit complex = 2 128 bit floats / -# -# +# +# task_name: ConformImage nipype_name: ConformImage nipype_module: mriqc.interfaces.common.conform_image @@ -70,11 +70,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input image callable_defaults: @@ -88,11 +88,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_file: generic/file # type=file: output conformed file callables: @@ -103,28 +103,34 @@ outputs: requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: input image - check_ras: - # type=bool|default=True: check that orientation is RAS - check_dtype: - # type=bool|default=True: check data type - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: input image + check_ras: + # type=bool|default=True: check that orientation is RAS + check_dtype: + # type=bool|default=True: check data type + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] +find_replace: + - [config\.loggers\.interface, "logger"] + - [ + messages\.SUSPICIOUS_DATA_TYPE, + '"Input image {in_file} has a suspicious data type: ''{dtype}''"', + ] diff --git a/nipype-auto-conv/specs/interfaces/ensure_size.yaml b/nipype-auto-conv/specs/interfaces/ensure_size.yaml index 4063c18..18ee3b0 100644 --- a/nipype-auto-conv/specs/interfaces/ensure_size.yaml +++ b/nipype-auto-conv/specs/interfaces/ensure_size.yaml @@ -5,9 +5,9 @@ # # Docs # ---- -# +# # Checks the size of the input image and resamples it to have `pixel_size`. -# +# task_name: EnsureSize nipype_name: EnsureSize nipype_module: mriqc.interfaces.common.ensure_size @@ -17,11 +17,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. in_file: generic/file # type=file|default=: input image in_mask: generic/file @@ -37,11 +37,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_file: generic/file # type=file: output image out_mask: generic/file @@ -54,28 +54,35 @@ outputs: requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: input image - in_mask: - # type=file|default=: input mask - pixel_size: - # type=float|default=2.0: desired pixel size (mm) - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: input image + in_mask: + # type=file|default=: input mask + pixel_size: + # type=float|default=2.0: desired pixel size (mm) + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] +find_replace: + - ["config.loggers.interface", "logger"] + - [ + "messages.VOXEL_SIZE_SMALL", + "'One or more voxel dimensions (%f, %f, %f) are smaller than the requested voxel size (%f) - diff=(%f, %f, %f)'", + ] + - ["messages.VOXEL_SIZE_OK", "'Voxel size is large enough.'"] diff --git a/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml b/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml index db0a473..6618818 100644 --- a/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml +++ b/nipype-auto-conv/specs/interfaces/iqm_file_sink.yaml @@ -5,7 +5,7 @@ # # Docs # ---- -# +# task_name: IQMFileSink nipype_name: IQMFileSink nipype_module: mriqc.interfaces.bids @@ -15,11 +15,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_dir: Path # type=file|default=: the output directory callable_defaults: @@ -33,11 +33,11 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_file: generic/file # type=file: the output JSON file containing the IQMs callables: @@ -48,52 +48,54 @@ outputs: requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=str|default='': path of input file - subject_id: - # type=str|default='': the subject id - modality: - # type=str|default='': the qc type - session_id: - # type=traitcompound|default=None: - task_id: - # type=traitcompound|default=None: - acq_id: - # type=traitcompound|default=None: - rec_id: - # type=traitcompound|default=None: - run_id: - # type=traitcompound|default=None: - dataset: - # type=str|default='': dataset identifier - dismiss_entities: - # type=list|default=['part']: - metadata: - # type=dict|default={}: - provenance: - # type=dict|default={}: - root: - # type=dict|default={}: output root dictionary - out_dir: - # type=file|default=: the output directory - _outputs: - # type=dict|default={}: - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=str|default='': path of input file + subject_id: + # type=str|default='': the subject id + modality: + # type=str|default='': the qc type + session_id: + # type=traitcompound|default=None: + task_id: + # type=traitcompound|default=None: + acq_id: + # type=traitcompound|default=None: + rec_id: + # type=traitcompound|default=None: + run_id: + # type=traitcompound|default=None: + dataset: + # type=str|default='': dataset identifier + dismiss_entities: + # type=list|default=['part']: + metadata: + # type=dict|default={}: + provenance: + # type=dict|default={}: + root: + # type=dict|default={}: output root dictionary + out_dir: + # type=file|default=: the output directory + _outputs: + # type=dict|default={}: + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] +find_replace: + - [config\.loggers\.\w+\., logger.] diff --git a/nipype-auto-conv/specs/interfaces/structural_qc.yaml b/nipype-auto-conv/specs/interfaces/structural_qc.yaml index a9d2b0c..26bf8b9 100644 --- a/nipype-auto-conv/specs/interfaces/structural_qc.yaml +++ b/nipype-auto-conv/specs/interfaces/structural_qc.yaml @@ -5,11 +5,11 @@ # # Docs # ---- -# +# # Computes anatomical :abbr:`QC (Quality Control)` measures on the # structural image given as input -# -# +# +# task_name: StructuralQC nipype_name: StructuralQC nipype_module: mriqc.interfaces.anatomical @@ -19,11 +19,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. air_msk: generic/file # type=file|default=: air mask artifact_msk: generic/file @@ -57,24 +57,24 @@ outputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. out_noisefit: generic/file # type=file: plot of background noise and chi fitting callables: - # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` - # to set to the `callable` attribute of output fields + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields cjv: cjv_callable - # type=float: + # type=float: cnr: cnr_callable - # type=float: + # type=float: efc: efc_callable - # type=float: + # type=float: fber: fber_callable - # type=float: + # type=float: fwhm: fwhm_callable # type=dict: full width half-maximum measure icvs: icvs_callable @@ -84,70 +84,76 @@ outputs: out_qc: out_qc_callable # type=dict: output flattened dictionary with all measures qi_1: qi_1_callable - # type=float: + # type=float: rpve: rpve_callable # type=dict: partial volume fractions size: size_callable # type=dict: image sizes snr: snr_callable - # type=dict: + # type=dict: snrd: snrd_callable - # type=dict: + # type=dict: spacing: spacing_callable # type=dict: image sizes summary: summary_callable # type=dict: summary statistics per tissue tpm_overlap: tpm_overlap_callable - # type=dict: + # type=dict: wm2max: wm2max_callable - # type=float: + # type=float: templates: # dict[str, str] - `output_file_template` values to be provided to output fields requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_file: - # type=file|default=: file to be plotted - in_noinu: - # type=file|default=: image after INU correction - in_segm: - # type=file|default=: segmentation file from FSL FAST - in_bias: - # type=file|default=: bias file - head_msk: - # type=file|default=: head mask - air_msk: - # type=file|default=: air mask - rot_msk: - # type=file|default=: rotation mask - artifact_msk: - # type=file|default=: air mask - in_pvms: - # type=inputmultiobject|default=[]: partial volume maps from FSL FAST - in_tpms: - # type=inputmultiobject|default=[]: tissue probability maps from FSL FAST - mni_tpms: - # type=inputmultiobject|default=[]: tissue probability maps from FSL FAST - in_fwhm: - # type=list|default=[]: smoothness estimated with AFNI - human: - # type=bool|default=True: human workflow - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_file: + # type=file|default=: file to be plotted + in_noinu: + # type=file|default=: image after INU correction + in_segm: + # type=file|default=: segmentation file from FSL FAST + in_bias: + # type=file|default=: bias file + head_msk: + # type=file|default=: head mask + air_msk: + # type=file|default=: air mask + rot_msk: + # type=file|default=: rotation mask + artifact_msk: + # type=file|default=: air mask + in_pvms: + # type=inputmultiobject|default=[]: partial volume maps from FSL FAST + in_tpms: + # type=inputmultiobject|default=[]: tissue probability maps from FSL FAST + mni_tpms: + # type=inputmultiobject|default=[]: tissue probability maps from FSL FAST + in_fwhm: + # type=list|default=[]: smoothness estimated with AFNI + human: + # type=bool|default=True: human workflow + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] +find_replace: + - [config\.loggers\.interface\., "logger."] + - ["\n File", "\n # File"] + - ['"out_noisefit": File,', '# "out_noisefit": File,'] + - ["out_noisefit,", "# out_noisefit,"] + - [out_qc = _flatten_dict\(self._results\), "out_qc = {}"] diff --git a/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml b/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml index a2e5ae1..f6c3a2b 100644 --- a/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml +++ b/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml @@ -5,9 +5,9 @@ # # Docs # ---- -# +# # Upload features to MRIQCWebAPI -# +# task_name: UploadIQMs nipype_name: UploadIQMs nipype_module: mriqc.interfaces.webapi @@ -17,11 +17,11 @@ inputs: rename: # dict[str, str] - fields to rename in the Pydra interface types: - # dict[str, type] - override inferred types (use "mime-like" string for file-format types, - # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred - # from the nipype interface, but you may want to be more specific, particularly - # for file types, where specifying the format also specifies the file that will be - # passed to the field in the automatically generated unittests. + # dict[str, type] - override inferred types (use "mime-like" string for file-format types, + # e.g. 'medimage/nifti-gz'). For most fields the type will be correctly inferred + # from the nipype interface, but you may want to be more specific, particularly + # for file types, where specifying the format also specifies the file that will be + # passed to the field in the automatically generated unittests. in_iqms: generic/file # type=file|default=: the input IQMs-JSON file callable_defaults: @@ -41,8 +41,8 @@ outputs: # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. callables: - # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` - # to set to the `callable` attribute of output fields + # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` + # to set to the `callable` attribute of output fields api_id: api_id_callable # type=traitcompound: Id for report returned by the web api templates: @@ -50,32 +50,40 @@ outputs: requirements: # dict[str, list[str]] - input fields that are required to be provided for the output field to be present tests: -- inputs: - # dict[str, str] - values to provide to inputs fields in the task initialisation - # (if not specified, will try to choose a sensible value) - in_iqms: - # type=file|default=: the input IQMs-JSON file - endpoint: - # type=str|default='': URL of the POST endpoint - auth_token: - # type=str|default='': authentication token - email: - # type=str|default='': set sender email - strict: - # type=bool|default=False: crash if upload was not successful - imports: - # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item - # consisting of 'module', 'name', and optionally 'alias' keys - expected_outputs: - # dict[str, str] - expected values for selected outputs, noting that tests will typically - # be terminated before they complete for time-saving reasons, and therefore - # these values will be ignored, when running in CI - timeout: 10 - # int - the value to set for the timeout in the generated test, - # after which the test will be considered to have been initialised - # successfully. Set to 0 to disable the timeout (warning, this could - # lead to the unittests taking a very long time to complete) - xfail: true - # bool - whether the unittest is expected to fail or not. Set to false - # when you are satisfied with the edits you have made to this file + - inputs: + # dict[str, str] - values to provide to inputs fields in the task initialisation + # (if not specified, will try to choose a sensible value) + in_iqms: + # type=file|default=: the input IQMs-JSON file + endpoint: + # type=str|default='': URL of the POST endpoint + auth_token: + # type=str|default='': authentication token + email: + # type=str|default='': set sender email + strict: + # type=bool|default=False: crash if upload was not successful + imports: + # list[nipype2pydra.task.base.explicitimport] - list import statements required by the test, with each list item + # consisting of 'module', 'name', and optionally 'alias' keys + expected_outputs: + # dict[str, str] - expected values for selected outputs, noting that tests will typically + # be terminated before they complete for time-saving reasons, and therefore + # these values will be ignored, when running in CI + timeout: 10 + # int - the value to set for the timeout in the generated test, + # after which the test will be considered to have been initialised + # successfully. Set to 0 to disable the timeout (warning, this could + # lead to the unittests taking a very long time to complete) + xfail: true + # bool - whether the unittest is expected to fail or not. Set to false + # when you are satisfied with the edits you have made to this file doctests: [] +find_replace: + - [config\.loggers\.interface, logger] + - ["return runtime", "return api_id"] + - ["messages.QC_UPLOAD_COMPLETE", "'QC metrics successfully uploaded.'"] + - ["messages.QC_UPLOAD_START", "'MRIQC Web API: submitting to <{url}>'"] + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 5c05cd9..8d028b3 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -29,3 +29,6 @@ import_translations: find_replace: - [config\.loggers\.\w+\., logger.] - [config.to_filename\(\), ""] + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml index 107190c..5f1ef36 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml @@ -6,14 +6,15 @@ nipype_name: airmsk_wf nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml index 2eb7b4d..5220024 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml @@ -6,15 +6,17 @@ nipype_name: anat_qc_workflow nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode -# Generic regular expression substitutions to be run over the code before it is processed -find_replace: null + "": outputnode # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported external_nested_workflows: null +find_replace: + - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] + - [BUILDING_WORKFLOW, "'Building {modality} MRIQC workflow {detail}.'"] + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml index 00b6f18..b9c18e7 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -6,14 +6,15 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml index 1658068..14e7551 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml @@ -6,14 +6,15 @@ nipype_name: headmsk_wf nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml index 346d3e8..5ca1cf9 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml @@ -6,14 +6,15 @@ nipype_name: init_brain_tissue_segmentation nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml index 7eed0be..e306604 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml @@ -6,14 +6,15 @@ nipype_name: spatial_normalization nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index a8fd0a1..7c35e96 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -20,6 +20,9 @@ output_nodes: noisefit: ds_report_noisefit # Generic regular expression substitutions to be run over the code before it is processed find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index 35425b3..b269d84 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -11,7 +11,10 @@ input_nodes: output_nodes: "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index 39fa8fa..d4349ed 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -11,7 +11,12 @@ input_nodes: output_nodes: "": dwi_report_wf # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] + - [BUILDING_WORKFLOW, "'Building {modality} MRIQC workflow {detail}.'"] + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml index 49263f5..4798ecc 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml @@ -6,14 +6,15 @@ nipype_name: epi_mni_align nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml index 7dac072..b7a9015 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml @@ -6,14 +6,15 @@ nipype_name: hmc_workflow nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index 965bdae..72a7d39 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -20,7 +20,10 @@ output_nodes: background: ds_report_background bmask: ds_report_bmask # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml index 40b5e8d..2d13f1d 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -10,8 +10,12 @@ input_nodes: # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: "": outputnode + "data": ds_timeseries # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml index ec1435f..804da74 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml @@ -6,14 +6,15 @@ nipype_name: epi_mni_align nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml index cbebb36..133a485 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml @@ -6,14 +6,15 @@ nipype_name: fmri_bmsk_workflow nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index c2b10d2..7aa28e7 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -6,14 +6,17 @@ nipype_name: fmri_qc_workflow nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] + - [BUILDING_WORKFLOW, "'Building {modality} MRIQC workflow {detail}.'"] + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml index 77ec3b7..ead57db 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml @@ -6,14 +6,15 @@ nipype_name: hmc nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index 5a1dac7..1ef2cb4 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -16,7 +16,10 @@ output_nodes: carpet: ds_report_carpet # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml index 5337d81..a371cb6 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml @@ -6,14 +6,15 @@ nipype_name: synthstrip_wf nipype_module: mriqc.workflows.shared # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - ? '' - : inputnode + "": inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - ? '' - : outputnode + "": outputnode # Generic regular expression substitutions to be run over the code before it is processed -find_replace: null +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported From 571219c011bbbbea47e87ba7bd16a093588fc1e6 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 22 Apr 2024 17:03:24 +1000 Subject: [PATCH 08/47] touched up package spec --- nipype-auto-conv/requirements.txt | 3 ++- nipype-auto-conv/specs/package.yaml | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/nipype-auto-conv/requirements.txt b/nipype-auto-conv/requirements.txt index 20a0b10..0366123 100644 --- a/nipype-auto-conv/requirements.txt +++ b/nipype-auto-conv/requirements.txt @@ -1 +1,2 @@ -nipype2pydra \ No newline at end of file +nipype2pydra +mriqc \ No newline at end of file diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 8d028b3..f9530f5 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -32,3 +32,5 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] +omit_modules: + - "mriqc.config" From 4bb5f2d4a57342efd04953581be7ebb0007d8faf Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 23 Apr 2024 12:22:10 +1000 Subject: [PATCH 09/47] omitted and find-replaces several things to get tests to run --- .../specs/interfaces/ensure_size.yaml | 4 ++++ nipype-auto-conv/specs/package.yaml | 16 ++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/nipype-auto-conv/specs/interfaces/ensure_size.yaml b/nipype-auto-conv/specs/interfaces/ensure_size.yaml index 18ee3b0..bfaa99b 100644 --- a/nipype-auto-conv/specs/interfaces/ensure_size.yaml +++ b/nipype-auto-conv/specs/interfaces/ensure_size.yaml @@ -86,3 +86,7 @@ find_replace: "'One or more voxel dimensions (%f, %f, %f) are smaller than the requested voxel size (%f) - diff=(%f, %f, %f)'", ] - ["messages.VOXEL_SIZE_OK", "'Voxel size is large enough.'"] + - [ + "load_data = Loader\\(\"mriqc\"\\)", + 'load_data = Loader("pydra.tasks.mriqc")', + ] diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index f9530f5..94c1cf0 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -22,6 +22,15 @@ config_params: varname: config.environment type: struct module: mriqc +omit_functions: + - nipype.external.due.BibTeX +omit_classes: + - niworkflows.interfaces.bids._ReadSidecarJSONOutputSpec + - mriqc.interfaces.diffusion._ReadDWIMetadataOutputSpec +omit_constants: + - nipype.utils.filemanip._cifs_table + - nipype.config + - nipype.logging # Mappings between nipype packages and their pydra equivalents. Regular expressions are supported import_translations: - [nireports, pydra.tasks.nireports] @@ -32,5 +41,12 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - ["class _ReadDWIMetadataOutputSpec.+?(?=\\n\\n)", ""] omit_modules: - "mriqc.config" +import_find_replace: + - ["from \\.\\. import config, logging", ""] + - ["_ReadDWIMetadataOutputSpec,", ""] + - ["from pydra.tasks.mriqc.nipype_ports.interfaces import utility as niu", ""] +copy_packages: + - mriqc.data From 210206129896dfbde1c2368d53870cd00799652c Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 25 Apr 2024 00:47:32 +1000 Subject: [PATCH 10/47] debugging workflow tests --- nipype-auto-conv/specs/package.yaml | 4 ++++ .../mriqc.workflows.anatomical.base.compute_iqms.yaml | 1 + .../mriqc.workflows.diffusion.base.compute_iqms.yaml | 1 + .../mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml | 1 + .../mriqc.workflows.functional.base.fmri_qc_workflow.yaml | 2 ++ 5 files changed, 9 insertions(+) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 94c1cf0..7f119a0 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -18,6 +18,8 @@ config_params: varname: config.nipype type: struct module: mriqc + defaults: + omp_nthreads: 1 env: varname: config.environment type: struct @@ -42,6 +44,8 @@ find_replace: - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] - ["class _ReadDWIMetadataOutputSpec.+?(?=\\n\\n)", ""] + - ["dataset = wf_inputs\\.get\\(.*?_datalad_get\\(\\w+\\)", ""] + - ["DWI(Denoise|Heatmap)", "Dwi\\1"] omit_modules: - "mriqc.config" import_find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml index b9c18e7..9f4758b 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -7,6 +7,7 @@ nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: "": inputnode + meta: meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: "": outputnode diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index b269d84..3e1fcb0 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -7,6 +7,7 @@ nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: "": inputnode + meta: meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: "": outputnode diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index d4349ed..889114e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -7,6 +7,7 @@ nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: "": inputnode + b: load_bmat # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: "": dwi_report_wf diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index 7aa28e7..6db5f5d 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -7,6 +7,7 @@ nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: "": inputnode + meta: meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: "": outputnode @@ -17,6 +18,7 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - ["full_files = \\[\\].*?= full_files", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported From b44f35732d3074b9a42f0f93aeb5337e7e743b9e Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 25 Apr 2024 11:38:00 +1000 Subject: [PATCH 11/47] debugging workflow conversions --- nipype-auto-conv/specs/package.yaml | 2 +- ...c.workflows.anatomical.base.airmsk_wf.yaml | 4 ++-- ...lows.anatomical.base.anat_qc_workflow.yaml | 4 ++-- ...orkflows.anatomical.base.compute_iqms.yaml | 4 ++-- ....workflows.anatomical.base.headmsk_wf.yaml | 4 ++-- ...l.base.init_brain_tissue_segmentation.yaml | 4 ++-- ...anatomical.base.spatial_normalization.yaml | 4 ++-- ...anatomical.output.init_anat_report_wf.yaml | 20 ++++++++--------- ...workflows.diffusion.base.compute_iqms.yaml | 4 ++-- ...flows.diffusion.base.dmri_qc_workflow.yaml | 6 ++--- ...orkflows.diffusion.base.epi_mni_align.yaml | 4 ++-- ...workflows.diffusion.base.hmc_workflow.yaml | 4 ++-- ...s.diffusion.output.init_dwi_report_wf.yaml | 22 +++++++++---------- ...orkflows.functional.base.compute_iqms.yaml | 6 ++--- ...rkflows.functional.base.epi_mni_align.yaml | 4 ++-- ...ws.functional.base.fmri_bmsk_workflow.yaml | 4 ++-- ...lows.functional.base.fmri_qc_workflow.yaml | 4 ++-- .../mriqc.workflows.functional.base.hmc.yaml | 4 ++-- ...functional.output.init_func_report_wf.yaml | 12 +++++----- .../mriqc.workflows.shared.synthstrip_wf.yaml | 4 ++-- 20 files changed, 62 insertions(+), 62 deletions(-) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 7f119a0..5891a48 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -45,7 +45,7 @@ find_replace: - ["logging = Logging\\(config\\)", ""] - ["class _ReadDWIMetadataOutputSpec.+?(?=\\n\\n)", ""] - ["dataset = wf_inputs\\.get\\(.*?_datalad_get\\(\\w+\\)", ""] - - ["DWI(Denoise|Heatmap)", "Dwi\\1"] + - ["DWIDenoise", "DwiDenoise"] omit_modules: - "mriqc.config" import_find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml index 5f1ef36..8a75e5f 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml @@ -6,10 +6,10 @@ nipype_name: airmsk_wf nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml index 5220024..82d3172 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml @@ -6,10 +6,10 @@ nipype_name: anat_qc_workflow nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml index 9f4758b..ec5b838 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -6,11 +6,11 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" meta: meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml index 14e7551..630ef76 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml @@ -6,10 +6,10 @@ nipype_name: headmsk_wf nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml index 5ca1cf9..8cd815f 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml @@ -6,10 +6,10 @@ nipype_name: init_brain_tissue_segmentation nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml index e306604..99c6397 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml @@ -6,10 +6,10 @@ nipype_name: spatial_normalization nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index 7c35e96..e0bcace 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -6,18 +6,18 @@ nipype_name: init_anat_report_wf nipype_module: mriqc.workflows.anatomical.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - zoom: ds_report_zoomed - bg: ds_report_background - segm: ds_report_segm - bmask: ds_report_bmask - artmask: ds_report_artmask - airmask: ds_report_airmask - headmask: ds_report_headmask - norm: ds_report_norm - noisefit: ds_report_noisefit + ds_report_zoomed: zoom + ds_report_background: bg + ds_report_segm: segm + ds_report_bmask: bmask + ds_report_artmask: artmask + ds_report_airmask: airmask + ds_report_headmask: headmask + ds_report_norm: norm + ds_report_noisefit: noisefit # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index 3e1fcb0..fce0f24 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -6,11 +6,11 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" meta: meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index 889114e..b2a255e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -6,11 +6,11 @@ nipype_name: dmri_qc_workflow nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode - b: load_bmat + inputnode: "" + load_bmat: b # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": dwi_report_wf + dwi_report_wf: # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml index 4798ecc..45a3e7a 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml @@ -6,10 +6,10 @@ nipype_name: epi_mni_align nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml index b7a9015..c77239c 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml @@ -6,10 +6,10 @@ nipype_name: hmc_workflow nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index 72a7d39..ebfaae6 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -6,19 +6,19 @@ nipype_name: init_dwi_report_wf nipype_module: mriqc.workflows.diffusion.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - snr: ds_report_snr - noise: ds_report_noise - fa: ds_report_fa - md: ds_report_md - hm: ds_report_hm - spikes: ds_report_spikes - norm: ds_report_norm - carpet: ds_report_carpet - background: ds_report_background - bmask: ds_report_bmask + ds_report_snr: snr + ds_report_noise: noise + ds_report_fa: fa + ds_report_md: md + ds_report_hm: hm + ds_report_spikes: spikes + ds_report_norm: norm + ds_report_carpet: carpet + ds_report_background: background + ds_report_bmask: bmask # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml index 2d13f1d..e34390d 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -6,11 +6,11 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode - "data": ds_timeseries + outputnode: "" + ds_timeseries: data # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml index 804da74..0ed909e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml @@ -6,10 +6,10 @@ nipype_name: epi_mni_align nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml index 133a485..f239826 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml @@ -6,10 +6,10 @@ nipype_name: fmri_bmsk_workflow nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index 6db5f5d..9fbc5a0 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -6,11 +6,11 @@ nipype_name: fmri_qc_workflow nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" meta: meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml index ead57db..c98e790 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml @@ -6,10 +6,10 @@ nipype_name: hmc nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index 1ef2cb4..0de69e4 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -6,14 +6,14 @@ nipype_name: init_func_report_wf nipype_module: mriqc.workflows.functional.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - mean: ds_report_mean - stdev: ds_report_stdev - background: ds_report_background - zoomed: ds_report_zoomed - carpet: ds_report_carpet + ds_report_mean: mean + ds_report_stdev: stdev + ds_report_background: background + ds_report_zoomed: zoomed + ds_report_carpet: carpet # Generic regular expression substitutions to be run over the code before it is processed find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml index a371cb6..63b0cf2 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml @@ -6,10 +6,10 @@ nipype_name: synthstrip_wf nipype_module: mriqc.workflows.shared # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - "": inputnode + inputnode: "" # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - "": outputnode + outputnode: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] From 5db79624f3d5de902e889d88f68636b41b9e1dd0 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 25 Apr 2024 12:08:10 +1000 Subject: [PATCH 12/47] debugging compute iqms --- .../workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index fce0f24..f391ba8 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -11,6 +11,7 @@ input_nodes: # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: outputnode: "" + datasink: "" # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] From da922509950cbb25fb87dc0a45d3aa99f7272cd2 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 26 Apr 2024 10:00:50 +1000 Subject: [PATCH 13/47] cleaned up some different workflow-specific issues using find/replace --- .../mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml | 8 ++++++++ ...iqc.workflows.diffusion.output.init_dwi_report_wf.yaml | 1 + 2 files changed, 9 insertions(+) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index b2a255e..9b74b22 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -18,6 +18,14 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - [ + "workflow\\.dwidenoise\\.inputs\\.in_file", + "workflow.dwidenoise.inputs.dwi", + ] + - [ + "in_file=workflow\\.dwidenoise\\.lzout\\.out_file", + "in_file=workflow.dwidenoise.lzout.out", + ] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index ebfaae6..232365d 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -24,6 +24,7 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - ["in_file=workflow\\.lzin\\.epi_mean,\\n", ""] # multiple connections to in_file in workflow # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported From db90f4de74ddcd26b196d694c4700d0761ff0ae9 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 26 Apr 2024 10:31:17 +1000 Subject: [PATCH 14/47] removed unnecessary omp_nthreads default --- nipype-auto-conv/specs/package.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 5891a48..9b5b3ac 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -18,8 +18,6 @@ config_params: varname: config.nipype type: struct module: mriqc - defaults: - omp_nthreads: 1 env: varname: config.environment type: struct From eb24b32d3f36cac9602adef211727f75ed5f972e Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 26 Apr 2024 10:36:42 +1000 Subject: [PATCH 15/47] added test_inputs to synthstrip --- .../specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml index 63b0cf2..5888939 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml @@ -19,3 +19,5 @@ find_replace: workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported external_nested_workflows: null +test_inputs: + omp_nthreads: 1 From af9097e21578a976dbb9218a09af4c89a052e2e8 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 1 May 2024 07:38:01 +1000 Subject: [PATCH 16/47] refactoring workflow input/output specifications --- ...c.workflows.anatomical.base.airmsk_wf.yaml | 10 +++++----- ...lows.anatomical.base.anat_qc_workflow.yaml | 5 +++-- ...orkflows.anatomical.base.compute_iqms.yaml | 19 +++++++++++++------ ....workflows.anatomical.base.headmsk_wf.yaml | 10 +++++----- ...l.base.init_brain_tissue_segmentation.yaml | 10 +++++----- ...anatomical.base.spatial_normalization.yaml | 10 +++++----- ...anatomical.output.init_anat_report_wf.yaml | 6 +++--- ...workflows.diffusion.base.compute_iqms.yaml | 13 ++++++------- ...flows.diffusion.base.dmri_qc_workflow.yaml | 7 +++---- ...orkflows.diffusion.base.epi_mni_align.yaml | 10 +++++----- ...workflows.diffusion.base.hmc_workflow.yaml | 4 ++-- ...s.diffusion.output.init_dwi_report_wf.yaml | 6 +++--- ...orkflows.functional.base.compute_iqms.yaml | 12 ++++++------ ...rkflows.functional.base.epi_mni_align.yaml | 10 +++++----- ...ws.functional.base.fmri_bmsk_workflow.yaml | 10 +++++----- ...lows.functional.base.fmri_qc_workflow.yaml | 7 ++++--- .../mriqc.workflows.functional.base.hmc.yaml | 10 +++++----- ...functional.output.init_func_report_wf.yaml | 4 ++-- .../mriqc.workflows.shared.synthstrip_wf.yaml | 10 +++++----- 19 files changed, 90 insertions(+), 83 deletions(-) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml index 8a75e5f..4cf1dc2 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml @@ -5,11 +5,11 @@ nipype_name: airmsk_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml index 82d3172..7388195 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml @@ -6,14 +6,15 @@ nipype_name: anat_qc_workflow nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - inputnode: "" + inputnode: # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - outputnode: "" + outputnode: # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported external_nested_workflows: null +is_external: true find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] - [BUILDING_WORKFLOW, "'Building {modality} MRIQC workflow {detail}.'"] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml index ec5b838..4c82d8b 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -5,12 +5,19 @@ nipype_name: compute_iqms # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" - meta: meta -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# - meta +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode +outputs: + out_file: + node_name: datasink + field: in_file + type: medimage/nifti + mappings: + - [outputnode, out_file] # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml index 630ef76..759215e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml @@ -5,11 +5,11 @@ nipype_name: headmsk_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml index 8cd815f..0610093 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml @@ -5,11 +5,11 @@ nipype_name: init_brain_tissue_segmentation # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml index 99c6397..de4c841 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml @@ -5,11 +5,11 @@ nipype_name: spatial_normalization # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index e0bcace..484ed51 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -5,8 +5,8 @@ nipype_name: init_anat_report_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" +# input_nodes: +# - inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: ds_report_zoomed: zoom @@ -17,7 +17,7 @@ output_nodes: ds_report_airmask: airmask ds_report_headmask: headmask ds_report_norm: norm - ds_report_noisefit: noisefit + ds_report_noisefit: noise # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index f391ba8..c0ef280 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -5,13 +5,12 @@ nipype_name: compute_iqms # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" - meta: meta -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" - datasink: "" +# input_nodes: +# - inputnode +# - meta +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index 9b74b22..91e3a06 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -7,10 +7,9 @@ nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: inputnode: "" - load_bmat: b -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - dwi_report_wf: + load_bmat: "" +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +is_external: true # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml index 45a3e7a..d9178fd 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml @@ -5,11 +5,11 @@ nipype_name: epi_mni_align # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml index c77239c..fe97ed5 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml @@ -6,10 +6,10 @@ nipype_name: hmc_workflow nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - inputnode: "" + - inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - outputnode: "" + - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index 232365d..5c3926f 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -5,8 +5,8 @@ nipype_name: init_dwi_report_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" +# input_nodes: +# - inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: ds_report_snr: snr @@ -17,7 +17,7 @@ output_nodes: ds_report_spikes: spikes ds_report_norm: norm ds_report_carpet: carpet - ds_report_background: background + ds_report_background: bg ds_report_bmask: bmask # Generic regular expression substitutions to be run over the code before it is processed find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml index e34390d..edb2129 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -5,12 +5,12 @@ nipype_name: compute_iqms # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" - ds_timeseries: data +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode +# - ds_timeseries # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml index 0ed909e..ddd23bf 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml @@ -5,11 +5,11 @@ nipype_name: epi_mni_align # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml index f239826..4a3ce16 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml @@ -5,11 +5,11 @@ nipype_name: fmri_bmsk_workflow # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index 9fbc5a0..7b3104e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -6,11 +6,12 @@ nipype_name: fmri_qc_workflow nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_nodes: - inputnode: "" - meta: meta + - inputnode + - meta # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: - outputnode: "" + - outputnode +is_external: true # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml index c98e790..718b169 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml @@ -5,11 +5,11 @@ nipype_name: hmc # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index 0de69e4..7feeff9 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -5,8 +5,8 @@ nipype_name: init_func_report_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" +# input_nodes: +# - inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_nodes: ds_report_mean: mean diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml index 5888939..bf72b44 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml @@ -5,11 +5,11 @@ nipype_name: synthstrip_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.shared # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: "" +# input_nodes: +# - inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +# output_nodes: +# - outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] From 6a727b37ffdb71d81e5b9f9425943179075236a6 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 1 May 2024 07:42:18 +1000 Subject: [PATCH 17/47] deleted unused fmri brain mask workflow fmri_bmsk_workflow --- ...ws.functional.base.fmri_bmsk_workflow.yaml | 21 ------------------- 1 file changed, 21 deletions(-) delete mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml deleted file mode 100644 index 4a3ce16..0000000 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# name of the converted workflow constructor function -name: fmri_bmsk_workflow -# name of the nipype workflow constructor -nipype_name: fmri_bmsk_workflow -# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base -nipype_module: mriqc.workflows.functional.base -# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode -# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode -# Generic regular expression substitutions to be run over the code before it is processed -find_replace: - - ["config = NipypeConfig\\(\\)", ""] - - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - - ["logging = Logging\\(config\\)", ""] -# name of the workflow variable that is returned -workflow_variable: workflow -# the names of the nested workflows that are defined in other modules and need to be imported -external_nested_workflows: null From 5922e102a033d0b3931627ebc88e515db1c2e8a5 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 3 May 2024 13:53:55 +1000 Subject: [PATCH 18/47] Modifying syntax of how inputs/outputs are specified --- ...c.workflows.anatomical.base.airmsk_wf.yaml | 6 +-- ...lows.anatomical.base.anat_qc_workflow.yaml | 7 +-- ...orkflows.anatomical.base.compute_iqms.yaml | 8 ++-- ....workflows.anatomical.base.headmsk_wf.yaml | 6 +-- ...l.base.init_brain_tissue_segmentation.yaml | 6 +-- ...anatomical.base.spatial_normalization.yaml | 6 +-- ...anatomical.output.init_anat_report_wf.yaml | 42 ++++++++++++----- ...workflows.diffusion.base.compute_iqms.yaml | 6 +-- ...flows.diffusion.base.dmri_qc_workflow.yaml | 15 ++++-- ...orkflows.diffusion.base.epi_mni_align.yaml | 6 +-- ...workflows.diffusion.base.hmc_workflow.yaml | 6 +-- ...s.diffusion.output.init_dwi_report_wf.yaml | 47 +++++++++++++------ ...orkflows.functional.base.compute_iqms.yaml | 6 +-- ...rkflows.functional.base.epi_mni_align.yaml | 6 +-- ...lows.functional.base.fmri_qc_workflow.yaml | 8 +--- .../mriqc.workflows.functional.base.hmc.yaml | 6 +-- ...functional.output.init_func_report_wf.yaml | 25 ++++++---- .../mriqc.workflows.shared.synthstrip_wf.yaml | 6 +-- 18 files changed, 118 insertions(+), 100 deletions(-) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml index 4cf1dc2..d3b72c9 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.airmsk_wf.yaml @@ -5,11 +5,9 @@ nipype_name: airmsk_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml index 7388195..8ced19a 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml @@ -5,16 +5,13 @@ nipype_name: anat_qc_workflow # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: +input_node: inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - outputnode: +output_node: outputnode # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported external_nested_workflows: null -is_external: true find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] - [BUILDING_WORKFLOW, "'Building {modality} MRIQC workflow {detail}.'"] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml index 4c82d8b..a65b04a 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -5,18 +5,16 @@ nipype_name: compute_iqms # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # - meta # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode outputs: out_file: node_name: datasink field: in_file type: medimage/nifti - mappings: + replaces: - [outputnode, out_file] # Generic regular expression substitutions to be run over the code before it is processed find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml index 759215e..12453c0 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.headmsk_wf.yaml @@ -5,11 +5,9 @@ nipype_name: headmsk_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml index 0610093..48bcb66 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.init_brain_tissue_segmentation.yaml @@ -5,11 +5,9 @@ nipype_name: init_brain_tissue_segmentation # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml index de4c841..17a122c 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml @@ -5,11 +5,9 @@ nipype_name: spatial_normalization # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index 484ed51..0a59aaf 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -5,19 +5,35 @@ nipype_name: init_anat_report_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.anatomical.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - ds_report_zoomed: zoom - ds_report_background: bg - ds_report_segm: segm - ds_report_bmask: bmask - ds_report_artmask: artmask - ds_report_airmask: airmask - ds_report_headmask: headmask - ds_report_norm: norm - ds_report_noisefit: noise +input_node: inputnode +outputs: + zoom_report: + node_name: ds_report_zoomed + field: in_file + bg_report: + node_name: ds_report_background + field: in_file + segm_report: + node_name: ds_report_segm + field: in_file + bmask_report: + node_name: ds_report_bmask + field: in_file + artmask_report: + node_name: ds_report_artmask + field: in_file + airmask_report: + node_name: ds_report_airmask + field: in_file + headmask_report: + node_name: ds_report_headmask + field: in_file + norm_report: + node_name: ds_report_norm + field: in_file + noise_report: + node_name: ds_report_noisefit + field: in_file # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index c0ef280..5041084 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -5,12 +5,10 @@ nipype_name: compute_iqms # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # - meta # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index 91e3a06..e990e9a 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -5,11 +5,16 @@ nipype_name: dmri_qc_workflow # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - inputnode: "" - load_bmat: "" -# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -is_external: true +input_node: inputnode +inputs: + bvals: + node_name: load_bmat + field: out_bval_file + type: medimage/bval + bvecs: + node_name: load_bmat + field: out_bvec_file + type: medimage/bvec # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml index d9178fd..5df89ec 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.epi_mni_align.yaml @@ -5,11 +5,9 @@ nipype_name: epi_mni_align # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml index fe97ed5..5de2ca9 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.hmc_workflow.yaml @@ -5,11 +5,9 @@ nipype_name: hmc_workflow # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - - inputnode +input_node: inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index 5c3926f..b228e63 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -5,26 +5,45 @@ nipype_name: init_dwi_report_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.diffusion.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - ds_report_snr: snr - ds_report_noise: noise - ds_report_fa: fa - ds_report_md: md - ds_report_hm: hm - ds_report_spikes: spikes - ds_report_norm: norm - ds_report_carpet: carpet - ds_report_background: bg - ds_report_bmask: bmask +outputs: + snr_report: + node_name: mosaic_snr + field: out_report + noise_report: + node_name: mosaic_noise + field: out_file + fa_report: + node_name: mosaic_fa + field: out_file + md_report: + node_name: mosaic_md + field: out_file + heatmap_report: + node_name: plot_heatmap + field: out_file + spikes_report: + node_name: mosaic_spikes + field: out_file + norm_report: + node_name: inputnode + field: mni_report + carpet_report: + node_name: bigplot + field: out_file + # bg_report: # seems to be the same as the noise report + # node_name: mosaic_noise + # field: out_file + bmask_report: + node_name: plot_bmask + field: out_file # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] - - ["in_file=workflow\\.lzin\\.epi_mean,\\n", ""] # multiple connections to in_file in workflow + - ["out_file=workflow\\.lzin\\.epi_mean,\\n", ""] # multiple connections to out_file in workflow # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml index edb2129..54b5257 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -5,11 +5,9 @@ nipype_name: compute_iqms # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # - ds_timeseries # Generic regular expression substitutions to be run over the code before it is processed find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml index ddd23bf..344be72 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml @@ -5,11 +5,9 @@ nipype_name: epi_mni_align # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index 7b3104e..41fde9b 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -5,13 +5,9 @@ nipype_name: fmri_qc_workflow # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -input_nodes: - - inputnode - - meta +input_node: inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - - outputnode -is_external: true +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml index 718b169..970269e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.hmc.yaml @@ -5,11 +5,9 @@ nipype_name: hmc # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index 7feeff9..1fb4007 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -5,15 +5,24 @@ nipype_name: init_func_report_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.functional.output # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_nodes: - ds_report_mean: mean - ds_report_stdev: stdev - ds_report_background: background - ds_report_zoomed: zoomed - ds_report_carpet: carpet +outputs: + mean_report: + node_name: ds_report_mean + field: in_file + stdev_report: + node_name: ds_report_stdev + field: in_file + background_report: + node_name: ds_report_background + field: in_file + zoomed_report: + node_name: ds_report_zoomed + field: in_file + carpet_report: + node_name: ds_report_carpet + field: in_file # Generic regular expression substitutions to be run over the code before it is processed find_replace: diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml index bf72b44..942477a 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.shared.synthstrip_wf.yaml @@ -5,11 +5,9 @@ nipype_name: synthstrip_wf # name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base nipype_module: mriqc.workflows.shared # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow -# input_nodes: -# - inputnode +input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -# output_nodes: -# - outputnode +output_node: outputnode # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] From 80248714f663cb713050b90ebdb9f9e5b1cfc40d Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 15 May 2024 11:15:50 +1000 Subject: [PATCH 19/47] debugging conversion --- .../specs/interfaces/upload_iq_ms.yaml | 2 +- ...lows.anatomical.base.anat_qc_workflow.yaml | 9 ++-- ...orkflows.anatomical.base.compute_iqms.yaml | 16 ++++--- ...anatomical.base.spatial_normalization.yaml | 7 +++ ...anatomical.output.init_anat_report_wf.yaml | 41 ++++++++-------- ...workflows.diffusion.base.compute_iqms.yaml | 22 +++++++-- ...s.diffusion.output.init_dwi_report_wf.yaml | 11 +++-- ...orkflows.functional.base.compute_iqms.yaml | 47 +++++++++++++++++-- ...rkflows.functional.base.epi_mni_align.yaml | 17 +++++++ ...ws.functional.base.fmri_bmsk_workflow.yaml | 19 ++++++++ ...lows.functional.base.fmri_qc_workflow.yaml | 11 ++++- ...functional.output.init_func_report_wf.yaml | 29 ++++++++---- 12 files changed, 179 insertions(+), 52 deletions(-) create mode 100644 nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml diff --git a/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml b/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml index f6c3a2b..3adbf2b 100644 --- a/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml +++ b/nipype-auto-conv/specs/interfaces/upload_iq_ms.yaml @@ -22,7 +22,7 @@ inputs: # from the nipype interface, but you may want to be more specific, particularly # for file types, where specifying the format also specifies the file that will be # passed to the field in the automatically generated unittests. - in_iqms: generic/file + in_iqms: dict # type=file|default=: the input IQMs-JSON file callable_defaults: # dict[str, str] - names of methods/callable classes defined in the adjacent `*_callables.py` diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml index 8ced19a..62caab2 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.anat_qc_workflow.yaml @@ -6,15 +6,18 @@ nipype_name: anat_qc_workflow nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_node: inputnode -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_node: outputnode # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported -external_nested_workflows: null +external_nested_workflows: + - nirodents.workflows.brainextraction.init_rodent_brain_extraction_wf find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] - [BUILDING_WORKFLOW, "'Building {modality} MRIQC workflow {detail}.'"] - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - [ + "# fmt: off\\n\\s*workflow.set_output\\(\\[\\('iqmswf_measures', workflow.iqmswf.lzout.measures\\)\\]\\)", + "", + ] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml index a65b04a..078cc79 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.compute_iqms.yaml @@ -6,16 +6,18 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.anatomical.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_node: inputnode -# - meta -# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_node: outputnode outputs: - out_file: - node_name: datasink - field: in_file - type: medimage/nifti + measures: + node_name: measures + field: out_qc replaces: - [outputnode, out_file] + noise_report: + node_name: getqi2 + field: out_file + export: true + replaces: + - [outputnode, noisefit] # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml index 17a122c..4f7cac1 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.base.spatial_normalization.yaml @@ -8,6 +8,13 @@ nipype_module: mriqc.workflows.anatomical.base input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_node: outputnode +outputs: + report: + node_name: norm + field: out_report + export: true + replaces: + - ["outputnode", "out_report"] # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index 0a59aaf..99b2dee 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -8,32 +8,33 @@ nipype_module: mriqc.workflows.anatomical.output input_node: inputnode outputs: zoom_report: - node_name: ds_report_zoomed - field: in_file + node_name: mosaic_zoom + field: out_file + export: true bg_report: - node_name: ds_report_background - field: in_file + node_name: mosaic_noise + field: out_file + export: true segm_report: - node_name: ds_report_segm - field: in_file + node_name: plot_artmask + field: out_file + export: true bmask_report: - node_name: ds_report_bmask - field: in_file + node_name: plot_bmask + field: out_file + export: true artmask_report: - node_name: ds_report_artmask - field: in_file + node_name: plot_artmask + field: out_file + export: true airmask_report: - node_name: ds_report_airmask - field: in_file + node_name: plot_airmask + field: out_file + export: true headmask_report: - node_name: ds_report_headmask - field: in_file - norm_report: - node_name: ds_report_norm - field: in_file - noise_report: - node_name: ds_report_noisefit - field: in_file + node_name: plot_headmask + field: out_file + export: true # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index 5041084..0abfe3b 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -6,9 +6,25 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.diffusion.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_node: inputnode -# - meta -# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_node: outputnode +outputs: + out_file: + node_name: measures + field: out_qc + export: true + replaces: + - [outputnode, out_file] + noise_report: + node_name: getqi2 + field: out_file + export: true + replaces: + - [outputnode, noisefit] + noise_floor: + node_name: estimate_sigma + field: out + export: true + replaces: + - [outputnode, noise_floor] # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index b228e63..7ee8436 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -11,33 +11,38 @@ outputs: snr_report: node_name: mosaic_snr field: out_report + export: true noise_report: node_name: mosaic_noise field: out_file + export: true fa_report: node_name: mosaic_fa field: out_file + export: true md_report: node_name: mosaic_md field: out_file + export: true heatmap_report: node_name: plot_heatmap field: out_file + export: true spikes_report: node_name: mosaic_spikes field: out_file - norm_report: - node_name: inputnode - field: mni_report + export: true carpet_report: node_name: bigplot field: out_file + export: true # bg_report: # seems to be the same as the noise report # node_name: mosaic_noise # field: out_file bmask_report: node_name: plot_bmask field: out_file + export: true # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml index 54b5257..9afe5ed 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -6,9 +6,50 @@ nipype_name: compute_iqms nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_node: inputnode -# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_node: outputnode -# - ds_timeseries +outputs: + out_file: + node_name: measures + field: out_qc + export: true + replaces: + - [outputnode, out_file] + noise_report: + node_name: getqi2 + field: out_file + export: true + replaces: + - [outputnode, noisefit] + spikes: + node_name: spikes_fft + field: out_spikes + export: true + replaces: + - ["outputnode", "out_spikes"] + fft: + node_name: spikes_fft + field: out_fft + export: true + replaces: + - ["outputnode", "out_fft"] + n_spikes: + node_name: spikes_fft + field: spikes_num + type: field/integer + export: true + replaces: + - ["outputnode", "n_spikes"] + outliers: + node_name: outliers + field: out_file + export: true + replaces: + - ["outputnode", "outliers"] + dvars: + node_name: dvnode + field: out_all + export: true + replaces: + - ["outputnode", "out_dvars"] # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml index 344be72..4e1f2ee 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.epi_mni_align.yaml @@ -8,6 +8,23 @@ nipype_module: mriqc.workflows.functional.base input_node: inputnode # # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow output_node: outputnode +outputs: + epi_parc: + node_name: invt + field: output_image + replaces: + - ["outputnode", "epi_parc"] + epi_mni: + node_name: norm + field: warped_image + replaces: + - ["outputnode", "epi_mri"] + report: + node_name: norm + field: mni_report + export: true + replaces: + - ["outputnode", "out_report"] # Generic regular expression substitutions to be run over the code before it is processed find_replace: - ["config = NipypeConfig\\(\\)", ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml new file mode 100644 index 0000000..ff05860 --- /dev/null +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_bmsk_workflow.yaml @@ -0,0 +1,19 @@ +# name of the converted workflow constructor function +name: fmri_bmsk_workflow +# name of the nipype workflow constructor +nipype_name: fmri_bmsk_workflow +# name of the nipype module the function is found within, e.g. mriqc.workflows.anatomical.base +nipype_module: mriqc.workflows.functional.base +# Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow +input_node: inputnode +# # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow +output_node: outputnode +# Generic regular expression substitutions to be run over the code before it is processed +find_replace: + - ["config = NipypeConfig\\(\\)", ""] + - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] + - ["logging = Logging\\(config\\)", ""] +# name of the workflow variable that is returned +workflow_variable: workflow +# the names of the nested workflows that are defined in other modules and need to be imported +external_nested_workflows: null diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index 41fde9b..7ad54d1 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -6,8 +6,15 @@ nipype_name: fmri_qc_workflow nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_node: inputnode -# Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow -output_node: outputnode +inputs: + bvals: + node_name: load_bmat + field: out_bval_file + type: medimage/bval + bvecs: + node_name: load_bmat + field: out_bvec_file + type: medimage/bvec # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index 1fb4007..c782268 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -9,20 +9,29 @@ input_node: inputnode # Name of the node that is to be considered the output of the workflow, i.e. its inputs will be the outputs of the workflow outputs: mean_report: - node_name: ds_report_mean - field: in_file + node_name: mosaic_mean + field: out_file + export: true stdev_report: - node_name: ds_report_stdev - field: in_file + node_name: mosaic_stddev + field: out_file + export: true background_report: - node_name: ds_report_background - field: in_file + node_name: mosaic_noise + field: out_file + export: true zoomed_report: - node_name: ds_report_zoomed - field: in_file + node_name: mosaic_zoom + field: out_file + export: true carpet_report: - node_name: ds_report_carpet - field: in_file + node_name: bigplot + field: out_file + export: true + spikes_report: + node_name: mosaic_spikes + field: out_file + export: true # Generic regular expression substitutions to be run over the code before it is processed find_replace: From 67fd0f1a9aba0ae76daf622db5c8accb857ad921 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 15 May 2024 12:38:06 +1000 Subject: [PATCH 20/47] removed early return if not verbose so all outputs are generated by anat report workflow --- .../mriqc.workflows.anatomical.output.init_anat_report_wf.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml index 99b2dee..9af5b76 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.anatomical.output.init_anat_report_wf.yaml @@ -16,7 +16,7 @@ outputs: field: out_file export: true segm_report: - node_name: plot_artmask + node_name: plot_segm field: out_file export: true bmask_report: @@ -40,6 +40,7 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - ["if not verbose:\\n\\s*return workflow", ""] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported From fd6773320881f7c703e01dee20680ff70e273772 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 15 May 2024 17:19:20 +1000 Subject: [PATCH 21/47] got diffusion workflow to build successfully --- .../mriqc.workflows.diffusion.base.compute_iqms.yaml | 6 ------ .../mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml | 8 ++++++++ ...iqc.workflows.diffusion.output.init_dwi_report_wf.yaml | 3 +++ 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml index 0abfe3b..0c560b8 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.compute_iqms.yaml @@ -13,12 +13,6 @@ outputs: export: true replaces: - [outputnode, out_file] - noise_report: - node_name: getqi2 - field: out_file - export: true - replaces: - - [outputnode, noisefit] noise_floor: node_name: estimate_sigma field: out diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml index e990e9a..9d094af 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.base.dmri_qc_workflow.yaml @@ -15,6 +15,10 @@ inputs: node_name: load_bmat field: out_bvec_file type: medimage/bvec + qspace_neighbors: + node_name: load_bmat + field: qspace_neighbors + # type: field/integer+list-of.list-of # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] @@ -30,6 +34,10 @@ find_replace: "in_file=workflow\\.dwidenoise\\.lzout\\.out_file", "in_file=workflow.dwidenoise.lzout.out", ] + # - [ + # "workflow.set_output\\(\\n(\\s*)\\[\\(\"dwi_report_wf_spikes_report\", workflow.dwi_report_wf.lzout.spikes_report\\)\\n(\\s*)\\]\\n(\\s*)\\)", + # "if wf_fft_spikes_detector:\\n workflow.set_output(\\n \\1[(\"dwi_report_wf_spikes_report\", workflow.dwi_report_wf.lzout.spikes_report)\\n \\2]\\n \\3)", + # ] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml index 7ee8436..c837d27 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.diffusion.output.init_dwi_report_wf.yaml @@ -49,6 +49,9 @@ find_replace: - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] - ["out_file=workflow\\.lzin\\.epi_mean,\\n", ""] # multiple connections to out_file in workflow + - ["if True:\\n\\s*return workflow", ""] + - ["if wf_fft_spikes_detector:", "if True: # wf_fft_spikes_detector:"] + - ["if not verbose:", "if False: # not verbose:"] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported From 7c4bd38282b09ffa770d29b9bec1d8df17a3e909 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 16 May 2024 09:31:14 +1000 Subject: [PATCH 22/47] added metadata input --- ...c.workflows.functional.base.fmri_qc_workflow.yaml | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index 7ad54d1..a41418c 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -7,14 +7,10 @@ nipype_module: mriqc.workflows.functional.base # Name of the node that is to be considered the input of the workflow, i.e. its outputs will be the inputs of the workflow input_node: inputnode inputs: - bvals: - node_name: load_bmat - field: out_bval_file - type: medimage/bval - bvecs: - node_name: load_bmat - field: out_bvec_file - type: medimage/bvec + metadata: + node_name: meta + field: out_dict + type: dict # Generic regular expression substitutions to be run over the code before it is processed find_replace: - [from pydra.tasks.mriqc.messages import BUILDING_WORKFLOW, ""] From f81b54f90b2533f495fac15414e96a52ba7581e4 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Thu, 16 May 2024 15:35:26 +1000 Subject: [PATCH 23/47] all workflows build!! --- ...c.workflows.functional.base.compute_iqms.yaml | 16 +++++++--------- ...rkflows.functional.base.fmri_qc_workflow.yaml | 4 ++++ ...ws.functional.output.init_func_report_wf.yaml | 4 ++++ 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml index 9afe5ed..84b4660 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.compute_iqms.yaml @@ -13,12 +13,6 @@ outputs: export: true replaces: - [outputnode, out_file] - noise_report: - node_name: getqi2 - field: out_file - export: true - replaces: - - [outputnode, noisefit] spikes: node_name: spikes_fft field: out_spikes @@ -31,13 +25,13 @@ outputs: export: true replaces: - ["outputnode", "out_fft"] - n_spikes: + spikes_num: node_name: spikes_fft - field: spikes_num + field: n_spikes type: field/integer export: true replaces: - - ["outputnode", "n_spikes"] + - ["outputnode", "spikes_num"] outliers: node_name: outliers field: out_file @@ -55,6 +49,10 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - [ + "if wf_fft_spikes_detector:", + "if True: # wf_fft_spikes_detector: - disabled to ensure all outputs are generated", + ] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml index a41418c..d6796e0 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.base.fmri_qc_workflow.yaml @@ -19,6 +19,10 @@ find_replace: - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] - ["full_files = \\[\\].*?= full_files", ""] + - [ + "# fmt: off\\n\\s*workflow.set_output\\(\\[\\('iqmswf_out_file', workflow.iqmswf.lzout.out_file\\)\\]\\)", + "", + ] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported diff --git a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml index c782268..36c527e 100644 --- a/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml +++ b/nipype-auto-conv/specs/workflows/mriqc.workflows.functional.output.init_func_report_wf.yaml @@ -38,6 +38,10 @@ find_replace: - ["config = NipypeConfig\\(\\)", ""] - ["iflogger = logging.getLogger\\(\"nipype.interface\"\\)", ""] - ["logging = Logging\\(config\\)", ""] + - [ + "if wf_fft_spikes_detector:", + "if True: # wf_fft_spikes_detector: - disabled so output is always created", + ] # name of the workflow variable that is returned workflow_variable: workflow # the names of the nested workflows that are defined in other modules and need to be imported From 9ab4ff8072bf4c4b79eb73352b27d7845a648abd Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 17 May 2024 22:50:24 +1000 Subject: [PATCH 24/47] added pydra task dependencies --- pyproject.toml | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e053d9d..f087361 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,10 +12,16 @@ dependencies = [ "fileformats >=0.8.3", "fileformats-datascience >=0.1", "fileformats-medimage >=0.4.1", + "pydra-ants", + "pydra-afni", + "pydra-fsl", + "pydra-mrtrix3 >=3.0.3a0", +] +license = { file = "LICENSE" } +authors = [{ name = "Nipype developers", email = "neuroimaging@python.org" }] +maintainers = [ + { name = "Nipype developers", email = "neuroimaging@python.org" }, ] -license = {file = "LICENSE"} -authors = [{name = "Nipype developers", email = "neuroimaging@python.org"}] -maintainers = [{name = "Nipype developers", email = "neuroimaging@python.org"}] keywords = ["pydra"] classifiers = [ "Development Status :: 2 - Pre-Alpha", @@ -30,10 +36,7 @@ classifiers = [ dynamic = ["version"] [project.optional-dependencies] -dev = [ - "black", - "pre-commit", -] +dev = ["black", "pre-commit"] doc = [ "packaging", "sphinx >=2.1.2", @@ -53,6 +56,9 @@ test = [ "fileformats-extras", "fileformats-datascience-extras", "fileformats-medimage-extras", + "fileformats-medimage-afni-extras", + "fileformats-medimage-mrtrix3-extras", + "fileformats-medimage-fsl-extras", ] [tool.hatch.version] @@ -74,9 +80,7 @@ ignore-words = ".codespell-ignorewords" [tool.flake8] doctests = true -per-file-ignores = [ - "__init__.py:F401,F403" -] +per-file-ignores = ["__init__.py:F401,F403"] max-line-length = 88 select = "C,E,F,W,B,B950" extend-ignore = ['E203', 'E501', 'E129', 'W503'] From 328233b9f761ad703f8e2993fa451cfa4f36fa4b Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 17 May 2024 23:33:57 +1000 Subject: [PATCH 25/47] removed fileformats from ci-cd --- .github/workflows/ci-cd.yaml | 104 ++--------------------------------- 1 file changed, 5 insertions(+), 99 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 42c8af7..fef93ba 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -30,7 +30,7 @@ jobs: - name: Install build dependencies run: python -m pip install --upgrade pip - name: Install requirements - run: python -m pip install ./related-packages/fileformats -r ./nipype-auto-conv/requirements.txt + run: python -m pip install -r ./nipype-auto-conv/requirements.txt - name: Run automatic Nipype > Pydra conversion run: ./nipype-auto-conv/generate - uses: actions/upload-artifact@v3 @@ -77,41 +77,12 @@ jobs: python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - name: Install task package run: | - pip install "./related-packages/fileformats[dev]" "related-packages/fileformats-extras[dev]" pip install ${{ matrix.pip-flags }} ".[dev]" - python -c "import pydra.tasks.anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - python -c "import fileformats.medimage_anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - python -c "import fileformats.extras.medimage_anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - - fileformats-test: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.8', '3.11'] - steps: - - uses: actions/checkout@v4 - - name: Revert version to most recent tag on upstream update - if: github.event_name == 'repository_dispatch' - run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install build dependencies - run: | - python -m pip install --upgrade pip - - name: Install task package - run: | - pip install "./related-packages/fileformats[test]" "./related-packages/fileformats-extras[test]" - python -c "import fileformats.medimage_anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - - name: Test fileformats with pytest - run: | - cd ./fileformats - pytest -sv --cov fileformats.medimage_anatomical --cov fileformats.extras.medimage_anatomical --cov-report xml . + test: - needs: [nipype-conv, fileformats-test] + needs: [nipype-conv] runs-on: ubuntu-22.04 strategy: matrix: @@ -170,7 +141,7 @@ jobs: python -m pip install --upgrade pip - name: Install task package run: | - pip install "./related-packages/fileformats" "./related-packages/fileformats-extras" ".[test]" + pip install ".[test]" python -c "import pydra.tasks.anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - name: Test with pytest @@ -180,75 +151,10 @@ jobs: - uses: codecov/codecov-action@v3 if: ${{ always() }} with: - files: coverage.xml,./fileformats/coverage.xml + files: coverage.xml name: pydra-anatomical - deploy-fileformats: - needs: [devcheck, test] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - name: Install build tools - run: python -m pip install build twine - - name: Build source and wheel distributions - run: python -m build ./related-packages/fileformats - - name: Check distributions - run: twine check ./related-packages/fileformats/dist/* - - name: Check for PyPI token on tag - id: deployable - if: (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) || github.event_name == 'repository_dispatch' - env: - PYPI_API_TOKEN: "${{ secrets.PYPI_FILEFORMATS_API_TOKEN }}" - run: if [ -n "$PYPI_API_TOKEN" ]; then echo "DEPLOY=true" >> $GITHUB_OUTPUT; fi - - name: Upload to PyPI - if: steps.deployable.outputs.DEPLOY - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_FILEFORMATS_API_TOKEN }} - packages-dir: ./related-packages/fileformats/dist - - deploy-fileformats-extras: - needs: [deploy-fileformats] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - submodules: recursive - fetch-depth: 0 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - name: Install build tools - run: python -m pip install build twine - - name: Build source and wheel distributions - run: python -m build ./related-packages/fileformats-extras - - name: Check distributions - run: twine check ./related-packages/fileformats-extras/dist/* - - name: Check for PyPI token on tag - id: deployable - if: (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) || github.event_name == 'repository_dispatch' - env: - PYPI_API_TOKEN: "${{ secrets.PYPI_FILEFORMATS_EXTRAS_API_TOKEN }}" - run: if [ -n "$PYPI_API_TOKEN" ]; then echo "DEPLOY=true" >> $GITHUB_OUTPUT; fi - - name: Upload to PyPI - if: steps.deployable.outputs.DEPLOY - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_FILEFORMATS_EXTRAS_API_TOKEN }} - packages-dir: ./related-packages/fileformats-extras/dist - deploy: - needs: [deploy-fileformats-extras] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From 86c37c453070349568fa1dc4f753b8788f3fda14 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 17 May 2024 23:37:13 +1000 Subject: [PATCH 26/47] added job dependency in gha --- .github/workflows/ci-cd.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index fef93ba..5ee4b8f 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -16,7 +16,6 @@ on: types: [create-release] jobs: - nipype-conv: runs-on: ubuntu-latest steps: @@ -155,6 +154,7 @@ jobs: name: pydra-anatomical deploy: + needs: [test] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 From 6b8d42eb6a5ec8c7f2347f95f6272e97e5ce3307 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Fri, 17 May 2024 23:52:43 +1000 Subject: [PATCH 27/47] added nipy to nipype2pydra deps --- nipype-auto-conv/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype-auto-conv/requirements.txt b/nipype-auto-conv/requirements.txt index 0366123..0bdabfa 100644 --- a/nipype-auto-conv/requirements.txt +++ b/nipype-auto-conv/requirements.txt @@ -1,2 +1,3 @@ nipype2pydra -mriqc \ No newline at end of file +mriqc +nipy \ No newline at end of file From 72b4223fb08e46a9927c88ee9d9a0f71966b8782 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Sat, 18 May 2024 19:32:44 +1000 Subject: [PATCH 28/47] touched up ci-cd workflow --- .github/workflows/ci-cd.yaml | 2 +- pydra/tasks/mriqc/workflows/__init__.py | 83 +- .../mriqc/workflows/anatomical/__init__.py | 15 + .../tasks/mriqc/workflows/anatomical/base.py | 1457 +++++++---------- .../mriqc/workflows/anatomical/output.py | 345 ++-- pydra/tasks/mriqc/workflows/core.py | 59 - .../mriqc/workflows/diffusion/__init__.py | 10 + pydra/tasks/mriqc/workflows/diffusion/base.py | 1059 ++++++------ .../tasks/mriqc/workflows/diffusion/output.py | 542 +++--- .../mriqc/workflows/functional/__init__.py | 2 + .../tasks/mriqc/workflows/functional/base.py | 1331 ++++++++------- .../mriqc/workflows/functional/output.py | 544 +++--- pydra/tasks/mriqc/workflows/shared.py | 133 +- pydra/tasks/mriqc/workflows/utils.py | 245 +-- 14 files changed, 2566 insertions(+), 3261 deletions(-) delete mode 100644 pydra/tasks/mriqc/workflows/core.py diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 5ee4b8f..e0c4f71 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -170,7 +170,7 @@ jobs: if: github.event_name == 'repository_dispatch' run: | TAG=$(git tag -l | tail -n 1 | awk -F post '{print $1}') - POST=$(python -c "from pydra.tasks.anatomical.auto._version import *; print(post_release)") + POST=$(python -c "from pydra.tasks.anatomical.auto._post_release import *; print(post_release)") git checkout $TAG git add -f pydra/tasks/anatomical/auto/_version.py git commit -am"added auto-generated version to make new tag for package version" diff --git a/pydra/tasks/mriqc/workflows/__init__.py b/pydra/tasks/mriqc/workflows/__init__.py index 36ff3a6..7757ed1 100644 --- a/pydra/tasks/mriqc/workflows/__init__.py +++ b/pydra/tasks/mriqc/workflows/__init__.py @@ -1,42 +1,41 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -""" -.. automodule:: mriqc.workflows.anatomical - :members: - :undoc-members: - :show-inheritance: - - -.. automodule:: mriqc.workflows.functional - :members: - :undoc-members: - :show-inheritance: - -""" -from mriqc.workflows.anatomical.base import anat_qc_workflow -from mriqc.workflows.functional.base import fmri_qc_workflow - -__all__ = [ - "anat_qc_workflow", - "fmri_qc_workflow", -] +from .anatomical import ( + _binarize, + _enhance, + _get_mod, + _pop, + airmsk_wf, + anat_qc_workflow, + compute_iqms, + gradient_threshold, + headmsk_wf, + image_gradient, + init_anat_report_wf, + init_brain_tissue_segmentation, + spatial_normalization, +) +from .diffusion import ( + _bvals_report, + _carpet_parcellation, + _estimate_sigma, + _filter_metadata, + _get_tr, + _get_wm, + compute_iqms, + dmri_qc_workflow, + epi_mni_align, + hmc_workflow, + init_dwi_report_wf, +) +from .functional import ( + _carpet_parcellation, + _get_tr, + compute_iqms, + epi_mni_align, + fmri_bmsk_workflow, + fmri_qc_workflow, + hmc, + init_func_report_wf, + spikes_mask, +) +from .shared import synthstrip_wf +from .utils import _tofloat, generate_filename, get_fwhmx, slice_wise_fft, spectrum_mask diff --git a/pydra/tasks/mriqc/workflows/anatomical/__init__.py b/pydra/tasks/mriqc/workflows/anatomical/__init__.py index e69de29..c38580a 100644 --- a/pydra/tasks/mriqc/workflows/anatomical/__init__.py +++ b/pydra/tasks/mriqc/workflows/anatomical/__init__.py @@ -0,0 +1,15 @@ +from .base import ( + _binarize, + _enhance, + _get_mod, + _pop, + airmsk_wf, + anat_qc_workflow, + compute_iqms, + gradient_threshold, + headmsk_wf, + image_gradient, + init_brain_tissue_segmentation, + spatial_normalization, +) +from .output import init_anat_report_wf diff --git a/pydra/tasks/mriqc/workflows/anatomical/base.py b/pydra/tasks/mriqc/workflows/anatomical/base.py index 030e8e3..4fbeb37 100644 --- a/pydra/tasks/mriqc/workflows/anatomical/base.py +++ b/pydra/tasks/mriqc/workflows/anatomical/base.py @@ -1,85 +1,43 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -""" -Anatomical workflow -=================== - -.. image :: _static/anatomical_workflow_source.svg - -The anatomical workflow follows the following steps: - -#. Conform (reorientations, revise data types) input data and read - associated metadata. -#. Skull-stripping (AFNI). -#. Calculate head mask -- :py:func:`headmsk_wf`. -#. Spatial Normalization to MNI (ANTs) -#. Calculate air mask above the nasial-cerebelum plane -- :py:func:`airmsk_wf`. -#. Brain tissue segmentation (FAST). -#. Extraction of IQMs -- :py:func:`compute_iqms`. -#. Individual-reports generation -- - :py:func:`~mriqc.workflows.anatomical.output.init_anat_report_wf`. - -This workflow is orchestrated by :py:func:`anat_qc_workflow`. - -For the skull-stripping, we use ``afni_wf`` from ``niworkflows.anat.skullstrip``: - -.. workflow:: - - from niworkflows.anat.skullstrip import afni_wf - from mriqc.testing import mock_config - with mock_config(): - wf = afni_wf() -""" -import pydra -from pydra import Workflow -from pydra.tasks.mriqc.auto import ( - UploadIQMs, +import attrs +import logging +from pathlib import Path +from pydra.engine import Workflow +from pydra.engine.specs import BaseSpec, SpecInfo +from pydra.engine.task import FunctionTask +import pydra.mark +from pydra.tasks.mriqc.interfaces import ( ArtifactMask, ComputeQI2, ConformImage, - IQMFileSink, RotationMask, StructuralQC, ) -from mriqc.messages import BUILDING_WORKFLOW - -from mriqc import config - -# from mriqc.interfaces.reports import AddProvenance -# from mriqc.interfaces.datalad import DataladIdentityInterface -from mriqc.messages import BUILDING_WORKFLOW -from pydra.tasks.mriqc.workflows.utils import get_fwhmx from pydra.tasks.mriqc.workflows.anatomical.output import init_anat_report_wf - - -# from nipype.interfaces import utility as niu -# from nipype.pipeline import engine as pe - -from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms +from pydra.tasks.mriqc.workflows.utils import get_fwhmx +from pydra.tasks.niworkflows.interfaces.fixes import ( + FixHeaderApplyTransforms as ApplyTransforms, +) from templateflow.api import get as get_template - - -def anat_qc_workflow(modality, name="anatMRIQC"): +import typing as ty + + +logger = logging.getLogger(__name__) + + +def anat_qc_workflow( + exec_ants_float=False, + exec_datalad_get=True, + exec_debug=False, + exec_no_sub=False, + exec_verbose_reports=False, + exec_work_dir=None, + in_file=attrs.NOTHING, + name="anatMRIQC", + nipype_omp_nthreads=12, + wf_inputs=None, + wf_species="human", + wf_template_id="MNI152NLin2009cAsym", +): """ One-subject-one-session-one-run pipeline to extract the NR-IQMs from anatomical images @@ -95,552 +53,364 @@ def anat_qc_workflow(modality, name="anatMRIQC"): """ from pydra.tasks.mriqc.workflows.shared import synthstrip_wf - dataset = config.workflow.inputs.get("t1w", []) + config.workflow.inputs.get( - "t2w", [] - ) - - message = BUILDING_WORKFLOW.format( - modality="anatomical", - detail=( - f"for {len(dataset)} NIfTI files." - if len(dataset) > 2 - else f"({' and '.join(('<%s>' % v for v in dataset))})." - ), - ) - config.loggers.workflow.info(message) + if exec_work_dir is None: + exec_work_dir = Path.cwd() # Initialize workflow workflow = Workflow( - name=name, input_spec=["in_file"] - ) # specifying `input_spec` to contain ["in_file"] makes a field accessible at workflow.lzin.in_file + name=name, + input_spec={"in_file": ty.Any}, + output_spec={ + "anat_report_wf_airmask_report": ty.Any, + "anat_report_wf_artmask_report": ty.Any, + "anat_report_wf_bg_report": ty.Any, + "anat_report_wf_bmask_report": ty.Any, + "anat_report_wf_headmask_report": ty.Any, + "anat_report_wf_segm_report": ty.Any, + "anat_report_wf_zoom_report": ty.Any, + "iqmswf_noise_report": ty.Any, + "norm_report": ty.Any, + }, + in_file=in_file, + ) + + # Define workflow, inputs and outputs + # 0. Get data # 1. Reorient anatomical image - # to_ras = pe.Node(ConformImage(check_dtype=False), name="conform") workflow.add( - ConformImage(in_file=workflow.lzin.in_file, check_dtype=False, name="to_ras") + ConformImage(check_dtype=False, in_file=workflow.lzin.in_file, name="to_ras") ) - # 2. species specific skull-stripping - # if config.workflow.species.lower() == "human": - workflow.add( - synthstrip_wf( - omp_nthreads=config.nipype.omp_nthreads, - in_files=workflow.to_ras.lzout.out_file, - name="skull_stripping", + if wf_species.lower() == "human": + workflow.add( + synthstrip_wf( + omp_nthreads=nipype_omp_nthreads, + in_files=workflow.to_ras.lzout.out_file, + name="skull_stripping", + ) ) - ) - ss_bias_field = "bias_image" - # else: - # from nirodents.workflows.brainextraction import init_rodent_brain_extraction_wf - - # skull_stripping = init_rodent_brain_extraction_wf(template_id=config.workflow.template_id) - # ss_bias_field = "final_n4.bias_image" + ss_bias_field = "outputnode.bias_image" + else: + from nirodents.workflows.brainextraction import init_rodent_brain_extraction_wf + skull_stripping = init_rodent_brain_extraction_wf(template_id=wf_template_id) + ss_bias_field = "final_n4.bias_image" # 3. Head mask workflow.add( - headmsk_wf( - in_file=workflow.skull_stripping.lzout.out_corrected, - brain_mask=workflow.skull_stripping.lzout.out_mask, - omp_nthreads=config.nipype.omp_nthreads, - name="hmsk", - ) + headmsk_wf(omp_nthreads=nipype_omp_nthreads, wf_species=wf_species, name="hmsk") ) # 4. Spatial Normalization, using ANTs workflow.add( spatial_normalization( - **{modality: workflow.lzin.in_file}, - moving_image=workflow.skull_stripping.lzout.outcorrected, - moving_mask=workflow.skullstripping.lzout.out_mask, + nipype_omp_nthreads=nipype_omp_nthreads, + wf_species=wf_species, + exec_ants_float=exec_ants_float, + exec_debug=exec_debug, + wf_template_id=wf_template_id, + modality=workflow.lzin.in_file, name="norm", ) ) # 5. Air mask (with and without artifacts) - workflow.add(airmsk_wf(ind2std_xfm=workflow.norm.lzout.ind2std_xfm, name="amw")) + workflow.add( + airmsk_wf( + head_mask=workflow.hmsk.lzout.out_file, + in_file=workflow.to_ras.lzout.out_file, + ind2std_xfm=workflow.norm.lzout.ind2std_xfm, + name="amw", + ) + ) # 6. Brain tissue segmentation workflow.add( init_brain_tissue_segmentation( - brainmask=workflow.skull_stripping.lzout.out_mask, + nipype_omp_nthreads=nipype_omp_nthreads, + in_file=workflow.hmsk.lzout.out_denoised, std_tpms=workflow.norm.lzout.out_tpms, name="bts", ) ) - # 7. Compute IQMs workflow.add( compute_iqms( - in_file=workflow.lzin.in_file, + wf_species=wf_species, + headmask=workflow.hmsk.lzout.out_file, + pvms=workflow.bts.lzout.out_pvms, + segmentation=workflow.bts.lzout.out_segm, + rotmask=workflow.amw.lzout.rot_mask, + artmask=workflow.amw.lzout.art_mask, + hatmask=workflow.amw.lzout.hat_mask, + airmask=workflow.amw.lzout.air_mask, + in_ras=workflow.to_ras.lzout.out_file, std_tpms=workflow.norm.lzout.out_tpms, name="iqmswf", ) ) - # Reports workflow.add( init_anat_report_wf( - mni_report=workflow.norm.lzout.out_report, name="anat_report_wf" + exec_verbose_reports=exec_verbose_reports, + exec_work_dir=exec_work_dir, + wf_species=wf_species, + segmentation=workflow.bts.lzout.out_segm, + artmask=workflow.amw.lzout.art_mask, + airmask=workflow.amw.lzout.air_mask, + headmask=workflow.hmsk.lzout.out_file, + in_ras=workflow.to_ras.lzout.out_file, + name="anat_report_wf", ) ) - # Connect all nodes # fmt: off - workflow.add_connections([ - (norm, hmsk, [("outputnode.out_tpms", "inputnode.in_tpms")]), - (to_ras, amw, [("out_file", "inputnode.in_file")]), - (skull_stripping, amw, [("outputnode.out_mask", "inputnode.in_mask")]), - (hmsk, amw, [("outputnode.out_file", "inputnode.head_mask")]), - (to_ras, iqmswf, [("out_file", "inputnode.in_ras")]), - (skull_stripping, iqmswf, [("outputnode.out_corrected", "inputnode.inu_corrected"), - (ss_bias_field, "inputnode.in_inu"), - ("outputnode.out_mask", "inputnode.brainmask")]), - (amw, iqmswf, [("outputnode.air_mask", "inputnode.airmask"), - ("outputnode.hat_mask", "inputnode.hatmask"), - ("outputnode.art_mask", "inputnode.artmask"), - ("outputnode.rot_mask", "inputnode.rotmask")]), - (hmsk, bts, [("outputnode.out_denoised", "inputnode.in_file")]), - (bts, iqmswf, [("outputnode.out_segm", "inputnode.segmentation"), - ("outputnode.out_pvms", "inputnode.pvms")]), - (hmsk, iqmswf, [("outputnode.out_file", "inputnode.headmask")]), - (to_ras, anat_report_wf, [("out_file", "inputnode.in_ras")]), - (skull_stripping, anat_report_wf, [ - ("outputnode.out_corrected", "inputnode.inu_corrected"), - ("outputnode.out_mask", "inputnode.brainmask")]), - (hmsk, anat_report_wf, [("outputnode.out_file", "inputnode.headmask")]), - (amw, anat_report_wf, [ - ("outputnode.air_mask", "inputnode.airmask"), - ("outputnode.art_mask", "inputnode.artmask"), - ("outputnode.rot_mask", "inputnode.rotmask"), - ]), - (bts, anat_report_wf, [("outputnode.out_segm", "inputnode.segmentation")]), - (iqmswf, anat_report_wf, [("outputnode.noisefit", "inputnode.noisefit")]), - (iqmswf, anat_report_wf, [("outputnode.out_file", "inputnode.in_iqms")]), - (iqmswf, outputnode, [("outputnode.out_file", "out_json")]), - ]) - # fmt: on - - # # Upload metrics - # @pydra.mark.task - # def upload_iqms(in_iqms, endpoint, auth_token, strict): - # from mriqc.interfaces.webapi import UploadIQMs + workflow.norm.inputs.modality = workflow.lzin.in_file + workflow.hmsk.inputs.in_file = workflow.skull_stripping.lzout.out_corrected + workflow.hmsk.inputs.brainmask = workflow.skull_stripping.lzout.out_mask + workflow.bts.inputs.brainmask = workflow.skull_stripping.lzout.out_mask + workflow.norm.inputs.moving_image = workflow.skull_stripping.lzout.out_corrected + workflow.norm.inputs.moving_mask = workflow.skull_stripping.lzout.out_mask + workflow.hmsk.inputs.in_tpms = workflow.norm.lzout.out_tpms - # upldwf = UploadIQMs( - # in_iqms=in_iqms, endpoint=endpoint, auth_token=auth_token, strict=strict - # ) - # return upldwf.api_id + workflow.iqmswf.inputs.inu_corrected = workflow.skull_stripping.lzout.out_corrected + workflow.iqmswf.inputs.in_inu = workflow.skull_stripping.lzout.bias_image + workflow.iqmswf.inputs.brainmask = workflow.skull_stripping.lzout.out_mask - # # fmt: off - # @pydra.mark.task - # def upload_metrics(endpoint, auth_token, strict, in_iqms): - # upload_iqms_result = upload_iqms(in_iqms=in_iqms, endpoint=endpoint, auth_token=auth_token, strict=strict) - # return upload_iqms_result + workflow.anat_report_wf.inputs.brainmask = workflow.skull_stripping.lzout.out_mask - # returns the result - workflow.add( - UploadIQMs( - endpoint=config.execution.webapi_url, - auth_token=config.execution.webapi_token, - strict=config.execution.upload_strict, - in_iqms=workflow.iqmswf.lzout.outputnode.out_file, - name="upldwf", - ) - ) - # workflow.ad_connections( - # [ - # (iqmswf, upldwf, [("outputnode.out_file", "in_iqms")]), - # (upldwf, anat_report_wf, [("api_id", "inputnode.api_id")]), - # ] - # ) - - # # Original Code - # if not config.execution.no_sub: - # from mriqc.interfaces.webapi import UploadIQMs - - # upldwf = pe.Node( - # UploadIQMs( - # endpoint=config.execution.webapi_url, - # auth_token=config.execution.webapi_token, - # strict=config.execution.upload_strict, - # ), - # name="UploadMetrics", - # ) - - # # fmt: off - # workflow.ad_connections([ - # (iqmswf, upldwf, [("outputnode.out_file", "in_iqms")]), - # (upldwf, anat_report_wf, [("api_id", "inputnode.api_id")]), - # ]) - # # fmt: on - - return workflow - - -def spatial_normalization(name="SpatialNormalization"): - """Create a simplified workflow to perform fast spatial normalization.""" - from niworkflows.interfaces.reportlets.registration import ( - SpatialNormalizationRPT as RobustMNINormalization, - ) - - # Have the template id handy - tpl_id = config.workflow.template_id - - # Define workflow interface - # workflow = pe.Workflow(name=name) - workflow = Workflow(name=name, input_spec=input_spec) - - # # Define input and output nodes - # inputnode = Node(interface=input_spec, name="inputnode") - - # # inputnode = pe.Node( - # # niu.IdentityInterface(fields=["moving_image", "moving_mask", "modality"]), - # # name="inputnode", - # # ) - # outputnode = Node(interface=output_spec, name="outputnode") - - # outputnode = pe.Node( - # niu.IdentityInterface(fields=["out_tpms", "out_report", "ind2std_xfm"]), - # name="outputnode", - # ) - - # # Spatial normalization - # @pydra.mark.task - # def spatial_normalization( - # flavor, - # num_threads, - # ants_float, - # template, - # generate_report, - # species, - # tpl_id, - # ): - - # no pe.node here - workflow.add( - RobustMNINormalization( - flavor=["testing", "fast"][config.execution.debug], - num_threads=config.nipype.omp_nthreads, - float=config.execution.ants_float, - template=tpl_id, - generate_report=True, - name="SpatialNormalization", - # Request all MultiProc processes when ants_nthreads > n_procs - num_threads=config.nipype.omp_nthreads, - mem_gb=3, - ) + # fmt: on + # Upload metrics + if not exec_no_sub: + from pydra.tasks.mriqc.interfaces.webapi import UploadIQMs + + pass + # fmt: off + pass + pass + # fmt: on + workflow.set_output([("norm_report", workflow.norm.lzout.report)]) + workflow.set_output([("iqmswf_noise_report", workflow.iqmswf.lzout.noise_report)]) + workflow.set_output( + [("anat_report_wf_bg_report", workflow.anat_report_wf.lzout.bg_report)] + ) + workflow.set_output( + [ + ( + "anat_report_wf_artmask_report", + workflow.anat_report_wf.lzout.artmask_report, + ) + ] ) - if config.workflow.species.lower() == "human": - norm.inputs.reference_mask = str( - get_template(tpl_id, resolution=2, desc="brain", suffix="mask") - ) - else: - norm.inputs.reference_image = str(get_template(tpl_id, suffix="T2w")) - norm.inputs.reference_mask = str( - get_template(tpl_id, desc="brain", suffix="mask")[0] - ) - - return workflow - - -# Create a Pydra workflow -wf = Workflow(name="SpatialNormalizationWorkflow") - -# Define input parameters -flavor = ["testing", "fast"][config.execution.debug] -num_threads = config.nipype.omp_nthreads -ants_float = config.execution.ants_float -template = tpl_id -generate_report = True -species = config.workflow.species.lower() -tpl_id = "your_template_id_here" # Replace with actual value - -# Add the spatial normalization task to the workflow -wf.add( - spatial_normalization( - flavor=flavor, - num_threads=num_threads, - ants_float=ants_float, - template=template, - generate_report=generate_report, - species=species, - tpl_id=tpl_id, - ), - name="SpatialNormalization", - num_threads=config.nipype.omp_nthreads, - mem_gb=3, -) - -# Execute the workflow -with pydra.Submitter(plugin="cf") as sub: - wf(submitter=sub) - - #### up to here (20/03/2024) - - # Draft conversion - - import pydra - - -@pydra.mark.task -def project_tpm_to_t1w_space(template_id, species, workflow_name): - from niworkflows.interfaces.ants import ApplyTransforms - from mriqc.utils.misc import get_template - - if species.lower() == "human": - resolution = 1 - else: - resolution = None - - tpms_std2t1w = ApplyTransforms( - dimension=3, - default_value=0, - interpolation="Gaussian", - float=config.execution.ants_float, + workflow.set_output( + [ + ( + "anat_report_wf_headmask_report", + workflow.anat_report_wf.lzout.headmask_report, + ) + ] ) - tpms_std2t1w.inputs.input_image = [ - str(p) - for p in get_template( - template_id, - suffix="probseg", - resolution=resolution, - label=["CSF", "GM", "WM"], - ) - ] - return tpms_std2t1w - - -@pydra.mark.task -def project_segmentation_to_t1w_space(template_id, species, workflow_name): - from niworkflows.interfaces.ants import ApplyTransforms - from mriqc.utils.misc import get_template - - if species.lower() == "human": - resolution = 1 - else: - resolution = None - - tpms_std2t1w = ApplyTransforms( - dimension=3, - default_value=0, - interpolation="Linear", - float=config.execution.ants_float, + workflow.set_output( + [("anat_report_wf_bmask_report", workflow.anat_report_wf.lzout.bmask_report)] ) - tpms_std2t1w.inputs.input_image = [ - str(p) - for p in get_template( - template_id, - suffix="probseg", - resolution=resolution, - label=["CSF", "GM", "WM"], - ) - ] - return tpms_std2t1w - - -@pydra.mark.task -def init_brain_tissue_segmentation(template_id, species, workflow_name): - from nipype.interfaces.ants import Atropos - - workflow = pydra.Workflow(name=workflow_name) - - format_tpm_names = pydra.Node( - name="format_tpm_names", - function=_format_tpm_names, - input_names=["in_files"], - output_names=["file_format"], - iterfield=["in_files"], + workflow.set_output( + [("anat_report_wf_zoom_report", workflow.anat_report_wf.lzout.zoom_report)] ) - format_tpm_names.inputs.in_files = get_template( - template_id, - suffix="probseg", - resolution=(1 if species.lower() == "human" else None), - label=["CSF", "GM", "WM"], + workflow.set_output( + [ + ( + "anat_report_wf_airmask_report", + workflow.anat_report_wf.lzout.airmask_report, + ) + ] ) - - segment = pydra.Node( - Atropos( - initialization="PriorProbabilityImages", - number_of_tissue_classes=3, - prior_weighting=0.1, - mrf_radius=[1, 1, 1], - mrf_smoothing_factor=0.01, - save_posteriors=True, - out_classified_image_name="segment.nii.gz", - output_posteriors_name_template="segment_%02d.nii.gz", - num_threads=config.nipype.omp_nthreads, - ), - name="segmentation", + workflow.set_output( + [("anat_report_wf_segm_report", workflow.anat_report_wf.lzout.segm_report)] ) - workflow.add(format_tpm_names) - workflow.add(segment) - workflow.connect(format_tpm_names, "file_format", segment, "prior_image") - return workflow -@pydra.mark.task -def compute_iqms(template_id, species, workflow_name): - from niworkflows.interfaces.bids import ReadSidecarJSON - from mriqc.interfaces.anatomical import Harmonize - from mriqc.workflows.utils import _tofloat - from niworkflows.interfaces.bids import ReadSidecarJSON - from mriqc.interfaces.utils import AddProvenance - from mriqc.interfaces.anatomical import ComputeQI2 - from mriqc.workflows.utils import _pop - - inputnode = pydra.Input( - name="inputnode", - spec=[ - "in_file", - "brainmask", - "airmask", - "artmask", - "headmask", - "rotmask", - "hatmask", - "segmentation", - "inu_corrected", - "in_inu", - "pvms", - "metadata", - "std_tpms", - ], - ) - outputnode = pydra.Output(name="outputnode", spec=["out_file", "noisefit"]) +def airmsk_wf( + head_mask=attrs.NOTHING, + in_file=attrs.NOTHING, + ind2std_xfm=attrs.NOTHING, + name="AirMaskWorkflow", +): + """ + Calculate air, artifacts and "hat" masks to evaluate noise in the background. - meta = pydra.Node( - ReadSidecarJSON(index_db=config.execution.bids_database_dir), name="metadata" - ) + This workflow mostly addresses the implementation of Step 1 in [Mortamet2009]_. + This work proposes to look at the signal distribution in the background, where + no signals are expected, to evaluate the spread of the noise. + It is in the background where [Mortamet2009]_ proposed to also look at the presence + of ghosts and artifacts, where they are very easy to isolate. - addprov = pydra.Node( - AddProvenance(), name="provenance", run_without_submitting=True - ) + However, [Mortamet2009]_ proposes not to look at the background around the face + because of the likely signal leakage through the phase-encoding axis sourcing from + eyeballs (and their motion). + To avoid that, [Mortamet2009]_ proposed atlas-based identification of two landmarks + (nasion and cerebellar projection on to the occipital bone). + MRIQC, for simplicity, used a such a mask created in MNI152NLin2009cAsym space and + projected it on to the individual. + Such a solution is inadequate because it doesn't drop full in-plane slices as there + will be a large rotation of the individual's tilt of the head with respect to the + template. + The new implementation (23.1.x series) follows [Mortamet2009]_ more closely, + projecting the two landmarks from the template space and leveraging + *NiTransforms* to do that. - getqi2 = pydra.Node(ComputeQI2(), name="ComputeQI2") + .. workflow:: - measures = pydra.Node(StructuralQC(human=species.lower() == "human"), "measures") + from mriqc.testing import mock_config + from mriqc.workflows.anatomical.base import airmsk_wf + with mock_config(): + wf = airmsk_wf() - datasink = pydra.Node( - IQMFileSink( - out_dir=config.execution.output_dir, - dataset=config.execution.dsname, - ), - name="datasink", - run_without_submitting=True, + """ + workflow = Workflow( + name=name, + input_spec={"head_mask": ty.Any, "in_file": ty.Any, "ind2std_xfm": ty.Any}, + output_spec={ + "air_mask": ty.Any, + "art_mask": ty.Any, + "hat_mask": ty.Any, + "rot_mask": ty.Any, + }, + head_mask=head_mask, + in_file=in_file, + ind2std_xfm=ind2std_xfm, + ) + + workflow.add(RotationMask(in_file=workflow.lzin.in_file, name="rotmsk")) + workflow.add( + ArtifactMask( + head_mask=workflow.lzin.head_mask, + in_file=workflow.lzin.in_file, + ind2std_xfm=workflow.lzin.ind2std_xfm, + name="qi1", + ) ) - - workflow = pydra.Workflow(name=workflow_name) - workflow.add(meta, addprov, getqi2, measures, datasink) + # fmt: off + workflow.set_output([('hat_mask', workflow.qi1.lzout.out_hat_msk)]) + workflow.set_output([('air_mask', workflow.qi1.lzout.out_air_msk)]) + workflow.set_output([('art_mask', workflow.qi1.lzout.out_art_msk)]) + workflow.set_output([('rot_mask', workflow.rotmsk.lzout.out_file)]) + # fmt: on return workflow -@pydra.mark.task -def headmsk_wf(name="HeadMaskWorkflow", omp_nthreads=1): - from niworkflows.interfaces.nibabel import ApplyMask +def headmsk_wf( + brainmask=attrs.NOTHING, + in_file=attrs.NOTHING, + in_tpms=attrs.NOTHING, + name="HeadMaskWorkflow", + omp_nthreads=1, + wf_species="human", +): + """ + Computes a head mask as in [Mortamet2009]_. - inputnode = pydra.Input(name="inputnode", spec=["in_file", "brainmask", "in_tpms"]) - outputnode = pydra.Output(name="outputnode", spec=["out_file", "out_denoised"]) + .. workflow:: - enhance = pydra.Node( - niu.Function( - input_names=["in_file", "wm_tpm"], - output_names=["out_file"], - function=_enhance, - ), - name="Enhance", - num_threads=omp_nthreads, - ) + from mriqc.testing import mock_config + from mriqc.workflows.anatomical.base import headmsk_wf + with mock_config(): + wf = headmsk_wf() - gradient = pydra.Node( - niu.Function( - input_names=["in_file", "brainmask", "sigma"], - output_names=["out_file"], - function=image_gradient, - ), - name="Grad", - num_threads=omp_nthreads, - ) + """ + from pydra.tasks.niworkflows.interfaces.nibabel import ApplyMask - thresh = pydra.Node( - niu.Function( - input_names=["in_file", "brainmask", "aniso", "thresh"], - output_names=["out_file"], - function=gradient_threshold, - ), - name="GradientThreshold", - num_threads=omp_nthreads, + workflow = Workflow( + name=name, + input_spec={"brainmask": ty.Any, "in_file": ty.Any, "in_tpms": ty.Any}, + output_spec={"out_denoised": ty.Any, "out_file": ty.Any}, + brainmask=brainmask, + in_file=in_file, + in_tpms=in_tpms, ) - apply_mask = pydra.Node(ApplyMask(), name="apply_mask") - - workflow = pydra.Workflow(name=name) - workflow.add(enhance, gradient, thresh, apply_mask) - - return workflow - # end of draft conversion + def _select_wm(inlist): + return [f for f in inlist if "WM" in f][0] - # Project standard TPMs into T1w space - tpms_std2t1w = pe.MapNode( - ApplyTransforms( - dimension=3, - default_value=0, - interpolation="Gaussian", - float=config.execution.ants_float, - ), - iterfield=["input_image"], - name="tpms_std2t1w", + workflow.add( + FunctionTask( + func=_enhance, + input_spec=SpecInfo( + name="FunctionIn", + bases=(BaseSpec,), + fields=[("in_file", ty.Any), ("wm_tpm", ty.Any)], + ), + output_spec=SpecInfo( + name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] + ), + in_file=workflow.lzin.in_file, + wm_tpm=workflow.lzin.in_tpms, + name="enhance", + ) ) - tpms_std2t1w.inputs.input_image = [ - str(p) - for p in get_template( - config.workflow.template_id, - suffix="probseg", - resolution=(1 if config.workflow.species.lower() == "human" else None), - label=["CSF", "GM", "WM"], + workflow.add( + FunctionTask( + func=image_gradient, + input_spec=SpecInfo( + name="FunctionIn", + bases=(BaseSpec,), + fields=[("in_file", ty.Any), ("brainmask", ty.Any), ("sigma", ty.Any)], + ), + output_spec=SpecInfo( + name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] + ), + brainmask=workflow.lzin.brainmask, + in_file=workflow.enhance.lzout.out_file, + name="gradient", ) - ] - - # Project MNI segmentation to T1 space - tpms_std2t1w = pe.MapNode( - ApplyTransforms( - dimension=3, - default_value=0, - interpolation="Linear", - float=config.execution.ants_float, - ), - iterfield=["input_image"], - name="tpms_std2t1w", ) - tpms_std2t1w.inputs.input_image = [ - str(p) - for p in get_template( - config.workflow.template_id, - suffix="probseg", - resolution=(1 if config.workflow.species.lower() == "human" else None), - label=["CSF", "GM", "WM"], + workflow.add( + FunctionTask( + func=gradient_threshold, + input_spec=SpecInfo( + name="FunctionIn", + bases=(BaseSpec,), + fields=[ + ("in_file", ty.Any), + ("brainmask", ty.Any), + ("aniso", ty.Any), + ("thresh", ty.Any), + ], + ), + output_spec=SpecInfo( + name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] + ), + brainmask=workflow.lzin.brainmask, + in_file=workflow.gradient.lzout.out_file, + name="thresh", ) - ] - + ) + if wf_species != "human": + workflow.gradient.inputs.sigma = 3.0 + workflow.thresh.inputs.aniso = True + workflow.thresh.inputs.thresh = 4.0 + workflow.add( + ApplyMask( + in_file=workflow.enhance.lzout.out_file, + in_mask=workflow.lzin.brainmask, + name="apply_mask", + ) + ) # fmt: off - workflow.connect([ - (inputnode, norm, [("moving_image", "moving_image"), - ("moving_mask", "moving_mask"), - ("modality", "reference")]), - (inputnode, tpms_std2t1w, [("moving_image", "reference_image")]), - (norm, tpms_std2t1w, [ - ("inverse_composite_transform", "transforms"), - ]), - (norm, outputnode, [ - ("composite_transform", "ind2std_xfm"), - ("out_report", "out_report"), - ]), - (tpms_std2t1w, outputnode, [("output_image", "out_tpms")]), - ]) + workflow.enhance.inputs.wm_tpm = workflow.lzin.in_tpms + workflow.set_output([('out_file', workflow.thresh.lzout.out_file)]) + workflow.set_output([('out_denoised', workflow.apply_mask.lzout.out_file)]) # fmt: on return workflow -def init_brain_tissue_segmentation(name="brain_tissue_segmentation"): +def init_brain_tissue_segmentation( + brainmask=attrs.NOTHING, + in_file=attrs.NOTHING, + name="brain_tissue_segmentation", + nipype_omp_nthreads=12, + std_tpms=attrs.NOTHING, +): """ Setup a workflow for brain tissue segmentation. @@ -652,433 +422,334 @@ def init_brain_tissue_segmentation(name="brain_tissue_segmentation"): wf = init_brain_tissue_segmentation() """ - from nipype.interfaces.ants import Atropos + from pydra.tasks.ants.auto import Atropos def _format_tpm_names(in_files, fname_string=None): + import glob from pathlib import Path import nibabel as nb - import glob out_path = Path.cwd().absolute() - # copy files to cwd and rename iteratively for count, fname in enumerate(in_files): img = nb.load(fname) extension = "".join(Path(fname).suffixes) out_fname = f"priors_{1 + count:02}{extension}" nb.save(img, Path(out_path, out_fname)) - if fname_string is None: fname_string = f"priors_%02d{extension}" - out_files = [ str(prior) for prior in glob.glob(str(Path(out_path, f"priors*{extension}"))) ] - # return path with c-style format string for Atropos file_format = str(Path(out_path, fname_string)) return file_format, out_files - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface(fields=["in_file", "brainmask", "std_tpms"]), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["out_segm", "out_pvms"]), - name="outputnode", + workflow = Workflow( + name=name, + input_spec={"brainmask": ty.Any, "in_file": ty.Any, "std_tpms": ty.Any}, + output_spec={"out_pvms": ty.Any, "out_segm": ty.Any}, + brainmask=brainmask, + in_file=in_file, + std_tpms=std_tpms, ) - format_tpm_names = pe.Node( - niu.Function( - input_names=["in_files"], - output_names=["file_format"], - function=_format_tpm_names, + workflow.add( + FunctionTask( execution={"keep_inputs": True, "remove_unnecessary_outputs": False}, - ), - name="format_tpm_names", + func=_format_tpm_names, + input_spec=SpecInfo( + name="FunctionIn", bases=(BaseSpec,), fields=[("in_files", ty.Any)] + ), + output_spec=SpecInfo( + name="FunctionOut", bases=(BaseSpec,), fields=[("file_format", ty.Any)] + ), + in_files=workflow.lzin.std_tpms, + name="format_tpm_names", + ) ) - - segment = pe.Node( + workflow.add( Atropos( initialization="PriorProbabilityImages", - number_of_tissue_classes=3, - prior_weighting=0.1, mrf_radius=[1, 1, 1], mrf_smoothing_factor=0.01, - save_posteriors=True, + num_threads=nipype_omp_nthreads, + number_of_tissue_classes=3, out_classified_image_name="segment.nii.gz", output_posteriors_name_template="segment_%02d.nii.gz", - num_threads=config.nipype.omp_nthreads, - ), - name="segmentation", - mem_gb=5, - num_threads=config.nipype.omp_nthreads, + prior_weighting=0.1, + save_posteriors=True, + intensity_images=workflow.lzin.in_file, + mask_image=workflow.lzin.brainmask, + name="segment", + ) ) - # fmt: off - workflow.connect([ - (inputnode, segment, [("in_file", "intensity_images"), - ("brainmask", "mask_image")]), - (inputnode, format_tpm_names, [('std_tpms', 'in_files')]), - (format_tpm_names, segment, [(('file_format', _pop), 'prior_image')]), - (segment, outputnode, [("classified_image", "out_segm"), - ("posteriors", "out_pvms")]), - ]) - # fmt: on - return workflow + @pydra.mark.task + def format_tpm_names_file_format_to_segment_prior_image_callable(in_: ty.Any) -> ty.Any: + return _pop(in_) -def compute_iqms(name="ComputeIQMs"): - """ - Setup the workflow that actually computes the IQMs. + workflow.add(format_tpm_names_file_format_to_segment_prior_image_callable(in_=workflow.format_tpm_names.lzout.file_format, name="format_tpm_names_file_format_to_segment_prior_image_callable")) - .. workflow:: + workflow.segment.inputs.prior_image = workflow.format_tpm_names_file_format_to_segment_prior_image_callable.lzout.out + workflow.set_output([('out_segm', workflow.segment.lzout.classified_image)]) + workflow.set_output([('out_pvms', workflow.segment.lzout.posteriors)]) + # fmt: on - from mriqc.workflows.anatomical.base import compute_iqms - from mriqc.testing import mock_config - with mock_config(): - wf = compute_iqms() + return workflow - """ - from niworkflows.interfaces.bids import ReadSidecarJSON - - from mriqc.interfaces.anatomical import Harmonize - from mriqc.workflows.utils import _tofloat - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_file", - "in_ras", - "brainmask", - "airmask", - "artmask", - "headmask", - "rotmask", - "hatmask", - "segmentation", - "inu_corrected", - "in_inu", - "pvms", - "metadata", - "std_tpms", - ] - ), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["out_file", "noisefit"]), - name="outputnode", - ) - # Extract metadata - meta = pe.Node( - ReadSidecarJSON(index_db=config.execution.bids_database_dir), name="metadata" +def spatial_normalization( + exec_ants_float=False, + exec_debug=False, + modality=attrs.NOTHING, + moving_image=attrs.NOTHING, + moving_mask=attrs.NOTHING, + name="SpatialNormalization", + nipype_omp_nthreads=12, + wf_species="human", + wf_template_id="MNI152NLin2009cAsym", +): + """Create a simplified workflow to perform fast spatial normalization.""" + from pydra.tasks.niworkflows.interfaces.reportlets.registration import ( + SpatialNormalizationRPT as RobustMNINormalization, ) - # Add provenance - addprov = pe.Node(AddProvenance(), name="provenance", run_without_submitting=True) - - # AFNI check smoothing - fwhm_interface = get_fwhmx() - - fwhm = pe.Node(fwhm_interface, name="smoothness") - - # Harmonize - homog = pe.Node(Harmonize(), name="harmonize") - if config.workflow.species.lower() != "human": - homog.inputs.erodemsk = False - homog.inputs.thresh = 0.8 - - # Mortamet's QI2 - getqi2 = pe.Node(ComputeQI2(), name="ComputeQI2") - - # Compute python-coded measures - measures = pe.Node( - StructuralQC(human=config.workflow.species.lower() == "human"), "measures" + # Have the template id handy + tpl_id = wf_template_id + # Define workflow interface + workflow = Workflow( + name=name, + input_spec={"modality": ty.Any, "moving_image": ty.Any, "moving_mask": ty.Any}, + output_spec={"ind2std_xfm": ty.Any, "out_tpms": ty.Any, "report": ty.Any}, + modality=modality, + moving_image=moving_image, + moving_mask=moving_mask, ) - datasink = pe.Node( - IQMFileSink( - out_dir=config.execution.output_dir, - dataset=config.execution.dsname, - ), - name="datasink", - run_without_submitting=True, + # Spatial normalization + workflow.add( + RobustMNINormalization( + flavor=["testing", "fast"][exec_debug], + float=exec_ants_float, + generate_report=True, + num_threads=nipype_omp_nthreads, + template=tpl_id, + moving_image=workflow.lzin.moving_image, + moving_mask=workflow.lzin.moving_mask, + reference=workflow.lzin.modality, + name="norm", + ) ) - - def _getwm(inlist): - return inlist[-1] - + if wf_species.lower() == "human": + workflow.norm.inputs.reference_mask = str( + get_template(tpl_id, resolution=2, desc="brain", suffix="mask") + ) + else: + workflow.norm.inputs.reference_image = str(get_template(tpl_id, suffix="T2w")) + workflow.norm.inputs.reference_mask = str( + get_template(tpl_id, desc="brain", suffix="mask")[0] + ) + # Project standard TPMs into T1w space + workflow.add( + ApplyTransforms( + default_value=0, + dimension=3, + float=exec_ants_float, + interpolation="Gaussian", + reference_image=workflow.lzin.moving_image, + transforms=workflow.norm.lzout.inverse_composite_transform, + name="tpms_std2t1w", + ) + ) + workflow.tpms_std2t1w.inputs.input_image = [ + str(p) + for p in get_template( + wf_template_id, + suffix="probseg", + resolution=(1 if wf_species.lower() == "human" else None), + label=["CSF", "GM", "WM"], + ) + ] # fmt: off - workflow.connect([ - (inputnode, meta, [("in_file", "in_file")]), - (inputnode, datasink, [("in_file", "in_file"), - (("in_file", _get_mod), "modality")]), - (inputnode, addprov, [(("in_file", _get_mod), "modality")]), - (meta, datasink, [("subject", "subject_id"), - ("session", "session_id"), - ("task", "task_id"), - ("acquisition", "acq_id"), - ("reconstruction", "rec_id"), - ("run", "run_id"), - ("out_dict", "metadata")]), - (inputnode, addprov, [("in_file", "in_file"), - ("airmask", "air_msk"), - ("rotmask", "rot_msk")]), - (inputnode, getqi2, [("in_ras", "in_file"), - ("hatmask", "air_msk")]), - (inputnode, homog, [("inu_corrected", "in_file"), - (("pvms", _getwm), "wm_mask")]), - (inputnode, measures, [("in_inu", "in_bias"), - ("in_ras", "in_file"), - ("airmask", "air_msk"), - ("headmask", "head_msk"), - ("artmask", "artifact_msk"), - ("rotmask", "rot_msk"), - ("segmentation", "in_segm"), - ("pvms", "in_pvms"), - ("std_tpms", "mni_tpms")]), - (inputnode, fwhm, [("in_ras", "in_file"), - ("brainmask", "mask")]), - (homog, measures, [("out_file", "in_noinu")]), - (fwhm, measures, [(("fwhm", _tofloat), "in_fwhm")]), - (measures, datasink, [("out_qc", "root")]), - (addprov, datasink, [("out_prov", "provenance")]), - (getqi2, datasink, [("qi2", "qi_2")]), - (getqi2, outputnode, [("out_file", "noisefit")]), - (datasink, outputnode, [("out_file", "out_file")]), - ]) + workflow.set_output([('ind2std_xfm', workflow.norm.lzout.composite_transform)]) + workflow.set_output([('report', workflow.norm.lzout.out_report)]) + workflow.set_output([('out_tpms', workflow.tpms_std2t1w.lzout.output_image)]) # fmt: on return workflow -def headmsk_wf(name="HeadMaskWorkflow", omp_nthreads=1): +def compute_iqms( + airmask=attrs.NOTHING, + artmask=attrs.NOTHING, + brainmask=attrs.NOTHING, + hatmask=attrs.NOTHING, + headmask=attrs.NOTHING, + in_inu=attrs.NOTHING, + in_ras=attrs.NOTHING, + inu_corrected=attrs.NOTHING, + name="ComputeIQMs", + pvms=attrs.NOTHING, + rotmask=attrs.NOTHING, + segmentation=attrs.NOTHING, + std_tpms=attrs.NOTHING, + wf_species="human", +): """ - Computes a head mask as in [Mortamet2009]_. + Setup the workflow that actually computes the IQMs. .. workflow:: + from mriqc.workflows.anatomical.base import compute_iqms from mriqc.testing import mock_config - from mriqc.workflows.anatomical.base import headmsk_wf with mock_config(): - wf = headmsk_wf() + wf = compute_iqms() """ + from pydra.tasks.niworkflows.interfaces.bids import ReadSidecarJSON + from pydra.tasks.mriqc.interfaces.anatomical import Harmonize + from pydra.tasks.mriqc.workflows.utils import _tofloat - from niworkflows.interfaces.nibabel import ApplyMask - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface(fields=["in_file", "brainmask", "in_tpms"]), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["out_file", "out_denoised"]), name="outputnode" + workflow = Workflow( + name=name, + input_spec={ + "airmask": ty.Any, + "artmask": ty.Any, + "brainmask": ty.Any, + "hatmask": ty.Any, + "headmask": ty.Any, + "in_inu": ty.Any, + "in_ras": ty.Any, + "inu_corrected": ty.Any, + "pvms": ty.Any, + "rotmask": ty.Any, + "segmentation": ty.Any, + "std_tpms": ty.Any, + }, + output_spec={"measures": ty.Any, "noise_report": ty.Any}, + airmask=airmask, + artmask=artmask, + brainmask=brainmask, + hatmask=hatmask, + headmask=headmask, + in_inu=in_inu, + in_ras=in_ras, + inu_corrected=inu_corrected, + pvms=pvms, + rotmask=rotmask, + segmentation=segmentation, + std_tpms=std_tpms, ) - def _select_wm(inlist): - return [f for f in inlist if "WM" in f][0] + # Extract metadata - enhance = pe.Node( - niu.Function( - input_names=["in_file", "wm_tpm"], - output_names=["out_file"], - function=_enhance, - ), - name="Enhance", - num_threads=omp_nthreads, - ) + # Add provenance - gradient = pe.Node( - niu.Function( - input_names=["in_file", "brainmask", "sigma"], - output_names=["out_file"], - function=image_gradient, - ), - name="Grad", - num_threads=omp_nthreads, + # AFNI check smoothing + fwhm_interface = get_fwhmx() + fwhm = fwhm_interface + fwhm.name = "fwhm" + fwhm.inputs.in_file = workflow.lzin.in_ras + fwhm.inputs.mask = workflow.lzin.brainmask + workflow.add(fwhm) + # Harmonize + workflow.add( + Harmonize( + in_file=workflow.lzin.inu_corrected, + wm_mask=workflow.lzin.pvms, + name="homog", + ) + ) + if wf_species.lower() != "human": + workflow.homog.inputs.erodemsk = False + workflow.homog.inputs.thresh = 0.8 + # Mortamet's QI2 + workflow.add( + ComputeQI2( + air_msk=workflow.lzin.hatmask, in_file=workflow.lzin.in_ras, name="getqi2" + ) ) - thresh = pe.Node( - niu.Function( - input_names=["in_file", "brainmask", "aniso", "thresh"], - output_names=["out_file"], - function=gradient_threshold, - ), - name="GradientThreshold", - num_threads=omp_nthreads, + # Compute python-coded measures + workflow.add( + StructuralQC( + human=wf_species.lower() == "human", + air_msk=workflow.lzin.airmask, + artifact_msk=workflow.lzin.artmask, + head_msk=workflow.lzin.headmask, + in_bias=workflow.lzin.in_inu, + in_file=workflow.lzin.in_ras, + in_noinu=workflow.homog.lzout.out_file, + in_pvms=workflow.lzin.pvms, + in_segm=workflow.lzin.segmentation, + mni_tpms=workflow.lzin.std_tpms, + rot_msk=workflow.lzin.rotmask, + name="measures", + ) ) - if config.workflow.species != "human": - gradient.inputs.sigma = 3.0 - thresh.inputs.aniso = True - thresh.inputs.thresh = 4.0 - apply_mask = pe.Node(ApplyMask(), name="apply_mask") + def _getwm(inlist): + return inlist[-1] # fmt: off - workflow.connect([ - (inputnode, enhance, [("in_file", "in_file"), - (("in_tpms", _select_wm), "wm_tpm")]), - (inputnode, thresh, [("brainmask", "brainmask")]), - (inputnode, gradient, [("brainmask", "brainmask")]), - (inputnode, apply_mask, [("brainmask", "in_mask")]), - (enhance, gradient, [("out_file", "in_file")]), - (gradient, thresh, [("out_file", "in_file")]), - (enhance, apply_mask, [("out_file", "in_file")]), - (thresh, outputnode, [("out_file", "out_file")]), - (apply_mask, outputnode, [("out_file", "out_denoised")]), - ]) - # fmt: on - return workflow + workflow.homog.inputs.wm_mask = workflow.lzin.pvms -def airmsk_wf(name="AirMaskWorkflow"): - """ - Calculate air, artifacts and "hat" masks to evaluate noise in the background. + @pydra.mark.task + def fwhm_fwhm_to_measures_in_fwhm_callable(in_: ty.Any) -> ty.Any: + return _tofloat(in_) - This workflow mostly addresses the implementation of Step 1 in [Mortamet2009]_. - This work proposes to look at the signal distribution in the background, where - no signals are expected, to evaluate the spread of the noise. - It is in the background where [Mortamet2009]_ proposed to also look at the presence - of ghosts and artifacts, where they are very easy to isolate. + workflow.add(fwhm_fwhm_to_measures_in_fwhm_callable(in_=workflow.fwhm.lzout.fwhm, name="fwhm_fwhm_to_measures_in_fwhm_callable")) - However, [Mortamet2009]_ proposes not to look at the background around the face - because of the likely signal leakage through the phase-encoding axis sourcing from - eyeballs (and their motion). - To avoid that, [Mortamet2009]_ proposed atlas-based identification of two landmarks - (nasion and cerebellar projection on to the occipital bone). - MRIQC, for simplicity, used a such a mask created in MNI152NLin2009cAsym space and - projected it on to the individual. - Such a solution is inadequate because it doesn't drop full in-plane slices as there - will be a large rotation of the individual's tilt of the head with respect to the - template. - The new implementation (23.1.x series) follows [Mortamet2009]_ more closely, - projecting the two landmarks from the template space and leveraging - *NiTransforms* to do that. + workflow.measures.inputs.in_fwhm = workflow.fwhm_fwhm_to_measures_in_fwhm_callable.lzout.out + workflow.set_output([('measures', workflow.measures.lzout.out_qc)]) + workflow.set_output([('noise_report', workflow.getqi2.lzout.out_file)]) - .. workflow:: - - from mriqc.testing import mock_config - from mriqc.workflows.anatomical.base import airmsk_wf - with mock_config(): - wf = airmsk_wf() - - """ - workflow = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_file", - "in_mask", - "head_mask", - "ind2std_xfm", - ] - ), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["hat_mask", "air_mask", "art_mask", "rot_mask"]), - name="outputnode", - ) - - rotmsk = pe.Node(RotationMask(), name="RotationMask") - qi1 = pe.Node(ArtifactMask(), name="ArtifactMask") - - # fmt: off - workflow.connect([ - (inputnode, rotmsk, [("in_file", "in_file")]), - (inputnode, qi1, [("in_file", "in_file"), - ("head_mask", "head_mask"), - ("ind2std_xfm", "ind2std_xfm")]), - (qi1, outputnode, [("out_hat_msk", "hat_mask"), - ("out_air_msk", "air_mask"), - ("out_art_msk", "art_mask")]), - (rotmsk, outputnode, [("out_file", "rot_mask")]) - ]) # fmt: on return workflow -def _binarize(in_file, threshold=0.5, out_file=None): - import os.path as op +def _enhance(in_file, wm_tpm, out_file=None): import nibabel as nb import numpy as np - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath(f"{fname}_bin{ext}") - - nii = nb.load(in_file) - data = nii.get_fdata() > threshold - - hdr = nii.header.copy() - hdr.set_data_dtype(np.uint8) - nb.Nifti1Image(data.astype(np.uint8), nii.affine, hdr).to_filename(out_file) - return out_file - - -def _enhance(in_file, wm_tpm, out_file=None): - import numpy as np - import nibabel as nb - from mriqc.workflows.utils import generate_filename + from pydra.tasks.mriqc.workflows.utils import generate_filename imnii = nb.load(in_file) data = imnii.get_fdata(dtype=np.float32) range_max = np.percentile(data[data > 0], 99.98) excess = data > range_max - wm_prob = nb.load(wm_tpm).get_fdata() wm_prob[wm_prob < 0] = 0 # Ensure no negative values wm_prob[excess] = 0 # Ensure no outliers are considered - # Calculate weighted mean and standard deviation wm_mu = np.average(data, weights=wm_prob) wm_sigma = np.sqrt(np.average((data - wm_mu) ** 2, weights=wm_prob)) - # Resample signal excess pixels data[excess] = np.random.normal(loc=wm_mu, scale=wm_sigma, size=excess.sum()) - out_file = out_file or str(generate_filename(in_file, suffix="enhanced").absolute()) nb.Nifti1Image(data, imnii.affine, imnii.header).to_filename(out_file) return out_file -def image_gradient(in_file, brainmask, sigma=4.0, out_file=None): - """Computes the magnitude gradient of an image using numpy""" - import nibabel as nb - import numpy as np - from scipy.ndimage import gaussian_gradient_magnitude as gradient - from mriqc.workflows.utils import generate_filename +def _get_mod(in_file): - imnii = nb.load(in_file) - mask = np.bool_(nb.load(brainmask).dataobj) - data = imnii.get_fdata(dtype=np.float32) - datamax = np.percentile(data.reshape(-1), 99.5) - data *= 100 / datamax - data[mask] = 100 + from pathlib import Path - zooms = np.array(imnii.header.get_zooms()[:3]) - sigma_xyz = 2 - zooms / min(zooms) - grad = gradient(data, sigma * sigma_xyz) - gradmax = np.percentile(grad.reshape(-1), 99.5) - grad *= 100.0 - grad /= gradmax - grad[mask] = 100 + in_file = Path(in_file) + extension = "".join(in_file.suffixes) + return in_file.name.replace(extension, "").split("_")[-1] - out_file = out_file or str(generate_filename(in_file, suffix="grad").absolute()) - nb.Nifti1Image(grad, imnii.affine, imnii.header).to_filename(out_file) - return out_file + +def _pop(inlist): + + if isinstance(inlist, (list, tuple)): + return inlist[0] + return inlist def gradient_threshold(in_file, brainmask, thresh=15.0, out_file=None, aniso=False): @@ -1086,7 +757,7 @@ def gradient_threshold(in_file, brainmask, thresh=15.0, out_file=None, aniso=Fal import nibabel as nb import numpy as np from scipy import ndimage as sim - from mriqc.workflows.utils import generate_filename + from pydra.tasks.mriqc.workflows.utils import generate_filename if not aniso: struct = sim.iterate_structure(sim.generate_binary_structure(3, 2), 2) @@ -1096,64 +767,82 @@ def gradient_threshold(in_file, brainmask, thresh=15.0, out_file=None, aniso=Fal zooms = img.header.get_zooms() dist = max(zooms) dim = img.header["dim"][0] - x = np.ones((5) * np.ones(dim, dtype=np.int8)) np.put(x, x.size // 2, 0) dist_matrix = np.round(sim.distance_transform_edt(x, sampling=zooms), 5) struct = dist_matrix <= dist - imnii = nb.load(in_file) - hdr = imnii.header.copy() hdr.set_data_dtype(np.uint8) - data = imnii.get_fdata(dtype=np.float32) - mask = np.zeros_like(data, dtype=np.uint8) mask[data > thresh] = 1 mask = sim.binary_closing(mask, struct, iterations=2).astype(np.uint8) mask = sim.binary_erosion(mask, sim.generate_binary_structure(3, 2)).astype( np.uint8 ) - segdata = np.asanyarray(nb.load(brainmask).dataobj) > 0 segdata = sim.binary_dilation(segdata, struct, iterations=2, border_value=1).astype( np.uint8 ) mask[segdata] = 1 - # Remove small objects label_im, nb_labels = sim.label(mask) artmsk = np.zeros_like(mask) if nb_labels > 2: sizes = sim.sum(mask, label_im, list(range(nb_labels + 1))) - ordered = list(reversed(sorted(zip(sizes, list(range(nb_labels + 1)))))) + ordered = sorted(zip(sizes, list(range(nb_labels + 1))), reverse=True) for _, label in ordered[2:]: mask[label_im == label] = 0 artmsk[label_im == label] = 1 - mask = sim.binary_fill_holes(mask, struct).astype( np.uint8 ) # pylint: disable=no-member - out_file = out_file or str(generate_filename(in_file, suffix="gradmask").absolute()) nb.Nifti1Image(mask, imnii.affine, hdr).to_filename(out_file) return out_file -def _get_imgtype(in_file): - from pathlib import Path - - return int(Path(in_file).name.rstrip(".gz").rstrip(".nii").split("_")[-1][1]) +def image_gradient(in_file, brainmask, sigma=4.0, out_file=None): + """Computes the magnitude gradient of an image using numpy""" + import nibabel as nb + import numpy as np + from scipy.ndimage import gaussian_gradient_magnitude as gradient + from pydra.tasks.mriqc.workflows.utils import generate_filename + imnii = nb.load(in_file) + mask = np.bool_(nb.load(brainmask).dataobj) + data = imnii.get_fdata(dtype=np.float32) + datamax = np.percentile(data.reshape(-1), 99.5) + data *= 100 / datamax + data[mask] = 100 + zooms = np.array(imnii.header.get_zooms()[:3]) + sigma_xyz = 2 - zooms / min(zooms) + grad = gradient(data, sigma * sigma_xyz) + gradmax = np.percentile(grad.reshape(-1), 99.5) + grad *= 100.0 + grad /= gradmax + grad[mask] = 100 + out_file = out_file or str(generate_filename(in_file, suffix="grad").absolute()) + nb.Nifti1Image(grad, imnii.affine, imnii.header).to_filename(out_file) + return out_file -def _get_mod(in_file): - from pathlib import Path - return Path(in_file).name.rstrip(".gz").rstrip(".nii").split("_")[-1] +def _binarize(in_file, threshold=0.5, out_file=None): + import os.path as op + import nibabel as nb + import numpy as np -def _pop(inlist): - if isinstance(inlist, (list, tuple)): - return inlist[0] - return inlist + if out_file is None: + fname, ext = op.splitext(op.basename(in_file)) + if ext == ".gz": + fname, ext2 = op.splitext(fname) + ext = ext2 + ext + out_file = op.abspath(f"{fname}_bin{ext}") + nii = nb.load(in_file) + data = nii.get_fdata() > threshold + hdr = nii.header.copy() + hdr.set_data_dtype(np.uint8) + nb.Nifti1Image(data.astype(np.uint8), nii.affine, hdr).to_filename(out_file) + return out_file diff --git a/pydra/tasks/mriqc/workflows/anatomical/output.py b/pydra/tasks/mriqc/workflows/anatomical/output.py index d85163d..62f8432 100644 --- a/pydra/tasks/mriqc/workflows/anatomical/output.py +++ b/pydra/tasks/mriqc/workflows/anatomical/output.py @@ -1,33 +1,25 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2023 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -"""Writing out anatomical reportlets.""" -from mriqc import config -from mriqc.interfaces import DerivativesDataSink -from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu - - -def init_anat_report_wf(name: str = "anat_report_wf"): +import attrs +import logging +from pathlib import Path +from pydra.engine import Workflow +import typing as ty + + +logger = logging.getLogger(__name__) + + +def init_anat_report_wf( + airmask=attrs.NOTHING, + artmask=attrs.NOTHING, + brainmask=attrs.NOTHING, + exec_verbose_reports=False, + exec_work_dir=None, + headmask=attrs.NOTHING, + in_ras=attrs.NOTHING, + name: str = "anat_report_wf", + segmentation=attrs.NOTHING, + wf_species="human", +): """ Generate the components of the individual report. @@ -39,241 +31,142 @@ def init_anat_report_wf(name: str = "anat_report_wf"): wf = init_anat_report_wf() """ - from nireports.interfaces import PlotMosaic + from pydra.tasks.nireports.interfaces import PlotMosaic # from mriqc.interfaces.reports import IndividualReport - - verbose = config.execution.verbose_reports - reportlets_dir = config.execution.work_dir / "reportlets" - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_ras", - "brainmask", - "headmask", - "airmask", - "artmask", - "rotmask", - "segmentation", - "inu_corrected", - "noisefit", - "in_iqms", - "mni_report", - "api_id", - "name_source", - ] - ), - name="inputnode", - ) - - mosaic_zoom = pe.Node( - PlotMosaic(cmap="Greys_r"), - name="PlotMosaicZoomed", - ) - - mosaic_noise = pe.Node( - PlotMosaic(only_noise=True, cmap="viridis_r"), - name="PlotMosaicNoise", - ) - if config.workflow.species.lower() in ("rat", "mouse"): - mosaic_zoom.inputs.view = ["coronal", "axial"] - mosaic_noise.inputs.view = ["coronal", "axial"] - - ds_report_zoomed = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="zoomed", - datatype="figures", - ), - name="ds_report_zoomed", - run_without_submitting=True, - ) - - ds_report_background = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="background", - datatype="figures", - ), - name="ds_report_background", - run_without_submitting=True, - ) + if exec_work_dir is None: + exec_work_dir = Path.cwd() + + verbose = exec_verbose_reports + reportlets_dir = exec_work_dir / "reportlets" + workflow = Workflow( + name=name, + input_spec={ + "airmask": ty.Any, + "artmask": ty.Any, + "brainmask": ty.Any, + "headmask": ty.Any, + "in_ras": ty.Any, + "segmentation": ty.Any, + }, + output_spec={ + "airmask_report": ty.Any, + "artmask_report": ty.Any, + "bg_report": ty.Any, + "bmask_report": ty.Any, + "headmask_report": ty.Any, + "segm_report": ty.Any, + "zoom_report": ty.Any, + }, + airmask=airmask, + artmask=artmask, + brainmask=brainmask, + headmask=headmask, + in_ras=in_ras, + segmentation=segmentation, + ) + + workflow.add( + PlotMosaic( + cmap="Greys_r", + bbox_mask_file=workflow.lzin.brainmask, + in_file=workflow.lzin.in_ras, + name="mosaic_zoom", + ) + ) + workflow.add( + PlotMosaic( + cmap="viridis_r", + only_noise=True, + in_file=workflow.lzin.in_ras, + name="mosaic_noise", + ) + ) + if wf_species.lower() in ("rat", "mouse"): + workflow.mosaic_zoom.inputs.view = ["coronal", "axial"] + workflow.mosaic_noise.inputs.view = ["coronal", "axial"] # fmt: off - workflow.connect([ - # (inputnode, rnode, [("in_iqms", "in_iqms")]), - (inputnode, mosaic_zoom, [("in_ras", "in_file"), - ("brainmask", "bbox_mask_file")]), - (inputnode, mosaic_noise, [("in_ras", "in_file")]), - (inputnode, ds_report_zoomed, [("name_source", "source_file")]), - (inputnode, ds_report_background, [("name_source", "source_file")]), - (mosaic_zoom, ds_report_zoomed, [("out_file", "in_file")]), - (mosaic_noise, ds_report_background, [("out_file", "in_file")]), - ]) + workflow.set_output([('zoom_report', workflow.mosaic_zoom.lzout.out_file)]) + workflow.set_output([('bg_report', workflow.mosaic_noise.lzout.out_file)]) # fmt: on - if not verbose: - return workflow - - from nireports.interfaces import PlotContours + from pydra.tasks.nireports.interfaces import PlotContours - display_mode = "y" if config.workflow.species.lower() in ("rat", "mouse") else "z" - plot_segm = pe.Node( + display_mode = "y" if wf_species.lower() in ("rat", "mouse") else "z" + workflow.add( PlotContours( + colors=["r", "g", "b"], + cut_coords=10, display_mode=display_mode, levels=[0.5, 1.5, 2.5], - cut_coords=10, - colors=["r", "g", "b"], - ), - name="PlotSegmentation", + in_contours=workflow.lzin.segmentation, + in_file=workflow.lzin.in_ras, + name="plot_segm", + ) ) - ds_report_segm = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="segmentation", - datatype="figures", - ), - name="ds_report_segm", - run_without_submitting=True, - ) - - plot_bmask = pe.Node( + workflow.add( PlotContours( - display_mode=display_mode, - levels=[0.5], colors=["r"], cut_coords=10, + display_mode=display_mode, + levels=[0.5], out_file="bmask", - ), - name="PlotBrainmask", + in_contours=workflow.lzin.brainmask, + in_file=workflow.lzin.in_ras, + name="plot_bmask", + ) ) - ds_report_bmask = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="brainmask", - datatype="figures", - ), - name="ds_report_bmask", - run_without_submitting=True, - ) - - plot_artmask = pe.Node( + workflow.add( PlotContours( - display_mode=display_mode, - levels=[0.5], colors=["r"], cut_coords=10, + display_mode=display_mode, + levels=[0.5], out_file="artmask", saturate=True, - ), - name="PlotArtmask", - ) - - ds_report_artmask = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="artifacts", - datatype="figures", - ), - name="ds_report_artmask", - run_without_submitting=True, + in_contours=workflow.lzin.artmask, + in_file=workflow.lzin.in_ras, + name="plot_artmask", + ) ) # NOTE: humans switch on these two to coronal view. - display_mode = "y" if config.workflow.species.lower() in ("rat", "mouse") else "x" - plot_airmask = pe.Node( + display_mode = "y" if wf_species.lower() in ("rat", "mouse") else "x" + workflow.add( PlotContours( - display_mode=display_mode, - levels=[0.5], colors=["r"], cut_coords=6, + display_mode=display_mode, + levels=[0.5], out_file="airmask", - ), - name="PlotAirmask", + in_contours=workflow.lzin.airmask, + in_file=workflow.lzin.in_ras, + name="plot_airmask", + ) ) - ds_report_airmask = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="airmask", - datatype="figures", - ), - name="ds_report_airmask", - run_without_submitting=True, - ) - - plot_headmask = pe.Node( + workflow.add( PlotContours( - display_mode=display_mode, - levels=[0.5], colors=["r"], cut_coords=6, + display_mode=display_mode, + levels=[0.5], out_file="headmask", - ), - name="PlotHeadmask", - ) - - ds_report_headmask = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="head", - datatype="figures", - ), - name="ds_report_headmask", - run_without_submitting=True, - ) - - ds_report_norm = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="norm", - datatype="figures", - ), - name="ds_report_norm", - run_without_submitting=True, - ) - - ds_report_noisefit = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="noisefit", - datatype="figures", - ), - name="ds_report_noisefit", - run_without_submitting=True, + in_contours=workflow.lzin.headmask, + in_file=workflow.lzin.in_ras, + name="plot_headmask", + ) ) # fmt: off - workflow.connect([ - (inputnode, ds_report_segm, [("name_source", "source_file")]), - (inputnode, ds_report_bmask, [("name_source", "source_file")]), - (inputnode, ds_report_artmask, [("name_source", "source_file")]), - (inputnode, ds_report_airmask, [("name_source", "source_file")]), - (inputnode, ds_report_headmask, [("name_source", "source_file")]), - (inputnode, ds_report_norm, [("mni_report", "in_file"), - ("name_source", "source_file")]), - (inputnode, ds_report_noisefit, [("noisefit", "in_file"), - ("name_source", "source_file")]), - (inputnode, plot_segm, [("in_ras", "in_file"), - ("segmentation", "in_contours")]), - (inputnode, plot_bmask, [("in_ras", "in_file"), - ("brainmask", "in_contours")]), - (inputnode, plot_headmask, [("in_ras", "in_file"), - ("headmask", "in_contours")]), - (inputnode, plot_airmask, [("in_ras", "in_file"), - ("airmask", "in_contours")]), - (inputnode, plot_artmask, [("in_ras", "in_file"), - ("artmask", "in_contours")]), - (plot_bmask, ds_report_bmask, [("out_file", "in_file")]), - (plot_segm, ds_report_segm, [("out_file", "in_file")]), - (plot_artmask, ds_report_artmask, [("out_file", "in_file")]), - (plot_headmask, ds_report_headmask, [("out_file", "in_file")]), - (plot_airmask, ds_report_airmask, [("out_file", "in_file")]), - ]) + workflow.set_output([('bmask_report', workflow.plot_bmask.lzout.out_file)]) + workflow.set_output([('segm_report', workflow.plot_segm.lzout.out_file)]) + workflow.set_output([('artmask_report', workflow.plot_artmask.lzout.out_file)]) + workflow.set_output([('headmask_report', workflow.plot_headmask.lzout.out_file)]) + workflow.set_output([('airmask_report', workflow.plot_airmask.lzout.out_file)]) # fmt: on return workflow diff --git a/pydra/tasks/mriqc/workflows/core.py b/pydra/tasks/mriqc/workflows/core.py deleted file mode 100644 index 5aa5308..0000000 --- a/pydra/tasks/mriqc/workflows/core.py +++ /dev/null @@ -1,59 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -""" -Combines the structural and functional MRI workflows. -""" -from nipype.pipeline.engine import Workflow -from mriqc.workflows.anatomical.base import anat_qc_workflow -from mriqc.workflows.functional.base import fmri_qc_workflow -from mriqc.workflows.diffusion.base import dmri_qc_workflow - -ANATOMICAL_KEYS = "t1w", "t2w" -FMRI_KEY = "bold" -DMRI_KEY = "dwi" - - -def init_mriqc_wf(): - """Create a multi-subject MRIQC workflow.""" - from mriqc import config - - # Create parent workflow - workflow = Workflow(name="mriqc_wf") - workflow.base_dir = config.execution.work_dir - - # Create fMRI QC workflow - if FMRI_KEY in config.workflow.inputs: - workflow.add_nodes([fmri_qc_workflow()]) - - # Create dMRI QC workflow - if DMRI_KEY in config.workflow.inputs: - workflow.add_nodes([dmri_qc_workflow()]) - - # Create sMRI QC workflow - input_keys = config.workflow.inputs.keys() - if any(key in input_keys for key in ANATOMICAL_KEYS): - workflow.add_nodes([anat_qc_workflow()]) - - # Return non-empty workflow, else None - if workflow._get_all_nodes(): - return workflow diff --git a/pydra/tasks/mriqc/workflows/diffusion/__init__.py b/pydra/tasks/mriqc/workflows/diffusion/__init__.py index e69de29..3a79cc8 100644 --- a/pydra/tasks/mriqc/workflows/diffusion/__init__.py +++ b/pydra/tasks/mriqc/workflows/diffusion/__init__.py @@ -0,0 +1,10 @@ +from .base import ( + _bvals_report, + _estimate_sigma, + _filter_metadata, + compute_iqms, + dmri_qc_workflow, + epi_mni_align, + hmc_workflow, +) +from .output import _carpet_parcellation, _get_tr, _get_wm, init_dwi_report_wf diff --git a/pydra/tasks/mriqc/workflows/diffusion/base.py b/pydra/tasks/mriqc/workflows/diffusion/base.py index 76573f5..909bfc1 100644 --- a/pydra/tasks/mriqc/workflows/diffusion/base.py +++ b/pydra/tasks/mriqc/workflows/diffusion/base.py @@ -1,57 +1,42 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2023 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -""" -Diffusion MRI workflow -====================== - -.. image :: _static/diffusion_workflow_source.svg - -The diffusion workflow follows the following steps: - -#. Sanitize (revise data types and xforms) input data, read - associated metadata and discard non-steady state frames. -#. :abbr:`HMC (head-motion correction)` based on ``3dvolreg`` from - AFNI -- :py:func:`hmc`. -#. Skull-stripping of the time-series (AFNI) -- - :py:func:`dmri_bmsk_workflow`. -#. Calculate mean time-series, and :abbr:`tSNR (temporal SNR)`. -#. Spatial Normalization to MNI (ANTs) -- :py:func:`epi_mni_align` -#. Extraction of IQMs -- :py:func:`compute_iqms`. -#. Individual-reports generation -- - :py:func:`~mriqc.workflows.diffusion.output.init_dwi_report_wf`. - -This workflow is orchestrated by :py:func:`dmri_qc_workflow`. -""" -from mriqc import config -from nipype.interfaces import utility as niu -from nipype.pipeline import engine as pe -from mriqc.interfaces.datalad import DataladIdentityInterface -from mriqc.workflows.diffusion.output import init_dwi_report_wf - -DEFAULT_MEMORY_MIN_GB = 0.01 - - -def dmri_qc_workflow(name="dwiMRIQC"): +import attrs +from fileformats.medimage import Bval, Bvec +import logging +import numpy as np +from pathlib import Path +from pydra.engine import Workflow +from pydra.engine.task import FunctionTask +import pydra.mark +from pydra.tasks.mriqc.workflows.diffusion.output import init_dwi_report_wf +import typing as ty + + +logger = logging.getLogger(__name__) + + +def dmri_qc_workflow( + bvals=attrs.NOTHING, + bvecs=attrs.NOTHING, + exec_ants_float=False, + exec_datalad_get=True, + exec_debug=False, + exec_float32=True, + exec_layout=None, + exec_verbose_reports=False, + exec_work_dir=None, + in_file=attrs.NOTHING, + name="dwiMRIQC", + nipype_nprocs=12, + nipype_omp_nthreads=12, + qspace_neighbors=attrs.NOTHING, + wf_biggest_file_gb=1, + wf_fd_radius=50, + wf_fd_thres=0.2, + wf_fft_spikes_detector=False, + wf_inputs=None, + wf_min_len_dwi=7, + wf_species="human", + wf_template_id="MNI152NLin2009cAsym", +): """ Initialize the dMRI-QC workflow. @@ -64,362 +49,307 @@ def dmri_qc_workflow(name="dwiMRIQC"): wf = dmri_qc_workflow() """ - from nipype.interfaces.afni import Volreg - from nipype.interfaces.mrtrix3.preprocess import DWIDenoise - from niworkflows.interfaces.header import SanitizeImage - from mriqc.interfaces.diffusion import ( + from pydra.tasks.afni.auto import Volreg + from pydra.tasks.mrtrix3.v3_0 import DwiDenoise + from pydra.tasks.niworkflows.interfaces.header import SanitizeImage + from pydra.tasks.niworkflows.interfaces.images import RobustAverage + from pydra.tasks.mriqc.interfaces.diffusion import ( + CCSegmentation, CorrectSignalDrift, - DipyDTI, - ExtractB0, - FilterShells, + DiffusionModel, + ExtractOrientations, NumberOfShells, + PIESNO, ReadDWIMetadata, + SpikingVoxelsMask, WeightedStat, ) - from mriqc.workflows.shared import synthstrip_wf as dmri_bmsk_workflow - from mriqc.messages import BUILDING_WORKFLOW - - workflow = pe.Workflow(name=name) - - mem_gb = config.workflow.biggest_file_gb - dataset = config.workflow.inputs.get("dwi", []) - - message = BUILDING_WORKFLOW.format( - modality="diffusion", - detail=( - f"for {len(dataset)} NIfTI files." - if len(dataset) > 2 - else f"({' and '.join(('<%s>' % v for v in dataset))})." - ), + from pydra.tasks.mriqc.workflows.shared import synthstrip_wf as dmri_bmsk_workflow + + if exec_work_dir is None: + exec_work_dir = Path.cwd() + + workflow = Workflow( + name=name, + input_spec={ + "bvals": Bval, + "bvecs": Bvec, + "in_file": ty.Any, + "qspace_neighbors": ty.Any, + }, + output_spec={ + "dwi_report_wf_bmask_report": ty.Any, + "dwi_report_wf_carpet_report": ty.Any, + "dwi_report_wf_fa_report": ty.Any, + "dwi_report_wf_heatmap_report": ty.Any, + "dwi_report_wf_md_report": ty.Any, + "dwi_report_wf_noise_report": ty.Any, + "dwi_report_wf_snr_report": ty.Any, + "dwi_report_wf_spikes_report": ty.Any, + "iqms_wf_noise_floor": ty.Any, + "iqms_wf_out_file": ty.Any, + }, + bvals=bvals, + bvecs=bvecs, + in_file=in_file, + qspace_neighbors=qspace_neighbors, ) - config.loggers.workflow.info(message) # Define workflow, inputs and outputs # 0. Get data, put it in RAS orientation - inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode") - inputnode.iterables = [("in_file", dataset)] - - datalad_get = pe.Node( - DataladIdentityInterface(fields=["in_file"], dataset_path=config.execution.bids_dir), - name="datalad_get", - ) - - outputnode = pe.Node( - niu.IdentityInterface(fields=["qc", "mosaic", "out_group", "out_dvars", "out_fd"]), - name="outputnode", - ) - sanitize = pe.Node( + workflow.add( SanitizeImage( + max_32bit=exec_float32, n_volumes_to_discard=0, - max_32bit=config.execution.float32, - ), - name="sanitize", - mem_gb=mem_gb * 4.0, + in_file=workflow.lzin.in_file, + name="sanitize", + ) ) - # Workflow -------------------------------------------------------- + # Read metadata & bvec/bval, estimate number of shells, extract and split B0s - # 1. Read metadata & bvec/bval, estimate number of shells, extract and split B0s - meta = pe.Node(ReadDWIMetadata(index_db=config.execution.bids_database_dir), name="metadata") - shells = pe.Node(NumberOfShells(), name="shells") - get_shells = pe.MapNode(ExtractB0(), name="get_shells", iterfield=["b0_ixs"]) - hmc_shells = pe.MapNode( - Volreg(args="-Fourier -twopass", zpad=4, outputtype="NIFTI_GZ"), - name="hmc_shells", - mem_gb=mem_gb * 2.5, - iterfield=["in_file"], + workflow.add(NumberOfShells(in_bvals=workflow.lzin.bvals, name="shells")) + workflow.add( + ExtractOrientations(in_file=workflow.sanitize.lzout.out_file, name="get_lowb") ) - - hmc_b0 = pe.Node( - Volreg(args="-Fourier -twopass", zpad=4, outputtype="NIFTI_GZ"), - name="hmc_b0", - mem_gb=mem_gb * 2.5, + # Generate B0 reference + workflow.add( + RobustAverage( + mc_method=None, in_file=workflow.sanitize.lzout.out_file, name="dwi_ref" + ) ) - - drift = pe.Node(CorrectSignalDrift(), name="drift") - - # 2. Generate B0 reference - dwi_reference_wf = init_dmriref_wf(name="dwi_reference_wf") - - # 3. Calculate brainmask - dmri_bmsk = dmri_bmsk_workflow(omp_nthreads=config.nipype.omp_nthreads) - - # 4. HMC: head motion correct - hmcwf = hmc_workflow() - - # 5. Split shells and compute some stats - averages = pe.MapNode( - WeightedStat(), - name="averages", - mem_gb=mem_gb * 1.5, - iterfield=["in_weights"], + workflow.add( + Volreg( + args="-Fourier -twopass", + outputtype="NIFTI_GZ", + zpad=4, + basefile=workflow.dwi_ref.lzout.out_file, + in_file=workflow.get_lowb.lzout.out_file, + name="hmc_b0", + ) ) - stddev = pe.MapNode( - WeightedStat(stat="std"), - name="stddev", - mem_gb=mem_gb * 1.5, - iterfield=["in_weights"], + # Calculate brainmask + workflow.add( + dmri_bmsk_workflow( + omp_nthreads=nipype_omp_nthreads, + in_files=workflow.dwi_ref.lzout.out_file, + name="dmri_bmsk", + ) ) - - # 6. Fit DTI model - dti_filter = pe.Node(FilterShells(), name="dti_filter") - dwidenoise = pe.Node( - DWIDenoise( + # HMC: head motion correct + workflow.add( + hmc_workflow( + wf_fd_radius=wf_fd_radius, in_bvec=workflow.lzin.bvecs, name="hmcwf" + ) + ) + workflow.add( + ExtractOrientations( + in_bvec_file=workflow.lzin.bvecs, + in_file=workflow.hmcwf.lzout.out_file, + indices=workflow.shells.lzout.b_indices, + name="get_hmc_shells", + ) + ) + # Split shells and compute some stats + workflow.add( + WeightedStat(in_weights=workflow.shells.lzout.b_masks, name="averages") + ) + workflow.add( + WeightedStat( + stat="std", in_weights=workflow.shells.lzout.b_masks, name="stddev" + ) + ) + workflow.add( + DwiDenoise( noise="noisemap.nii.gz", - nthreads=config.nipype.omp_nthreads, - ), - name="dwidenoise", - nprocs=config.nipype.omp_nthreads, + nthreads=nipype_omp_nthreads, + mask=workflow.dmri_bmsk.lzout.out_mask, + name="dwidenoise", + ) ) - dti = pe.Node( - DipyDTI(free_water_model=False), - name="dti", + workflow.add( + CorrectSignalDrift( + brainmask_file=workflow.dmri_bmsk.lzout.out_mask, + bval_file=workflow.lzin.bvals, + full_epi=workflow.sanitize.lzout.out_file, + in_file=workflow.hmc_b0.lzout.out_file, + name="drift", + ) + ) + workflow.add( + SpikingVoxelsMask( + b_masks=workflow.shells.lzout.b_masks, + brain_mask=workflow.dmri_bmsk.lzout.out_mask, + in_file=workflow.sanitize.lzout.out_file, + name="sp_mask", + ) + ) + # Fit DTI/DKI model + workflow.add( + DiffusionModel( + brain_mask=workflow.dmri_bmsk.lzout.out_mask, + bvals=workflow.shells.lzout.out_data, + bvec_file=workflow.lzin.bvecs, + in_file=workflow.dwidenoise.lzout.out, + n_shells=workflow.shells.lzout.n_shells, + name="dwimodel", + ) + ) + # Calculate CC mask + workflow.add( + CCSegmentation( + in_cfa=workflow.dwimodel.lzout.out_cfa, + in_fa=workflow.dwimodel.lzout.out_fa, + name="cc_mask", + ) + ) + # Run PIESNO noise estimation + workflow.add(PIESNO(in_file=workflow.sanitize.lzout.out_file, name="piesno")) + # EPI to MNI registration + workflow.add( + epi_mni_align( + nipype_omp_nthreads=nipype_omp_nthreads, + wf_species=wf_species, + exec_ants_float=exec_ants_float, + exec_debug=exec_debug, + nipype_nprocs=nipype_nprocs, + wf_template_id=wf_template_id, + epi_mask=workflow.dmri_bmsk.lzout.out_mask, + epi_mean=workflow.dwi_ref.lzout.out_file, + name="spatial_norm", + ) + ) + # Compute IQMs + workflow.add( + compute_iqms( + in_noise=workflow.dwidenoise.lzout.noise, + in_bvec=workflow.get_hmc_shells.lzout.out_bvec, + in_shells=workflow.get_hmc_shells.lzout.out_file, + b_values_shells=workflow.shells.lzout.b_values, + wm_mask=workflow.cc_mask.lzout.wm_finalmask, + cc_mask=workflow.cc_mask.lzout.out_mask, + brain_mask=workflow.dmri_bmsk.lzout.out_mask, + in_md=workflow.dwimodel.lzout.out_md, + in_fa_degenerate=workflow.dwimodel.lzout.out_fa_degenerate, + in_fa_nans=workflow.dwimodel.lzout.out_fa_nans, + in_cfa=workflow.dwimodel.lzout.out_cfa, + in_fa=workflow.dwimodel.lzout.out_fa, + in_bvec_diff=workflow.hmcwf.lzout.out_bvec_diff, + in_bvec_rotated=workflow.hmcwf.lzout.out_bvec, + framewise_displacement=workflow.hmcwf.lzout.out_fd, + piesno_sigma=workflow.piesno.lzout.sigma, + spikes_mask=workflow.sp_mask.lzout.out_mask, + qspace_neighbors=workflow.lzin.qspace_neighbors, + b_values_file=workflow.lzin.bvals, + in_file=workflow.lzin.in_file, + name="iqms_wf", + ) + ) + # Generate outputs + workflow.add( + init_dwi_report_wf( + exec_verbose_reports=exec_verbose_reports, + wf_biggest_file_gb=wf_biggest_file_gb, + wf_fd_thres=wf_fd_thres, + exec_work_dir=exec_work_dir, + wf_species=wf_species, + wf_fft_spikes_detector=wf_fft_spikes_detector, + in_parcellation=workflow.spatial_norm.lzout.epi_parc, + in_md=workflow.dwimodel.lzout.out_md, + in_fa=workflow.dwimodel.lzout.out_fa, + in_epi=workflow.drift.lzout.out_full_file, + in_stdmap=workflow.stddev.lzout.out_file, + in_avgmap=workflow.averages.lzout.out_file, + brain_mask=workflow.dmri_bmsk.lzout.out_mask, + in_bdict=workflow.shells.lzout.b_dict, + name="dwi_report_wf", + ) ) - - # 7. EPI to MNI registration - ema = epi_mni_align() - - # 8. Compute IQMs - iqmswf = compute_iqms() - - # 9. Generate outputs - dwi_report_wf = init_dwi_report_wf() - # fmt: off - workflow.connect([ - (inputnode, datalad_get, [("in_file", "in_file")]), - (inputnode, meta, [("in_file", "in_file")]), - (inputnode, dwi_report_wf, [ - ("in_file", "inputnode.name_source"), - ]), - (datalad_get, iqmswf, [("in_file", "inputnode.in_file")]), - (datalad_get, sanitize, [("in_file", "in_file")]), - (sanitize, dwi_reference_wf, [("out_file", "inputnode.in_file")]), - (shells, dwi_reference_wf, [(("b_masks", _first), "inputnode.t_mask")]), - (meta, shells, [("out_bval_file", "in_bvals")]), - (sanitize, drift, [("out_file", "full_epi")]), - (shells, get_shells, [("b_indices", "b0_ixs")]), - (sanitize, get_shells, [("out_file", "in_file")]), - (meta, drift, [("out_bval_file", "bval_file")]), - (get_shells, hmc_shells, [(("out_file", _all_but_first), "in_file")]), - (get_shells, hmc_b0, [(("out_file", _first), "in_file")]), - (dwi_reference_wf, hmc_b0, [("outputnode.ref_file", "basefile")]), - (hmc_b0, drift, [("out_file", "in_file")]), - (shells, drift, [(("b_indices", _first), "b0_ixs")]), - (dwi_reference_wf, dmri_bmsk, [("outputnode.ref_file", "inputnode.in_files")]), - (dwi_reference_wf, ema, [("outputnode.ref_file", "inputnode.epi_mean")]), - (dmri_bmsk, drift, [("outputnode.out_mask", "brainmask_file")]), - (dmri_bmsk, ema, [("outputnode.out_mask", "inputnode.epi_mask")]), - (drift, hmcwf, [("out_full_file", "inputnode.reference")]), - (drift, averages, [("out_full_file", "in_file")]), - (drift, stddev, [("out_full_file", "in_file")]), - (shells, averages, [("b_masks", "in_weights")]), - (shells, stddev, [("b_masks", "in_weights")]), - (shells, dti_filter, [("out_data", "bvals")]), - (meta, dti_filter, [("out_bvec_file", "bvec_file")]), - (drift, dti_filter, [("out_full_file", "in_file")]), - (dti_filter, dti, [("out_bvals", "bvals")]), - (dti_filter, dti, [("out_bvec_file", "bvec_file")]), - (dti_filter, dwidenoise, [("out_file", "in_file")]), - (dmri_bmsk, dwidenoise, [("outputnode.out_mask", "mask")]), - (dwidenoise, dti, [("out_file", "in_file")]), - (dmri_bmsk, dti, [("outputnode.out_mask", "brainmask")]), - (hmcwf, outputnode, [("outputnode.out_fd", "out_fd")]), - (shells, iqmswf, [("n_shells", "inputnode.n_shells"), - ("b_values", "inputnode.b_values")]), - (dwidenoise, dwi_report_wf, [("noise", "inputnode.in_noise")]), - (shells, dwi_report_wf, [("b_dict", "inputnode.in_bdict")]), - (dmri_bmsk, dwi_report_wf, [("outputnode.out_mask", "inputnode.brainmask")]), - (shells, dwi_report_wf, [("b_values", "inputnode.in_shells")]), - (averages, dwi_report_wf, [("out_file", "inputnode.in_avgmap")]), - (stddev, dwi_report_wf, [("out_file", "inputnode.in_stdmap")]), - (drift, dwi_report_wf, [("out_full_file", "inputnode.in_epi")]), - (dti, dwi_report_wf, [("out_fa", "inputnode.in_fa"), - ("out_md", "inputnode.in_md")]), - (ema, dwi_report_wf, [("outputnode.epi_parc", "inputnode.in_parcellation")]), - ]) - # fmt: on - return workflow + @pydra.mark.task + def shells_b_masks_to_dwi_ref_t_mask_callable(in_: ty.Any) -> ty.Any: + return _first(in_) -def compute_iqms(name="ComputeIQMs"): - """ - Initialize the workflow that actually computes the IQMs. + workflow.add(shells_b_masks_to_dwi_ref_t_mask_callable(in_=workflow.shells.lzout.b_masks, name="shells_b_masks_to_dwi_ref_t_mask_callable")) - .. workflow:: + workflow.dwi_ref.inputs.t_mask = workflow.shells_b_masks_to_dwi_ref_t_mask_callable.lzout.out - from mriqc.workflows.diffusion.base import compute_iqms - from mriqc.testing import mock_config - with mock_config(): - wf = compute_iqms() + @pydra.mark.task + def shells_b_indices_to_get_lowb_indices_callable(in_: ty.Any) -> ty.Any: + return _first(in_) - """ - from niworkflows.interfaces.bids import ReadSidecarJSON + workflow.add(shells_b_indices_to_get_lowb_indices_callable(in_=workflow.shells.lzout.b_indices, name="shells_b_indices_to_get_lowb_indices_callable")) - from mriqc.interfaces.reports import AddProvenance - from mriqc.interfaces import IQMFileSink + workflow.get_lowb.inputs.indices = workflow.shells_b_indices_to_get_lowb_indices_callable.lzout.out - # from mriqc.workflows.utils import _tofloat, get_fwhmx - # mem_gb = config.workflow.biggest_file_gb + @pydra.mark.task + def shells_b_indices_to_drift_b0_ixs_callable(in_: ty.Any) -> ty.Any: + return _first(in_) - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_file", - "n_shells", - "b_values", - ] - ), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "out_file", - "meta_sidecar", - ] - ), - name="outputnode", - ) + workflow.add(shells_b_indices_to_drift_b0_ixs_callable(in_=workflow.shells.lzout.b_indices, name="shells_b_indices_to_drift_b0_ixs_callable")) - meta = pe.Node(ReadSidecarJSON(index_db=config.execution.bids_database_dir), name="metadata") + workflow.drift.inputs.b0_ixs = workflow.shells_b_indices_to_drift_b0_ixs_callable.lzout.out + workflow.hmcwf.inputs.in_file = workflow.drift.lzout.out_full_file + workflow.averages.inputs.in_file = workflow.drift.lzout.out_full_file + workflow.stddev.inputs.in_file = workflow.drift.lzout.out_full_file - addprov = pe.Node( - AddProvenance(modality="dwi"), - name="provenance", - run_without_submitting=True, - ) + @pydra.mark.task + def averages_out_file_to_hmcwf_reference_callable(in_: ty.Any) -> ty.Any: + return _first(in_) - # Save to JSON file - datasink = pe.Node( - IQMFileSink( - modality="dwi", - out_dir=str(config.execution.output_dir), - dataset=config.execution.dsname, - ), - name="datasink", - run_without_submitting=True, - ) - - # fmt: off - workflow.connect([ - (inputnode, datasink, [("in_file", "in_file"), - ("n_shells", "NumberOfShells"), - ("b_values", "b-values")]), - (inputnode, meta, [("in_file", "in_file")]), - (inputnode, addprov, [("in_file", "in_file")]), - (addprov, datasink, [("out_prov", "provenance")]), - (meta, datasink, [("subject", "subject_id"), - ("session", "session_id"), - ("task", "task_id"), - ("acquisition", "acq_id"), - ("reconstruction", "rec_id"), - ("run", "run_id"), - ("out_dict", "metadata")]), - (datasink, outputnode, [("out_file", "out_file")]), - (meta, outputnode, [("out_dict", "meta_sidecar")]), - ]) - # fmt: on + workflow.add(averages_out_file_to_hmcwf_reference_callable(in_=workflow.averages.lzout.out_file, name="averages_out_file_to_hmcwf_reference_callable")) - # Set FD threshold - # inputnode.inputs.fd_thres = config.workflow.fd_thres - - # # AFNI quality measures - # fwhm_interface = get_fwhmx() - # fwhm = pe.Node(fwhm_interface, name="smoothness") - # # fwhm.inputs.acf = True # add when AFNI >= 16 - # measures = pe.Node(FunctionalQC(), name="measures", mem_gb=mem_gb * 3) - - # # fmt: off - # workflow.connect([ - # (inputnode, measures, [("epi_mean", "in_epi"), - # ("brainmask", "in_mask"), - # ("hmc_epi", "in_hmc"), - # ("hmc_fd", "in_fd"), - # ("fd_thres", "fd_thres"), - # ("in_tsnr", "in_tsnr")]), - # (inputnode, fwhm, [("epi_mean", "in_file"), - # ("brainmask", "mask")]), - # (fwhm, measures, [(("fwhm", _tofloat), "in_fwhm")]), - # (measures, datasink, [("out_qc", "root")]), - # ]) - # # fmt: on - return workflow + workflow.hmcwf.inputs.reference = workflow.averages_out_file_to_hmcwf_reference_callable.lzout.out + workflow.dwidenoise.inputs.dwi = workflow.drift.lzout.out_full_file + @pydra.mark.task + def averages_out_file_to_iqms_wf_in_b0_callable(in_: ty.Any) -> ty.Any: + return _first(in_) -def init_dmriref_wf( - in_file=None, - name="init_dmriref_wf", -): - """ - Build a workflow that generates reference images for a dMRI series. - - The raw reference image is the target of :abbr:`HMC (head motion correction)`, and a - contrast-enhanced reference is the subject of distortion correction, as well as - boundary-based registration to T1w and template spaces. - - This workflow assumes only one dMRI file has been passed. - - Workflow Graph - .. workflow:: - :graph2use: orig - :simple_form: yes - - from mriqc.workflows.diffusion.base import init_dmriref_wf - wf = init_dmriref_wf() - - Parameters - ---------- - in_file : :obj:`str` - dMRI series NIfTI file - ------ - in_file : str - series NIfTI file - - Outputs - ------- - in_file : str - Validated DWI series NIfTI file - ref_file : str - Reference image to which DWI series is motion corrected - """ - from niworkflows.interfaces.images import RobustAverage - from niworkflows.interfaces.header import ValidateImage + workflow.add(averages_out_file_to_iqms_wf_in_b0_callable(in_=workflow.averages.lzout.out_file, name="averages_out_file_to_iqms_wf_in_b0_callable")) - workflow = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "t_mask"]), name="inputnode") - outputnode = pe.Node( - niu.IdentityInterface(fields=["in_file", "ref_file", "validation_report"]), - name="outputnode", + workflow.iqms_wf.inputs.in_b0 = workflow.averages_out_file_to_iqms_wf_in_b0_callable.lzout.out + # fmt: on + workflow.set_output([("iqms_wf_out_file", workflow.iqms_wf.lzout.out_file)]) + workflow.set_output([("iqms_wf_noise_floor", workflow.iqms_wf.lzout.noise_floor)]) + workflow.set_output( + [("dwi_report_wf_spikes_report", workflow.dwi_report_wf.lzout.spikes_report)] ) - - # Simplify manually setting input image - if in_file is not None: - inputnode.inputs.in_file = in_file - - val_bold = pe.Node( - ValidateImage(), - name="val_bold", - mem_gb=DEFAULT_MEMORY_MIN_GB, + workflow.set_output( + [("dwi_report_wf_carpet_report", workflow.dwi_report_wf.lzout.carpet_report)] + ) + workflow.set_output( + [("dwi_report_wf_heatmap_report", workflow.dwi_report_wf.lzout.heatmap_report)] + ) + workflow.set_output( + [("dwi_report_wf_md_report", workflow.dwi_report_wf.lzout.md_report)] + ) + workflow.set_output( + [("dwi_report_wf_fa_report", workflow.dwi_report_wf.lzout.fa_report)] + ) + workflow.set_output( + [("dwi_report_wf_noise_report", workflow.dwi_report_wf.lzout.noise_report)] + ) + workflow.set_output( + [("dwi_report_wf_bmask_report", workflow.dwi_report_wf.lzout.bmask_report)] + ) + workflow.set_output( + [("dwi_report_wf_snr_report", workflow.dwi_report_wf.lzout.snr_report)] ) - - gen_avg = pe.Node(RobustAverage(mc_method=None), name="gen_avg", mem_gb=1) - # fmt: off - workflow.connect([ - (inputnode, val_bold, [("in_file", "in_file")]), - (inputnode, gen_avg, [("t_mask", "t_mask")]), - (val_bold, gen_avg, [("out_file", "in_file")]), - (gen_avg, outputnode, [("out_file", "ref_file")]), - ]) - # fmt: on return workflow -def hmc_workflow(name="dMRI_HMC"): +def hmc_workflow( + in_bvec=attrs.NOTHING, + in_file=attrs.NOTHING, + name="dMRI_HMC", + reference=attrs.NOTHING, + wf_fd_radius=50, +): """ Create a :abbr:`HMC (head motion correction)` workflow for dMRI. @@ -431,46 +361,76 @@ def hmc_workflow(name="dMRI_HMC"): wf = hmc() """ - from nipype.algorithms.confounds import FramewiseDisplacement - from nipype.interfaces.afni import Volreg - - mem_gb = config.workflow.biggest_file_gb - - workflow = pe.Workflow(name=name) - - inputnode = pe.Node(niu.IdentityInterface(fields=["in_file", "reference"]), name="inputnode") - outputnode = pe.Node(niu.IdentityInterface(fields=["out_file", "out_fd"]), name="outputnode") + from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ( + FramewiseDisplacement, + ) + from pydra.tasks.afni.auto import Volreg + from pydra.tasks.mriqc.interfaces.diffusion import RotateVectors + + workflow = Workflow( + name=name, + input_spec={"in_bvec": ty.Any, "in_file": ty.Any, "reference": ty.Any}, + output_spec={ + "out_bvec": ty.Any, + "out_bvec_diff": ty.Any, + "out_fd": ty.Any, + "out_file": ty.Any, + }, + in_bvec=in_bvec, + in_file=in_file, + reference=reference, + ) # calculate hmc parameters - hmc = pe.Node( - Volreg(args="-Fourier -twopass", zpad=4, outputtype="NIFTI_GZ"), - name="motion_correct", - mem_gb=mem_gb * 2.5, + workflow.add( + Volreg( + args="-Fourier -twopass", + outputtype="NIFTI_GZ", + zpad=4, + basefile=workflow.lzin.reference, + in_file=workflow.lzin.in_file, + name="hmc", + ) + ) + workflow.add( + RotateVectors( + in_file=workflow.lzin.in_bvec, + reference=workflow.lzin.reference, + transforms=workflow.hmc.lzout.oned_matrix_save, + name="bvec_rot", + ) ) - # Compute the frame-wise displacement - fdnode = pe.Node( + workflow.add( FramewiseDisplacement( normalize=False, parameter_source="AFNI", - radius=config.workflow.fd_radius, - ), - name="ComputeFD", + radius=wf_fd_radius, + in_file=workflow.hmc.lzout.oned_file, + name="fdnode", + ) ) - # fmt: off - workflow.connect([ - (inputnode, hmc, [("in_file", "in_file"), - ("reference", "basefile")]), - (hmc, outputnode, [("out_file", "out_file")]), - (hmc, fdnode, [("oned_file", "in_file")]), - (fdnode, outputnode, [("out_file", "out_fd")]), - ]) + workflow.set_output([('out_file', workflow.hmc.lzout.out_file)]) + workflow.set_output([('out_fd', workflow.fdnode.lzout.out_file)]) + workflow.set_output([('out_bvec', workflow.bvec_rot.lzout.out_bvec)]) + workflow.set_output([('out_bvec_diff', workflow.bvec_rot.lzout.out_diff)]) # fmt: on + return workflow -def epi_mni_align(name="SpatialNormalization"): +def epi_mni_align( + epi_mask=attrs.NOTHING, + epi_mean=attrs.NOTHING, + exec_ants_float=False, + exec_debug=False, + name="SpatialNormalization", + nipype_nprocs=12, + nipype_omp_nthreads=12, + wf_species="human", + wf_template_id="MNI152NLin2009cAsym", +): """ Estimate the transform that maps the EPI space into MNI152NLin2009cAsym. @@ -487,52 +447,53 @@ def epi_mni_align(name="SpatialNormalization"): wf = epi_mni_align() """ - from nipype.interfaces.ants import ApplyTransforms, N4BiasFieldCorrection - from niworkflows.interfaces.reportlets.registration import ( + from pydra.tasks.ants.auto import ApplyTransforms, N4BiasFieldCorrection + from pydra.tasks.niworkflows.interfaces.reportlets.registration import ( SpatialNormalizationRPT as RobustMNINormalization, ) from templateflow.api import get as get_template # Get settings - testing = config.execution.debug - n_procs = config.nipype.nprocs - ants_nthreads = config.nipype.omp_nthreads - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface(fields=["epi_mean", "epi_mask"]), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["epi_mni", "epi_parc", "report"]), - name="outputnode", + testing = exec_debug + n_procs = nipype_nprocs + ants_nthreads = nipype_omp_nthreads + workflow = Workflow( + name=name, + input_spec={"epi_mask": ty.Any, "epi_mean": ty.Any}, + output_spec={"epi_mni": ty.Any, "epi_parc": ty.Any, "report": ty.Any}, + epi_mask=epi_mask, + epi_mean=epi_mean, ) - n4itk = pe.Node(N4BiasFieldCorrection(dimension=3, copy_header=True), name="SharpenEPI") - - norm = pe.Node( + workflow.add( + N4BiasFieldCorrection( + copy_header=True, + dimension=3, + input_image=workflow.lzin.epi_mean, + name="n4itk", + ) + ) + workflow.add( RobustMNINormalization( explicit_masking=False, flavor="testing" if testing else "precise", - float=config.execution.ants_float, + float=exec_ants_float, generate_report=True, moving="boldref", num_threads=ants_nthreads, reference="boldref", - template=config.workflow.template_id, - ), - name="EPI2MNI", - num_threads=n_procs, - mem_gb=3, + template=wf_template_id, + moving_image=workflow.n4itk.lzout.output_image, + name="norm", + ) ) - - if config.workflow.species.lower() == "human": - norm.inputs.reference_image = str( - get_template(config.workflow.template_id, resolution=2, suffix="boldref") + if wf_species.lower() == "human": + workflow.norm.inputs.reference_image = str( + get_template(wf_template_id, resolution=2, suffix="boldref") ) - norm.inputs.reference_mask = str( + workflow.norm.inputs.reference_mask = str( get_template( - config.workflow.template_id, + wf_template_id, resolution=2, desc="brain", suffix="mask", @@ -542,113 +503,245 @@ def epi_mni_align(name="SpatialNormalization"): else: from nirodents.workflows.brainextraction import _bspline_grid - n4itk.inputs.shrink_factor = 1 - n4itk.inputs.n_iterations = [50] * 4 - norm.inputs.reference_image = str(get_template(config.workflow.template_id, suffix="T2w")) - norm.inputs.reference_mask = str( + workflow.n4itk.inputs.shrink_factor = 1 + workflow.n4itk.inputs.n_iterations = [50] * 4 + workflow.norm.inputs.reference_image = str( + get_template(wf_template_id, suffix="T2w") + ) + workflow.norm.inputs.reference_mask = str( get_template( - config.workflow.template_id, + wf_template_id, desc="brain", suffix="mask", )[0] ) - - bspline_grid = pe.Node(niu.Function(function=_bspline_grid), name="bspline_grid") - + workflow.add(FunctionTask(func=_bspline_grid, name="bspline_grid")) # fmt: off - workflow.connect([ - (inputnode, bspline_grid, [('epi_mean', 'in_file')]), - (bspline_grid, n4itk, [('out', 'args')]) - ]) + workflow.bspline_grid.inputs.in_file = workflow.lzin.epi_mean + workflow.n4itk.inputs.args = workflow.bspline_grid.lzout.out # fmt: on - # Warp segmentation into EPI space - invt = pe.Node( + workflow.add( ApplyTransforms( - float=True, - dimension=3, default_value=0, + dimension=3, + float=True, interpolation="MultiLabel", - ), - name="ResampleSegmentation", + reference_image=workflow.lzin.epi_mean, + transforms=workflow.norm.lzout.inverse_composite_transform, + name="invt", + ) ) - - if config.workflow.species.lower() == "human": - invt.inputs.input_image = str( + if wf_species.lower() == "human": + workflow.invt.inputs.input_image = str( get_template( - config.workflow.template_id, + wf_template_id, resolution=1, desc="carpet", suffix="dseg", ) ) else: - invt.inputs.input_image = str( + workflow.invt.inputs.input_image = str( get_template( - config.workflow.template_id, + wf_template_id, suffix="dseg", )[-1] ) - # fmt: off - workflow.connect([ - (inputnode, invt, [("epi_mean", "reference_image")]), - (inputnode, n4itk, [("epi_mean", "input_image")]), - (n4itk, norm, [("output_image", "moving_image")]), - (norm, invt, [ - ("inverse_composite_transform", "transforms")]), - (invt, outputnode, [("output_image", "epi_parc")]), - (norm, outputnode, [("warped_image", "epi_mni"), - ("out_report", "report")]), - ]) + workflow.set_output([('epi_parc', workflow.invt.lzout.output_image)]) + workflow.set_output([('epi_mni', workflow.norm.lzout.warped_image)]) + workflow.set_output([('report', workflow.norm.lzout.out_report)]) # fmt: on - - if config.workflow.species.lower() == "human": - workflow.connect([(inputnode, norm, [("epi_mask", "moving_mask")])]) + if wf_species.lower() == "human": + workflow.norm.inputs.moving_mask = workflow.lzin.epi_mask return workflow -def _mean(inlist): - import numpy as np +def compute_iqms( + b_values_file=attrs.NOTHING, + b_values_shells=attrs.NOTHING, + brain_mask=attrs.NOTHING, + cc_mask=attrs.NOTHING, + framewise_displacement=attrs.NOTHING, + in_b0=attrs.NOTHING, + in_bvec=attrs.NOTHING, + in_bvec_diff=attrs.NOTHING, + in_bvec_rotated=attrs.NOTHING, + in_cfa=attrs.NOTHING, + in_fa=attrs.NOTHING, + in_fa_degenerate=attrs.NOTHING, + in_fa_nans=attrs.NOTHING, + in_file=attrs.NOTHING, + in_md=attrs.NOTHING, + in_noise=attrs.NOTHING, + in_shells=attrs.NOTHING, + name="ComputeIQMs", + piesno_sigma=attrs.NOTHING, + qspace_neighbors=attrs.NOTHING, + spikes_mask=attrs.NOTHING, + wm_mask=attrs.NOTHING, +): + """ + Initialize the workflow that actually computes the IQMs. - return np.mean(inlist) + .. workflow:: + from mriqc.workflows.diffusion.base import compute_iqms + from mriqc.testing import mock_config + with mock_config(): + wf = compute_iqms() -def _parse_tqual(in_file): - import numpy as np + """ + from pydra.tasks.niworkflows.interfaces.bids import ReadSidecarJSON + from pydra.tasks.mriqc.interfaces import IQMFileSink + from pydra.tasks.mriqc.interfaces.diffusion import DiffusionQC + from pydra.tasks.mriqc.interfaces.reports import AddProvenance - with open(in_file, "r") as fin: - lines = fin.readlines() - return np.mean([float(line.strip()) for line in lines if not line.startswith("++")]) + # from mriqc.workflows.utils import _tofloat, get_fwhmx + workflow = Workflow( + name=name, + input_spec={ + "b_values_file": ty.Any, + "b_values_shells": ty.Any, + "brain_mask": ty.Any, + "cc_mask": ty.Any, + "framewise_displacement": ty.Any, + "in_b0": ty.Any, + "in_bvec": ty.Any, + "in_bvec_diff": ty.Any, + "in_bvec_rotated": ty.Any, + "in_cfa": ty.Any, + "in_fa": ty.Any, + "in_fa_degenerate": ty.Any, + "in_fa_nans": ty.Any, + "in_file": ty.Any, + "in_md": ty.Any, + "in_noise": ty.Any, + "in_shells": ty.Any, + "piesno_sigma": ty.Any, + "qspace_neighbors": ty.Any, + "spikes_mask": ty.Any, + "wm_mask": ty.Any, + }, + output_spec={"noise_floor": ty.Any, "out_file": ty.Any}, + b_values_file=b_values_file, + b_values_shells=b_values_shells, + brain_mask=brain_mask, + cc_mask=cc_mask, + framewise_displacement=framewise_displacement, + in_b0=in_b0, + in_bvec=in_bvec, + in_bvec_diff=in_bvec_diff, + in_bvec_rotated=in_bvec_rotated, + in_cfa=in_cfa, + in_fa=in_fa, + in_fa_degenerate=in_fa_degenerate, + in_fa_nans=in_fa_nans, + in_file=in_file, + in_md=in_md, + in_noise=in_noise, + in_shells=in_shells, + piesno_sigma=piesno_sigma, + qspace_neighbors=qspace_neighbors, + spikes_mask=spikes_mask, + wm_mask=wm_mask, + ) + workflow.add( + FunctionTask( + func=_estimate_sigma, + in_file=workflow.lzin.in_noise, + mask=workflow.lzin.brain_mask, + name="estimate_sigma", + ) + ) -def _parse_tout(in_file): - import numpy as np + workflow.add( + DiffusionQC( + brain_mask=workflow.lzin.brain_mask, + cc_mask=workflow.lzin.cc_mask, + in_b0=workflow.lzin.in_b0, + in_bval_file=workflow.lzin.b_values_file, + in_bvec=workflow.lzin.in_bvec, + in_bvec_diff=workflow.lzin.in_bvec_diff, + in_bvec_rotated=workflow.lzin.in_bvec_rotated, + in_cfa=workflow.lzin.in_cfa, + in_fa=workflow.lzin.in_fa, + in_fa_degenerate=workflow.lzin.in_fa_degenerate, + in_fa_nans=workflow.lzin.in_fa_nans, + in_fd=workflow.lzin.framewise_displacement, + in_file=workflow.lzin.in_file, + in_md=workflow.lzin.in_md, + in_shells=workflow.lzin.in_shells, + in_shells_bval=workflow.lzin.b_values_shells, + piesno_sigma=workflow.lzin.piesno_sigma, + qspace_neighbors=workflow.lzin.qspace_neighbors, + spikes_mask=workflow.lzin.spikes_mask, + wm_mask=workflow.lzin.wm_mask, + name="measures", + ) + ) - data = np.loadtxt(in_file) # pylint: disable=no-member - return data.mean() + # Save to JSON file + # fmt: off -def _tolist(value): - return [value] -def _get_bvals(bmatrix): - import numpy - return numpy.squeeze(bmatrix[:, -1]).tolist() + workflow.set_output([('out_file', workflow.measures.lzout.out_qc)]) + workflow.set_output([('noise_floor', workflow.estimate_sigma.lzout.out)]) + # fmt: on + return workflow -def _first(inlist): - if isinstance(inlist, (list, tuple)): - return inlist[0] - return inlist +def _bvals_report(in_file): + import numpy as np -def _all_but_first(inlist): - if isinstance(inlist, (list, tuple)): - return inlist[1:] + bvals = [ + round(float(val), 2) for val in np.unique(np.round(np.loadtxt(in_file), 2)) + ] + if len(bvals) > 10: + return "Likely DSI" + return bvals + +def _estimate_sigma(in_file, mask): + + import nibabel as nb + import numpy as np + + msk = nb.load(mask).get_fdata() > 0.5 + return round( + float(np.median(nb.load(in_file).get_fdata()[msk])), + 6, + ) + + +def _filter_metadata( + in_dict, + keys=( + "global", + "dcmmeta_affine", + "dcmmeta_reorient_transform", + "dcmmeta_shape", + "dcmmeta_slice_dim", + "dcmmeta_version", + "time", + ), +): + """Drop large and partially redundant objects generated by dcm2niix.""" + for key in keys: + in_dict.pop(key, None) + return in_dict + + +def _first(inlist): + + if isinstance(inlist, (list, tuple)): + return inlist[0] return inlist diff --git a/pydra/tasks/mriqc/workflows/diffusion/output.py b/pydra/tasks/mriqc/workflows/diffusion/output.py index d7346be..d234056 100644 --- a/pydra/tasks/mriqc/workflows/diffusion/output.py +++ b/pydra/tasks/mriqc/workflows/diffusion/output.py @@ -1,38 +1,47 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2023 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -"""Writing out diffusion reportlets.""" -from mriqc import config -from mriqc.interfaces import DerivativesDataSink - -from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu -from nireports.interfaces.reporting.base import ( +import attrs +import logging +from pathlib import Path +from pydra.engine import Workflow +from pydra.engine.task import FunctionTask +from pydra.tasks.nireports.interfaces.dmri import DWIHeatmap +from pydra.tasks.nireports.interfaces.reporting.base import ( SimpleBeforeAfterRPT as SimpleBeforeAfter, ) -from nireports.interfaces.dmri import DWIHeatmap - - -def init_dwi_report_wf(name="dwi_report_wf"): +import typing as ty + + +logger = logging.getLogger(__name__) + + +def init_dwi_report_wf( + brain_mask=attrs.NOTHING, + epi_mean=attrs.NOTHING, + epi_parc=attrs.NOTHING, + exec_verbose_reports=False, + exec_work_dir=None, + fd_thres=attrs.NOTHING, + hmc_epi=attrs.NOTHING, + hmc_fd=attrs.NOTHING, + in_avgmap=attrs.NOTHING, + in_bdict=attrs.NOTHING, + in_dvars=attrs.NOTHING, + in_epi=attrs.NOTHING, + in_fa=attrs.NOTHING, + in_fft=attrs.NOTHING, + in_md=attrs.NOTHING, + in_parcellation=attrs.NOTHING, + in_ras=attrs.NOTHING, + in_spikes=attrs.NOTHING, + in_stdmap=attrs.NOTHING, + meta_sidecar=attrs.NOTHING, + name="dwi_report_wf", + noise_floor=attrs.NOTHING, + outliers=attrs.NOTHING, + wf_biggest_file_gb=1, + wf_fd_thres=0.2, + wf_fft_spikes_detector=False, + wf_species="human", +): """ Write out individual reportlets. @@ -44,332 +53,221 @@ def init_dwi_report_wf(name="dwi_report_wf"): wf = init_dwi_report_wf() """ - from nireports.interfaces import FMRISummary - from niworkflows.interfaces.morphology import BinaryDilation, BinarySubtraction - - from nireports.interfaces import PlotMosaic, PlotSpikes - - # from mriqc.interfaces.reports import IndividualReport - - verbose = config.execution.verbose_reports - mem_gb = config.workflow.biggest_file_gb - reportlets_dir = config.execution.work_dir / "reportlets" - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_epi", - "brainmask", - "in_avgmap", - "in_stdmap", - "in_shells", - "in_fa", - "in_md", - "in_parcellation", - "in_bdict", - "in_noise", - "name_source", - ] - ), - name="inputnode", + from pydra.tasks.nireports.interfaces import FMRISummary, PlotMosaic, PlotSpikes + from pydra.tasks.niworkflows.interfaces.morphology import ( + BinaryDilation, + BinarySubtraction, ) - estimate_sigma = pe.Node( - niu.Function(function=_estimate_sigma), - name="estimate_sigma", + # from mriqc.interfaces.reports import IndividualReport + if exec_work_dir is None: + exec_work_dir = Path.cwd() + + verbose = exec_verbose_reports + mem_gb = wf_biggest_file_gb + reportlets_dir = exec_work_dir / "reportlets" + workflow = Workflow( + name=name, + input_spec={ + "brain_mask": ty.Any, + "epi_mean": ty.Any, + "epi_parc": ty.Any, + "fd_thres": ty.Any, + "hmc_epi": ty.Any, + "hmc_fd": ty.Any, + "in_avgmap": ty.Any, + "in_bdict": ty.Any, + "in_dvars": ty.Any, + "in_epi": ty.Any, + "in_fa": ty.Any, + "in_fft": ty.Any, + "in_md": ty.Any, + "in_parcellation": ty.Any, + "in_ras": ty.Any, + "in_spikes": ty.Any, + "in_stdmap": ty.Any, + "meta_sidecar": ty.Any, + "noise_floor": ty.Any, + "outliers": ty.Any, + }, + output_spec={ + "bmask_report": ty.Any, + "carpet_report": ty.Any, + "fa_report": ty.Any, + "heatmap_report": ty.Any, + "md_report": ty.Any, + "noise_report": ty.Any, + "snr_report": ty.Any, + "spikes_report": ty.Any, + }, + brain_mask=brain_mask, + epi_mean=epi_mean, + epi_parc=epi_parc, + fd_thres=fd_thres, + hmc_epi=hmc_epi, + hmc_fd=hmc_fd, + in_avgmap=in_avgmap, + in_bdict=in_bdict, + in_dvars=in_dvars, + in_epi=in_epi, + in_fa=in_fa, + in_fft=in_fft, + in_md=in_md, + in_parcellation=in_parcellation, + in_ras=in_ras, + in_spikes=in_spikes, + in_stdmap=in_stdmap, + meta_sidecar=meta_sidecar, + noise_floor=noise_floor, + outliers=outliers, ) # Set FD threshold - # inputnode.inputs.fd_thres = config.workflow.fd_thres - - mosaic_fa = pe.Node( - PlotMosaic(cmap="Greys_r"), - name="mosaic_fa", + # inputnode.inputs.fd_thres = wf_fd_thres + workflow.add( + PlotMosaic( + cmap="Greys_r", + bbox_mask_file=workflow.lzin.brain_mask, + in_file=workflow.lzin.in_fa, + name="mosaic_fa", + ) ) - mosaic_md = pe.Node( - PlotMosaic(cmap="Greys_r"), - name="mosaic_md", + workflow.add( + PlotMosaic( + cmap="Greys_r", + bbox_mask_file=workflow.lzin.brain_mask, + in_file=workflow.lzin.in_md, + name="mosaic_md", + ) ) - - mosaic_snr = pe.MapNode( + workflow.add( SimpleBeforeAfter( - fixed_params={"cmap": "viridis"}, - moving_params={"cmap": "Greys_r"}, - before_label="Average", after_label="Standard Deviation", + before_label="Average", dismiss_affine=True, - ), - name="mosaic_snr", - iterfield=["before", "after"], + fixed_params={"cmap": "viridis"}, + moving_params={"cmap": "Greys_r"}, + after=workflow.lzin.in_stdmap, + before=workflow.lzin.in_avgmap, + wm_seg=workflow.lzin.brain_mask, + name="mosaic_snr", + ) ) - - mosaic_noise = pe.MapNode( + workflow.add( PlotMosaic( - only_noise=True, cmap="viridis_r", - ), - name="mosaic_noise", - iterfield=["in_file"], - ) - - if config.workflow.species.lower() in ("rat", "mouse"): - mosaic_noise.inputs.view = ["coronal", "axial"] - mosaic_fa.inputs.view = ["coronal", "axial"] - mosaic_md.inputs.view = ["coronal", "axial"] - - ds_report_snr = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="avgstd", - datatype="figures", - allowed_entities=("bval",), - ), - name="ds_report_snr", - run_without_submitting=True, - iterfield=["in_file", "bval"], - ) - - ds_report_noise = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="background", - datatype="figures", - allowed_entities=("bval",), - ), - name="ds_report_noise", - run_without_submitting=True, - iterfield=["in_file", "bval"], - ) - - ds_report_fa = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="fa", - datatype="figures", - ), - name="ds_report_fa", - run_without_submitting=True, - ) - - ds_report_md = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="ad", - datatype="figures", - ), - name="ds_report_md", - run_without_submitting=True, + only_noise=True, + in_file=workflow.lzin.in_avgmap, + name="mosaic_noise", + ) ) + if wf_species.lower() in ("rat", "mouse"): + workflow.mosaic_noise.inputs.view = ["coronal", "axial"] + workflow.mosaic_fa.inputs.view = ["coronal", "axial"] + workflow.mosaic_md.inputs.view = ["coronal", "axial"] def _gen_entity(inlist): return ["00000"] + [f"{int(round(bval, 0)):05d}" for bval in inlist] # fmt: off - workflow.connect([ - (inputnode, mosaic_snr, [("in_avgmap", "before"), - ("in_stdmap", "after"), - ("brainmask", "wm_seg")]), - (inputnode, mosaic_noise, [("in_avgmap", "in_file")]), - (inputnode, mosaic_fa, [("in_fa", "in_file"), - ("brainmask", "bbox_mask_file")]), - (inputnode, mosaic_md, [("in_md", "in_file"), - ("brainmask", "bbox_mask_file")]), - (inputnode, ds_report_snr, [("name_source", "source_file"), - (("in_shells", _gen_entity), "bval")]), - (inputnode, ds_report_noise, [("name_source", "source_file"), - (("in_shells", _gen_entity), "bval")]), - (inputnode, ds_report_fa, [("name_source", "source_file")]), - (inputnode, ds_report_md, [("name_source", "source_file")]), - (mosaic_snr, ds_report_snr, [("out_report", "in_file")]), - (mosaic_noise, ds_report_noise, [("out_file", "in_file")]), - (mosaic_fa, ds_report_fa, [("out_file", "in_file")]), - (mosaic_md, ds_report_md, [("out_file", "in_file")]), - ]) - # fmt: on - get_wm = pe.Node(niu.Function(function=_get_wm), name="get_wm") - plot_heatmap = pe.Node( - DWIHeatmap(scalarmap_label="Shell-wise Fractional Anisotropy (FA)"), - name="plot_heatmap", + + workflow.set_output([('snr_report', workflow.mosaic_snr.lzout.out_report)]) + workflow.set_output([('noise_report', workflow.mosaic_noise.lzout.out_file)]) + workflow.set_output([('fa_report', workflow.mosaic_fa.lzout.out_file)]) + workflow.set_output([('md_report', workflow.mosaic_md.lzout.out_file)]) + # fmt: on + workflow.add( + FunctionTask(func=_get_wm, in_file=workflow.lzin.in_parcellation, name="get_wm") ) - ds_report_hm = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="heatmap", - datatype="figures", - ), - name="ds_report_hm", - run_without_submitting=True, + workflow.add( + DWIHeatmap( + scalarmap_label="Shell-wise Fractional Anisotropy (FA)", + b_indices=workflow.lzin.in_bdict, + in_file=workflow.lzin.in_epi, + mask_file=workflow.get_wm.lzout.out, + scalarmap=workflow.lzin.in_fa, + sigma=workflow.lzin.noise_floor, + name="plot_heatmap", + ) ) # fmt: off - workflow.connect([ - (inputnode, get_wm, [("in_parcellation", "in_file")]), - (inputnode, plot_heatmap, [("in_epi", "in_file"), - ("in_fa", "scalarmap"), - ("in_bdict", "b_indices")]), - (inputnode, ds_report_hm, [("name_source", "source_file")]), - (inputnode, estimate_sigma, [("in_noise", "in_file"), - ("brainmask", "mask")]), - (estimate_sigma, plot_heatmap, [("out", "sigma")]), - (get_wm, plot_heatmap, [("out", "mask_file")]), - (plot_heatmap, ds_report_hm, [("out_file", "in_file")]), - - ]) + workflow.set_output([('heatmap_report', workflow.plot_heatmap.lzout.out_file)]) # fmt: on - if True: - return workflow - # Generate crown mask # Create the crown mask - dilated_mask = pe.Node(BinaryDilation(), name="dilated_mask") - subtract_mask = pe.Node(BinarySubtraction(), name="subtract_mask") - parcels = pe.Node(niu.Function(function=_carpet_parcellation), name="parcels") - - bigplot = pe.Node(FMRISummary(), name="BigPlot", mem_gb=mem_gb * 3.5) - - ds_report_carpet = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="carpet", - datatype="figures", - ), - name="ds_report_carpet", - run_without_submitting=True, + workflow.add(BinaryDilation(in_mask=workflow.lzin.brain_mask, name="dilated_mask")) + workflow.add( + BinarySubtraction( + in_base=workflow.dilated_mask.lzout.out_mask, + in_subtract=workflow.lzin.brain_mask, + name="subtract_mask", + ) + ) + workflow.add( + FunctionTask( + func=_carpet_parcellation, + crown_mask=workflow.subtract_mask.lzout.out_mask, + segmentation=workflow.lzin.epi_parc, + name="parcels", + ) + ) + workflow.add( + FMRISummary( + dvars=workflow.lzin.in_dvars, + fd=workflow.lzin.hmc_fd, + fd_thres=workflow.lzin.fd_thres, + in_func=workflow.lzin.hmc_epi, + in_segm=workflow.parcels.lzout.out, + outliers=workflow.lzin.outliers, + tr=workflow.lzin.meta_sidecar, + name="bigplot", + ) ) # fmt: off - workflow.connect([ - # (inputnode, rnode, [("in_iqms", "in_iqms")]), - (inputnode, bigplot, [("hmc_epi", "in_func"), - ("hmc_fd", "fd"), - ("fd_thres", "fd_thres"), - ("in_dvars", "dvars"), - ("outliers", "outliers"), - (("meta_sidecar", _get_tr), "tr")]), - (inputnode, parcels, [("epi_parc", "segmentation")]), - (inputnode, dilated_mask, [("brainmask", "in_mask")]), - (inputnode, subtract_mask, [("brainmask", "in_subtract")]), - (dilated_mask, subtract_mask, [("out_mask", "in_base")]), - (subtract_mask, parcels, [("out_mask", "crown_mask")]), - (parcels, bigplot, [("out", "in_segm")]), - (inputnode, ds_report_carpet, [("name_source", "source_file")]), - (bigplot, ds_report_carpet, [("out_file", "in_file")]), - ]) + workflow.bigplot.inputs.tr = workflow.lzin.meta_sidecar + workflow.set_output([('carpet_report', workflow.bigplot.lzout.out_file)]) # fmt: on - - if config.workflow.fft_spikes_detector: - mosaic_spikes = pe.Node( + if True: # wf_fft_spikes_detector: + workflow.add( PlotSpikes( - out_file="plot_spikes.svg", cmap="viridis", + out_file="plot_spikes.svg", title="High-Frequency spikes", - ), - name="PlotSpikes", + name="mosaic_spikes", + ) ) - - ds_report_spikes = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="spikes", - datatype="figures", - ), - name="ds_report_spikes", - run_without_submitting=True, - ) - + pass # fmt: off - workflow.connect([ - (inputnode, ds_report_spikes, [("name_source", "source_file")]), - (inputnode, mosaic_spikes, [("in_ras", "in_file"), - ("in_spikes", "in_spikes"), - ("in_fft", "in_fft")]), - (mosaic_spikes, ds_report_spikes, [("out_file", "in_file")]), - ]) + pass + workflow.mosaic_spikes.inputs.in_file = workflow.lzin.in_ras + workflow.mosaic_spikes.inputs.in_spikes = workflow.lzin.in_spikes + workflow.mosaic_spikes.inputs.in_fft = workflow.lzin.in_fft + workflow.set_output([('spikes_report', workflow.mosaic_spikes.lzout.out_file)]) # fmt: on - - if not verbose: + if False: # not verbose: return workflow - # Verbose-reporting goes here - from nireports.interfaces import PlotContours - - mosaic_zoom = pe.Node( - PlotMosaic( - cmap="Greys_r", - ), - name="PlotMosaicZoomed", - ) + from pydra.tasks.nireports.interfaces import PlotContours - plot_bmask = pe.Node( + workflow.add( PlotContours( - display_mode="y" if config.workflow.species.lower() in ("rat", "mouse") else "z", - levels=[0.5], colors=["r"], cut_coords=10, + display_mode="y" if wf_species.lower() in ("rat", "mouse") else "z", + levels=[0.5], out_file="bmask", - ), - name="PlotBrainmask", - ) - - ds_report_zoomed = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="zoomed", - datatype="figures", - ), - name="ds_report_zoomed", - run_without_submitting=True, - ) - - ds_report_background = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="background", - datatype="figures", - ), - name="ds_report_background", - run_without_submitting=True, - ) - - ds_report_bmask = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="brainmask", - datatype="figures", - ), - name="ds_report_bmask", - run_without_submitting=True, - ) - - ds_report_norm = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="norm", - datatype="figures", - ), - name="ds_report_norm", - run_without_submitting=True, + in_contours=workflow.lzin.brain_mask, + in_file=workflow.lzin.epi_mean, + name="plot_bmask", + ) ) # fmt: off - workflow.connect([ - (inputnode, ds_report_norm, [("mni_report", "in_file"), - ("name_source", "source_file")]), - (inputnode, plot_bmask, [("epi_mean", "in_file"), - ("brainmask", "in_contours")]), - (inputnode, mosaic_zoom, [("epi_mean", "in_file"), - ("brainmask", "bbox_mask_file")]), - (inputnode, mosaic_noise, [("epi_mean", "in_file")]), - (inputnode, ds_report_zoomed, [("name_source", "source_file")]), - (inputnode, ds_report_background, [("name_source", "source_file")]), - (inputnode, ds_report_bmask, [("name_source", "source_file")]), - (mosaic_zoom, ds_report_zoomed, [("out_file", "in_file")]), - (mosaic_noise, ds_report_background, [("out_file", "in_file")]), - (plot_bmask, ds_report_bmask, [("out_file", "in_file")]), - ]) + workflow.set_output([('bmask_report', workflow.plot_bmask.lzout.out_file)]) # fmt: on return workflow @@ -378,11 +276,10 @@ def _gen_entity(inlist): def _carpet_parcellation(segmentation, crown_mask): """Generate the union of two masks.""" from pathlib import Path - import numpy as np import nibabel as nb + import numpy as np img = nb.load(segmentation) - lut = np.zeros((256,), dtype="uint8") lut[100:201] = 1 # Ctx GM lut[30:99] = 2 # dGM @@ -391,7 +288,6 @@ def _carpet_parcellation(segmentation, crown_mask): # Apply lookup table seg = lut[np.asanyarray(img.dataobj, dtype="uint16")] seg[np.asanyarray(nb.load(crown_mask).dataobj, dtype=int) > 0] = 5 - outimg = img.__class__(seg.astype("uint8"), img.affine, img.header) outimg.set_data_dtype("uint8") out_file = Path("segments.nii.gz").absolute() @@ -400,14 +296,16 @@ def _carpet_parcellation(segmentation, crown_mask): def _get_tr(meta_dict): + return meta_dict.get("RepetitionTime", None) def _get_wm(in_file, radius=2): + from pathlib import Path - import numpy as np import nibabel as nb - from nipype.utils.filemanip import fname_presuffix + import numpy as np + from pydra.tasks.mriqc.nipype_ports.utils.filemanip import fname_presuffix from scipy import ndimage as ndi from skimage.morphology import ball @@ -415,7 +313,6 @@ def _get_wm(in_file, radius=2): hdr = parc.header.copy() data = np.array(parc.dataobj, dtype=hdr.get_data_dtype()) wm_mask = ndi.binary_erosion((data == 1) | (data == 2), ball(radius)) - hdr.set_data_dtype(np.uint8) out_wm = fname_presuffix(in_file, suffix="wm", newpath=str(Path.cwd())) parc.__class__( @@ -424,14 +321,3 @@ def _get_wm(in_file, radius=2): hdr, ).to_filename(out_wm) return out_wm - - -def _estimate_sigma(in_file, mask): - import numpy as np - import nibabel as nb - - msk = np.asanyarray(nb.load(mask).dataobj) > 0.5 - - return float( - np.median(nb.load(in_file).get_fdata()[msk]) - ) diff --git a/pydra/tasks/mriqc/workflows/functional/__init__.py b/pydra/tasks/mriqc/workflows/functional/__init__.py index e69de29..81e4f91 100644 --- a/pydra/tasks/mriqc/workflows/functional/__init__.py +++ b/pydra/tasks/mriqc/workflows/functional/__init__.py @@ -0,0 +1,2 @@ +from .base import compute_iqms, epi_mni_align, fmri_bmsk_workflow, fmri_qc_workflow, hmc +from .output import _carpet_parcellation, _get_tr, init_func_report_wf, spikes_mask diff --git a/pydra/tasks/mriqc/workflows/functional/base.py b/pydra/tasks/mriqc/workflows/functional/base.py index abe4ed1..90f8854 100644 --- a/pydra/tasks/mriqc/workflows/functional/base.py +++ b/pydra/tasks/mriqc/workflows/functional/base.py @@ -1,274 +1,345 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -""" -Functional workflow -=================== - -.. image :: _static/functional_workflow_source.svg - -The functional workflow follows the following steps: - -#. Sanitize (revise data types and xforms) input data, read - associated metadata and discard non-steady state frames. -#. :abbr:`HMC (head-motion correction)` based on ``3dvolreg`` from - AFNI -- :py:func:`hmc`. -#. Skull-stripping of the time-series (AFNI) -- - :py:func:`fmri_bmsk_workflow`. -#. Calculate mean time-series, and :abbr:`tSNR (temporal SNR)`. -#. Spatial Normalization to MNI (ANTs) -- :py:func:`epi_mni_align` -#. Extraction of IQMs -- :py:func:`compute_iqms`. -#. Individual-reports generation -- - :py:func:`~mriqc.workflows.functional.output.init_func_report_wf`. - -This workflow is orchestrated by :py:func:`fmri_qc_workflow`. -""" -from mriqc import config -from nipype.interfaces import utility as niu -from nipype.pipeline import engine as pe -from niworkflows.utils.connections import pop_file as _pop - -from mriqc.interfaces.datalad import DataladIdentityInterface -from mriqc.workflows.functional.output import init_func_report_wf - - -def fmri_qc_workflow(name="funcMRIQC"): +import attrs +import logging +from pydra.tasks.mriqc.workflows.functional.output import init_func_report_wf +from pydra.tasks.niworkflows.utils.connections import pop_file as _pop +from pathlib import Path +from pydra.engine import Workflow +from pydra.engine.specs import BaseSpec, SpecInfo +from pydra.engine.task import FunctionTask +import pydra.mark +from pydra.tasks.niworkflows.utils.connections import pop_file as _pop +import typing as ty + + +logger = logging.getLogger(__name__) + + +def fmri_bmsk_workflow(in_file=attrs.NOTHING, name="fMRIBrainMask"): """ - Initialize the (f)MRIQC workflow. + Compute a brain mask for the input :abbr:`fMRI (functional MRI)` dataset. .. workflow:: - import os.path as op - from mriqc.workflows.functional.base import fmri_qc_workflow + from mriqc.workflows.functional.base import fmri_bmsk_workflow from mriqc.testing import mock_config with mock_config(): - wf = fmri_qc_workflow() + wf = fmri_bmsk_workflow() + """ - from nipype.algorithms.confounds import TSNR, NonSteadyStateDetector - from nipype.interfaces.afni import TStat - from niworkflows.interfaces.bids import ReadSidecarJSON - from niworkflows.interfaces.header import SanitizeImage - from mriqc.messages import BUILDING_WORKFLOW - from mriqc.interfaces.functional import SelectEcho + from pydra.tasks.afni.auto import Automask - workflow = pe.Workflow(name=name) + workflow = Workflow( + name=name, + input_spec={"in_file": ty.Any}, + output_spec={"out_file": ty.Any}, + in_file=in_file, + ) - mem_gb = config.workflow.biggest_file_gb + workflow.add( + Automask(outputtype="NIFTI_GZ", in_file=workflow.lzin.in_file, name="afni_msk") + ) + # Connect brain mask extraction + # fmt: off + workflow.set_output([('out_file', workflow.afni_msk.lzout.out_file)]) + # fmt: on - dataset = config.workflow.inputs.get("bold", []) + return workflow - message = BUILDING_WORKFLOW.format( - modality="functional", - detail=( - f"for {len(dataset)} BOLD runs." - if len(dataset) > 2 - else f"({' and '.join(('<%s>' % v for v in dataset))})." - ), - ) - config.loggers.workflow.info(message) - # Define workflow, inputs and outputs - # 0. Get data, put it in RAS orientation - inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode") - inputnode.iterables = [("in_file", dataset)] +def epi_mni_align( + epi_mask=attrs.NOTHING, + epi_mean=attrs.NOTHING, + exec_ants_float=False, + exec_debug=False, + name="SpatialNormalization", + nipype_nprocs=12, + nipype_omp_nthreads=12, + wf_species="human", + wf_template_id="MNI152NLin2009cAsym", +): + """ + Estimate the transform that maps the EPI space into MNI152NLin2009cAsym. - datalad_get = pe.MapNode( - DataladIdentityInterface(fields=["in_file"], dataset_path=config.execution.bids_dir), - name="datalad_get", - iterfield=["in_file"], - ) + The input epi_mean is the averaged and brain-masked EPI timeseries - outputnode = pe.Node( - niu.IdentityInterface(fields=["qc", "mosaic", "out_group", "out_dvars", "out_fd"]), - name="outputnode", - ) + Returns the EPI mean resampled in MNI space (for checking out registration) and + the associated "lobe" parcellation in EPI space. - # Get metadata - meta = pe.MapNode(ReadSidecarJSON( - index_db=config.execution.bids_database_dir - ), name="metadata", iterfield=["in_file"]) + .. workflow:: - pick_echo = pe.Node(SelectEcho(), name="pick_echo") + from mriqc.workflows.functional.base import epi_mni_align + from mriqc.testing import mock_config + with mock_config(): + wf = epi_mni_align() - non_steady_state_detector = pe.Node(NonSteadyStateDetector(), name="non_steady_state_detector") + """ + from pydra.tasks.ants.auto import ApplyTransforms, N4BiasFieldCorrection + from pydra.tasks.niworkflows.interfaces.reportlets.registration import ( + SpatialNormalizationRPT as RobustMNINormalization, + ) + from templateflow.api import get as get_template - sanitize = pe.MapNode( - SanitizeImage(max_32bit=config.execution.float32), - name="sanitize", - mem_gb=mem_gb * 4.0, - iterfield=["in_file"], + # Get settings + testing = exec_debug + n_procs = nipype_nprocs + ants_nthreads = nipype_omp_nthreads + workflow = Workflow( + name=name, + input_spec={"epi_mask": ty.Any, "epi_mean": ty.Any}, + output_spec={"epi_mni": ty.Any, "epi_parc": ty.Any, "report": ty.Any}, + epi_mask=epi_mask, + epi_mean=epi_mean, ) - # Workflow -------------------------------------------------------- + workflow.add( + N4BiasFieldCorrection( + copy_header=True, + dimension=3, + input_image=workflow.lzin.epi_mean, + name="n4itk", + ) + ) + workflow.add( + RobustMNINormalization( + explicit_masking=False, + flavor="testing" if testing else "precise", + float=exec_ants_float, + generate_report=True, + moving="boldref", + num_threads=ants_nthreads, + reference="boldref", + template=wf_template_id, + moving_image=workflow.n4itk.lzout.output_image, + name="norm", + ) + ) + if wf_species.lower() == "human": + workflow.norm.inputs.reference_image = str( + get_template(wf_template_id, resolution=2, suffix="boldref") + ) + workflow.norm.inputs.reference_mask = str( + get_template( + wf_template_id, + resolution=2, + desc="brain", + suffix="mask", + ) + ) + # adapt some population-specific settings + else: + from nirodents.workflows.brainextraction import _bspline_grid - # 1. HMC: head motion correct - hmcwf = hmc(omp_nthreads=config.nipype.omp_nthreads) + workflow.n4itk.inputs.shrink_factor = 1 + workflow.n4itk.inputs.n_iterations = [50] * 4 + workflow.norm.inputs.reference_image = str( + get_template(wf_template_id, suffix="T2w") + ) + workflow.norm.inputs.reference_mask = str( + get_template( + wf_template_id, + desc="brain", + suffix="mask", + )[0] + ) + workflow.add(FunctionTask(func=_bspline_grid, name="bspline_grid")) + # fmt: off + workflow.bspline_grid.inputs.in_file = workflow.lzin.epi_mean + workflow.n4itk.inputs.args = workflow.bspline_grid.lzout.out + # fmt: on + # Warp segmentation into EPI space + workflow.add( + ApplyTransforms( + default_value=0, + dimension=3, + float=True, + interpolation="MultiLabel", + reference_image=workflow.lzin.epi_mean, + transforms=workflow.norm.lzout.inverse_composite_transform, + name="invt", + ) + ) + if wf_species.lower() == "human": + workflow.invt.inputs.input_image = str( + get_template( + wf_template_id, + resolution=1, + desc="carpet", + suffix="dseg", + ) + ) + else: + workflow.invt.inputs.input_image = str( + get_template( + wf_template_id, + suffix="dseg", + )[-1] + ) + # fmt: off + workflow.set_output([('epi_parc', workflow.invt.lzout.output_image)]) + workflow.set_output([('epi_mni', workflow.norm.lzout.warped_image)]) + workflow.set_output([('report', workflow.norm.lzout.out_report)]) + # fmt: on + if wf_species.lower() == "human": + workflow.norm.inputs.moving_mask = workflow.lzin.epi_mask - # Set HMC settings - hmcwf.inputs.inputnode.fd_radius = config.workflow.fd_radius + return workflow - # 2. Compute mean fmri - mean = pe.MapNode( - TStat(options="-mean", outputtype="NIFTI_GZ"), - name="mean", - mem_gb=mem_gb * 1.5, - iterfield=["in_file"], - ) - # Compute TSNR using nipype implementation - tsnr = pe.MapNode( - TSNR(), - name="compute_tsnr", - mem_gb=mem_gb * 2.5, - iterfield=["in_file"], - ) +def hmc( + fd_radius=attrs.NOTHING, + in_file=attrs.NOTHING, + name="fMRI_HMC", + omp_nthreads=None, + wf_biggest_file_gb=1, + wf_deoblique=False, + wf_despike=False, +): + """ + Create a :abbr:`HMC (head motion correction)` workflow for fMRI. - # EPI to MNI registration - ema = epi_mni_align() + .. workflow:: - # 7. Compute IQMs - iqmswf = compute_iqms() - # Reports - func_report_wf = init_func_report_wf() + from mriqc.workflows.functional.base import hmc + from mriqc.testing import mock_config + with mock_config(): + wf = hmc() - # fmt: off + """ + from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ( + FramewiseDisplacement, + ) + from pydra.tasks.afni.auto import Despike, Refit, Volreg + + mem_gb = wf_biggest_file_gb + workflow = Workflow( + name=name, + input_spec={"fd_radius": ty.Any, "in_file": ty.Any}, + output_spec={"mpars": ty.Any, "out_fd": ty.Any, "out_file": ty.Any}, + fd_radius=fd_radius, + in_file=in_file, + ) - workflow.connect([ - (inputnode, datalad_get, [("in_file", "in_file")]), - (datalad_get, meta, [("in_file", "in_file")]), - (datalad_get, pick_echo, [("in_file", "in_files")]), - (datalad_get, sanitize, [("in_file", "in_file")]), - (meta, pick_echo, [("out_dict", "metadata")]), - (pick_echo, non_steady_state_detector, [("out_file", "in_file")]), - (non_steady_state_detector, sanitize, [("n_volumes_to_discard", "n_volumes_to_discard")]), - (sanitize, hmcwf, [("out_file", "inputnode.in_file")]), - (hmcwf, mean, [("outputnode.out_file", "in_file")]), - (hmcwf, tsnr, [("outputnode.out_file", "in_file")]), - (mean, ema, [(("out_file", _pop), "inputnode.epi_mean")]), - # Feed IQMs computation - (meta, iqmswf, [("out_dict", "inputnode.metadata"), - ("subject", "inputnode.subject"), - ("session", "inputnode.session"), - ("task", "inputnode.task"), - ("acquisition", "inputnode.acquisition"), - ("reconstruction", "inputnode.reconstruction"), - ("run", "inputnode.run")]), - (datalad_get, iqmswf, [("in_file", "inputnode.in_file")]), - (sanitize, iqmswf, [("out_file", "inputnode.in_ras")]), - (mean, iqmswf, [("out_file", "inputnode.epi_mean")]), - (hmcwf, iqmswf, [("outputnode.out_file", "inputnode.hmc_epi"), - ("outputnode.out_fd", "inputnode.hmc_fd"), - ("outputnode.mpars", "inputnode.mpars")]), - (tsnr, iqmswf, [("tsnr_file", "inputnode.in_tsnr")]), - (non_steady_state_detector, iqmswf, [("n_volumes_to_discard", "inputnode.exclude_index")]), - # Feed reportlet generation - (inputnode, func_report_wf, [ - ("in_file", "inputnode.name_source"), - ]), - (sanitize, func_report_wf, [("out_file", "inputnode.in_ras")]), - (mean, func_report_wf, [("out_file", "inputnode.epi_mean")]), - (tsnr, func_report_wf, [("stddev_file", "inputnode.in_stddev")]), - (hmcwf, func_report_wf, [ - ("outputnode.out_fd", "inputnode.hmc_fd"), - ("outputnode.out_file", "inputnode.hmc_epi"), - ]), - (ema, func_report_wf, [ - ("outputnode.epi_parc", "inputnode.epi_parc"), - ("outputnode.report", "inputnode.mni_report"), - ]), - (iqmswf, func_report_wf, [ - ("outputnode.out_file", "inputnode.in_iqms"), - ("outputnode.out_dvars", "inputnode.in_dvars"), - ("outputnode.outliers", "inputnode.outliers"), - ]), - (meta, func_report_wf, [("out_dict", "inputnode.meta_sidecar")]), - (hmcwf, outputnode, [("outputnode.out_fd", "out_fd")]), - ]) + # calculate hmc parameters + workflow.add( + Volreg( + args="-Fourier -twopass", outputtype="NIFTI_GZ", zpad=4, name="estimate_hm" + ) + ) + # Compute the frame-wise displacement + workflow.add( + FramewiseDisplacement( + normalize=False, + parameter_source="AFNI", + in_file=workflow.estimate_hm.lzout.oned_file, + radius=workflow.lzin.fd_radius, + name="fdnode", + ) + ) + # Apply transforms to other echos + workflow.add( + FunctionTask( + func=_apply_transforms, + input_spec=SpecInfo( + name="FunctionIn", + bases=(BaseSpec,), + fields=[("in_file", ty.Any), ("in_xfm", ty.Any)], + ), + in_xfm=workflow.estimate_hm.lzout.oned_matrix_save, + name="apply_hmc", + ) + ) + # fmt: off + workflow.set_output([('out_file', workflow.apply_hmc.lzout.out)]) + workflow.set_output([('mpars', workflow.estimate_hm.lzout.oned_file)]) + workflow.set_output([('out_fd', workflow.fdnode.lzout.out_file)]) # fmt: on - - if config.workflow.fft_spikes_detector: + if not (wf_despike or wf_deoblique): # fmt: off - workflow.connect([ - (iqmswf, func_report_wf, [ - ("outputnode.out_spikes", "inputnode.in_spikes"), - ("outputnode.out_fft", "inputnode.in_fft"), - ]), - ]) + workflow.estimate_hm.inputs.in_file = workflow.lzin.in_file + workflow.apply_hmc.inputs.in_file = workflow.lzin.in_file # fmt: on + return workflow + # despiking, and deoblique + workflow.add(Refit(deoblique=True, name="deoblique_node")) + workflow.add(Despike(outputtype="NIFTI_GZ", name="despike_node")) + if wf_despike and wf_deoblique: + # fmt: off + workflow.despike_node.inputs.in_file = workflow.lzin.in_file + workflow.deoblique_node.inputs.in_file = workflow.despike_node.lzout.out_file - # population specific changes to brain masking - if config.workflow.species == "human": - from mriqc.workflows.shared import synthstrip_wf as fmri_bmsk_workflow + @pydra.mark.task + def deoblique_node_out_file_to_estimate_hm_in_file_callable(in_: ty.Any) -> ty.Any: + return _pop(in_) - skullstrip_epi = fmri_bmsk_workflow(omp_nthreads=config.nipype.omp_nthreads) - # fmt: off - workflow.connect([ - (mean, skullstrip_epi, [(("out_file", _pop), "inputnode.in_files")]), - (skullstrip_epi, ema, [("outputnode.out_mask", "inputnode.epi_mask")]), - (skullstrip_epi, iqmswf, [("outputnode.out_mask", "inputnode.brainmask")]), - (skullstrip_epi, func_report_wf, [("outputnode.out_mask", "inputnode.brainmask")]), - ]) + workflow.add(deoblique_node_out_file_to_estimate_hm_in_file_callable(in_=workflow.deoblique_node.lzout.out_file, name="deoblique_node_out_file_to_estimate_hm_in_file_callable")) + + workflow.estimate_hm.inputs.in_file = workflow.deoblique_node_out_file_to_estimate_hm_in_file_callable.lzout.out + workflow.apply_hmc.inputs.in_file = workflow.deoblique_node.lzout.out_file # fmt: on - else: - from mriqc.workflows.anatomical.base import _binarize + elif wf_despike: + # fmt: off + workflow.despike_node.inputs.in_file = workflow.lzin.in_file - binarise_labels = pe.Node( - niu.Function( - input_names=["in_file", "threshold"], - output_names=["out_file"], - function=_binarize, - ), - name="binarise_labels", - ) + @pydra.mark.task + def despike_node_out_file_to_estimate_hm_in_file_callable(in_: ty.Any) -> ty.Any: + return _pop(in_) - # fmt: off - workflow.connect([ - (ema, binarise_labels, [("outputnode.epi_parc", "in_file")]), - (binarise_labels, iqmswf, [("out_file", "inputnode.brainmask")]), - (binarise_labels, func_report_wf, [("out_file", "inputnode.brainmask")]) - ]) + workflow.add(despike_node_out_file_to_estimate_hm_in_file_callable(in_=workflow.despike_node.lzout.out_file, name="despike_node_out_file_to_estimate_hm_in_file_callable")) + + workflow.estimate_hm.inputs.in_file = workflow.despike_node_out_file_to_estimate_hm_in_file_callable.lzout.out + workflow.apply_hmc.inputs.in_file = workflow.despike_node.lzout.out_file # fmt: on + elif wf_deoblique: + # fmt: off + workflow.deoblique_node.inputs.in_file = workflow.lzin.in_file - # Upload metrics - if not config.execution.no_sub: - from mriqc.interfaces.webapi import UploadIQMs + @pydra.mark.task + def deoblique_node_out_file_to_estimate_hm_in_file_callable(in_: ty.Any) -> ty.Any: + return _pop(in_) - upldwf = pe.MapNode(UploadIQMs( - endpoint=config.execution.webapi_url, - auth_token=config.execution.webapi_token, - strict=config.execution.upload_strict, - ), name="UploadMetrics", iterfield=["in_iqms"]) + workflow.add(deoblique_node_out_file_to_estimate_hm_in_file_callable(in_=workflow.deoblique_node.lzout.out_file, name="deoblique_node_out_file_to_estimate_hm_in_file_callable")) - # fmt: off - workflow.connect([ - (iqmswf, upldwf, [("outputnode.out_file", "in_iqms")]), - ]) + workflow.estimate_hm.inputs.in_file = workflow.deoblique_node_out_file_to_estimate_hm_in_file_callable.lzout.out + workflow.apply_hmc.inputs.in_file = workflow.deoblique_node.lzout.out_file # fmt: on + else: + raise NotImplementedError return workflow -def compute_iqms(name="ComputeIQMs"): +def _apply_transforms(in_file, in_xfm): + + from pathlib import Path + from nitransforms.linear import load + from pydra.tasks.mriqc.utils.bids import derive_bids_fname + + realigned = load(in_xfm, fmt="afni", reference=in_file, moving=in_file).apply( + in_file + ) + out_file = derive_bids_fname( + in_file, + entity="desc-realigned", + newpath=Path.cwd(), + absolute=True, + ) + realigned.to_filename(out_file) + return str(out_file) + + +def compute_iqms( + brainmask=attrs.NOTHING, + epi_mean=attrs.NOTHING, + fd_thres=attrs.NOTHING, + hmc_epi=attrs.NOTHING, + hmc_fd=attrs.NOTHING, + in_ras=attrs.NOTHING, + in_tsnr=attrs.NOTHING, + name="ComputeIQMs", + wf_biggest_file_gb=1, + wf_fft_spikes_detector=False, +): """ Initialize the workflow that actually computes the IQMs. @@ -280,534 +351,442 @@ def compute_iqms(name="ComputeIQMs"): wf = compute_iqms() """ - from nipype.algorithms.confounds import ComputeDVARS - from nipype.interfaces.afni import OutlierCount, QualityIndex - - from mriqc.interfaces import ( + from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ComputeDVARS + from pydra.tasks.afni.auto import OutlierCount, QualityIndex + from pydra.tasks.mriqc.interfaces import ( DerivativesDataSink, FunctionalQC, + GatherTimeseries, IQMFileSink, - GatherTimeseries - ) - from mriqc.interfaces.reports import AddProvenance - from mriqc.interfaces.transitional import GCOR - from mriqc.workflows.utils import _tofloat, get_fwhmx - - mem_gb = config.workflow.biggest_file_gb - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_file", - "in_ras", - "epi_mean", - "brainmask", - "hmc_epi", - "hmc_fd", - "fd_thres", - "in_tsnr", - "metadata", - "mpars", - "exclude_index", - "subject", - "session", - "task", - "acquisition", - "reconstruction", - "run", - ] - ), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "out_file", - "out_dvars", - "outliers", - "out_spikes", - "out_fft", - ] - ), - name="outputnode", + ) + from pydra.tasks.mriqc.interfaces.reports import AddProvenance + from pydra.tasks.mriqc.interfaces.transitional import GCOR + from pydra.tasks.mriqc.workflows.utils import _tofloat, get_fwhmx + + mem_gb = wf_biggest_file_gb + workflow = Workflow( + name=name, + input_spec={ + "brainmask": ty.Any, + "epi_mean": ty.Any, + "fd_thres": ty.Any, + "hmc_epi": ty.Any, + "hmc_fd": ty.Any, + "in_ras": ty.Any, + "in_tsnr": ty.Any, + }, + output_spec={ + "dvars": ty.Any, + "fft": ty.Any, + "out_file": ty.Any, + "outliers": ty.Any, + "spikes": ty.Any, + "spikes_num": int, + }, + brainmask=brainmask, + epi_mean=epi_mean, + fd_thres=fd_thres, + hmc_epi=hmc_epi, + hmc_fd=hmc_fd, + in_ras=in_ras, + in_tsnr=in_tsnr, ) # Set FD threshold - inputnode.inputs.fd_thres = config.workflow.fd_thres # Compute DVARS - dvnode = pe.MapNode( - ComputeDVARS(save_plot=False, save_all=True), - name="ComputeDVARS", - mem_gb=mem_gb * 3, - iterfield=["in_file"], + workflow.add( + ComputeDVARS( + save_all=True, + save_plot=False, + in_file=workflow.lzin.hmc_epi, + in_mask=workflow.lzin.brainmask, + name="dvnode", + ) ) - # AFNI quality measures - fwhm = pe.MapNode(get_fwhmx(), name="smoothness", iterfield=["in_file"]) - fwhm.inputs.acf = True # Only AFNI >= 16 - - outliers = pe.MapNode( - OutlierCount(fraction=True, out_file="outliers.out"), - name="outliers", - mem_gb=mem_gb * 2.5, - iterfield=["in_file"], + fwhm = get_fwhmx() + fwhm.name = "fwhm" + fwhm.inputs.in_file = workflow.lzin.epi_mean + fwhm.inputs.mask = workflow.lzin.brainmask + workflow.add(fwhm) + workflow.fwhm.inputs.acf = True # Only AFNI >= 16 + workflow.add( + OutlierCount( + fraction=True, + out_file="outliers.out", + in_file=workflow.lzin.hmc_epi, + mask=workflow.lzin.brainmask, + name="outliers", + ) ) - quality = pe.MapNode( - QualityIndex(automask=True), - out_file="quality.out", - name="quality", - mem_gb=mem_gb * 3, - iterfield=["in_file"], + workflow.add( + FunctionalQC( + fd_thres=workflow.lzin.fd_thres, + in_epi=workflow.lzin.epi_mean, + in_fd=workflow.lzin.hmc_fd, + in_hmc=workflow.lzin.hmc_epi, + in_mask=workflow.lzin.brainmask, + in_tsnr=workflow.lzin.in_tsnr, + name="measures", + ) ) - gcor = pe.MapNode(GCOR(), name="gcor", mem_gb=mem_gb * 2, iterfield=["in_file"]) + # fmt: off + workflow.set_output([('dvars', workflow.dvnode.lzout.out_all)]) - measures = pe.MapNode( - FunctionalQC(), - name="measures", - mem_gb=mem_gb * 3, - iterfield=["in_epi", "in_hmc", "in_tsnr", "in_dvars", "in_fwhm"], - ) + @pydra.mark.task + def fwhm_fwhm_to_measures_in_fwhm_callable(in_: ty.Any) -> ty.Any: + return _tofloat(in_) - timeseries = pe.MapNode( - GatherTimeseries(mpars_source="AFNI"), - name="timeseries", - mem_gb=mem_gb * 3, - iterfield=["dvars", "outliers", "quality", "fd"] - ) + workflow.add(fwhm_fwhm_to_measures_in_fwhm_callable(in_=workflow.fwhm.lzout.fwhm, name="fwhm_fwhm_to_measures_in_fwhm_callable")) - # fmt: off - workflow.connect([ - (inputnode, dvnode, [("hmc_epi", "in_file"), - ("brainmask", "in_mask")]), - (inputnode, measures, [("epi_mean", "in_epi"), - ("brainmask", "in_mask"), - ("hmc_epi", "in_hmc"), - ("hmc_fd", "in_fd"), - ("fd_thres", "fd_thres"), - ("in_tsnr", "in_tsnr")]), - (inputnode, fwhm, [("epi_mean", "in_file"), - ("brainmask", "mask")]), - (inputnode, quality, [("hmc_epi", "in_file")]), - (inputnode, outliers, [("hmc_epi", "in_file"), - ("brainmask", "mask")]), - (inputnode, gcor, [("hmc_epi", "in_file"), - ("brainmask", "mask")]), - (dvnode, measures, [("out_all", "in_dvars")]), - (fwhm, measures, [(("fwhm", _tofloat), "in_fwhm")]), - (dvnode, outputnode, [("out_all", "out_dvars")]), - (outliers, outputnode, [("out_file", "outliers")]), - (outliers, timeseries, [("out_file", "outliers")]), - (quality, timeseries, [("out_file", "quality")]), - (dvnode, timeseries, [("out_all", "dvars")]), - (inputnode, timeseries, [("hmc_fd", "fd"), ("mpars", "mpars")]), - ]) + workflow.measures.inputs.in_fwhm = workflow.fwhm_fwhm_to_measures_in_fwhm_callable.lzout.out + workflow.set_output([('outliers', workflow.outliers.lzout.out_file)]) # fmt: on - addprov = pe.MapNode( - AddProvenance(modality="bold"), - name="provenance", - run_without_submitting=True, - iterfield=["in_file"], - ) - # Save to JSON file - datasink = pe.MapNode( - IQMFileSink( - modality="bold", - out_dir=str(config.execution.output_dir), - dataset=config.execution.dsname, - ), - name="datasink", - run_without_submitting=True, - iterfield=["in_file", "root", "metadata", "provenance"], - ) # Save timeseries TSV file - ds_timeseries = pe.MapNode( - DerivativesDataSink( - base_directory=str(config.execution.output_dir), - suffix="timeseries" - ), - name="ds_timeseries", - run_without_submitting=True, - iterfield=["in_file", "source_file", "meta_dict"], - ) # fmt: off - workflow.connect([ - (inputnode, addprov, [("in_file", "in_file")]), - (inputnode, datasink, [("in_file", "in_file"), - ("exclude_index", "dummy_trs"), - (("subject", _pop), "subject_id"), - (("session", _pop), "session_id"), - (("task", _pop), "task_id"), - (("acquisition", _pop), "acq_id"), - (("reconstruction", _pop), "rec_id"), - (("run", _pop), "run_id"), - ("metadata", "metadata")]), - (addprov, datasink, [("out_prov", "provenance")]), - (outliers, datasink, [(("out_file", _parse_tout), "aor")]), - (gcor, datasink, [(("out", _tofloat), "gcor")]), - (quality, datasink, [(("out_file", _parse_tqual), "aqi")]), - (measures, datasink, [("out_qc", "root")]), - (datasink, outputnode, [("out_file", "out_file")]), - (inputnode, ds_timeseries, [("in_file", "source_file")]), - (timeseries, ds_timeseries, [("timeseries_file", "in_file"), - ("timeseries_metadata", "meta_dict")]), - ]) - # fmt: on - - # FFT spikes finder - if config.workflow.fft_spikes_detector: - from mriqc.workflows.utils import slice_wise_fft - - spikes_fft = pe.MapNode( - niu.Function( - input_names=["in_file"], - output_names=["n_spikes", "out_spikes", "out_fft"], - function=slice_wise_fft, - ), - name="SpikesFinderFFT", - iterfield=["in_file"], - ) - - # fmt: off - workflow.connect([ - (inputnode, spikes_fft, [("in_ras", "in_file")]), - (spikes_fft, outputnode, [("out_spikes", "out_spikes"), - ("out_fft", "out_fft")]), - (spikes_fft, datasink, [("n_spikes", "spikes_num")]) - ]) - # fmt: on - return workflow -def fmri_bmsk_workflow(name="fMRIBrainMask"): - """ - Compute a brain mask for the input :abbr:`fMRI (functional MRI)` dataset. - .. workflow:: - from mriqc.workflows.functional.base import fmri_bmsk_workflow - from mriqc.testing import mock_config - with mock_config(): - wf = fmri_bmsk_workflow() - """ - from nipype.interfaces.afni import Automask - workflow = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode") - outputnode = pe.Node(niu.IdentityInterface(fields=["out_file"]), name="outputnode") - afni_msk = pe.Node(Automask(outputtype="NIFTI_GZ"), name="afni_msk") + workflow.set_output([('out_file', workflow.measures.lzout.out_qc)]) - # Connect brain mask extraction - # fmt: off - workflow.connect([ - (inputnode, afni_msk, [("in_file", "in_file")]), - (afni_msk, outputnode, [("out_file", "out_file")]) - ]) # fmt: on - return workflow - - -def hmc(name="fMRI_HMC", omp_nthreads=None): - """ - Create a :abbr:`HMC (head motion correction)` workflow for fMRI. - - .. workflow:: - - from mriqc.workflows.functional.base import hmc - from mriqc.testing import mock_config - with mock_config(): - wf = hmc() - - """ - from nipype.algorithms.confounds import FramewiseDisplacement - from nipype.interfaces.afni import Despike, Refit, Volreg - - mem_gb = config.workflow.biggest_file_gb - - workflow = pe.Workflow(name=name) + # FFT spikes finder + if True: # wf_fft_spikes_detector: - disabled to ensure all outputs are generated + from pydra.tasks.mriqc.workflows.utils import slice_wise_fft + + workflow.add( + FunctionTask( + func=slice_wise_fft, + input_spec=SpecInfo( + name="FunctionIn", bases=(BaseSpec,), fields=[("in_file", ty.Any)] + ), + output_spec=SpecInfo( + name="FunctionOut", + bases=(BaseSpec,), + fields=[ + ("n_spikes", ty.Any), + ("out_spikes", ty.Any), + ("out_fft", ty.Any), + ], + ), + name="spikes_fft", + ) + ) + # fmt: off + workflow.spikes_fft.inputs.in_file = workflow.lzin.in_ras + workflow.set_output([('spikes', workflow.spikes_fft.lzout.out_spikes)]) + workflow.set_output([('fft', workflow.spikes_fft.lzout.out_fft)]) + workflow.set_output([('spikes_num', workflow.spikes_fft.lzout.n_spikes)]) + # fmt: on - inputnode = pe.Node( - niu.IdentityInterface(fields=["in_file", "fd_radius"]), - name="inputnode", - ) + return workflow - outputnode = pe.Node( - niu.IdentityInterface(fields=["out_file", "out_fd", "mpars"]), - name="outputnode", - ) - # calculate hmc parameters - estimate_hm = pe.Node( - Volreg(args="-Fourier -twopass", zpad=4, outputtype="NIFTI_GZ"), - name="estimate_hm", - mem_gb=mem_gb * 2.5, - ) +def _parse_tout(in_file): - # Compute the frame-wise displacement - fdnode = pe.Node( - FramewiseDisplacement(normalize=False, parameter_source="AFNI"), - name="ComputeFD", - ) + if isinstance(in_file, (list, tuple)): + return ( + [_parse_tout(f) for f in in_file] + if len(in_file) > 1 + else _parse_tout(in_file[0]) + ) + import numpy as np - # Apply transforms to other echos - apply_hmc = pe.MapNode( - niu.Function(function=_apply_transforms, input_names=["in_file", "in_xfm"]), - name="apply_hmc", - iterfield=["in_file"], - # NiTransforms is a memory hog, so ensure only one process is running at a time - num_threads=config.environment.cpu_count, - ) + data = np.loadtxt(in_file) # pylint: disable=no-member + return data.mean() - # fmt: off - workflow.connect([ - (inputnode, fdnode, [("fd_radius", "radius")]), - (estimate_hm, apply_hmc, [("oned_matrix_save", "in_xfm")]), - (apply_hmc, outputnode, [("out", "out_file")]), - (estimate_hm, fdnode, [("oned_file", "in_file")]), - (estimate_hm, outputnode, [("oned_file", "mpars")]), - (fdnode, outputnode, [("out_file", "out_fd")]), - ]) - # fmt: on - if not (config.workflow.despike or config.workflow.deoblique): - # fmt: off - workflow.connect([ - (inputnode, estimate_hm, [(("in_file", _pop), "in_file")]), - (inputnode, apply_hmc, [("in_file", "in_file")]), - ]) - # fmt: on - return workflow +def _parse_tqual(in_file): - # despiking, and deoblique - deoblique_node = pe.MapNode( - Refit(deoblique=True), - name="deoblique", - iterfield=["in_file"], - ) - despike_node = pe.MapNode( - Despike(outputtype="NIFTI_GZ"), - name="despike", - iterfield=["in_file"], - ) - if config.workflow.despike and config.workflow.deoblique: - # fmt: off - workflow.connect([ - (inputnode, despike_node, [("in_file", "in_file")]), - (despike_node, deoblique_node, [("out_file", "in_file")]), - (deoblique_node, estimate_hm, [(("out_file", _pop), "in_file")]), - (deoblique_node, apply_hmc, [("out_file", "in_file")]), - ]) - # fmt: on - elif config.workflow.despike: - # fmt: off - workflow.connect([ - (inputnode, despike_node, [("in_file", "in_file")]), - (despike_node, estimate_hm, [(("out_file", _pop), "in_file")]), - (despike_node, apply_hmc, [("out_file", "in_file")]), - ]) - # fmt: on - elif config.workflow.deoblique: - # fmt: off - workflow.connect([ - (inputnode, deoblique_node, [("in_file", "in_file")]), - (deoblique_node, estimate_hm, [(("out_file", _pop), "in_file")]), - (deoblique_node, apply_hmc, [("out_file", "in_file")]), - ]) - # fmt: on - else: - raise NotImplementedError + if isinstance(in_file, (list, tuple)): + return ( + [_parse_tqual(f) for f in in_file] + if len(in_file) > 1 + else _parse_tqual(in_file[0]) + ) + import numpy as np - return workflow + with open(in_file) as fin: + lines = fin.readlines() + return np.mean([float(line.strip()) for line in lines if not line.startswith("++")]) -def epi_mni_align(name="SpatialNormalization"): +def fmri_qc_workflow( + exec_ants_float=False, + exec_datalad_get=True, + exec_debug=False, + exec_float32=True, + exec_no_sub=False, + exec_verbose_reports=False, + exec_work_dir=None, + in_file=attrs.NOTHING, + metadata=attrs.NOTHING, + name="funcMRIQC", + nipype_nprocs=12, + nipype_omp_nthreads=12, + wf_biggest_file_gb=1, + wf_deoblique=False, + wf_despike=False, + wf_fd_radius=50, + wf_fft_spikes_detector=False, + wf_inputs=None, + wf_min_len_bold=5, + wf_species="human", + wf_template_id="MNI152NLin2009cAsym", +): """ - Estimate the transform that maps the EPI space into MNI152NLin2009cAsym. - - The input epi_mean is the averaged and brain-masked EPI timeseries - - Returns the EPI mean resampled in MNI space (for checking out registration) and - the associated "lobe" parcellation in EPI space. + Initialize the (f)MRIQC workflow. .. workflow:: - from mriqc.workflows.functional.base import epi_mni_align + import os.path as op + from mriqc.workflows.functional.base import fmri_qc_workflow from mriqc.testing import mock_config with mock_config(): - wf = epi_mni_align() + wf = fmri_qc_workflow() """ - from nipype.interfaces.ants import ApplyTransforms, N4BiasFieldCorrection - from niworkflows.interfaces.reportlets.registration import ( - SpatialNormalizationRPT as RobustMNINormalization, + from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ( + NonSteadyStateDetector, + TSNR, + ) + from pydra.tasks.afni.auto import TStat + from pydra.tasks.niworkflows.interfaces.bids import ReadSidecarJSON + from pydra.tasks.niworkflows.interfaces.header import SanitizeImage + from pydra.tasks.mriqc.interfaces.functional import SelectEcho + + from pydra.tasks.mriqc.utils.misc import _flatten_list as flatten + + if exec_work_dir is None: + exec_work_dir = Path.cwd() + + workflow = Workflow( + name=name, + input_spec={"in_file": ty.Any, "metadata": dict}, + output_spec={ + "ema_report": ty.Any, + "func_report_wf_background_report": ty.Any, + "func_report_wf_carpet_report": ty.Any, + "func_report_wf_mean_report": ty.Any, + "func_report_wf_spikes_report": ty.Any, + "func_report_wf_stdev_report": ty.Any, + "func_report_wf_zoomed_report": ty.Any, + "iqmswf_dvars": ty.Any, + "iqmswf_fft": ty.Any, + "iqmswf_out_file": ty.Any, + "iqmswf_outliers": ty.Any, + "iqmswf_spikes": ty.Any, + "iqmswf_spikes_num": ty.Any, + }, + in_file=in_file, + metadata=metadata, ) - from templateflow.api import get as get_template - # Get settings - testing = config.execution.debug - n_procs = config.nipype.nprocs - ants_nthreads = config.nipype.omp_nthreads + mem_gb = wf_biggest_file_gb + - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface(fields=["epi_mean", "epi_mask"]), - name="inputnode", - ) - outputnode = pe.Node( - niu.IdentityInterface(fields=["epi_mni", "epi_parc", "report"]), - name="outputnode", - ) + + # Define workflow, inputs and outputs + # 0. Get data, put it in RAS orientation - n4itk = pe.Node(N4BiasFieldCorrection(dimension=3, copy_header=True), name="SharpenEPI") + # Get metadata - norm = pe.Node( - RobustMNINormalization( - explicit_masking=False, - flavor="testing" if testing else "precise", - float=config.execution.ants_float, - generate_report=True, - moving="boldref", - num_threads=ants_nthreads, - reference="boldref", - template=config.workflow.template_id, - ), - name="EPI2MNI", - num_threads=n_procs, - mem_gb=3, + workflow.add( + SelectEcho( + in_files=workflow.lzin.in_file, + metadata=workflow.lzin.metadata, + name="pick_echo", + ) ) - - if config.workflow.species.lower() == "human": - norm.inputs.reference_image = str( - get_template(config.workflow.template_id, resolution=2, suffix="boldref") + workflow.add( + NonSteadyStateDetector( + in_file=workflow.pick_echo.lzout.out_file, name="non_steady_state_detector" ) - norm.inputs.reference_mask = str( - get_template( - config.workflow.template_id, - resolution=2, - desc="brain", - suffix="mask", - ) + ) + workflow.add( + SanitizeImage( + max_32bit=exec_float32, + in_file=workflow.lzin.in_file, + n_volumes_to_discard=workflow.non_steady_state_detector.lzout.n_volumes_to_discard, + name="sanitize", ) - # adapt some population-specific settings - else: - from nirodents.workflows.brainextraction import _bspline_grid - - n4itk.inputs.shrink_factor = 1 - n4itk.inputs.n_iterations = [50] * 4 - norm.inputs.reference_image = str(get_template(config.workflow.template_id, suffix="T2w")) - norm.inputs.reference_mask = str( - get_template( - config.workflow.template_id, - desc="brain", - suffix="mask", - )[0] + ) + # Workflow -------------------------------------------------------- + # 1. HMC: head motion correct + workflow.add( + hmc( + omp_nthreads=nipype_omp_nthreads, + wf_biggest_file_gb=wf_biggest_file_gb, + wf_deoblique=wf_deoblique, + wf_despike=wf_despike, + in_file=workflow.sanitize.lzout.out_file, + name="hmcwf", ) - - bspline_grid = pe.Node(niu.Function(function=_bspline_grid), name="bspline_grid") - - # fmt: off - workflow.connect([ - (inputnode, bspline_grid, [('epi_mean', 'in_file')]), - (bspline_grid, n4itk, [('out', 'args')]) - ]) - # fmt: on - - # Warp segmentation into EPI space - invt = pe.Node( - ApplyTransforms( - float=True, - dimension=3, - default_value=0, - interpolation="MultiLabel", - ), - name="ResampleSegmentation", ) - - if config.workflow.species.lower() == "human": - invt.inputs.input_image = str( - get_template( - config.workflow.template_id, - resolution=1, - desc="carpet", - suffix="dseg", - ) + # Set HMC settings + workflow.inputs.fd_radius = wf_fd_radius + # 2. Compute mean fmri + workflow.add( + TStat( + options="-mean", + outputtype="NIFTI_GZ", + in_file=workflow.hmcwf.lzout.out_file, + name="mean", ) - else: - invt.inputs.input_image = str( - get_template( - config.workflow.template_id, - suffix="dseg", - )[-1] + ) + # Compute TSNR using nipype implementation + workflow.add(TSNR(in_file=workflow.hmcwf.lzout.out_file, name="tsnr")) + # EPI to MNI registration + workflow.add( + epi_mni_align( + nipype_omp_nthreads=nipype_omp_nthreads, + wf_species=wf_species, + exec_ants_float=exec_ants_float, + exec_debug=exec_debug, + nipype_nprocs=nipype_nprocs, + wf_template_id=wf_template_id, + name="ema", ) - + ) + # 7. Compute IQMs + workflow.add( + compute_iqms( + wf_biggest_file_gb=wf_biggest_file_gb, + wf_fft_spikes_detector=wf_fft_spikes_detector, + in_tsnr=workflow.tsnr.lzout.tsnr_file, + hmc_fd=workflow.hmcwf.lzout.out_fd, + hmc_epi=workflow.hmcwf.lzout.out_file, + epi_mean=workflow.mean.lzout.out_file, + in_ras=workflow.sanitize.lzout.out_file, + name="iqmswf", + ) + ) + # Reports + workflow.add( + init_func_report_wf( + exec_verbose_reports=exec_verbose_reports, + wf_biggest_file_gb=wf_biggest_file_gb, + exec_work_dir=exec_work_dir, + wf_species=wf_species, + wf_fft_spikes_detector=wf_fft_spikes_detector, + meta_sidecar=workflow.lzin.metadata, + epi_parc=workflow.ema.lzout.epi_parc, + hmc_epi=workflow.hmcwf.lzout.out_file, + hmc_fd=workflow.hmcwf.lzout.out_fd, + in_stddev=workflow.tsnr.lzout.stddev_file, + epi_mean=workflow.mean.lzout.out_file, + in_ras=workflow.sanitize.lzout.out_file, + name="func_report_wf", + ) + ) # fmt: off - workflow.connect([ - (inputnode, invt, [("epi_mean", "reference_image")]), - (inputnode, n4itk, [("epi_mean", "input_image")]), - (n4itk, norm, [("output_image", "moving_image")]), - (norm, invt, [ - ("inverse_composite_transform", "transforms")]), - (invt, outputnode, [("output_image", "epi_parc")]), - (norm, outputnode, [("warped_image", "epi_mni"), - ("out_report", "report")]), - ]) - # fmt: on - if config.workflow.species.lower() == "human": - workflow.connect([(inputnode, norm, [("epi_mask", "moving_mask")])]) + @pydra.mark.task + def mean_out_file_to_ema_epi_mean_callable(in_: ty.Any) -> ty.Any: + return _pop(in_) - return workflow + workflow.add(mean_out_file_to_ema_epi_mean_callable(in_=workflow.mean.lzout.out_file, name="mean_out_file_to_ema_epi_mean_callable")) + workflow.ema.inputs.epi_mean = workflow.mean_out_file_to_ema_epi_mean_callable.lzout.out -def _parse_tqual(in_file): - if isinstance(in_file, (list, tuple)): - return ( - [_parse_tqual(f) for f in in_file] if len(in_file) > 1 - else _parse_tqual(in_file[0]) + # fmt: on + if wf_fft_spikes_detector: + # fmt: off + workflow.set_output([('iqmswf_spikes', workflow.iqmswf.lzout.spikes)]) + workflow.set_output([('iqmswf_fft', workflow.iqmswf.lzout.fft)]) + # fmt: on + # population specific changes to brain masking + if wf_species == "human": + from pydra.tasks.mriqc.workflows.shared import ( + synthstrip_wf as fmri_bmsk_workflow, ) - import numpy as np - - with open(in_file, "r") as fin: - lines = fin.readlines() - return np.mean([float(line.strip()) for line in lines if not line.startswith("++")]) - - -def _parse_tout(in_file): - if isinstance(in_file, (list, tuple)): - return ( - [_parse_tout(f) for f in in_file] if len(in_file) > 1 - else _parse_tout(in_file[0]) + workflow.add( + fmri_bmsk_workflow(omp_nthreads=nipype_omp_nthreads, name="skullstrip_epi") ) + # fmt: off - import numpy as np + @pydra.mark.task + def mean_out_file_to_skullstrip_epi_in_files_callable(in_: ty.Any) -> ty.Any: + return _pop(in_) - data = np.loadtxt(in_file) # pylint: disable=no-member - return data.mean() + workflow.add(mean_out_file_to_skullstrip_epi_in_files_callable(in_=workflow.mean.lzout.out_file, name="mean_out_file_to_skullstrip_epi_in_files_callable")) + workflow.skullstrip_epi.inputs.in_files = workflow.mean_out_file_to_skullstrip_epi_in_files_callable.lzout.out + workflow.ema.inputs.epi_mask = workflow.skullstrip_epi.lzout.out_mask + workflow.iqmswf.inputs.brainmask = workflow.skullstrip_epi.lzout.out_mask + workflow.func_report_wf.inputs.brainmask = workflow.skullstrip_epi.lzout.out_mask + # fmt: on + else: + from pydra.tasks.mriqc.workflows.anatomical.base import _binarize + + workflow.add( + FunctionTask( + func=_binarize, + input_spec=SpecInfo( + name="FunctionIn", + bases=(BaseSpec,), + fields=[("in_file", ty.Any), ("threshold", ty.Any)], + ), + output_spec=SpecInfo( + name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] + ), + name="binarise_labels", + ) + ) + # fmt: off + workflow.binarise_labels.inputs.in_file = workflow.ema.lzout.epi_parc + workflow.iqmswf.inputs.brainmask = workflow.binarise_labels.lzout.out_file + workflow.func_report_wf.inputs.brainmask = workflow.binarise_labels.lzout.out_file + # fmt: on + # Upload metrics + if not exec_no_sub: + from pydra.tasks.mriqc.interfaces.webapi import UploadIQMs -def _apply_transforms(in_file, in_xfm): - from pathlib import Path - from nitransforms.linear import load - from mriqc.utils.bids import derive_bids_fname + pass - realigned = load(in_xfm, fmt="afni", reference=in_file, moving=in_file).apply(in_file) - out_file = derive_bids_fname( - in_file, - entity="desc-realigned", - newpath=Path.cwd(), - absolute=True, + # fmt: on + workflow.set_output([("ema_report", workflow.ema.lzout.report)]) + workflow.set_output([("iqmswf_outliers", workflow.iqmswf.lzout.outliers)]) + workflow.set_output([("iqmswf_spikes_num", workflow.iqmswf.lzout.spikes_num)]) + workflow.set_output([("iqmswf_fft", workflow.iqmswf.lzout.fft)]) + workflow.set_output([("iqmswf_spikes", workflow.iqmswf.lzout.spikes)]) + workflow.set_output([("iqmswf_out_file", workflow.iqmswf.lzout.out_file)]) + workflow.set_output([("iqmswf_dvars", workflow.iqmswf.lzout.dvars)]) + workflow.set_output( + [("func_report_wf_carpet_report", workflow.func_report_wf.lzout.carpet_report)] + ) + workflow.set_output( + [("func_report_wf_stdev_report", workflow.func_report_wf.lzout.stdev_report)] + ) + workflow.set_output( + [("func_report_wf_zoomed_report", workflow.func_report_wf.lzout.zoomed_report)] + ) + workflow.set_output( + [("func_report_wf_spikes_report", workflow.func_report_wf.lzout.spikes_report)] + ) + workflow.set_output( + [("func_report_wf_mean_report", workflow.func_report_wf.lzout.mean_report)] + ) + workflow.set_output( + [ + ( + "func_report_wf_background_report", + workflow.func_report_wf.lzout.background_report, + ) + ] ) - realigned.to_filename(out_file) - return str(out_file) + return workflow diff --git a/pydra/tasks/mriqc/workflows/functional/output.py b/pydra/tasks/mriqc/workflows/functional/output.py index 72a1d23..81bb2a8 100644 --- a/pydra/tasks/mriqc/workflows/functional/output.py +++ b/pydra/tasks/mriqc/workflows/functional/output.py @@ -1,34 +1,36 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -"""Writing out functional reportlets.""" -from mriqc import config -from mriqc.interfaces import DerivativesDataSink - -from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu - - -def init_func_report_wf(name="func_report_wf"): +import attrs +import logging +from pathlib import Path +from pydra.engine import Workflow +from pydra.engine.specs import BaseSpec, SpecInfo +from pydra.engine.task import FunctionTask +import typing as ty + + +logger = logging.getLogger(__name__) + + +def init_func_report_wf( + brainmask=attrs.NOTHING, + epi_mean=attrs.NOTHING, + epi_parc=attrs.NOTHING, + exec_verbose_reports=False, + exec_work_dir=None, + fd_thres=attrs.NOTHING, + hmc_epi=attrs.NOTHING, + hmc_fd=attrs.NOTHING, + in_dvars=attrs.NOTHING, + in_fft=attrs.NOTHING, + in_ras=attrs.NOTHING, + in_spikes=attrs.NOTHING, + in_stddev=attrs.NOTHING, + meta_sidecar=attrs.NOTHING, + name="func_report_wf", + outliers=attrs.NOTHING, + wf_biggest_file_gb=1, + wf_fft_spikes_detector=False, + wf_species="human", +): """ Write out individual reportlets. @@ -40,310 +42,229 @@ def init_func_report_wf(name="func_report_wf"): wf = init_func_report_wf() """ - from nireports.interfaces import FMRISummary - from niworkflows.interfaces.morphology import BinaryDilation, BinarySubtraction - - from nireports.interfaces import PlotMosaic, PlotSpikes - from mriqc.interfaces.functional import Spikes + from pydra.tasks.nireports.interfaces import FMRISummary, PlotMosaic, PlotSpikes + from pydra.tasks.niworkflows.interfaces.morphology import ( + BinaryDilation, + BinarySubtraction, + ) + from pydra.tasks.mriqc.interfaces.functional import Spikes # from mriqc.interfaces.reports import IndividualReport - - verbose = config.execution.verbose_reports - mem_gb = config.workflow.biggest_file_gb - reportlets_dir = config.execution.work_dir / "reportlets" - - workflow = pe.Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - "in_ras", - "hmc_epi", - "epi_mean", - "brainmask", - "hmc_fd", - "fd_thres", - "epi_parc", - "in_dvars", - "in_stddev", - "outliers", - "in_spikes", - "in_fft", - "in_iqms", - "mni_report", - "ica_report", - "meta_sidecar", - "name_source", - ] - ), - name="inputnode", + if exec_work_dir is None: + exec_work_dir = Path.cwd() + + verbose = exec_verbose_reports + mem_gb = wf_biggest_file_gb + reportlets_dir = exec_work_dir / "reportlets" + workflow = Workflow( + name=name, + input_spec={ + "brainmask": ty.Any, + "epi_mean": ty.Any, + "epi_parc": ty.Any, + "fd_thres": ty.Any, + "hmc_epi": ty.Any, + "hmc_fd": ty.Any, + "in_dvars": ty.Any, + "in_fft": ty.Any, + "in_ras": ty.Any, + "in_spikes": ty.Any, + "in_stddev": ty.Any, + "meta_sidecar": ty.Any, + "outliers": ty.Any, + }, + output_spec={ + "background_report": ty.Any, + "carpet_report": ty.Any, + "mean_report": ty.Any, + "spikes_report": ty.Any, + "stdev_report": ty.Any, + "zoomed_report": ty.Any, + }, + brainmask=brainmask, + epi_mean=epi_mean, + epi_parc=epi_parc, + fd_thres=fd_thres, + hmc_epi=hmc_epi, + hmc_fd=hmc_fd, + in_dvars=in_dvars, + in_fft=in_fft, + in_ras=in_ras, + in_spikes=in_spikes, + in_stddev=in_stddev, + meta_sidecar=meta_sidecar, + outliers=outliers, ) # Set FD threshold - inputnode.inputs.fd_thres = config.workflow.fd_thres - spmask = pe.MapNode( - niu.Function( - input_names=["in_file", "in_mask"], - output_names=["out_file", "out_plot"], - function=spikes_mask, - ), - name="SpikesMask", - mem_gb=mem_gb * 3.5, - iterfield=["in_file"], + workflow.add( + FunctionTask( + func=spikes_mask, + input_spec=SpecInfo( + name="FunctionIn", + bases=(BaseSpec,), + fields=[("in_file", ty.Any), ("in_mask", ty.Any)], + ), + output_spec=SpecInfo( + name="FunctionOut", + bases=(BaseSpec,), + fields=[("out_file", ty.Any), ("out_plot", ty.Any)], + ), + in_file=workflow.lzin.in_ras, + name="spmask", + ) ) - - spikes_bg = pe.MapNode( - Spikes(no_zscore=True, detrend=False), - name="SpikesFinderBgMask", - mem_gb=mem_gb * 2.5, - iterfield=["in_file", "in_mask"], + workflow.add( + Spikes( + detrend=False, + no_zscore=True, + in_file=workflow.lzin.in_ras, + in_mask=workflow.spmask.lzout.out_file, + name="spikes_bg", + ) ) - # Generate crown mask # Create the crown mask - dilated_mask = pe.Node(BinaryDilation(), name="dilated_mask") - subtract_mask = pe.Node(BinarySubtraction(), name="subtract_mask") - parcels = pe.Node(niu.Function(function=_carpet_parcellation), name="parcels") - - bigplot = pe.MapNode( - FMRISummary(), - name="BigPlot", - mem_gb=mem_gb * 3.5, - iterfield=["in_func", "dvars", "outliers", "in_spikes_bg"], + workflow.add(BinaryDilation(in_mask=workflow.lzin.brainmask, name="dilated_mask")) + workflow.add( + BinarySubtraction( + in_base=workflow.dilated_mask.lzout.out_mask, + in_subtract=workflow.lzin.brainmask, + name="subtract_mask", + ) + ) + workflow.add( + FunctionTask( + func=_carpet_parcellation, + crown_mask=workflow.subtract_mask.lzout.out_mask, + segmentation=workflow.lzin.epi_parc, + name="parcels", + ) + ) + workflow.add( + FMRISummary( + dvars=workflow.lzin.in_dvars, + fd=workflow.lzin.hmc_fd, + fd_thres=workflow.lzin.fd_thres, + in_func=workflow.lzin.hmc_epi, + in_segm=workflow.parcels.lzout.out, + in_spikes_bg=workflow.spikes_bg.lzout.out_tsz, + outliers=workflow.lzin.outliers, + tr=workflow.lzin.meta_sidecar, + name="bigplot", + ) ) - # fmt: off - workflow.connect([ - (inputnode, spikes_bg, [("in_ras", "in_file")]), - (inputnode, spmask, [("in_ras", "in_file")]), - (inputnode, bigplot, [("hmc_epi", "in_func"), - ("hmc_fd", "fd"), - ("fd_thres", "fd_thres"), - ("in_dvars", "dvars"), - ("outliers", "outliers"), - (("meta_sidecar", _get_tr), "tr")]), - (inputnode, parcels, [("epi_parc", "segmentation")]), - (inputnode, dilated_mask, [("brainmask", "in_mask")]), - (inputnode, subtract_mask, [("brainmask", "in_subtract")]), - (spmask, spikes_bg, [("out_file", "in_mask")]), - (dilated_mask, subtract_mask, [("out_mask", "in_base")]), - (subtract_mask, parcels, [("out_mask", "crown_mask")]), - (parcels, bigplot, [("out", "in_segm")]), - (spikes_bg, bigplot, [("out_tsz", "in_spikes_bg")]), - ]) + workflow.bigplot.inputs.tr = workflow.lzin.meta_sidecar # fmt: on - - mosaic_mean = pe.MapNode( + workflow.add( PlotMosaic( - out_file="plot_func_mean_mosaic1.svg", cmap="Greys_r", - ), - name="PlotMosaicMean", - iterfield=["in_file"], + out_file="plot_func_mean_mosaic1.svg", + in_file=workflow.lzin.epi_mean, + name="mosaic_mean", + ) ) - - mosaic_stddev = pe.MapNode( + workflow.add( PlotMosaic( - out_file="plot_func_stddev_mosaic2_stddev.svg", cmap="viridis", - ), - name="PlotMosaicSD", - iterfield=["in_file"], + out_file="plot_func_stddev_mosaic2_stddev.svg", + in_file=workflow.lzin.in_stddev, + name="mosaic_stddev", + ) ) - - mosaic_zoom = pe.MapNode( + workflow.add( PlotMosaic( cmap="Greys_r", - ), - name="PlotMosaicZoomed", - iterfield=["in_file"], + bbox_mask_file=workflow.lzin.brainmask, + in_file=workflow.lzin.epi_mean, + name="mosaic_zoom", + ) ) - - mosaic_noise = pe.MapNode( + workflow.add( PlotMosaic( - only_noise=True, cmap="viridis_r", - ), - name="PlotMosaicNoise", - iterfield=["in_file"], - ) - - if config.workflow.species.lower() in ("rat", "mouse"): - mosaic_mean.inputs.view = ["coronal", "axial"] - mosaic_stddev.inputs.view = ["coronal", "axial"] - mosaic_zoom.inputs.view = ["coronal", "axial"] - mosaic_noise.inputs.view = ["coronal", "axial"] - - ds_report_mean = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="mean", - datatype="figures", - dismiss_entities=("part",), - ), - name="ds_report_mean", - run_without_submitting=True, - iterfield=["in_file", "source_file"], - ) - - ds_report_stdev = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="stdev", - datatype="figures", - dismiss_entities=("part",), - ), - name="ds_report_stdev", - run_without_submitting=True, - iterfield=["in_file", "source_file"], - ) - - ds_report_background = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="background", - datatype="figures", - dismiss_entities=("part",), - ), - name="ds_report_background", - run_without_submitting=True, - iterfield=["in_file", "source_file"], - ) - - ds_report_zoomed = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="zoomed", - datatype="figures", - dismiss_entities=("part",), - ), - name="ds_report_zoomed", - run_without_submitting=True, - iterfield=["in_file", "source_file"], - ) - - ds_report_carpet = pe.MapNode( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="carpet", - datatype="figures", - dismiss_entities=("part",), - ), - name="ds_report_carpet", - run_without_submitting=True, - iterfield=["in_file", "source_file"], + only_noise=True, + in_file=workflow.lzin.epi_mean, + name="mosaic_noise", + ) ) + if wf_species.lower() in ("rat", "mouse"): + workflow.mosaic_mean.inputs.view = ["coronal", "axial"] + workflow.mosaic_stddev.inputs.view = ["coronal", "axial"] + workflow.mosaic_zoom.inputs.view = ["coronal", "axial"] + workflow.mosaic_noise.inputs.view = ["coronal", "axial"] # fmt: off - workflow.connect([ - # (inputnode, rnode, [("in_iqms", "in_iqms")]), - (inputnode, mosaic_mean, [("epi_mean", "in_file")]), - (inputnode, mosaic_stddev, [("in_stddev", "in_file")]), - (inputnode, ds_report_mean, [("name_source", "source_file")]), - (inputnode, ds_report_stdev, [("name_source", "source_file")]), - (inputnode, ds_report_background, [("name_source", "source_file")]), - (inputnode, ds_report_zoomed, [("name_source", "source_file")]), - (inputnode, ds_report_carpet, [("name_source", "source_file")]), - (inputnode, mosaic_zoom, [("epi_mean", "in_file"), - ("brainmask", "bbox_mask_file")]), - (inputnode, mosaic_noise, [("epi_mean", "in_file")]), - (mosaic_mean, ds_report_mean, [("out_file", "in_file")]), - (mosaic_stddev, ds_report_stdev, [("out_file", "in_file")]), - (mosaic_noise, ds_report_background, [("out_file", "in_file")]), - (mosaic_zoom, ds_report_zoomed, [("out_file", "in_file")]), - (bigplot, ds_report_carpet, [("out_file", "in_file")]), - ]) + workflow.set_output([('mean_report', workflow.mosaic_mean.lzout.out_file)]) + workflow.set_output([('stdev_report', workflow.mosaic_stddev.lzout.out_file)]) + workflow.set_output([('background_report', workflow.mosaic_noise.lzout.out_file)]) + workflow.set_output([('zoomed_report', workflow.mosaic_zoom.lzout.out_file)]) + workflow.set_output([('carpet_report', workflow.bigplot.lzout.out_file)]) # fmt: on - - if config.workflow.fft_spikes_detector: - mosaic_spikes = pe.Node( + if True: # wf_fft_spikes_detector: - disabled so output is always created + workflow.add( PlotSpikes( - out_file="plot_spikes.svg", cmap="viridis", + out_file="plot_spikes.svg", title="High-Frequency spikes", - ), - name="PlotSpikes", + name="mosaic_spikes", + ) ) - - ds_report_spikes = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="spikes", - datatype="figures", - dismiss_entities=("part",), - ), - name="ds_report_spikes", - run_without_submitting=True, - ) - + pass # fmt: off - workflow.connect([ - (inputnode, ds_report_spikes, [("name_source", "source_file")]), - (inputnode, mosaic_spikes, [("in_ras", "in_file"), - ("in_spikes", "in_spikes"), - ("in_fft", "in_fft")]), - (mosaic_spikes, ds_report_spikes, [("out_file", "in_file")]), - ]) + pass + workflow.mosaic_spikes.inputs.in_file = workflow.lzin.in_ras + workflow.mosaic_spikes.inputs.in_spikes = workflow.lzin.in_spikes + workflow.mosaic_spikes.inputs.in_fft = workflow.lzin.in_fft + workflow.set_output([('spikes_report', workflow.mosaic_spikes.lzout.out_file)]) # fmt: on - if not verbose: return workflow - # Verbose-reporting goes here - from niworkflows.utils.connections import pop_file as _pop - from nireports.interfaces import PlotContours - - plot_bmask = pe.Node( - PlotContours( - display_mode="y" if config.workflow.species.lower() in ("rat", "mouse") else "z", - levels=[0.5], - colors=["r"], - cut_coords=10, - out_file="bmask", - ), - name="PlotBrainmask", - ) - - ds_report_bmask = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="brainmask", - datatype="figures", - dismiss_entities=("part", "echo"), - ), - name="ds_report_bmask", - run_without_submitting=True, - ) - - ds_report_norm = pe.Node( - DerivativesDataSink( - base_directory=reportlets_dir, - desc="norm", - datatype="figures", - dismiss_entities=("part", "echo"), - ), - name="ds_report_norm", - run_without_submitting=True, - ) + from pydra.tasks.nireports.interfaces import PlotContours + from pydra.tasks.niworkflows.utils.connections import pop_file as _pop # fmt: off - workflow.connect([ - (inputnode, ds_report_norm, [("mni_report", "in_file"), - ("name_source", "source_file")]), - (inputnode, plot_bmask, [(("epi_mean", _pop), "in_file"), - ("brainmask", "in_contours")]), - (inputnode, ds_report_bmask, [("name_source", "source_file")]), - (plot_bmask, ds_report_bmask, [(("out_file", _pop), "in_file")]), - ]) + # fmt: on return workflow +def _carpet_parcellation(segmentation, crown_mask): + """Generate the union of two masks.""" + from pathlib import Path + import nibabel as nb + import numpy as np + + img = nb.load(segmentation) + lut = np.zeros((256,), dtype="uint8") + lut[100:201] = 1 # Ctx GM + lut[30:99] = 2 # dGM + lut[1:11] = 3 # WM+CSF + lut[255] = 4 # Cerebellum + # Apply lookup table + seg = lut[np.asanyarray(img.dataobj, dtype="uint16")] + seg[np.asanyarray(nb.load(crown_mask).dataobj, dtype=int) > 0] = 5 + outimg = img.__class__(seg.astype("uint8"), img.affine, img.header) + outimg.set_data_dtype("uint8") + out_file = Path("segments.nii.gz").absolute() + outimg.to_filename(out_file) + return str(out_file) + + +def _get_tr(meta_dict): + + if isinstance(meta_dict, (list, tuple)): + meta_dict = meta_dict[0] + return meta_dict.get("RepetitionTime", None) + + def spikes_mask(in_file, in_mask=None, out_file=None): """Calculate a mask in which check for :abbr:`EM (electromagnetic)` spikes.""" import os.path as op - import nibabel as nb import numpy as np from nilearn.image import mean_img @@ -357,10 +278,8 @@ def spikes_mask(in_file, in_mask=None, out_file=None): ext = ext2 + ext out_file = op.abspath(f"{fname}_spmask{ext}") out_plot = op.abspath(f"{fname}_spmask.pdf") - in_4d_nii = nb.load(in_file) orientation = nb.aff2axcodes(in_4d_nii.affine) - if in_mask: mask_data = np.asanyarray(nb.load(in_mask).dataobj) a = np.where(mask_data != 0) @@ -370,60 +289,27 @@ def spikes_mask(in_file, in_mask=None, out_file=None): np.max(a[2]) - np.min(a[2]), ) longest_axis = np.argmax(bbox) - # Input here is a binarized and intersected mask data from previous section - dil_mask = nd.binary_dilation(mask_data, iterations=int(mask_data.shape[longest_axis] / 9)) - + dil_mask = nd.binary_dilation( + mask_data, iterations=int(mask_data.shape[longest_axis] / 9) + ) rep = list(mask_data.shape) rep[longest_axis] = -1 new_mask_2d = dil_mask.max(axis=longest_axis).reshape(rep) - rep = [1, 1, 1] rep[longest_axis] = mask_data.shape[longest_axis] new_mask_3d = np.logical_not(np.tile(new_mask_2d, rep)) else: new_mask_3d = np.zeros(in_4d_nii.shape[:3]) == 1 - if orientation[0] in ("L", "R"): new_mask_3d[0:2, :, :] = True new_mask_3d[-3:-1, :, :] = True else: new_mask_3d[:, 0:2, :] = True new_mask_3d[:, -3:-1, :] = True - - mask_nii = nb.Nifti1Image(new_mask_3d.astype(np.uint8), in_4d_nii.affine, in_4d_nii.header) + mask_nii = nb.Nifti1Image( + new_mask_3d.astype(np.uint8), in_4d_nii.affine, in_4d_nii.header + ) mask_nii.to_filename(out_file) - plot_roi(mask_nii, mean_img(in_4d_nii), output_file=out_plot) return out_file, out_plot - - -def _carpet_parcellation(segmentation, crown_mask): - """Generate the union of two masks.""" - from pathlib import Path - import numpy as np - import nibabel as nb - - img = nb.load(segmentation) - - lut = np.zeros((256,), dtype="uint8") - lut[100:201] = 1 # Ctx GM - lut[30:99] = 2 # dGM - lut[1:11] = 3 # WM+CSF - lut[255] = 4 # Cerebellum - # Apply lookup table - seg = lut[np.asanyarray(img.dataobj, dtype="uint16")] - seg[np.asanyarray(nb.load(crown_mask).dataobj, dtype=int) > 0] = 5 - - outimg = img.__class__(seg.astype("uint8"), img.affine, img.header) - outimg.set_data_dtype("uint8") - out_file = Path("segments.nii.gz").absolute() - outimg.to_filename(out_file) - return str(out_file) - - -def _get_tr(meta_dict): - if isinstance(meta_dict, (list, tuple)): - meta_dict = meta_dict[0] - - return meta_dict.get("RepetitionTime", None) diff --git a/pydra/tasks/mriqc/workflows/shared.py b/pydra/tasks/mriqc/workflows/shared.py index 1144569..28367d2 100644 --- a/pydra/tasks/mriqc/workflows/shared.py +++ b/pydra/tasks/mriqc/workflows/shared.py @@ -1,94 +1,77 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2023 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -"""Shared workflows.""" -# from nipype.interfaces import utility as niu -# from nipype.pipeline import engine as pe +import attrs +import logging +from pydra.engine import Workflow +import typing as ty -import pydra -def synthstrip_wf(name="synthstrip_wf", omp_nthreads=None): - """Create a brain-extraction workflow using SynthStrip.""" - from pydra.tasks.ants import N4BiasFieldCorrection - from pydra.tasks.nibabel import IntensityClip, ApplyMask - from pydra.tasks.synthstrip import SynthStrip +logger = logging.getLogger(__name__) + - wf = pydra.Workflow(name=name, input_spec=["in_files"]) +def synthstrip_wf(in_files=attrs.NOTHING, name="synthstrip_wf", omp_nthreads=None): + """Create a brain-extraction workflow using SynthStrip.""" + from pydra.tasks.ants.auto import N4BiasFieldCorrection + from pydra.tasks.niworkflows.interfaces.nibabel import ApplyMask, IntensityClip + from pydra.tasks.mriqc.interfaces.synthstrip import SynthStrip - wf.add( - IntensityClip(p_min=10, p_max=99.9, in_file=wf.lzin.in_files), name="pre_clip" + # truncate target intensity for N4 correction + workflow = Workflow( + name=name, + input_spec={"in_files": ty.Any}, + output_spec={ + "bias_image": ty.Any, + "out_brain": ty.Any, + "out_corrected": ty.Any, + "out_mask": ty.Any, + }, + in_files=in_files, ) - wf.add( + workflow.add( + IntensityClip( + p_max=99.9, p_min=10, in_file=workflow.lzin.in_files, name="pre_clip" + ) + ) + workflow.add( N4BiasFieldCorrection( + copy_header=True, dimension=3, num_threads=omp_nthreads, rescale_intensities=True, - copy_header=True, - input_image=wf.pre_clip.lzout.out_file, - ), - name="pre_n4", + input_image=workflow.pre_clip.lzout.out_file, + name="pre_n4", + ) ) - - post_n4 = pe.Node( + workflow.add( N4BiasFieldCorrection( + copy_header=True, dimension=3, + n_iterations=[50] * 4, + num_threads=omp_nthreads, save_bias=True, + input_image=workflow.pre_clip.lzout.out_file, + name="post_n4", + ) + ) + workflow.add( + SynthStrip( num_threads=omp_nthreads, - n_iterations=[50] * 4, - copy_header=True, - ), - name="post_n4", + in_file=workflow.pre_n4.lzout.output_image, + name="synthstrip", + ) ) - - synthstrip = pe.Node( - SynthStrip(num_threads=omp_nthreads), - name="synthstrip", - num_threads=omp_nthreads, + workflow.add( + ApplyMask( + in_file=workflow.post_n4.lzout.output_image, + in_mask=workflow.synthstrip.lzout.out_mask, + name="final_masked", + ) ) - - final_masked = pe.Node(ApplyMask(), name="final_masked") - - workflow = pe.Workflow(name=name) # fmt: off - workflow.connect([ - (inputnode, pre_clip, [("in_files", "in_file")]), - (pre_clip, pre_n4, [("out_file", "input_image")]), - (pre_n4, synthstrip, [("output_image", "in_file")]), - (synthstrip, post_n4, [("out_mask", "weight_image")]), - (synthstrip, final_masked, [("out_mask", "in_mask")]), - (pre_clip, post_n4, [("out_file", "input_image")]), - (post_n4, final_masked, [("output_image", "in_file")]), - (final_masked, outputnode, [("out_file", "out_brain")]), - (post_n4, outputnode, [("bias_image", "bias_image")]), - (synthstrip, outputnode, [("out_mask", "out_mask")]), - (post_n4, outputnode, [("output_image", "out_corrected")]), - ]) - - wf.set_output( - ("out_corrected",), - ("out_brain", ), - ("bias_image", ), - ("out_mask"), - ) + workflow.post_n4.inputs.weight_image = workflow.synthstrip.lzout.out_mask + workflow.set_output([('out_brain', workflow.final_masked.lzout.out_file)]) + workflow.set_output([('bias_image', workflow.post_n4.lzout.bias_image)]) + workflow.set_output([('out_mask', workflow.synthstrip.lzout.out_mask)]) + workflow.set_output([('out_corrected', workflow.post_n4.lzout.output_image)]) # fmt: on - return wf + + return workflow diff --git a/pydra/tasks/mriqc/workflows/utils.py b/pydra/tasks/mriqc/workflows/utils.py index 10c30f1..193cb2f 100644 --- a/pydra/tasks/mriqc/workflows/utils.py +++ b/pydra/tasks/mriqc/workflows/utils.py @@ -1,118 +1,109 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -"""Helper functions for the workflows.""" +import logging +from pathlib import Path + + +logger = logging.getLogger(__name__) def _tofloat(inlist): + if isinstance(inlist, (list, tuple)): return ( - [_tofloat(el) for el in inlist] if len(inlist) > 1 - else _tofloat(inlist[0]) + [_tofloat(el) for el in inlist] if len(inlist) > 1 else _tofloat(inlist[0]) ) return float(inlist) -def fwhm_dict(fwhm): - """Convert a list of FWHM into a dictionary""" - fwhm = [float(f) for f in fwhm] - return { - "fwhm_x": fwhm[0], - "fwhm_y": fwhm[1], - "fwhm_z": fwhm[2], - "fwhm_avg": fwhm[3], - } +def generate_filename(in_file, dirname=None, suffix="", extension=None): + """ + Generate a nipype-like filename. + >>> str(generate_filename("/path/to/input.nii.gz").relative_to(Path.cwd())) + 'input.nii.gz' -def thresh_image(in_file, thres=0.5, out_file=None): - """Thresholds an image""" - import os.path as op - import numpy as np - import nibabel as nb + >>> str(generate_filename( + ... "/path/to/input.nii.gz", dirname="/other/path", + ... )) + '/other/path/input.nii.gz' - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath(f"{fname}_thresh{ext}") + >>> str(generate_filename( + ... "/path/to/input.nii.gz", dirname="/other/path", extension="tsv", + ... )) + '/other/path/input.tsv' - im = nb.load(in_file) - data = np.asanyarray(im.dataobj) - data[data < thres] = 0 - data[data > 0] = 1 - nb.Nifti1Image(data, im.affine, im.header).to_filename(out_file) - return out_file + >>> str(generate_filename( + ... "/path/to/input.nii.gz", dirname="/other/path", extension=".tsv", + ... )) + '/other/path/input.tsv' + >>> str(generate_filename( + ... "/path/to/input.nii.gz", dirname="/other/path", extension="", + ... )) + '/other/path/input' -def spectrum_mask(size): - """Creates a mask to filter the image of size size""" - import numpy as np - from scipy.ndimage.morphology import distance_transform_edt as distance + >>> str(generate_filename( + ... "/path/to/input.nii.gz", dirname="/other/path", extension="", suffix="_mod", + ... )) + '/other/path/input_mod' - ftmask = np.ones(size) + >>> str(generate_filename( + ... "/path/to/input.nii.gz", dirname="/other/path", extension="", suffix="mod", + ... )) + '/other/path/input_mod' - # Set zeros on corners - # ftmask[0, 0] = 0 - # ftmask[size[0] - 1, size[1] - 1] = 0 - # ftmask[0, size[1] - 1] = 0 - # ftmask[size[0] - 1, 0] = 0 - ftmask[size[0] // 2, size[1] // 2] = 0 + >>> str(generate_filename( + ... "/path/to/input", dirname="/other/path", extension="tsv", suffix="mod", + ... )) + '/other/path/input_mod.tsv' - # Distance transform - ftmask = distance(ftmask) - ftmask /= ftmask.max() + """ + from pathlib import Path - # Keep this just in case we want to switch to the opposite filter - ftmask *= -1.0 - ftmask += 1.0 + in_file = Path(in_file) + in_ext = "".join(in_file.suffixes) + dirname = Path.cwd() if dirname is None else Path(dirname) + if extension is not None: + extension = ( + extension if not extension or extension.startswith(".") else f".{extension}" + ) + else: + extension = in_ext + stem = in_file.name[: -len(in_ext)] if in_ext else in_file.name + if suffix and not suffix.startswith("_"): + suffix = f"_{suffix}" + return dirname / f"{stem}{suffix}{extension}" - ftmask[ftmask >= 0.4] = 1 - ftmask[ftmask < 1] = 0 - return ftmask + +def get_fwhmx(): + + from pydra.tasks.afni.auto import FWHMx, Info + + fwhm_args = {"combine": True, "detrend": True} + afni_version = Info.version() + if afni_version and afni_version >= (2017, 2, 3): + fwhm_args["args"] = "-ShowMeClassicFWHM" + fwhm_interface = FWHMx(**fwhm_args) + return fwhm_interface def slice_wise_fft(in_file, ftmask=None, spike_thres=3.0, out_prefix=None): """Search for spikes in slices using the 2D FFT""" import os.path as op - import nibabel as nb import numpy as np - from mriqc.workflows.utils import spectrum_mask from scipy.ndimage import binary_erosion, generate_binary_structure from scipy.ndimage.filters import median_filter from statsmodels.robust.scale import mad + from pydra.tasks.mriqc.workflows.utils import spectrum_mask if out_prefix is None: fname, ext = op.splitext(op.basename(in_file)) if ext == ".gz": fname, _ = op.splitext(fname) out_prefix = op.abspath(fname) - func_data = nb.load(in_file).get_fdata() - if ftmask is None: ftmask = spectrum_mask(tuple(func_data.shape[:2])) - fft_data = [] for t in range(func_data.shape[-1]): func_frame = func_data[..., t] @@ -129,119 +120,57 @@ def slice_wise_fft(in_file, ftmask=None, spike_thres=3.0, out_prefix=None): ) fft_slices.append(fftsl) fft_data.append(np.stack(fft_slices, axis=-1)) - # Recompose the 4D FFT timeseries fft_data = np.stack(fft_data, -1) - # Z-score across t, using robust statistics mu = np.median(fft_data, axis=3) sigma = np.stack([mad(fft_data, axis=3)] * fft_data.shape[-1], -1) idxs = np.where(np.abs(sigma) > 1e-4) fft_zscored = fft_data - mu[..., np.newaxis] fft_zscored[idxs] /= sigma[idxs] - # save fft z-scored out_fft = op.abspath(out_prefix + "_zsfft.nii.gz") nii = nb.Nifti1Image(fft_zscored.astype(np.float32), np.eye(4), None) nii.to_filename(out_fft) - # Find peaks spikes_list = [] for t in range(fft_zscored.shape[-1]): fft_frame = fft_zscored[..., t] - for z in range(fft_frame.shape[-1]): sl = fft_frame[..., z] if np.all(sl < spike_thres): continue - # Any zscore over spike_thres will be called a spike sl[sl <= spike_thres] = 0 sl[sl > 0] = 1 - # Erode peaks and see how many survive struct = generate_binary_structure(2, 2) sl = binary_erosion(sl.astype(np.uint8), structure=struct).astype(np.uint8) - if sl.sum() > 10: spikes_list.append((t, z)) - out_spikes = op.abspath(out_prefix + "_spikes.tsv") np.savetxt(out_spikes, spikes_list, fmt=b"%d", delimiter=b"\t", header="TR\tZ") - return len(spikes_list), out_spikes, out_fft -def get_fwhmx(): - from nipype.interfaces.afni import FWHMx, Info - - fwhm_args = {"combine": True, "detrend": True} - afni_version = Info.version() - - if afni_version and afni_version >= (2017, 2, 3): - fwhm_args["args"] = "-ShowMeClassicFWHM" - - fwhm_interface = FWHMx(**fwhm_args) - return fwhm_interface - - -def generate_filename(in_file, dirname=None, suffix="", extension=None): - """ - Generate a nipype-like filename. - - >>> str(generate_filename("/path/to/input.nii.gz").relative_to(Path.cwd())) - 'input.nii.gz' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", - ... )) - '/other/path/input.nii.gz' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="tsv", - ... )) - '/other/path/input.tsv' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension=".tsv", - ... )) - '/other/path/input.tsv' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="", - ... )) - '/other/path/input' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="", suffix="_mod", - ... )) - '/other/path/input_mod' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="", suffix="mod", - ... )) - '/other/path/input_mod' - - >>> str(generate_filename( - ... "/path/to/input", dirname="/other/path", extension="tsv", suffix="mod", - ... )) - '/other/path/input_mod.tsv' - - """ - from pathlib import Path - in_file = Path(in_file) - in_ext = "".join(in_file.suffixes) - - dirname = Path.cwd() if dirname is None else Path(dirname) - - if extension is not None: - extension = extension if not extension or extension.startswith(".") else f".{extension}" - else: - extension = in_ext - - stem = in_file.name[:-len(in_ext)] if in_ext else in_file.name - - if suffix and not suffix.startswith("_"): - suffix = f"_{suffix}" +def spectrum_mask(size): + """Creates a mask to filter the image of size size""" + import numpy as np + from scipy.ndimage.morphology import distance_transform_edt as distance - return dirname / f"{stem}{suffix}{extension}" + ftmask = np.ones(size) + # Set zeros on corners + # ftmask[0, 0] = 0 + # ftmask[size[0] - 1, size[1] - 1] = 0 + # ftmask[0, size[1] - 1] = 0 + # ftmask[size[0] - 1, 0] = 0 + ftmask[size[0] // 2, size[1] // 2] = 0 + # Distance transform + ftmask = distance(ftmask) + ftmask /= ftmask.max() + # Keep this just in case we want to switch to the opposite filter + ftmask *= -1.0 + ftmask += 1.0 + ftmask[ftmask >= 0.4] = 1 + ftmask[ftmask < 1] = 0 + return ftmask From 0d9fc17cfca6cb6326186ce0b86c1d792c5b78bc Mon Sep 17 00:00:00 2001 From: Tom Close Date: Sat, 18 May 2024 19:34:27 +1000 Subject: [PATCH 29/47] removed files that shouldn't have been included in vcs --- pydra/tasks/mriqc/workflows/__init__.py | 41 - .../mriqc/workflows/anatomical/__init__.py | 15 - .../tasks/mriqc/workflows/anatomical/base.py | 848 ------------------ .../mriqc/workflows/anatomical/output.py | 172 ---- .../mriqc/workflows/diffusion/__init__.py | 10 - pydra/tasks/mriqc/workflows/diffusion/base.py | 747 --------------- .../tasks/mriqc/workflows/diffusion/output.py | 323 ------- .../mriqc/workflows/functional/__init__.py | 2 - .../tasks/mriqc/workflows/functional/base.py | 792 ---------------- .../mriqc/workflows/functional/output.py | 315 ------- pydra/tasks/mriqc/workflows/shared.py | 77 -- pydra/tasks/mriqc/workflows/utils.py | 176 ---- 12 files changed, 3518 deletions(-) delete mode 100644 pydra/tasks/mriqc/workflows/__init__.py delete mode 100644 pydra/tasks/mriqc/workflows/anatomical/__init__.py delete mode 100644 pydra/tasks/mriqc/workflows/anatomical/base.py delete mode 100644 pydra/tasks/mriqc/workflows/anatomical/output.py delete mode 100644 pydra/tasks/mriqc/workflows/diffusion/__init__.py delete mode 100644 pydra/tasks/mriqc/workflows/diffusion/base.py delete mode 100644 pydra/tasks/mriqc/workflows/diffusion/output.py delete mode 100644 pydra/tasks/mriqc/workflows/functional/__init__.py delete mode 100644 pydra/tasks/mriqc/workflows/functional/base.py delete mode 100644 pydra/tasks/mriqc/workflows/functional/output.py delete mode 100644 pydra/tasks/mriqc/workflows/shared.py delete mode 100644 pydra/tasks/mriqc/workflows/utils.py diff --git a/pydra/tasks/mriqc/workflows/__init__.py b/pydra/tasks/mriqc/workflows/__init__.py deleted file mode 100644 index 7757ed1..0000000 --- a/pydra/tasks/mriqc/workflows/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -from .anatomical import ( - _binarize, - _enhance, - _get_mod, - _pop, - airmsk_wf, - anat_qc_workflow, - compute_iqms, - gradient_threshold, - headmsk_wf, - image_gradient, - init_anat_report_wf, - init_brain_tissue_segmentation, - spatial_normalization, -) -from .diffusion import ( - _bvals_report, - _carpet_parcellation, - _estimate_sigma, - _filter_metadata, - _get_tr, - _get_wm, - compute_iqms, - dmri_qc_workflow, - epi_mni_align, - hmc_workflow, - init_dwi_report_wf, -) -from .functional import ( - _carpet_parcellation, - _get_tr, - compute_iqms, - epi_mni_align, - fmri_bmsk_workflow, - fmri_qc_workflow, - hmc, - init_func_report_wf, - spikes_mask, -) -from .shared import synthstrip_wf -from .utils import _tofloat, generate_filename, get_fwhmx, slice_wise_fft, spectrum_mask diff --git a/pydra/tasks/mriqc/workflows/anatomical/__init__.py b/pydra/tasks/mriqc/workflows/anatomical/__init__.py deleted file mode 100644 index c38580a..0000000 --- a/pydra/tasks/mriqc/workflows/anatomical/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .base import ( - _binarize, - _enhance, - _get_mod, - _pop, - airmsk_wf, - anat_qc_workflow, - compute_iqms, - gradient_threshold, - headmsk_wf, - image_gradient, - init_brain_tissue_segmentation, - spatial_normalization, -) -from .output import init_anat_report_wf diff --git a/pydra/tasks/mriqc/workflows/anatomical/base.py b/pydra/tasks/mriqc/workflows/anatomical/base.py deleted file mode 100644 index 4fbeb37..0000000 --- a/pydra/tasks/mriqc/workflows/anatomical/base.py +++ /dev/null @@ -1,848 +0,0 @@ -import attrs -import logging -from pathlib import Path -from pydra.engine import Workflow -from pydra.engine.specs import BaseSpec, SpecInfo -from pydra.engine.task import FunctionTask -import pydra.mark -from pydra.tasks.mriqc.interfaces import ( - ArtifactMask, - ComputeQI2, - ConformImage, - RotationMask, - StructuralQC, -) -from pydra.tasks.mriqc.workflows.anatomical.output import init_anat_report_wf -from pydra.tasks.mriqc.workflows.utils import get_fwhmx -from pydra.tasks.niworkflows.interfaces.fixes import ( - FixHeaderApplyTransforms as ApplyTransforms, -) -from templateflow.api import get as get_template -import typing as ty - - -logger = logging.getLogger(__name__) - - -def anat_qc_workflow( - exec_ants_float=False, - exec_datalad_get=True, - exec_debug=False, - exec_no_sub=False, - exec_verbose_reports=False, - exec_work_dir=None, - in_file=attrs.NOTHING, - name="anatMRIQC", - nipype_omp_nthreads=12, - wf_inputs=None, - wf_species="human", - wf_template_id="MNI152NLin2009cAsym", -): - """ - One-subject-one-session-one-run pipeline to extract the NR-IQMs from - anatomical images - - .. workflow:: - - import os.path as op - from mriqc.workflows.anatomical.base import anat_qc_workflow - from mriqc.testing import mock_config - with mock_config(): - wf = anat_qc_workflow() - - """ - from pydra.tasks.mriqc.workflows.shared import synthstrip_wf - - if exec_work_dir is None: - exec_work_dir = Path.cwd() - - # Initialize workflow - workflow = Workflow( - name=name, - input_spec={"in_file": ty.Any}, - output_spec={ - "anat_report_wf_airmask_report": ty.Any, - "anat_report_wf_artmask_report": ty.Any, - "anat_report_wf_bg_report": ty.Any, - "anat_report_wf_bmask_report": ty.Any, - "anat_report_wf_headmask_report": ty.Any, - "anat_report_wf_segm_report": ty.Any, - "anat_report_wf_zoom_report": ty.Any, - "iqmswf_noise_report": ty.Any, - "norm_report": ty.Any, - }, - in_file=in_file, - ) - - # Define workflow, inputs and outputs - # 0. Get data - - # 1. Reorient anatomical image - workflow.add( - ConformImage(check_dtype=False, in_file=workflow.lzin.in_file, name="to_ras") - ) - # 2. species specific skull-stripping - if wf_species.lower() == "human": - workflow.add( - synthstrip_wf( - omp_nthreads=nipype_omp_nthreads, - in_files=workflow.to_ras.lzout.out_file, - name="skull_stripping", - ) - ) - ss_bias_field = "outputnode.bias_image" - else: - from nirodents.workflows.brainextraction import init_rodent_brain_extraction_wf - - skull_stripping = init_rodent_brain_extraction_wf(template_id=wf_template_id) - ss_bias_field = "final_n4.bias_image" - # 3. Head mask - workflow.add( - headmsk_wf(omp_nthreads=nipype_omp_nthreads, wf_species=wf_species, name="hmsk") - ) - # 4. Spatial Normalization, using ANTs - workflow.add( - spatial_normalization( - nipype_omp_nthreads=nipype_omp_nthreads, - wf_species=wf_species, - exec_ants_float=exec_ants_float, - exec_debug=exec_debug, - wf_template_id=wf_template_id, - modality=workflow.lzin.in_file, - name="norm", - ) - ) - # 5. Air mask (with and without artifacts) - workflow.add( - airmsk_wf( - head_mask=workflow.hmsk.lzout.out_file, - in_file=workflow.to_ras.lzout.out_file, - ind2std_xfm=workflow.norm.lzout.ind2std_xfm, - name="amw", - ) - ) - # 6. Brain tissue segmentation - workflow.add( - init_brain_tissue_segmentation( - nipype_omp_nthreads=nipype_omp_nthreads, - in_file=workflow.hmsk.lzout.out_denoised, - std_tpms=workflow.norm.lzout.out_tpms, - name="bts", - ) - ) - # 7. Compute IQMs - workflow.add( - compute_iqms( - wf_species=wf_species, - headmask=workflow.hmsk.lzout.out_file, - pvms=workflow.bts.lzout.out_pvms, - segmentation=workflow.bts.lzout.out_segm, - rotmask=workflow.amw.lzout.rot_mask, - artmask=workflow.amw.lzout.art_mask, - hatmask=workflow.amw.lzout.hat_mask, - airmask=workflow.amw.lzout.air_mask, - in_ras=workflow.to_ras.lzout.out_file, - std_tpms=workflow.norm.lzout.out_tpms, - name="iqmswf", - ) - ) - # Reports - workflow.add( - init_anat_report_wf( - exec_verbose_reports=exec_verbose_reports, - exec_work_dir=exec_work_dir, - wf_species=wf_species, - segmentation=workflow.bts.lzout.out_segm, - artmask=workflow.amw.lzout.art_mask, - airmask=workflow.amw.lzout.air_mask, - headmask=workflow.hmsk.lzout.out_file, - in_ras=workflow.to_ras.lzout.out_file, - name="anat_report_wf", - ) - ) - # Connect all nodes - # fmt: off - workflow.norm.inputs.modality = workflow.lzin.in_file - workflow.hmsk.inputs.in_file = workflow.skull_stripping.lzout.out_corrected - workflow.hmsk.inputs.brainmask = workflow.skull_stripping.lzout.out_mask - workflow.bts.inputs.brainmask = workflow.skull_stripping.lzout.out_mask - workflow.norm.inputs.moving_image = workflow.skull_stripping.lzout.out_corrected - workflow.norm.inputs.moving_mask = workflow.skull_stripping.lzout.out_mask - workflow.hmsk.inputs.in_tpms = workflow.norm.lzout.out_tpms - - workflow.iqmswf.inputs.inu_corrected = workflow.skull_stripping.lzout.out_corrected - workflow.iqmswf.inputs.in_inu = workflow.skull_stripping.lzout.bias_image - workflow.iqmswf.inputs.brainmask = workflow.skull_stripping.lzout.out_mask - - workflow.anat_report_wf.inputs.brainmask = workflow.skull_stripping.lzout.out_mask - - # fmt: on - # Upload metrics - if not exec_no_sub: - from pydra.tasks.mriqc.interfaces.webapi import UploadIQMs - - pass - # fmt: off - pass - pass - # fmt: on - workflow.set_output([("norm_report", workflow.norm.lzout.report)]) - workflow.set_output([("iqmswf_noise_report", workflow.iqmswf.lzout.noise_report)]) - workflow.set_output( - [("anat_report_wf_bg_report", workflow.anat_report_wf.lzout.bg_report)] - ) - workflow.set_output( - [ - ( - "anat_report_wf_artmask_report", - workflow.anat_report_wf.lzout.artmask_report, - ) - ] - ) - workflow.set_output( - [ - ( - "anat_report_wf_headmask_report", - workflow.anat_report_wf.lzout.headmask_report, - ) - ] - ) - workflow.set_output( - [("anat_report_wf_bmask_report", workflow.anat_report_wf.lzout.bmask_report)] - ) - workflow.set_output( - [("anat_report_wf_zoom_report", workflow.anat_report_wf.lzout.zoom_report)] - ) - workflow.set_output( - [ - ( - "anat_report_wf_airmask_report", - workflow.anat_report_wf.lzout.airmask_report, - ) - ] - ) - workflow.set_output( - [("anat_report_wf_segm_report", workflow.anat_report_wf.lzout.segm_report)] - ) - - return workflow - - -def airmsk_wf( - head_mask=attrs.NOTHING, - in_file=attrs.NOTHING, - ind2std_xfm=attrs.NOTHING, - name="AirMaskWorkflow", -): - """ - Calculate air, artifacts and "hat" masks to evaluate noise in the background. - - This workflow mostly addresses the implementation of Step 1 in [Mortamet2009]_. - This work proposes to look at the signal distribution in the background, where - no signals are expected, to evaluate the spread of the noise. - It is in the background where [Mortamet2009]_ proposed to also look at the presence - of ghosts and artifacts, where they are very easy to isolate. - - However, [Mortamet2009]_ proposes not to look at the background around the face - because of the likely signal leakage through the phase-encoding axis sourcing from - eyeballs (and their motion). - To avoid that, [Mortamet2009]_ proposed atlas-based identification of two landmarks - (nasion and cerebellar projection on to the occipital bone). - MRIQC, for simplicity, used a such a mask created in MNI152NLin2009cAsym space and - projected it on to the individual. - Such a solution is inadequate because it doesn't drop full in-plane slices as there - will be a large rotation of the individual's tilt of the head with respect to the - template. - The new implementation (23.1.x series) follows [Mortamet2009]_ more closely, - projecting the two landmarks from the template space and leveraging - *NiTransforms* to do that. - - .. workflow:: - - from mriqc.testing import mock_config - from mriqc.workflows.anatomical.base import airmsk_wf - with mock_config(): - wf = airmsk_wf() - - """ - workflow = Workflow( - name=name, - input_spec={"head_mask": ty.Any, "in_file": ty.Any, "ind2std_xfm": ty.Any}, - output_spec={ - "air_mask": ty.Any, - "art_mask": ty.Any, - "hat_mask": ty.Any, - "rot_mask": ty.Any, - }, - head_mask=head_mask, - in_file=in_file, - ind2std_xfm=ind2std_xfm, - ) - - workflow.add(RotationMask(in_file=workflow.lzin.in_file, name="rotmsk")) - workflow.add( - ArtifactMask( - head_mask=workflow.lzin.head_mask, - in_file=workflow.lzin.in_file, - ind2std_xfm=workflow.lzin.ind2std_xfm, - name="qi1", - ) - ) - # fmt: off - workflow.set_output([('hat_mask', workflow.qi1.lzout.out_hat_msk)]) - workflow.set_output([('air_mask', workflow.qi1.lzout.out_air_msk)]) - workflow.set_output([('art_mask', workflow.qi1.lzout.out_art_msk)]) - workflow.set_output([('rot_mask', workflow.rotmsk.lzout.out_file)]) - # fmt: on - - return workflow - - -def headmsk_wf( - brainmask=attrs.NOTHING, - in_file=attrs.NOTHING, - in_tpms=attrs.NOTHING, - name="HeadMaskWorkflow", - omp_nthreads=1, - wf_species="human", -): - """ - Computes a head mask as in [Mortamet2009]_. - - .. workflow:: - - from mriqc.testing import mock_config - from mriqc.workflows.anatomical.base import headmsk_wf - with mock_config(): - wf = headmsk_wf() - - """ - from pydra.tasks.niworkflows.interfaces.nibabel import ApplyMask - - workflow = Workflow( - name=name, - input_spec={"brainmask": ty.Any, "in_file": ty.Any, "in_tpms": ty.Any}, - output_spec={"out_denoised": ty.Any, "out_file": ty.Any}, - brainmask=brainmask, - in_file=in_file, - in_tpms=in_tpms, - ) - - def _select_wm(inlist): - return [f for f in inlist if "WM" in f][0] - - workflow.add( - FunctionTask( - func=_enhance, - input_spec=SpecInfo( - name="FunctionIn", - bases=(BaseSpec,), - fields=[("in_file", ty.Any), ("wm_tpm", ty.Any)], - ), - output_spec=SpecInfo( - name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] - ), - in_file=workflow.lzin.in_file, - wm_tpm=workflow.lzin.in_tpms, - name="enhance", - ) - ) - workflow.add( - FunctionTask( - func=image_gradient, - input_spec=SpecInfo( - name="FunctionIn", - bases=(BaseSpec,), - fields=[("in_file", ty.Any), ("brainmask", ty.Any), ("sigma", ty.Any)], - ), - output_spec=SpecInfo( - name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] - ), - brainmask=workflow.lzin.brainmask, - in_file=workflow.enhance.lzout.out_file, - name="gradient", - ) - ) - workflow.add( - FunctionTask( - func=gradient_threshold, - input_spec=SpecInfo( - name="FunctionIn", - bases=(BaseSpec,), - fields=[ - ("in_file", ty.Any), - ("brainmask", ty.Any), - ("aniso", ty.Any), - ("thresh", ty.Any), - ], - ), - output_spec=SpecInfo( - name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] - ), - brainmask=workflow.lzin.brainmask, - in_file=workflow.gradient.lzout.out_file, - name="thresh", - ) - ) - if wf_species != "human": - workflow.gradient.inputs.sigma = 3.0 - workflow.thresh.inputs.aniso = True - workflow.thresh.inputs.thresh = 4.0 - workflow.add( - ApplyMask( - in_file=workflow.enhance.lzout.out_file, - in_mask=workflow.lzin.brainmask, - name="apply_mask", - ) - ) - # fmt: off - workflow.enhance.inputs.wm_tpm = workflow.lzin.in_tpms - workflow.set_output([('out_file', workflow.thresh.lzout.out_file)]) - workflow.set_output([('out_denoised', workflow.apply_mask.lzout.out_file)]) - # fmt: on - - return workflow - - -def init_brain_tissue_segmentation( - brainmask=attrs.NOTHING, - in_file=attrs.NOTHING, - name="brain_tissue_segmentation", - nipype_omp_nthreads=12, - std_tpms=attrs.NOTHING, -): - """ - Setup a workflow for brain tissue segmentation. - - .. workflow:: - - from mriqc.workflows.anatomical.base import init_brain_tissue_segmentation - from mriqc.testing import mock_config - with mock_config(): - wf = init_brain_tissue_segmentation() - - """ - from pydra.tasks.ants.auto import Atropos - - def _format_tpm_names(in_files, fname_string=None): - import glob - from pathlib import Path - import nibabel as nb - - out_path = Path.cwd().absolute() - # copy files to cwd and rename iteratively - for count, fname in enumerate(in_files): - img = nb.load(fname) - extension = "".join(Path(fname).suffixes) - out_fname = f"priors_{1 + count:02}{extension}" - nb.save(img, Path(out_path, out_fname)) - if fname_string is None: - fname_string = f"priors_%02d{extension}" - out_files = [ - str(prior) - for prior in glob.glob(str(Path(out_path, f"priors*{extension}"))) - ] - # return path with c-style format string for Atropos - file_format = str(Path(out_path, fname_string)) - return file_format, out_files - - workflow = Workflow( - name=name, - input_spec={"brainmask": ty.Any, "in_file": ty.Any, "std_tpms": ty.Any}, - output_spec={"out_pvms": ty.Any, "out_segm": ty.Any}, - brainmask=brainmask, - in_file=in_file, - std_tpms=std_tpms, - ) - - workflow.add( - FunctionTask( - execution={"keep_inputs": True, "remove_unnecessary_outputs": False}, - func=_format_tpm_names, - input_spec=SpecInfo( - name="FunctionIn", bases=(BaseSpec,), fields=[("in_files", ty.Any)] - ), - output_spec=SpecInfo( - name="FunctionOut", bases=(BaseSpec,), fields=[("file_format", ty.Any)] - ), - in_files=workflow.lzin.std_tpms, - name="format_tpm_names", - ) - ) - workflow.add( - Atropos( - initialization="PriorProbabilityImages", - mrf_radius=[1, 1, 1], - mrf_smoothing_factor=0.01, - num_threads=nipype_omp_nthreads, - number_of_tissue_classes=3, - out_classified_image_name="segment.nii.gz", - output_posteriors_name_template="segment_%02d.nii.gz", - prior_weighting=0.1, - save_posteriors=True, - intensity_images=workflow.lzin.in_file, - mask_image=workflow.lzin.brainmask, - name="segment", - ) - ) - # fmt: off - - @pydra.mark.task - def format_tpm_names_file_format_to_segment_prior_image_callable(in_: ty.Any) -> ty.Any: - return _pop(in_) - - workflow.add(format_tpm_names_file_format_to_segment_prior_image_callable(in_=workflow.format_tpm_names.lzout.file_format, name="format_tpm_names_file_format_to_segment_prior_image_callable")) - - workflow.segment.inputs.prior_image = workflow.format_tpm_names_file_format_to_segment_prior_image_callable.lzout.out - workflow.set_output([('out_segm', workflow.segment.lzout.classified_image)]) - workflow.set_output([('out_pvms', workflow.segment.lzout.posteriors)]) - # fmt: on - - return workflow - - -def spatial_normalization( - exec_ants_float=False, - exec_debug=False, - modality=attrs.NOTHING, - moving_image=attrs.NOTHING, - moving_mask=attrs.NOTHING, - name="SpatialNormalization", - nipype_omp_nthreads=12, - wf_species="human", - wf_template_id="MNI152NLin2009cAsym", -): - """Create a simplified workflow to perform fast spatial normalization.""" - from pydra.tasks.niworkflows.interfaces.reportlets.registration import ( - SpatialNormalizationRPT as RobustMNINormalization, - ) - - # Have the template id handy - tpl_id = wf_template_id - # Define workflow interface - workflow = Workflow( - name=name, - input_spec={"modality": ty.Any, "moving_image": ty.Any, "moving_mask": ty.Any}, - output_spec={"ind2std_xfm": ty.Any, "out_tpms": ty.Any, "report": ty.Any}, - modality=modality, - moving_image=moving_image, - moving_mask=moving_mask, - ) - - # Spatial normalization - workflow.add( - RobustMNINormalization( - flavor=["testing", "fast"][exec_debug], - float=exec_ants_float, - generate_report=True, - num_threads=nipype_omp_nthreads, - template=tpl_id, - moving_image=workflow.lzin.moving_image, - moving_mask=workflow.lzin.moving_mask, - reference=workflow.lzin.modality, - name="norm", - ) - ) - if wf_species.lower() == "human": - workflow.norm.inputs.reference_mask = str( - get_template(tpl_id, resolution=2, desc="brain", suffix="mask") - ) - else: - workflow.norm.inputs.reference_image = str(get_template(tpl_id, suffix="T2w")) - workflow.norm.inputs.reference_mask = str( - get_template(tpl_id, desc="brain", suffix="mask")[0] - ) - # Project standard TPMs into T1w space - workflow.add( - ApplyTransforms( - default_value=0, - dimension=3, - float=exec_ants_float, - interpolation="Gaussian", - reference_image=workflow.lzin.moving_image, - transforms=workflow.norm.lzout.inverse_composite_transform, - name="tpms_std2t1w", - ) - ) - workflow.tpms_std2t1w.inputs.input_image = [ - str(p) - for p in get_template( - wf_template_id, - suffix="probseg", - resolution=(1 if wf_species.lower() == "human" else None), - label=["CSF", "GM", "WM"], - ) - ] - # fmt: off - workflow.set_output([('ind2std_xfm', workflow.norm.lzout.composite_transform)]) - workflow.set_output([('report', workflow.norm.lzout.out_report)]) - workflow.set_output([('out_tpms', workflow.tpms_std2t1w.lzout.output_image)]) - # fmt: on - - return workflow - - -def compute_iqms( - airmask=attrs.NOTHING, - artmask=attrs.NOTHING, - brainmask=attrs.NOTHING, - hatmask=attrs.NOTHING, - headmask=attrs.NOTHING, - in_inu=attrs.NOTHING, - in_ras=attrs.NOTHING, - inu_corrected=attrs.NOTHING, - name="ComputeIQMs", - pvms=attrs.NOTHING, - rotmask=attrs.NOTHING, - segmentation=attrs.NOTHING, - std_tpms=attrs.NOTHING, - wf_species="human", -): - """ - Setup the workflow that actually computes the IQMs. - - .. workflow:: - - from mriqc.workflows.anatomical.base import compute_iqms - from mriqc.testing import mock_config - with mock_config(): - wf = compute_iqms() - - """ - from pydra.tasks.niworkflows.interfaces.bids import ReadSidecarJSON - from pydra.tasks.mriqc.interfaces.anatomical import Harmonize - from pydra.tasks.mriqc.workflows.utils import _tofloat - - workflow = Workflow( - name=name, - input_spec={ - "airmask": ty.Any, - "artmask": ty.Any, - "brainmask": ty.Any, - "hatmask": ty.Any, - "headmask": ty.Any, - "in_inu": ty.Any, - "in_ras": ty.Any, - "inu_corrected": ty.Any, - "pvms": ty.Any, - "rotmask": ty.Any, - "segmentation": ty.Any, - "std_tpms": ty.Any, - }, - output_spec={"measures": ty.Any, "noise_report": ty.Any}, - airmask=airmask, - artmask=artmask, - brainmask=brainmask, - hatmask=hatmask, - headmask=headmask, - in_inu=in_inu, - in_ras=in_ras, - inu_corrected=inu_corrected, - pvms=pvms, - rotmask=rotmask, - segmentation=segmentation, - std_tpms=std_tpms, - ) - - # Extract metadata - - # Add provenance - - # AFNI check smoothing - fwhm_interface = get_fwhmx() - fwhm = fwhm_interface - fwhm.name = "fwhm" - fwhm.inputs.in_file = workflow.lzin.in_ras - fwhm.inputs.mask = workflow.lzin.brainmask - workflow.add(fwhm) - # Harmonize - workflow.add( - Harmonize( - in_file=workflow.lzin.inu_corrected, - wm_mask=workflow.lzin.pvms, - name="homog", - ) - ) - if wf_species.lower() != "human": - workflow.homog.inputs.erodemsk = False - workflow.homog.inputs.thresh = 0.8 - # Mortamet's QI2 - workflow.add( - ComputeQI2( - air_msk=workflow.lzin.hatmask, in_file=workflow.lzin.in_ras, name="getqi2" - ) - ) - # Compute python-coded measures - workflow.add( - StructuralQC( - human=wf_species.lower() == "human", - air_msk=workflow.lzin.airmask, - artifact_msk=workflow.lzin.artmask, - head_msk=workflow.lzin.headmask, - in_bias=workflow.lzin.in_inu, - in_file=workflow.lzin.in_ras, - in_noinu=workflow.homog.lzout.out_file, - in_pvms=workflow.lzin.pvms, - in_segm=workflow.lzin.segmentation, - mni_tpms=workflow.lzin.std_tpms, - rot_msk=workflow.lzin.rotmask, - name="measures", - ) - ) - - def _getwm(inlist): - return inlist[-1] - - # fmt: off - - - workflow.homog.inputs.wm_mask = workflow.lzin.pvms - - @pydra.mark.task - def fwhm_fwhm_to_measures_in_fwhm_callable(in_: ty.Any) -> ty.Any: - return _tofloat(in_) - - workflow.add(fwhm_fwhm_to_measures_in_fwhm_callable(in_=workflow.fwhm.lzout.fwhm, name="fwhm_fwhm_to_measures_in_fwhm_callable")) - - workflow.measures.inputs.in_fwhm = workflow.fwhm_fwhm_to_measures_in_fwhm_callable.lzout.out - workflow.set_output([('measures', workflow.measures.lzout.out_qc)]) - workflow.set_output([('noise_report', workflow.getqi2.lzout.out_file)]) - - # fmt: on - - return workflow - - -def _enhance(in_file, wm_tpm, out_file=None): - - import nibabel as nb - import numpy as np - from pydra.tasks.mriqc.workflows.utils import generate_filename - - imnii = nb.load(in_file) - data = imnii.get_fdata(dtype=np.float32) - range_max = np.percentile(data[data > 0], 99.98) - excess = data > range_max - wm_prob = nb.load(wm_tpm).get_fdata() - wm_prob[wm_prob < 0] = 0 # Ensure no negative values - wm_prob[excess] = 0 # Ensure no outliers are considered - # Calculate weighted mean and standard deviation - wm_mu = np.average(data, weights=wm_prob) - wm_sigma = np.sqrt(np.average((data - wm_mu) ** 2, weights=wm_prob)) - # Resample signal excess pixels - data[excess] = np.random.normal(loc=wm_mu, scale=wm_sigma, size=excess.sum()) - out_file = out_file or str(generate_filename(in_file, suffix="enhanced").absolute()) - nb.Nifti1Image(data, imnii.affine, imnii.header).to_filename(out_file) - return out_file - - -def _get_mod(in_file): - - from pathlib import Path - - in_file = Path(in_file) - extension = "".join(in_file.suffixes) - return in_file.name.replace(extension, "").split("_")[-1] - - -def _pop(inlist): - - if isinstance(inlist, (list, tuple)): - return inlist[0] - return inlist - - -def gradient_threshold(in_file, brainmask, thresh=15.0, out_file=None, aniso=False): - """Compute a threshold from the histogram of the magnitude gradient image""" - import nibabel as nb - import numpy as np - from scipy import ndimage as sim - from pydra.tasks.mriqc.workflows.utils import generate_filename - - if not aniso: - struct = sim.iterate_structure(sim.generate_binary_structure(3, 2), 2) - else: - # Generate an anisotropic binary structure, taking into account slice thickness - img = nb.load(in_file) - zooms = img.header.get_zooms() - dist = max(zooms) - dim = img.header["dim"][0] - x = np.ones((5) * np.ones(dim, dtype=np.int8)) - np.put(x, x.size // 2, 0) - dist_matrix = np.round(sim.distance_transform_edt(x, sampling=zooms), 5) - struct = dist_matrix <= dist - imnii = nb.load(in_file) - hdr = imnii.header.copy() - hdr.set_data_dtype(np.uint8) - data = imnii.get_fdata(dtype=np.float32) - mask = np.zeros_like(data, dtype=np.uint8) - mask[data > thresh] = 1 - mask = sim.binary_closing(mask, struct, iterations=2).astype(np.uint8) - mask = sim.binary_erosion(mask, sim.generate_binary_structure(3, 2)).astype( - np.uint8 - ) - segdata = np.asanyarray(nb.load(brainmask).dataobj) > 0 - segdata = sim.binary_dilation(segdata, struct, iterations=2, border_value=1).astype( - np.uint8 - ) - mask[segdata] = 1 - # Remove small objects - label_im, nb_labels = sim.label(mask) - artmsk = np.zeros_like(mask) - if nb_labels > 2: - sizes = sim.sum(mask, label_im, list(range(nb_labels + 1))) - ordered = sorted(zip(sizes, list(range(nb_labels + 1))), reverse=True) - for _, label in ordered[2:]: - mask[label_im == label] = 0 - artmsk[label_im == label] = 1 - mask = sim.binary_fill_holes(mask, struct).astype( - np.uint8 - ) # pylint: disable=no-member - out_file = out_file or str(generate_filename(in_file, suffix="gradmask").absolute()) - nb.Nifti1Image(mask, imnii.affine, hdr).to_filename(out_file) - return out_file - - -def image_gradient(in_file, brainmask, sigma=4.0, out_file=None): - """Computes the magnitude gradient of an image using numpy""" - import nibabel as nb - import numpy as np - from scipy.ndimage import gaussian_gradient_magnitude as gradient - from pydra.tasks.mriqc.workflows.utils import generate_filename - - imnii = nb.load(in_file) - mask = np.bool_(nb.load(brainmask).dataobj) - data = imnii.get_fdata(dtype=np.float32) - datamax = np.percentile(data.reshape(-1), 99.5) - data *= 100 / datamax - data[mask] = 100 - zooms = np.array(imnii.header.get_zooms()[:3]) - sigma_xyz = 2 - zooms / min(zooms) - grad = gradient(data, sigma * sigma_xyz) - gradmax = np.percentile(grad.reshape(-1), 99.5) - grad *= 100.0 - grad /= gradmax - grad[mask] = 100 - out_file = out_file or str(generate_filename(in_file, suffix="grad").absolute()) - nb.Nifti1Image(grad, imnii.affine, imnii.header).to_filename(out_file) - return out_file - - -def _binarize(in_file, threshold=0.5, out_file=None): - - import os.path as op - import nibabel as nb - import numpy as np - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath(f"{fname}_bin{ext}") - nii = nb.load(in_file) - data = nii.get_fdata() > threshold - hdr = nii.header.copy() - hdr.set_data_dtype(np.uint8) - nb.Nifti1Image(data.astype(np.uint8), nii.affine, hdr).to_filename(out_file) - return out_file diff --git a/pydra/tasks/mriqc/workflows/anatomical/output.py b/pydra/tasks/mriqc/workflows/anatomical/output.py deleted file mode 100644 index 62f8432..0000000 --- a/pydra/tasks/mriqc/workflows/anatomical/output.py +++ /dev/null @@ -1,172 +0,0 @@ -import attrs -import logging -from pathlib import Path -from pydra.engine import Workflow -import typing as ty - - -logger = logging.getLogger(__name__) - - -def init_anat_report_wf( - airmask=attrs.NOTHING, - artmask=attrs.NOTHING, - brainmask=attrs.NOTHING, - exec_verbose_reports=False, - exec_work_dir=None, - headmask=attrs.NOTHING, - in_ras=attrs.NOTHING, - name: str = "anat_report_wf", - segmentation=attrs.NOTHING, - wf_species="human", -): - """ - Generate the components of the individual report. - - .. workflow:: - - from mriqc.workflows.anatomical.output import init_anat_report_wf - from mriqc.testing import mock_config - with mock_config(): - wf = init_anat_report_wf() - - """ - from pydra.tasks.nireports.interfaces import PlotMosaic - - # from mriqc.interfaces.reports import IndividualReport - if exec_work_dir is None: - exec_work_dir = Path.cwd() - - verbose = exec_verbose_reports - reportlets_dir = exec_work_dir / "reportlets" - workflow = Workflow( - name=name, - input_spec={ - "airmask": ty.Any, - "artmask": ty.Any, - "brainmask": ty.Any, - "headmask": ty.Any, - "in_ras": ty.Any, - "segmentation": ty.Any, - }, - output_spec={ - "airmask_report": ty.Any, - "artmask_report": ty.Any, - "bg_report": ty.Any, - "bmask_report": ty.Any, - "headmask_report": ty.Any, - "segm_report": ty.Any, - "zoom_report": ty.Any, - }, - airmask=airmask, - artmask=artmask, - brainmask=brainmask, - headmask=headmask, - in_ras=in_ras, - segmentation=segmentation, - ) - - workflow.add( - PlotMosaic( - cmap="Greys_r", - bbox_mask_file=workflow.lzin.brainmask, - in_file=workflow.lzin.in_ras, - name="mosaic_zoom", - ) - ) - workflow.add( - PlotMosaic( - cmap="viridis_r", - only_noise=True, - in_file=workflow.lzin.in_ras, - name="mosaic_noise", - ) - ) - if wf_species.lower() in ("rat", "mouse"): - workflow.mosaic_zoom.inputs.view = ["coronal", "axial"] - workflow.mosaic_noise.inputs.view = ["coronal", "axial"] - - # fmt: off - workflow.set_output([('zoom_report', workflow.mosaic_zoom.lzout.out_file)]) - workflow.set_output([('bg_report', workflow.mosaic_noise.lzout.out_file)]) - # fmt: on - - from pydra.tasks.nireports.interfaces import PlotContours - - display_mode = "y" if wf_species.lower() in ("rat", "mouse") else "z" - workflow.add( - PlotContours( - colors=["r", "g", "b"], - cut_coords=10, - display_mode=display_mode, - levels=[0.5, 1.5, 2.5], - in_contours=workflow.lzin.segmentation, - in_file=workflow.lzin.in_ras, - name="plot_segm", - ) - ) - - workflow.add( - PlotContours( - colors=["r"], - cut_coords=10, - display_mode=display_mode, - levels=[0.5], - out_file="bmask", - in_contours=workflow.lzin.brainmask, - in_file=workflow.lzin.in_ras, - name="plot_bmask", - ) - ) - - workflow.add( - PlotContours( - colors=["r"], - cut_coords=10, - display_mode=display_mode, - levels=[0.5], - out_file="artmask", - saturate=True, - in_contours=workflow.lzin.artmask, - in_file=workflow.lzin.in_ras, - name="plot_artmask", - ) - ) - - # NOTE: humans switch on these two to coronal view. - display_mode = "y" if wf_species.lower() in ("rat", "mouse") else "x" - workflow.add( - PlotContours( - colors=["r"], - cut_coords=6, - display_mode=display_mode, - levels=[0.5], - out_file="airmask", - in_contours=workflow.lzin.airmask, - in_file=workflow.lzin.in_ras, - name="plot_airmask", - ) - ) - - workflow.add( - PlotContours( - colors=["r"], - cut_coords=6, - display_mode=display_mode, - levels=[0.5], - out_file="headmask", - in_contours=workflow.lzin.headmask, - in_file=workflow.lzin.in_ras, - name="plot_headmask", - ) - ) - - # fmt: off - workflow.set_output([('bmask_report', workflow.plot_bmask.lzout.out_file)]) - workflow.set_output([('segm_report', workflow.plot_segm.lzout.out_file)]) - workflow.set_output([('artmask_report', workflow.plot_artmask.lzout.out_file)]) - workflow.set_output([('headmask_report', workflow.plot_headmask.lzout.out_file)]) - workflow.set_output([('airmask_report', workflow.plot_airmask.lzout.out_file)]) - # fmt: on - - return workflow diff --git a/pydra/tasks/mriqc/workflows/diffusion/__init__.py b/pydra/tasks/mriqc/workflows/diffusion/__init__.py deleted file mode 100644 index 3a79cc8..0000000 --- a/pydra/tasks/mriqc/workflows/diffusion/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .base import ( - _bvals_report, - _estimate_sigma, - _filter_metadata, - compute_iqms, - dmri_qc_workflow, - epi_mni_align, - hmc_workflow, -) -from .output import _carpet_parcellation, _get_tr, _get_wm, init_dwi_report_wf diff --git a/pydra/tasks/mriqc/workflows/diffusion/base.py b/pydra/tasks/mriqc/workflows/diffusion/base.py deleted file mode 100644 index 909bfc1..0000000 --- a/pydra/tasks/mriqc/workflows/diffusion/base.py +++ /dev/null @@ -1,747 +0,0 @@ -import attrs -from fileformats.medimage import Bval, Bvec -import logging -import numpy as np -from pathlib import Path -from pydra.engine import Workflow -from pydra.engine.task import FunctionTask -import pydra.mark -from pydra.tasks.mriqc.workflows.diffusion.output import init_dwi_report_wf -import typing as ty - - -logger = logging.getLogger(__name__) - - -def dmri_qc_workflow( - bvals=attrs.NOTHING, - bvecs=attrs.NOTHING, - exec_ants_float=False, - exec_datalad_get=True, - exec_debug=False, - exec_float32=True, - exec_layout=None, - exec_verbose_reports=False, - exec_work_dir=None, - in_file=attrs.NOTHING, - name="dwiMRIQC", - nipype_nprocs=12, - nipype_omp_nthreads=12, - qspace_neighbors=attrs.NOTHING, - wf_biggest_file_gb=1, - wf_fd_radius=50, - wf_fd_thres=0.2, - wf_fft_spikes_detector=False, - wf_inputs=None, - wf_min_len_dwi=7, - wf_species="human", - wf_template_id="MNI152NLin2009cAsym", -): - """ - Initialize the dMRI-QC workflow. - - .. workflow:: - - import os.path as op - from mriqc.workflows.diffusion.base import dmri_qc_workflow - from mriqc.testing import mock_config - with mock_config(): - wf = dmri_qc_workflow() - - """ - from pydra.tasks.afni.auto import Volreg - from pydra.tasks.mrtrix3.v3_0 import DwiDenoise - from pydra.tasks.niworkflows.interfaces.header import SanitizeImage - from pydra.tasks.niworkflows.interfaces.images import RobustAverage - from pydra.tasks.mriqc.interfaces.diffusion import ( - CCSegmentation, - CorrectSignalDrift, - DiffusionModel, - ExtractOrientations, - NumberOfShells, - PIESNO, - ReadDWIMetadata, - SpikingVoxelsMask, - WeightedStat, - ) - - from pydra.tasks.mriqc.workflows.shared import synthstrip_wf as dmri_bmsk_workflow - - if exec_work_dir is None: - exec_work_dir = Path.cwd() - - workflow = Workflow( - name=name, - input_spec={ - "bvals": Bval, - "bvecs": Bvec, - "in_file": ty.Any, - "qspace_neighbors": ty.Any, - }, - output_spec={ - "dwi_report_wf_bmask_report": ty.Any, - "dwi_report_wf_carpet_report": ty.Any, - "dwi_report_wf_fa_report": ty.Any, - "dwi_report_wf_heatmap_report": ty.Any, - "dwi_report_wf_md_report": ty.Any, - "dwi_report_wf_noise_report": ty.Any, - "dwi_report_wf_snr_report": ty.Any, - "dwi_report_wf_spikes_report": ty.Any, - "iqms_wf_noise_floor": ty.Any, - "iqms_wf_out_file": ty.Any, - }, - bvals=bvals, - bvecs=bvecs, - in_file=in_file, - qspace_neighbors=qspace_neighbors, - ) - - # Define workflow, inputs and outputs - # 0. Get data, put it in RAS orientation - - workflow.add( - SanitizeImage( - max_32bit=exec_float32, - n_volumes_to_discard=0, - in_file=workflow.lzin.in_file, - name="sanitize", - ) - ) - # Workflow -------------------------------------------------------- - # Read metadata & bvec/bval, estimate number of shells, extract and split B0s - - workflow.add(NumberOfShells(in_bvals=workflow.lzin.bvals, name="shells")) - workflow.add( - ExtractOrientations(in_file=workflow.sanitize.lzout.out_file, name="get_lowb") - ) - # Generate B0 reference - workflow.add( - RobustAverage( - mc_method=None, in_file=workflow.sanitize.lzout.out_file, name="dwi_ref" - ) - ) - workflow.add( - Volreg( - args="-Fourier -twopass", - outputtype="NIFTI_GZ", - zpad=4, - basefile=workflow.dwi_ref.lzout.out_file, - in_file=workflow.get_lowb.lzout.out_file, - name="hmc_b0", - ) - ) - # Calculate brainmask - workflow.add( - dmri_bmsk_workflow( - omp_nthreads=nipype_omp_nthreads, - in_files=workflow.dwi_ref.lzout.out_file, - name="dmri_bmsk", - ) - ) - # HMC: head motion correct - workflow.add( - hmc_workflow( - wf_fd_radius=wf_fd_radius, in_bvec=workflow.lzin.bvecs, name="hmcwf" - ) - ) - workflow.add( - ExtractOrientations( - in_bvec_file=workflow.lzin.bvecs, - in_file=workflow.hmcwf.lzout.out_file, - indices=workflow.shells.lzout.b_indices, - name="get_hmc_shells", - ) - ) - # Split shells and compute some stats - workflow.add( - WeightedStat(in_weights=workflow.shells.lzout.b_masks, name="averages") - ) - workflow.add( - WeightedStat( - stat="std", in_weights=workflow.shells.lzout.b_masks, name="stddev" - ) - ) - workflow.add( - DwiDenoise( - noise="noisemap.nii.gz", - nthreads=nipype_omp_nthreads, - mask=workflow.dmri_bmsk.lzout.out_mask, - name="dwidenoise", - ) - ) - workflow.add( - CorrectSignalDrift( - brainmask_file=workflow.dmri_bmsk.lzout.out_mask, - bval_file=workflow.lzin.bvals, - full_epi=workflow.sanitize.lzout.out_file, - in_file=workflow.hmc_b0.lzout.out_file, - name="drift", - ) - ) - workflow.add( - SpikingVoxelsMask( - b_masks=workflow.shells.lzout.b_masks, - brain_mask=workflow.dmri_bmsk.lzout.out_mask, - in_file=workflow.sanitize.lzout.out_file, - name="sp_mask", - ) - ) - # Fit DTI/DKI model - workflow.add( - DiffusionModel( - brain_mask=workflow.dmri_bmsk.lzout.out_mask, - bvals=workflow.shells.lzout.out_data, - bvec_file=workflow.lzin.bvecs, - in_file=workflow.dwidenoise.lzout.out, - n_shells=workflow.shells.lzout.n_shells, - name="dwimodel", - ) - ) - # Calculate CC mask - workflow.add( - CCSegmentation( - in_cfa=workflow.dwimodel.lzout.out_cfa, - in_fa=workflow.dwimodel.lzout.out_fa, - name="cc_mask", - ) - ) - # Run PIESNO noise estimation - workflow.add(PIESNO(in_file=workflow.sanitize.lzout.out_file, name="piesno")) - # EPI to MNI registration - workflow.add( - epi_mni_align( - nipype_omp_nthreads=nipype_omp_nthreads, - wf_species=wf_species, - exec_ants_float=exec_ants_float, - exec_debug=exec_debug, - nipype_nprocs=nipype_nprocs, - wf_template_id=wf_template_id, - epi_mask=workflow.dmri_bmsk.lzout.out_mask, - epi_mean=workflow.dwi_ref.lzout.out_file, - name="spatial_norm", - ) - ) - # Compute IQMs - workflow.add( - compute_iqms( - in_noise=workflow.dwidenoise.lzout.noise, - in_bvec=workflow.get_hmc_shells.lzout.out_bvec, - in_shells=workflow.get_hmc_shells.lzout.out_file, - b_values_shells=workflow.shells.lzout.b_values, - wm_mask=workflow.cc_mask.lzout.wm_finalmask, - cc_mask=workflow.cc_mask.lzout.out_mask, - brain_mask=workflow.dmri_bmsk.lzout.out_mask, - in_md=workflow.dwimodel.lzout.out_md, - in_fa_degenerate=workflow.dwimodel.lzout.out_fa_degenerate, - in_fa_nans=workflow.dwimodel.lzout.out_fa_nans, - in_cfa=workflow.dwimodel.lzout.out_cfa, - in_fa=workflow.dwimodel.lzout.out_fa, - in_bvec_diff=workflow.hmcwf.lzout.out_bvec_diff, - in_bvec_rotated=workflow.hmcwf.lzout.out_bvec, - framewise_displacement=workflow.hmcwf.lzout.out_fd, - piesno_sigma=workflow.piesno.lzout.sigma, - spikes_mask=workflow.sp_mask.lzout.out_mask, - qspace_neighbors=workflow.lzin.qspace_neighbors, - b_values_file=workflow.lzin.bvals, - in_file=workflow.lzin.in_file, - name="iqms_wf", - ) - ) - # Generate outputs - workflow.add( - init_dwi_report_wf( - exec_verbose_reports=exec_verbose_reports, - wf_biggest_file_gb=wf_biggest_file_gb, - wf_fd_thres=wf_fd_thres, - exec_work_dir=exec_work_dir, - wf_species=wf_species, - wf_fft_spikes_detector=wf_fft_spikes_detector, - in_parcellation=workflow.spatial_norm.lzout.epi_parc, - in_md=workflow.dwimodel.lzout.out_md, - in_fa=workflow.dwimodel.lzout.out_fa, - in_epi=workflow.drift.lzout.out_full_file, - in_stdmap=workflow.stddev.lzout.out_file, - in_avgmap=workflow.averages.lzout.out_file, - brain_mask=workflow.dmri_bmsk.lzout.out_mask, - in_bdict=workflow.shells.lzout.b_dict, - name="dwi_report_wf", - ) - ) - # fmt: off - - @pydra.mark.task - def shells_b_masks_to_dwi_ref_t_mask_callable(in_: ty.Any) -> ty.Any: - return _first(in_) - - workflow.add(shells_b_masks_to_dwi_ref_t_mask_callable(in_=workflow.shells.lzout.b_masks, name="shells_b_masks_to_dwi_ref_t_mask_callable")) - - workflow.dwi_ref.inputs.t_mask = workflow.shells_b_masks_to_dwi_ref_t_mask_callable.lzout.out - - @pydra.mark.task - def shells_b_indices_to_get_lowb_indices_callable(in_: ty.Any) -> ty.Any: - return _first(in_) - - workflow.add(shells_b_indices_to_get_lowb_indices_callable(in_=workflow.shells.lzout.b_indices, name="shells_b_indices_to_get_lowb_indices_callable")) - - workflow.get_lowb.inputs.indices = workflow.shells_b_indices_to_get_lowb_indices_callable.lzout.out - - @pydra.mark.task - def shells_b_indices_to_drift_b0_ixs_callable(in_: ty.Any) -> ty.Any: - return _first(in_) - - workflow.add(shells_b_indices_to_drift_b0_ixs_callable(in_=workflow.shells.lzout.b_indices, name="shells_b_indices_to_drift_b0_ixs_callable")) - - workflow.drift.inputs.b0_ixs = workflow.shells_b_indices_to_drift_b0_ixs_callable.lzout.out - workflow.hmcwf.inputs.in_file = workflow.drift.lzout.out_full_file - workflow.averages.inputs.in_file = workflow.drift.lzout.out_full_file - workflow.stddev.inputs.in_file = workflow.drift.lzout.out_full_file - - @pydra.mark.task - def averages_out_file_to_hmcwf_reference_callable(in_: ty.Any) -> ty.Any: - return _first(in_) - - workflow.add(averages_out_file_to_hmcwf_reference_callable(in_=workflow.averages.lzout.out_file, name="averages_out_file_to_hmcwf_reference_callable")) - - workflow.hmcwf.inputs.reference = workflow.averages_out_file_to_hmcwf_reference_callable.lzout.out - workflow.dwidenoise.inputs.dwi = workflow.drift.lzout.out_full_file - - @pydra.mark.task - def averages_out_file_to_iqms_wf_in_b0_callable(in_: ty.Any) -> ty.Any: - return _first(in_) - - workflow.add(averages_out_file_to_iqms_wf_in_b0_callable(in_=workflow.averages.lzout.out_file, name="averages_out_file_to_iqms_wf_in_b0_callable")) - - workflow.iqms_wf.inputs.in_b0 = workflow.averages_out_file_to_iqms_wf_in_b0_callable.lzout.out - # fmt: on - workflow.set_output([("iqms_wf_out_file", workflow.iqms_wf.lzout.out_file)]) - workflow.set_output([("iqms_wf_noise_floor", workflow.iqms_wf.lzout.noise_floor)]) - workflow.set_output( - [("dwi_report_wf_spikes_report", workflow.dwi_report_wf.lzout.spikes_report)] - ) - workflow.set_output( - [("dwi_report_wf_carpet_report", workflow.dwi_report_wf.lzout.carpet_report)] - ) - workflow.set_output( - [("dwi_report_wf_heatmap_report", workflow.dwi_report_wf.lzout.heatmap_report)] - ) - workflow.set_output( - [("dwi_report_wf_md_report", workflow.dwi_report_wf.lzout.md_report)] - ) - workflow.set_output( - [("dwi_report_wf_fa_report", workflow.dwi_report_wf.lzout.fa_report)] - ) - workflow.set_output( - [("dwi_report_wf_noise_report", workflow.dwi_report_wf.lzout.noise_report)] - ) - workflow.set_output( - [("dwi_report_wf_bmask_report", workflow.dwi_report_wf.lzout.bmask_report)] - ) - workflow.set_output( - [("dwi_report_wf_snr_report", workflow.dwi_report_wf.lzout.snr_report)] - ) - - return workflow - - -def hmc_workflow( - in_bvec=attrs.NOTHING, - in_file=attrs.NOTHING, - name="dMRI_HMC", - reference=attrs.NOTHING, - wf_fd_radius=50, -): - """ - Create a :abbr:`HMC (head motion correction)` workflow for dMRI. - - .. workflow:: - - from mriqc.workflows.diffusion.base import hmc - from mriqc.testing import mock_config - with mock_config(): - wf = hmc() - - """ - from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ( - FramewiseDisplacement, - ) - from pydra.tasks.afni.auto import Volreg - from pydra.tasks.mriqc.interfaces.diffusion import RotateVectors - - workflow = Workflow( - name=name, - input_spec={"in_bvec": ty.Any, "in_file": ty.Any, "reference": ty.Any}, - output_spec={ - "out_bvec": ty.Any, - "out_bvec_diff": ty.Any, - "out_fd": ty.Any, - "out_file": ty.Any, - }, - in_bvec=in_bvec, - in_file=in_file, - reference=reference, - ) - - # calculate hmc parameters - workflow.add( - Volreg( - args="-Fourier -twopass", - outputtype="NIFTI_GZ", - zpad=4, - basefile=workflow.lzin.reference, - in_file=workflow.lzin.in_file, - name="hmc", - ) - ) - workflow.add( - RotateVectors( - in_file=workflow.lzin.in_bvec, - reference=workflow.lzin.reference, - transforms=workflow.hmc.lzout.oned_matrix_save, - name="bvec_rot", - ) - ) - # Compute the frame-wise displacement - workflow.add( - FramewiseDisplacement( - normalize=False, - parameter_source="AFNI", - radius=wf_fd_radius, - in_file=workflow.hmc.lzout.oned_file, - name="fdnode", - ) - ) - # fmt: off - workflow.set_output([('out_file', workflow.hmc.lzout.out_file)]) - workflow.set_output([('out_fd', workflow.fdnode.lzout.out_file)]) - workflow.set_output([('out_bvec', workflow.bvec_rot.lzout.out_bvec)]) - workflow.set_output([('out_bvec_diff', workflow.bvec_rot.lzout.out_diff)]) - # fmt: on - - return workflow - - -def epi_mni_align( - epi_mask=attrs.NOTHING, - epi_mean=attrs.NOTHING, - exec_ants_float=False, - exec_debug=False, - name="SpatialNormalization", - nipype_nprocs=12, - nipype_omp_nthreads=12, - wf_species="human", - wf_template_id="MNI152NLin2009cAsym", -): - """ - Estimate the transform that maps the EPI space into MNI152NLin2009cAsym. - - The input epi_mean is the averaged and brain-masked EPI timeseries - - Returns the EPI mean resampled in MNI space (for checking out registration) and - the associated "lobe" parcellation in EPI space. - - .. workflow:: - - from mriqc.workflows.diffusion.base import epi_mni_align - from mriqc.testing import mock_config - with mock_config(): - wf = epi_mni_align() - - """ - from pydra.tasks.ants.auto import ApplyTransforms, N4BiasFieldCorrection - from pydra.tasks.niworkflows.interfaces.reportlets.registration import ( - SpatialNormalizationRPT as RobustMNINormalization, - ) - from templateflow.api import get as get_template - - # Get settings - testing = exec_debug - n_procs = nipype_nprocs - ants_nthreads = nipype_omp_nthreads - workflow = Workflow( - name=name, - input_spec={"epi_mask": ty.Any, "epi_mean": ty.Any}, - output_spec={"epi_mni": ty.Any, "epi_parc": ty.Any, "report": ty.Any}, - epi_mask=epi_mask, - epi_mean=epi_mean, - ) - - workflow.add( - N4BiasFieldCorrection( - copy_header=True, - dimension=3, - input_image=workflow.lzin.epi_mean, - name="n4itk", - ) - ) - workflow.add( - RobustMNINormalization( - explicit_masking=False, - flavor="testing" if testing else "precise", - float=exec_ants_float, - generate_report=True, - moving="boldref", - num_threads=ants_nthreads, - reference="boldref", - template=wf_template_id, - moving_image=workflow.n4itk.lzout.output_image, - name="norm", - ) - ) - if wf_species.lower() == "human": - workflow.norm.inputs.reference_image = str( - get_template(wf_template_id, resolution=2, suffix="boldref") - ) - workflow.norm.inputs.reference_mask = str( - get_template( - wf_template_id, - resolution=2, - desc="brain", - suffix="mask", - ) - ) - # adapt some population-specific settings - else: - from nirodents.workflows.brainextraction import _bspline_grid - - workflow.n4itk.inputs.shrink_factor = 1 - workflow.n4itk.inputs.n_iterations = [50] * 4 - workflow.norm.inputs.reference_image = str( - get_template(wf_template_id, suffix="T2w") - ) - workflow.norm.inputs.reference_mask = str( - get_template( - wf_template_id, - desc="brain", - suffix="mask", - )[0] - ) - workflow.add(FunctionTask(func=_bspline_grid, name="bspline_grid")) - # fmt: off - workflow.bspline_grid.inputs.in_file = workflow.lzin.epi_mean - workflow.n4itk.inputs.args = workflow.bspline_grid.lzout.out - # fmt: on - # Warp segmentation into EPI space - workflow.add( - ApplyTransforms( - default_value=0, - dimension=3, - float=True, - interpolation="MultiLabel", - reference_image=workflow.lzin.epi_mean, - transforms=workflow.norm.lzout.inverse_composite_transform, - name="invt", - ) - ) - if wf_species.lower() == "human": - workflow.invt.inputs.input_image = str( - get_template( - wf_template_id, - resolution=1, - desc="carpet", - suffix="dseg", - ) - ) - else: - workflow.invt.inputs.input_image = str( - get_template( - wf_template_id, - suffix="dseg", - )[-1] - ) - # fmt: off - workflow.set_output([('epi_parc', workflow.invt.lzout.output_image)]) - workflow.set_output([('epi_mni', workflow.norm.lzout.warped_image)]) - workflow.set_output([('report', workflow.norm.lzout.out_report)]) - # fmt: on - if wf_species.lower() == "human": - workflow.norm.inputs.moving_mask = workflow.lzin.epi_mask - - return workflow - - -def compute_iqms( - b_values_file=attrs.NOTHING, - b_values_shells=attrs.NOTHING, - brain_mask=attrs.NOTHING, - cc_mask=attrs.NOTHING, - framewise_displacement=attrs.NOTHING, - in_b0=attrs.NOTHING, - in_bvec=attrs.NOTHING, - in_bvec_diff=attrs.NOTHING, - in_bvec_rotated=attrs.NOTHING, - in_cfa=attrs.NOTHING, - in_fa=attrs.NOTHING, - in_fa_degenerate=attrs.NOTHING, - in_fa_nans=attrs.NOTHING, - in_file=attrs.NOTHING, - in_md=attrs.NOTHING, - in_noise=attrs.NOTHING, - in_shells=attrs.NOTHING, - name="ComputeIQMs", - piesno_sigma=attrs.NOTHING, - qspace_neighbors=attrs.NOTHING, - spikes_mask=attrs.NOTHING, - wm_mask=attrs.NOTHING, -): - """ - Initialize the workflow that actually computes the IQMs. - - .. workflow:: - - from mriqc.workflows.diffusion.base import compute_iqms - from mriqc.testing import mock_config - with mock_config(): - wf = compute_iqms() - - """ - from pydra.tasks.niworkflows.interfaces.bids import ReadSidecarJSON - from pydra.tasks.mriqc.interfaces import IQMFileSink - from pydra.tasks.mriqc.interfaces.diffusion import DiffusionQC - from pydra.tasks.mriqc.interfaces.reports import AddProvenance - - # from mriqc.workflows.utils import _tofloat, get_fwhmx - workflow = Workflow( - name=name, - input_spec={ - "b_values_file": ty.Any, - "b_values_shells": ty.Any, - "brain_mask": ty.Any, - "cc_mask": ty.Any, - "framewise_displacement": ty.Any, - "in_b0": ty.Any, - "in_bvec": ty.Any, - "in_bvec_diff": ty.Any, - "in_bvec_rotated": ty.Any, - "in_cfa": ty.Any, - "in_fa": ty.Any, - "in_fa_degenerate": ty.Any, - "in_fa_nans": ty.Any, - "in_file": ty.Any, - "in_md": ty.Any, - "in_noise": ty.Any, - "in_shells": ty.Any, - "piesno_sigma": ty.Any, - "qspace_neighbors": ty.Any, - "spikes_mask": ty.Any, - "wm_mask": ty.Any, - }, - output_spec={"noise_floor": ty.Any, "out_file": ty.Any}, - b_values_file=b_values_file, - b_values_shells=b_values_shells, - brain_mask=brain_mask, - cc_mask=cc_mask, - framewise_displacement=framewise_displacement, - in_b0=in_b0, - in_bvec=in_bvec, - in_bvec_diff=in_bvec_diff, - in_bvec_rotated=in_bvec_rotated, - in_cfa=in_cfa, - in_fa=in_fa, - in_fa_degenerate=in_fa_degenerate, - in_fa_nans=in_fa_nans, - in_file=in_file, - in_md=in_md, - in_noise=in_noise, - in_shells=in_shells, - piesno_sigma=piesno_sigma, - qspace_neighbors=qspace_neighbors, - spikes_mask=spikes_mask, - wm_mask=wm_mask, - ) - - workflow.add( - FunctionTask( - func=_estimate_sigma, - in_file=workflow.lzin.in_noise, - mask=workflow.lzin.brain_mask, - name="estimate_sigma", - ) - ) - - workflow.add( - DiffusionQC( - brain_mask=workflow.lzin.brain_mask, - cc_mask=workflow.lzin.cc_mask, - in_b0=workflow.lzin.in_b0, - in_bval_file=workflow.lzin.b_values_file, - in_bvec=workflow.lzin.in_bvec, - in_bvec_diff=workflow.lzin.in_bvec_diff, - in_bvec_rotated=workflow.lzin.in_bvec_rotated, - in_cfa=workflow.lzin.in_cfa, - in_fa=workflow.lzin.in_fa, - in_fa_degenerate=workflow.lzin.in_fa_degenerate, - in_fa_nans=workflow.lzin.in_fa_nans, - in_fd=workflow.lzin.framewise_displacement, - in_file=workflow.lzin.in_file, - in_md=workflow.lzin.in_md, - in_shells=workflow.lzin.in_shells, - in_shells_bval=workflow.lzin.b_values_shells, - piesno_sigma=workflow.lzin.piesno_sigma, - qspace_neighbors=workflow.lzin.qspace_neighbors, - spikes_mask=workflow.lzin.spikes_mask, - wm_mask=workflow.lzin.wm_mask, - name="measures", - ) - ) - - # Save to JSON file - - # fmt: off - - - - - workflow.set_output([('out_file', workflow.measures.lzout.out_qc)]) - workflow.set_output([('noise_floor', workflow.estimate_sigma.lzout.out)]) - # fmt: on - - return workflow - - -def _bvals_report(in_file): - - import numpy as np - - bvals = [ - round(float(val), 2) for val in np.unique(np.round(np.loadtxt(in_file), 2)) - ] - if len(bvals) > 10: - return "Likely DSI" - return bvals - - -def _estimate_sigma(in_file, mask): - - import nibabel as nb - import numpy as np - - msk = nb.load(mask).get_fdata() > 0.5 - return round( - float(np.median(nb.load(in_file).get_fdata()[msk])), - 6, - ) - - -def _filter_metadata( - in_dict, - keys=( - "global", - "dcmmeta_affine", - "dcmmeta_reorient_transform", - "dcmmeta_shape", - "dcmmeta_slice_dim", - "dcmmeta_version", - "time", - ), -): - """Drop large and partially redundant objects generated by dcm2niix.""" - for key in keys: - in_dict.pop(key, None) - return in_dict - - -def _first(inlist): - - if isinstance(inlist, (list, tuple)): - return inlist[0] - return inlist diff --git a/pydra/tasks/mriqc/workflows/diffusion/output.py b/pydra/tasks/mriqc/workflows/diffusion/output.py deleted file mode 100644 index d234056..0000000 --- a/pydra/tasks/mriqc/workflows/diffusion/output.py +++ /dev/null @@ -1,323 +0,0 @@ -import attrs -import logging -from pathlib import Path -from pydra.engine import Workflow -from pydra.engine.task import FunctionTask -from pydra.tasks.nireports.interfaces.dmri import DWIHeatmap -from pydra.tasks.nireports.interfaces.reporting.base import ( - SimpleBeforeAfterRPT as SimpleBeforeAfter, -) -import typing as ty - - -logger = logging.getLogger(__name__) - - -def init_dwi_report_wf( - brain_mask=attrs.NOTHING, - epi_mean=attrs.NOTHING, - epi_parc=attrs.NOTHING, - exec_verbose_reports=False, - exec_work_dir=None, - fd_thres=attrs.NOTHING, - hmc_epi=attrs.NOTHING, - hmc_fd=attrs.NOTHING, - in_avgmap=attrs.NOTHING, - in_bdict=attrs.NOTHING, - in_dvars=attrs.NOTHING, - in_epi=attrs.NOTHING, - in_fa=attrs.NOTHING, - in_fft=attrs.NOTHING, - in_md=attrs.NOTHING, - in_parcellation=attrs.NOTHING, - in_ras=attrs.NOTHING, - in_spikes=attrs.NOTHING, - in_stdmap=attrs.NOTHING, - meta_sidecar=attrs.NOTHING, - name="dwi_report_wf", - noise_floor=attrs.NOTHING, - outliers=attrs.NOTHING, - wf_biggest_file_gb=1, - wf_fd_thres=0.2, - wf_fft_spikes_detector=False, - wf_species="human", -): - """ - Write out individual reportlets. - - .. workflow:: - - from mriqc.workflows.diffusion.output import init_dwi_report_wf - from mriqc.testing import mock_config - with mock_config(): - wf = init_dwi_report_wf() - - """ - from pydra.tasks.nireports.interfaces import FMRISummary, PlotMosaic, PlotSpikes - from pydra.tasks.niworkflows.interfaces.morphology import ( - BinaryDilation, - BinarySubtraction, - ) - - # from mriqc.interfaces.reports import IndividualReport - if exec_work_dir is None: - exec_work_dir = Path.cwd() - - verbose = exec_verbose_reports - mem_gb = wf_biggest_file_gb - reportlets_dir = exec_work_dir / "reportlets" - workflow = Workflow( - name=name, - input_spec={ - "brain_mask": ty.Any, - "epi_mean": ty.Any, - "epi_parc": ty.Any, - "fd_thres": ty.Any, - "hmc_epi": ty.Any, - "hmc_fd": ty.Any, - "in_avgmap": ty.Any, - "in_bdict": ty.Any, - "in_dvars": ty.Any, - "in_epi": ty.Any, - "in_fa": ty.Any, - "in_fft": ty.Any, - "in_md": ty.Any, - "in_parcellation": ty.Any, - "in_ras": ty.Any, - "in_spikes": ty.Any, - "in_stdmap": ty.Any, - "meta_sidecar": ty.Any, - "noise_floor": ty.Any, - "outliers": ty.Any, - }, - output_spec={ - "bmask_report": ty.Any, - "carpet_report": ty.Any, - "fa_report": ty.Any, - "heatmap_report": ty.Any, - "md_report": ty.Any, - "noise_report": ty.Any, - "snr_report": ty.Any, - "spikes_report": ty.Any, - }, - brain_mask=brain_mask, - epi_mean=epi_mean, - epi_parc=epi_parc, - fd_thres=fd_thres, - hmc_epi=hmc_epi, - hmc_fd=hmc_fd, - in_avgmap=in_avgmap, - in_bdict=in_bdict, - in_dvars=in_dvars, - in_epi=in_epi, - in_fa=in_fa, - in_fft=in_fft, - in_md=in_md, - in_parcellation=in_parcellation, - in_ras=in_ras, - in_spikes=in_spikes, - in_stdmap=in_stdmap, - meta_sidecar=meta_sidecar, - noise_floor=noise_floor, - outliers=outliers, - ) - - # Set FD threshold - # inputnode.inputs.fd_thres = wf_fd_thres - workflow.add( - PlotMosaic( - cmap="Greys_r", - bbox_mask_file=workflow.lzin.brain_mask, - in_file=workflow.lzin.in_fa, - name="mosaic_fa", - ) - ) - workflow.add( - PlotMosaic( - cmap="Greys_r", - bbox_mask_file=workflow.lzin.brain_mask, - in_file=workflow.lzin.in_md, - name="mosaic_md", - ) - ) - workflow.add( - SimpleBeforeAfter( - after_label="Standard Deviation", - before_label="Average", - dismiss_affine=True, - fixed_params={"cmap": "viridis"}, - moving_params={"cmap": "Greys_r"}, - after=workflow.lzin.in_stdmap, - before=workflow.lzin.in_avgmap, - wm_seg=workflow.lzin.brain_mask, - name="mosaic_snr", - ) - ) - workflow.add( - PlotMosaic( - cmap="viridis_r", - only_noise=True, - in_file=workflow.lzin.in_avgmap, - name="mosaic_noise", - ) - ) - if wf_species.lower() in ("rat", "mouse"): - workflow.mosaic_noise.inputs.view = ["coronal", "axial"] - workflow.mosaic_fa.inputs.view = ["coronal", "axial"] - workflow.mosaic_md.inputs.view = ["coronal", "axial"] - - def _gen_entity(inlist): - return ["00000"] + [f"{int(round(bval, 0)):05d}" for bval in inlist] - - # fmt: off - - - workflow.set_output([('snr_report', workflow.mosaic_snr.lzout.out_report)]) - workflow.set_output([('noise_report', workflow.mosaic_noise.lzout.out_file)]) - workflow.set_output([('fa_report', workflow.mosaic_fa.lzout.out_file)]) - workflow.set_output([('md_report', workflow.mosaic_md.lzout.out_file)]) - # fmt: on - workflow.add( - FunctionTask(func=_get_wm, in_file=workflow.lzin.in_parcellation, name="get_wm") - ) - workflow.add( - DWIHeatmap( - scalarmap_label="Shell-wise Fractional Anisotropy (FA)", - b_indices=workflow.lzin.in_bdict, - in_file=workflow.lzin.in_epi, - mask_file=workflow.get_wm.lzout.out, - scalarmap=workflow.lzin.in_fa, - sigma=workflow.lzin.noise_floor, - name="plot_heatmap", - ) - ) - - # fmt: off - workflow.set_output([('heatmap_report', workflow.plot_heatmap.lzout.out_file)]) - # fmt: on - - # Generate crown mask - # Create the crown mask - workflow.add(BinaryDilation(in_mask=workflow.lzin.brain_mask, name="dilated_mask")) - workflow.add( - BinarySubtraction( - in_base=workflow.dilated_mask.lzout.out_mask, - in_subtract=workflow.lzin.brain_mask, - name="subtract_mask", - ) - ) - workflow.add( - FunctionTask( - func=_carpet_parcellation, - crown_mask=workflow.subtract_mask.lzout.out_mask, - segmentation=workflow.lzin.epi_parc, - name="parcels", - ) - ) - workflow.add( - FMRISummary( - dvars=workflow.lzin.in_dvars, - fd=workflow.lzin.hmc_fd, - fd_thres=workflow.lzin.fd_thres, - in_func=workflow.lzin.hmc_epi, - in_segm=workflow.parcels.lzout.out, - outliers=workflow.lzin.outliers, - tr=workflow.lzin.meta_sidecar, - name="bigplot", - ) - ) - - # fmt: off - workflow.bigplot.inputs.tr = workflow.lzin.meta_sidecar - workflow.set_output([('carpet_report', workflow.bigplot.lzout.out_file)]) - # fmt: on - if True: # wf_fft_spikes_detector: - workflow.add( - PlotSpikes( - cmap="viridis", - out_file="plot_spikes.svg", - title="High-Frequency spikes", - name="mosaic_spikes", - ) - ) - pass - # fmt: off - pass - workflow.mosaic_spikes.inputs.in_file = workflow.lzin.in_ras - workflow.mosaic_spikes.inputs.in_spikes = workflow.lzin.in_spikes - workflow.mosaic_spikes.inputs.in_fft = workflow.lzin.in_fft - workflow.set_output([('spikes_report', workflow.mosaic_spikes.lzout.out_file)]) - # fmt: on - if False: # not verbose: - return workflow - # Verbose-reporting goes here - from pydra.tasks.nireports.interfaces import PlotContours - - workflow.add( - PlotContours( - colors=["r"], - cut_coords=10, - display_mode="y" if wf_species.lower() in ("rat", "mouse") else "z", - levels=[0.5], - out_file="bmask", - in_contours=workflow.lzin.brain_mask, - in_file=workflow.lzin.epi_mean, - name="plot_bmask", - ) - ) - - # fmt: off - workflow.set_output([('bmask_report', workflow.plot_bmask.lzout.out_file)]) - # fmt: on - - return workflow - - -def _carpet_parcellation(segmentation, crown_mask): - """Generate the union of two masks.""" - from pathlib import Path - import nibabel as nb - import numpy as np - - img = nb.load(segmentation) - lut = np.zeros((256,), dtype="uint8") - lut[100:201] = 1 # Ctx GM - lut[30:99] = 2 # dGM - lut[1:11] = 3 # WM+CSF - lut[255] = 4 # Cerebellum - # Apply lookup table - seg = lut[np.asanyarray(img.dataobj, dtype="uint16")] - seg[np.asanyarray(nb.load(crown_mask).dataobj, dtype=int) > 0] = 5 - outimg = img.__class__(seg.astype("uint8"), img.affine, img.header) - outimg.set_data_dtype("uint8") - out_file = Path("segments.nii.gz").absolute() - outimg.to_filename(out_file) - return str(out_file) - - -def _get_tr(meta_dict): - - return meta_dict.get("RepetitionTime", None) - - -def _get_wm(in_file, radius=2): - - from pathlib import Path - import nibabel as nb - import numpy as np - from pydra.tasks.mriqc.nipype_ports.utils.filemanip import fname_presuffix - from scipy import ndimage as ndi - from skimage.morphology import ball - - parc = nb.load(in_file) - hdr = parc.header.copy() - data = np.array(parc.dataobj, dtype=hdr.get_data_dtype()) - wm_mask = ndi.binary_erosion((data == 1) | (data == 2), ball(radius)) - hdr.set_data_dtype(np.uint8) - out_wm = fname_presuffix(in_file, suffix="wm", newpath=str(Path.cwd())) - parc.__class__( - wm_mask.astype(np.uint8), - parc.affine, - hdr, - ).to_filename(out_wm) - return out_wm diff --git a/pydra/tasks/mriqc/workflows/functional/__init__.py b/pydra/tasks/mriqc/workflows/functional/__init__.py deleted file mode 100644 index 81e4f91..0000000 --- a/pydra/tasks/mriqc/workflows/functional/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .base import compute_iqms, epi_mni_align, fmri_bmsk_workflow, fmri_qc_workflow, hmc -from .output import _carpet_parcellation, _get_tr, init_func_report_wf, spikes_mask diff --git a/pydra/tasks/mriqc/workflows/functional/base.py b/pydra/tasks/mriqc/workflows/functional/base.py deleted file mode 100644 index 90f8854..0000000 --- a/pydra/tasks/mriqc/workflows/functional/base.py +++ /dev/null @@ -1,792 +0,0 @@ -import attrs -import logging -from pydra.tasks.mriqc.workflows.functional.output import init_func_report_wf -from pydra.tasks.niworkflows.utils.connections import pop_file as _pop -from pathlib import Path -from pydra.engine import Workflow -from pydra.engine.specs import BaseSpec, SpecInfo -from pydra.engine.task import FunctionTask -import pydra.mark -from pydra.tasks.niworkflows.utils.connections import pop_file as _pop -import typing as ty - - -logger = logging.getLogger(__name__) - - -def fmri_bmsk_workflow(in_file=attrs.NOTHING, name="fMRIBrainMask"): - """ - Compute a brain mask for the input :abbr:`fMRI (functional MRI)` dataset. - - .. workflow:: - - from mriqc.workflows.functional.base import fmri_bmsk_workflow - from mriqc.testing import mock_config - with mock_config(): - wf = fmri_bmsk_workflow() - - - """ - from pydra.tasks.afni.auto import Automask - - workflow = Workflow( - name=name, - input_spec={"in_file": ty.Any}, - output_spec={"out_file": ty.Any}, - in_file=in_file, - ) - - workflow.add( - Automask(outputtype="NIFTI_GZ", in_file=workflow.lzin.in_file, name="afni_msk") - ) - # Connect brain mask extraction - # fmt: off - workflow.set_output([('out_file', workflow.afni_msk.lzout.out_file)]) - # fmt: on - - return workflow - - -def epi_mni_align( - epi_mask=attrs.NOTHING, - epi_mean=attrs.NOTHING, - exec_ants_float=False, - exec_debug=False, - name="SpatialNormalization", - nipype_nprocs=12, - nipype_omp_nthreads=12, - wf_species="human", - wf_template_id="MNI152NLin2009cAsym", -): - """ - Estimate the transform that maps the EPI space into MNI152NLin2009cAsym. - - The input epi_mean is the averaged and brain-masked EPI timeseries - - Returns the EPI mean resampled in MNI space (for checking out registration) and - the associated "lobe" parcellation in EPI space. - - .. workflow:: - - from mriqc.workflows.functional.base import epi_mni_align - from mriqc.testing import mock_config - with mock_config(): - wf = epi_mni_align() - - """ - from pydra.tasks.ants.auto import ApplyTransforms, N4BiasFieldCorrection - from pydra.tasks.niworkflows.interfaces.reportlets.registration import ( - SpatialNormalizationRPT as RobustMNINormalization, - ) - from templateflow.api import get as get_template - - # Get settings - testing = exec_debug - n_procs = nipype_nprocs - ants_nthreads = nipype_omp_nthreads - workflow = Workflow( - name=name, - input_spec={"epi_mask": ty.Any, "epi_mean": ty.Any}, - output_spec={"epi_mni": ty.Any, "epi_parc": ty.Any, "report": ty.Any}, - epi_mask=epi_mask, - epi_mean=epi_mean, - ) - - workflow.add( - N4BiasFieldCorrection( - copy_header=True, - dimension=3, - input_image=workflow.lzin.epi_mean, - name="n4itk", - ) - ) - workflow.add( - RobustMNINormalization( - explicit_masking=False, - flavor="testing" if testing else "precise", - float=exec_ants_float, - generate_report=True, - moving="boldref", - num_threads=ants_nthreads, - reference="boldref", - template=wf_template_id, - moving_image=workflow.n4itk.lzout.output_image, - name="norm", - ) - ) - if wf_species.lower() == "human": - workflow.norm.inputs.reference_image = str( - get_template(wf_template_id, resolution=2, suffix="boldref") - ) - workflow.norm.inputs.reference_mask = str( - get_template( - wf_template_id, - resolution=2, - desc="brain", - suffix="mask", - ) - ) - # adapt some population-specific settings - else: - from nirodents.workflows.brainextraction import _bspline_grid - - workflow.n4itk.inputs.shrink_factor = 1 - workflow.n4itk.inputs.n_iterations = [50] * 4 - workflow.norm.inputs.reference_image = str( - get_template(wf_template_id, suffix="T2w") - ) - workflow.norm.inputs.reference_mask = str( - get_template( - wf_template_id, - desc="brain", - suffix="mask", - )[0] - ) - workflow.add(FunctionTask(func=_bspline_grid, name="bspline_grid")) - # fmt: off - workflow.bspline_grid.inputs.in_file = workflow.lzin.epi_mean - workflow.n4itk.inputs.args = workflow.bspline_grid.lzout.out - # fmt: on - # Warp segmentation into EPI space - workflow.add( - ApplyTransforms( - default_value=0, - dimension=3, - float=True, - interpolation="MultiLabel", - reference_image=workflow.lzin.epi_mean, - transforms=workflow.norm.lzout.inverse_composite_transform, - name="invt", - ) - ) - if wf_species.lower() == "human": - workflow.invt.inputs.input_image = str( - get_template( - wf_template_id, - resolution=1, - desc="carpet", - suffix="dseg", - ) - ) - else: - workflow.invt.inputs.input_image = str( - get_template( - wf_template_id, - suffix="dseg", - )[-1] - ) - # fmt: off - workflow.set_output([('epi_parc', workflow.invt.lzout.output_image)]) - workflow.set_output([('epi_mni', workflow.norm.lzout.warped_image)]) - workflow.set_output([('report', workflow.norm.lzout.out_report)]) - # fmt: on - if wf_species.lower() == "human": - workflow.norm.inputs.moving_mask = workflow.lzin.epi_mask - - return workflow - - -def hmc( - fd_radius=attrs.NOTHING, - in_file=attrs.NOTHING, - name="fMRI_HMC", - omp_nthreads=None, - wf_biggest_file_gb=1, - wf_deoblique=False, - wf_despike=False, -): - """ - Create a :abbr:`HMC (head motion correction)` workflow for fMRI. - - .. workflow:: - - from mriqc.workflows.functional.base import hmc - from mriqc.testing import mock_config - with mock_config(): - wf = hmc() - - """ - from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ( - FramewiseDisplacement, - ) - from pydra.tasks.afni.auto import Despike, Refit, Volreg - - mem_gb = wf_biggest_file_gb - workflow = Workflow( - name=name, - input_spec={"fd_radius": ty.Any, "in_file": ty.Any}, - output_spec={"mpars": ty.Any, "out_fd": ty.Any, "out_file": ty.Any}, - fd_radius=fd_radius, - in_file=in_file, - ) - - # calculate hmc parameters - workflow.add( - Volreg( - args="-Fourier -twopass", outputtype="NIFTI_GZ", zpad=4, name="estimate_hm" - ) - ) - # Compute the frame-wise displacement - workflow.add( - FramewiseDisplacement( - normalize=False, - parameter_source="AFNI", - in_file=workflow.estimate_hm.lzout.oned_file, - radius=workflow.lzin.fd_radius, - name="fdnode", - ) - ) - # Apply transforms to other echos - workflow.add( - FunctionTask( - func=_apply_transforms, - input_spec=SpecInfo( - name="FunctionIn", - bases=(BaseSpec,), - fields=[("in_file", ty.Any), ("in_xfm", ty.Any)], - ), - in_xfm=workflow.estimate_hm.lzout.oned_matrix_save, - name="apply_hmc", - ) - ) - # fmt: off - workflow.set_output([('out_file', workflow.apply_hmc.lzout.out)]) - workflow.set_output([('mpars', workflow.estimate_hm.lzout.oned_file)]) - workflow.set_output([('out_fd', workflow.fdnode.lzout.out_file)]) - # fmt: on - if not (wf_despike or wf_deoblique): - # fmt: off - workflow.estimate_hm.inputs.in_file = workflow.lzin.in_file - workflow.apply_hmc.inputs.in_file = workflow.lzin.in_file - # fmt: on - return workflow - # despiking, and deoblique - workflow.add(Refit(deoblique=True, name="deoblique_node")) - workflow.add(Despike(outputtype="NIFTI_GZ", name="despike_node")) - if wf_despike and wf_deoblique: - # fmt: off - workflow.despike_node.inputs.in_file = workflow.lzin.in_file - workflow.deoblique_node.inputs.in_file = workflow.despike_node.lzout.out_file - - @pydra.mark.task - def deoblique_node_out_file_to_estimate_hm_in_file_callable(in_: ty.Any) -> ty.Any: - return _pop(in_) - - workflow.add(deoblique_node_out_file_to_estimate_hm_in_file_callable(in_=workflow.deoblique_node.lzout.out_file, name="deoblique_node_out_file_to_estimate_hm_in_file_callable")) - - workflow.estimate_hm.inputs.in_file = workflow.deoblique_node_out_file_to_estimate_hm_in_file_callable.lzout.out - workflow.apply_hmc.inputs.in_file = workflow.deoblique_node.lzout.out_file - # fmt: on - elif wf_despike: - # fmt: off - workflow.despike_node.inputs.in_file = workflow.lzin.in_file - - @pydra.mark.task - def despike_node_out_file_to_estimate_hm_in_file_callable(in_: ty.Any) -> ty.Any: - return _pop(in_) - - workflow.add(despike_node_out_file_to_estimate_hm_in_file_callable(in_=workflow.despike_node.lzout.out_file, name="despike_node_out_file_to_estimate_hm_in_file_callable")) - - workflow.estimate_hm.inputs.in_file = workflow.despike_node_out_file_to_estimate_hm_in_file_callable.lzout.out - workflow.apply_hmc.inputs.in_file = workflow.despike_node.lzout.out_file - # fmt: on - elif wf_deoblique: - # fmt: off - workflow.deoblique_node.inputs.in_file = workflow.lzin.in_file - - @pydra.mark.task - def deoblique_node_out_file_to_estimate_hm_in_file_callable(in_: ty.Any) -> ty.Any: - return _pop(in_) - - workflow.add(deoblique_node_out_file_to_estimate_hm_in_file_callable(in_=workflow.deoblique_node.lzout.out_file, name="deoblique_node_out_file_to_estimate_hm_in_file_callable")) - - workflow.estimate_hm.inputs.in_file = workflow.deoblique_node_out_file_to_estimate_hm_in_file_callable.lzout.out - workflow.apply_hmc.inputs.in_file = workflow.deoblique_node.lzout.out_file - # fmt: on - else: - raise NotImplementedError - - return workflow - - -def _apply_transforms(in_file, in_xfm): - - from pathlib import Path - from nitransforms.linear import load - from pydra.tasks.mriqc.utils.bids import derive_bids_fname - - realigned = load(in_xfm, fmt="afni", reference=in_file, moving=in_file).apply( - in_file - ) - out_file = derive_bids_fname( - in_file, - entity="desc-realigned", - newpath=Path.cwd(), - absolute=True, - ) - realigned.to_filename(out_file) - return str(out_file) - - -def compute_iqms( - brainmask=attrs.NOTHING, - epi_mean=attrs.NOTHING, - fd_thres=attrs.NOTHING, - hmc_epi=attrs.NOTHING, - hmc_fd=attrs.NOTHING, - in_ras=attrs.NOTHING, - in_tsnr=attrs.NOTHING, - name="ComputeIQMs", - wf_biggest_file_gb=1, - wf_fft_spikes_detector=False, -): - """ - Initialize the workflow that actually computes the IQMs. - - .. workflow:: - - from mriqc.workflows.functional.base import compute_iqms - from mriqc.testing import mock_config - with mock_config(): - wf = compute_iqms() - - """ - from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ComputeDVARS - from pydra.tasks.afni.auto import OutlierCount, QualityIndex - from pydra.tasks.mriqc.interfaces import ( - DerivativesDataSink, - FunctionalQC, - GatherTimeseries, - IQMFileSink, - ) - from pydra.tasks.mriqc.interfaces.reports import AddProvenance - from pydra.tasks.mriqc.interfaces.transitional import GCOR - from pydra.tasks.mriqc.workflows.utils import _tofloat, get_fwhmx - - mem_gb = wf_biggest_file_gb - workflow = Workflow( - name=name, - input_spec={ - "brainmask": ty.Any, - "epi_mean": ty.Any, - "fd_thres": ty.Any, - "hmc_epi": ty.Any, - "hmc_fd": ty.Any, - "in_ras": ty.Any, - "in_tsnr": ty.Any, - }, - output_spec={ - "dvars": ty.Any, - "fft": ty.Any, - "out_file": ty.Any, - "outliers": ty.Any, - "spikes": ty.Any, - "spikes_num": int, - }, - brainmask=brainmask, - epi_mean=epi_mean, - fd_thres=fd_thres, - hmc_epi=hmc_epi, - hmc_fd=hmc_fd, - in_ras=in_ras, - in_tsnr=in_tsnr, - ) - - # Set FD threshold - - # Compute DVARS - workflow.add( - ComputeDVARS( - save_all=True, - save_plot=False, - in_file=workflow.lzin.hmc_epi, - in_mask=workflow.lzin.brainmask, - name="dvnode", - ) - ) - # AFNI quality measures - fwhm = get_fwhmx() - fwhm.name = "fwhm" - fwhm.inputs.in_file = workflow.lzin.epi_mean - fwhm.inputs.mask = workflow.lzin.brainmask - workflow.add(fwhm) - workflow.fwhm.inputs.acf = True # Only AFNI >= 16 - workflow.add( - OutlierCount( - fraction=True, - out_file="outliers.out", - in_file=workflow.lzin.hmc_epi, - mask=workflow.lzin.brainmask, - name="outliers", - ) - ) - - workflow.add( - FunctionalQC( - fd_thres=workflow.lzin.fd_thres, - in_epi=workflow.lzin.epi_mean, - in_fd=workflow.lzin.hmc_fd, - in_hmc=workflow.lzin.hmc_epi, - in_mask=workflow.lzin.brainmask, - in_tsnr=workflow.lzin.in_tsnr, - name="measures", - ) - ) - - # fmt: off - workflow.set_output([('dvars', workflow.dvnode.lzout.out_all)]) - - @pydra.mark.task - def fwhm_fwhm_to_measures_in_fwhm_callable(in_: ty.Any) -> ty.Any: - return _tofloat(in_) - - workflow.add(fwhm_fwhm_to_measures_in_fwhm_callable(in_=workflow.fwhm.lzout.fwhm, name="fwhm_fwhm_to_measures_in_fwhm_callable")) - - workflow.measures.inputs.in_fwhm = workflow.fwhm_fwhm_to_measures_in_fwhm_callable.lzout.out - workflow.set_output([('outliers', workflow.outliers.lzout.out_file)]) - # fmt: on - - # Save to JSON file - - # Save timeseries TSV file - - # fmt: off - - - - - - - - - workflow.set_output([('out_file', workflow.measures.lzout.out_qc)]) - - # fmt: on - # FFT spikes finder - if True: # wf_fft_spikes_detector: - disabled to ensure all outputs are generated - from pydra.tasks.mriqc.workflows.utils import slice_wise_fft - - workflow.add( - FunctionTask( - func=slice_wise_fft, - input_spec=SpecInfo( - name="FunctionIn", bases=(BaseSpec,), fields=[("in_file", ty.Any)] - ), - output_spec=SpecInfo( - name="FunctionOut", - bases=(BaseSpec,), - fields=[ - ("n_spikes", ty.Any), - ("out_spikes", ty.Any), - ("out_fft", ty.Any), - ], - ), - name="spikes_fft", - ) - ) - # fmt: off - workflow.spikes_fft.inputs.in_file = workflow.lzin.in_ras - workflow.set_output([('spikes', workflow.spikes_fft.lzout.out_spikes)]) - workflow.set_output([('fft', workflow.spikes_fft.lzout.out_fft)]) - workflow.set_output([('spikes_num', workflow.spikes_fft.lzout.n_spikes)]) - # fmt: on - - return workflow - - -def _parse_tout(in_file): - - if isinstance(in_file, (list, tuple)): - return ( - [_parse_tout(f) for f in in_file] - if len(in_file) > 1 - else _parse_tout(in_file[0]) - ) - import numpy as np - - data = np.loadtxt(in_file) # pylint: disable=no-member - return data.mean() - - -def _parse_tqual(in_file): - - if isinstance(in_file, (list, tuple)): - return ( - [_parse_tqual(f) for f in in_file] - if len(in_file) > 1 - else _parse_tqual(in_file[0]) - ) - import numpy as np - - with open(in_file) as fin: - lines = fin.readlines() - return np.mean([float(line.strip()) for line in lines if not line.startswith("++")]) - - -def fmri_qc_workflow( - exec_ants_float=False, - exec_datalad_get=True, - exec_debug=False, - exec_float32=True, - exec_no_sub=False, - exec_verbose_reports=False, - exec_work_dir=None, - in_file=attrs.NOTHING, - metadata=attrs.NOTHING, - name="funcMRIQC", - nipype_nprocs=12, - nipype_omp_nthreads=12, - wf_biggest_file_gb=1, - wf_deoblique=False, - wf_despike=False, - wf_fd_radius=50, - wf_fft_spikes_detector=False, - wf_inputs=None, - wf_min_len_bold=5, - wf_species="human", - wf_template_id="MNI152NLin2009cAsym", -): - """ - Initialize the (f)MRIQC workflow. - - .. workflow:: - - import os.path as op - from mriqc.workflows.functional.base import fmri_qc_workflow - from mriqc.testing import mock_config - with mock_config(): - wf = fmri_qc_workflow() - - """ - from pydra.tasks.mriqc.nipype_ports.algorithms.confounds import ( - NonSteadyStateDetector, - TSNR, - ) - from pydra.tasks.afni.auto import TStat - from pydra.tasks.niworkflows.interfaces.bids import ReadSidecarJSON - from pydra.tasks.niworkflows.interfaces.header import SanitizeImage - from pydra.tasks.mriqc.interfaces.functional import SelectEcho - - from pydra.tasks.mriqc.utils.misc import _flatten_list as flatten - - if exec_work_dir is None: - exec_work_dir = Path.cwd() - - workflow = Workflow( - name=name, - input_spec={"in_file": ty.Any, "metadata": dict}, - output_spec={ - "ema_report": ty.Any, - "func_report_wf_background_report": ty.Any, - "func_report_wf_carpet_report": ty.Any, - "func_report_wf_mean_report": ty.Any, - "func_report_wf_spikes_report": ty.Any, - "func_report_wf_stdev_report": ty.Any, - "func_report_wf_zoomed_report": ty.Any, - "iqmswf_dvars": ty.Any, - "iqmswf_fft": ty.Any, - "iqmswf_out_file": ty.Any, - "iqmswf_outliers": ty.Any, - "iqmswf_spikes": ty.Any, - "iqmswf_spikes_num": ty.Any, - }, - in_file=in_file, - metadata=metadata, - ) - - mem_gb = wf_biggest_file_gb - - - - # Define workflow, inputs and outputs - # 0. Get data, put it in RAS orientation - - # Get metadata - - workflow.add( - SelectEcho( - in_files=workflow.lzin.in_file, - metadata=workflow.lzin.metadata, - name="pick_echo", - ) - ) - workflow.add( - NonSteadyStateDetector( - in_file=workflow.pick_echo.lzout.out_file, name="non_steady_state_detector" - ) - ) - workflow.add( - SanitizeImage( - max_32bit=exec_float32, - in_file=workflow.lzin.in_file, - n_volumes_to_discard=workflow.non_steady_state_detector.lzout.n_volumes_to_discard, - name="sanitize", - ) - ) - # Workflow -------------------------------------------------------- - # 1. HMC: head motion correct - workflow.add( - hmc( - omp_nthreads=nipype_omp_nthreads, - wf_biggest_file_gb=wf_biggest_file_gb, - wf_deoblique=wf_deoblique, - wf_despike=wf_despike, - in_file=workflow.sanitize.lzout.out_file, - name="hmcwf", - ) - ) - # Set HMC settings - workflow.inputs.fd_radius = wf_fd_radius - # 2. Compute mean fmri - workflow.add( - TStat( - options="-mean", - outputtype="NIFTI_GZ", - in_file=workflow.hmcwf.lzout.out_file, - name="mean", - ) - ) - # Compute TSNR using nipype implementation - workflow.add(TSNR(in_file=workflow.hmcwf.lzout.out_file, name="tsnr")) - # EPI to MNI registration - workflow.add( - epi_mni_align( - nipype_omp_nthreads=nipype_omp_nthreads, - wf_species=wf_species, - exec_ants_float=exec_ants_float, - exec_debug=exec_debug, - nipype_nprocs=nipype_nprocs, - wf_template_id=wf_template_id, - name="ema", - ) - ) - # 7. Compute IQMs - workflow.add( - compute_iqms( - wf_biggest_file_gb=wf_biggest_file_gb, - wf_fft_spikes_detector=wf_fft_spikes_detector, - in_tsnr=workflow.tsnr.lzout.tsnr_file, - hmc_fd=workflow.hmcwf.lzout.out_fd, - hmc_epi=workflow.hmcwf.lzout.out_file, - epi_mean=workflow.mean.lzout.out_file, - in_ras=workflow.sanitize.lzout.out_file, - name="iqmswf", - ) - ) - # Reports - workflow.add( - init_func_report_wf( - exec_verbose_reports=exec_verbose_reports, - wf_biggest_file_gb=wf_biggest_file_gb, - exec_work_dir=exec_work_dir, - wf_species=wf_species, - wf_fft_spikes_detector=wf_fft_spikes_detector, - meta_sidecar=workflow.lzin.metadata, - epi_parc=workflow.ema.lzout.epi_parc, - hmc_epi=workflow.hmcwf.lzout.out_file, - hmc_fd=workflow.hmcwf.lzout.out_fd, - in_stddev=workflow.tsnr.lzout.stddev_file, - epi_mean=workflow.mean.lzout.out_file, - in_ras=workflow.sanitize.lzout.out_file, - name="func_report_wf", - ) - ) - # fmt: off - - @pydra.mark.task - def mean_out_file_to_ema_epi_mean_callable(in_: ty.Any) -> ty.Any: - return _pop(in_) - - workflow.add(mean_out_file_to_ema_epi_mean_callable(in_=workflow.mean.lzout.out_file, name="mean_out_file_to_ema_epi_mean_callable")) - - workflow.ema.inputs.epi_mean = workflow.mean_out_file_to_ema_epi_mean_callable.lzout.out - - # fmt: on - if wf_fft_spikes_detector: - # fmt: off - workflow.set_output([('iqmswf_spikes', workflow.iqmswf.lzout.spikes)]) - workflow.set_output([('iqmswf_fft', workflow.iqmswf.lzout.fft)]) - # fmt: on - # population specific changes to brain masking - if wf_species == "human": - from pydra.tasks.mriqc.workflows.shared import ( - synthstrip_wf as fmri_bmsk_workflow, - ) - - workflow.add( - fmri_bmsk_workflow(omp_nthreads=nipype_omp_nthreads, name="skullstrip_epi") - ) - # fmt: off - - @pydra.mark.task - def mean_out_file_to_skullstrip_epi_in_files_callable(in_: ty.Any) -> ty.Any: - return _pop(in_) - - workflow.add(mean_out_file_to_skullstrip_epi_in_files_callable(in_=workflow.mean.lzout.out_file, name="mean_out_file_to_skullstrip_epi_in_files_callable")) - - workflow.skullstrip_epi.inputs.in_files = workflow.mean_out_file_to_skullstrip_epi_in_files_callable.lzout.out - workflow.ema.inputs.epi_mask = workflow.skullstrip_epi.lzout.out_mask - workflow.iqmswf.inputs.brainmask = workflow.skullstrip_epi.lzout.out_mask - workflow.func_report_wf.inputs.brainmask = workflow.skullstrip_epi.lzout.out_mask - # fmt: on - else: - from pydra.tasks.mriqc.workflows.anatomical.base import _binarize - - workflow.add( - FunctionTask( - func=_binarize, - input_spec=SpecInfo( - name="FunctionIn", - bases=(BaseSpec,), - fields=[("in_file", ty.Any), ("threshold", ty.Any)], - ), - output_spec=SpecInfo( - name="FunctionOut", bases=(BaseSpec,), fields=[("out_file", ty.Any)] - ), - name="binarise_labels", - ) - ) - # fmt: off - workflow.binarise_labels.inputs.in_file = workflow.ema.lzout.epi_parc - workflow.iqmswf.inputs.brainmask = workflow.binarise_labels.lzout.out_file - workflow.func_report_wf.inputs.brainmask = workflow.binarise_labels.lzout.out_file - # fmt: on - # Upload metrics - if not exec_no_sub: - from pydra.tasks.mriqc.interfaces.webapi import UploadIQMs - - pass - - # fmt: on - workflow.set_output([("ema_report", workflow.ema.lzout.report)]) - workflow.set_output([("iqmswf_outliers", workflow.iqmswf.lzout.outliers)]) - workflow.set_output([("iqmswf_spikes_num", workflow.iqmswf.lzout.spikes_num)]) - workflow.set_output([("iqmswf_fft", workflow.iqmswf.lzout.fft)]) - workflow.set_output([("iqmswf_spikes", workflow.iqmswf.lzout.spikes)]) - workflow.set_output([("iqmswf_out_file", workflow.iqmswf.lzout.out_file)]) - workflow.set_output([("iqmswf_dvars", workflow.iqmswf.lzout.dvars)]) - workflow.set_output( - [("func_report_wf_carpet_report", workflow.func_report_wf.lzout.carpet_report)] - ) - workflow.set_output( - [("func_report_wf_stdev_report", workflow.func_report_wf.lzout.stdev_report)] - ) - workflow.set_output( - [("func_report_wf_zoomed_report", workflow.func_report_wf.lzout.zoomed_report)] - ) - workflow.set_output( - [("func_report_wf_spikes_report", workflow.func_report_wf.lzout.spikes_report)] - ) - workflow.set_output( - [("func_report_wf_mean_report", workflow.func_report_wf.lzout.mean_report)] - ) - workflow.set_output( - [ - ( - "func_report_wf_background_report", - workflow.func_report_wf.lzout.background_report, - ) - ] - ) - - return workflow diff --git a/pydra/tasks/mriqc/workflows/functional/output.py b/pydra/tasks/mriqc/workflows/functional/output.py deleted file mode 100644 index 81bb2a8..0000000 --- a/pydra/tasks/mriqc/workflows/functional/output.py +++ /dev/null @@ -1,315 +0,0 @@ -import attrs -import logging -from pathlib import Path -from pydra.engine import Workflow -from pydra.engine.specs import BaseSpec, SpecInfo -from pydra.engine.task import FunctionTask -import typing as ty - - -logger = logging.getLogger(__name__) - - -def init_func_report_wf( - brainmask=attrs.NOTHING, - epi_mean=attrs.NOTHING, - epi_parc=attrs.NOTHING, - exec_verbose_reports=False, - exec_work_dir=None, - fd_thres=attrs.NOTHING, - hmc_epi=attrs.NOTHING, - hmc_fd=attrs.NOTHING, - in_dvars=attrs.NOTHING, - in_fft=attrs.NOTHING, - in_ras=attrs.NOTHING, - in_spikes=attrs.NOTHING, - in_stddev=attrs.NOTHING, - meta_sidecar=attrs.NOTHING, - name="func_report_wf", - outliers=attrs.NOTHING, - wf_biggest_file_gb=1, - wf_fft_spikes_detector=False, - wf_species="human", -): - """ - Write out individual reportlets. - - .. workflow:: - - from mriqc.workflows.functional.output import init_func_report_wf - from mriqc.testing import mock_config - with mock_config(): - wf = init_func_report_wf() - - """ - from pydra.tasks.nireports.interfaces import FMRISummary, PlotMosaic, PlotSpikes - from pydra.tasks.niworkflows.interfaces.morphology import ( - BinaryDilation, - BinarySubtraction, - ) - from pydra.tasks.mriqc.interfaces.functional import Spikes - - # from mriqc.interfaces.reports import IndividualReport - if exec_work_dir is None: - exec_work_dir = Path.cwd() - - verbose = exec_verbose_reports - mem_gb = wf_biggest_file_gb - reportlets_dir = exec_work_dir / "reportlets" - workflow = Workflow( - name=name, - input_spec={ - "brainmask": ty.Any, - "epi_mean": ty.Any, - "epi_parc": ty.Any, - "fd_thres": ty.Any, - "hmc_epi": ty.Any, - "hmc_fd": ty.Any, - "in_dvars": ty.Any, - "in_fft": ty.Any, - "in_ras": ty.Any, - "in_spikes": ty.Any, - "in_stddev": ty.Any, - "meta_sidecar": ty.Any, - "outliers": ty.Any, - }, - output_spec={ - "background_report": ty.Any, - "carpet_report": ty.Any, - "mean_report": ty.Any, - "spikes_report": ty.Any, - "stdev_report": ty.Any, - "zoomed_report": ty.Any, - }, - brainmask=brainmask, - epi_mean=epi_mean, - epi_parc=epi_parc, - fd_thres=fd_thres, - hmc_epi=hmc_epi, - hmc_fd=hmc_fd, - in_dvars=in_dvars, - in_fft=in_fft, - in_ras=in_ras, - in_spikes=in_spikes, - in_stddev=in_stddev, - meta_sidecar=meta_sidecar, - outliers=outliers, - ) - - # Set FD threshold - - workflow.add( - FunctionTask( - func=spikes_mask, - input_spec=SpecInfo( - name="FunctionIn", - bases=(BaseSpec,), - fields=[("in_file", ty.Any), ("in_mask", ty.Any)], - ), - output_spec=SpecInfo( - name="FunctionOut", - bases=(BaseSpec,), - fields=[("out_file", ty.Any), ("out_plot", ty.Any)], - ), - in_file=workflow.lzin.in_ras, - name="spmask", - ) - ) - workflow.add( - Spikes( - detrend=False, - no_zscore=True, - in_file=workflow.lzin.in_ras, - in_mask=workflow.spmask.lzout.out_file, - name="spikes_bg", - ) - ) - # Generate crown mask - # Create the crown mask - workflow.add(BinaryDilation(in_mask=workflow.lzin.brainmask, name="dilated_mask")) - workflow.add( - BinarySubtraction( - in_base=workflow.dilated_mask.lzout.out_mask, - in_subtract=workflow.lzin.brainmask, - name="subtract_mask", - ) - ) - workflow.add( - FunctionTask( - func=_carpet_parcellation, - crown_mask=workflow.subtract_mask.lzout.out_mask, - segmentation=workflow.lzin.epi_parc, - name="parcels", - ) - ) - workflow.add( - FMRISummary( - dvars=workflow.lzin.in_dvars, - fd=workflow.lzin.hmc_fd, - fd_thres=workflow.lzin.fd_thres, - in_func=workflow.lzin.hmc_epi, - in_segm=workflow.parcels.lzout.out, - in_spikes_bg=workflow.spikes_bg.lzout.out_tsz, - outliers=workflow.lzin.outliers, - tr=workflow.lzin.meta_sidecar, - name="bigplot", - ) - ) - # fmt: off - workflow.bigplot.inputs.tr = workflow.lzin.meta_sidecar - # fmt: on - workflow.add( - PlotMosaic( - cmap="Greys_r", - out_file="plot_func_mean_mosaic1.svg", - in_file=workflow.lzin.epi_mean, - name="mosaic_mean", - ) - ) - workflow.add( - PlotMosaic( - cmap="viridis", - out_file="plot_func_stddev_mosaic2_stddev.svg", - in_file=workflow.lzin.in_stddev, - name="mosaic_stddev", - ) - ) - workflow.add( - PlotMosaic( - cmap="Greys_r", - bbox_mask_file=workflow.lzin.brainmask, - in_file=workflow.lzin.epi_mean, - name="mosaic_zoom", - ) - ) - workflow.add( - PlotMosaic( - cmap="viridis_r", - only_noise=True, - in_file=workflow.lzin.epi_mean, - name="mosaic_noise", - ) - ) - if wf_species.lower() in ("rat", "mouse"): - workflow.mosaic_mean.inputs.view = ["coronal", "axial"] - workflow.mosaic_stddev.inputs.view = ["coronal", "axial"] - workflow.mosaic_zoom.inputs.view = ["coronal", "axial"] - workflow.mosaic_noise.inputs.view = ["coronal", "axial"] - - # fmt: off - workflow.set_output([('mean_report', workflow.mosaic_mean.lzout.out_file)]) - workflow.set_output([('stdev_report', workflow.mosaic_stddev.lzout.out_file)]) - workflow.set_output([('background_report', workflow.mosaic_noise.lzout.out_file)]) - workflow.set_output([('zoomed_report', workflow.mosaic_zoom.lzout.out_file)]) - workflow.set_output([('carpet_report', workflow.bigplot.lzout.out_file)]) - # fmt: on - if True: # wf_fft_spikes_detector: - disabled so output is always created - workflow.add( - PlotSpikes( - cmap="viridis", - out_file="plot_spikes.svg", - title="High-Frequency spikes", - name="mosaic_spikes", - ) - ) - pass - # fmt: off - pass - workflow.mosaic_spikes.inputs.in_file = workflow.lzin.in_ras - workflow.mosaic_spikes.inputs.in_spikes = workflow.lzin.in_spikes - workflow.mosaic_spikes.inputs.in_fft = workflow.lzin.in_fft - workflow.set_output([('spikes_report', workflow.mosaic_spikes.lzout.out_file)]) - # fmt: on - if not verbose: - return workflow - # Verbose-reporting goes here - from pydra.tasks.nireports.interfaces import PlotContours - from pydra.tasks.niworkflows.utils.connections import pop_file as _pop - - # fmt: off - - # fmt: on - - return workflow - - -def _carpet_parcellation(segmentation, crown_mask): - """Generate the union of two masks.""" - from pathlib import Path - import nibabel as nb - import numpy as np - - img = nb.load(segmentation) - lut = np.zeros((256,), dtype="uint8") - lut[100:201] = 1 # Ctx GM - lut[30:99] = 2 # dGM - lut[1:11] = 3 # WM+CSF - lut[255] = 4 # Cerebellum - # Apply lookup table - seg = lut[np.asanyarray(img.dataobj, dtype="uint16")] - seg[np.asanyarray(nb.load(crown_mask).dataobj, dtype=int) > 0] = 5 - outimg = img.__class__(seg.astype("uint8"), img.affine, img.header) - outimg.set_data_dtype("uint8") - out_file = Path("segments.nii.gz").absolute() - outimg.to_filename(out_file) - return str(out_file) - - -def _get_tr(meta_dict): - - if isinstance(meta_dict, (list, tuple)): - meta_dict = meta_dict[0] - return meta_dict.get("RepetitionTime", None) - - -def spikes_mask(in_file, in_mask=None, out_file=None): - """Calculate a mask in which check for :abbr:`EM (electromagnetic)` spikes.""" - import os.path as op - import nibabel as nb - import numpy as np - from nilearn.image import mean_img - from nilearn.plotting import plot_roi - from scipy import ndimage as nd - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath(f"{fname}_spmask{ext}") - out_plot = op.abspath(f"{fname}_spmask.pdf") - in_4d_nii = nb.load(in_file) - orientation = nb.aff2axcodes(in_4d_nii.affine) - if in_mask: - mask_data = np.asanyarray(nb.load(in_mask).dataobj) - a = np.where(mask_data != 0) - bbox = ( - np.max(a[0]) - np.min(a[0]), - np.max(a[1]) - np.min(a[1]), - np.max(a[2]) - np.min(a[2]), - ) - longest_axis = np.argmax(bbox) - # Input here is a binarized and intersected mask data from previous section - dil_mask = nd.binary_dilation( - mask_data, iterations=int(mask_data.shape[longest_axis] / 9) - ) - rep = list(mask_data.shape) - rep[longest_axis] = -1 - new_mask_2d = dil_mask.max(axis=longest_axis).reshape(rep) - rep = [1, 1, 1] - rep[longest_axis] = mask_data.shape[longest_axis] - new_mask_3d = np.logical_not(np.tile(new_mask_2d, rep)) - else: - new_mask_3d = np.zeros(in_4d_nii.shape[:3]) == 1 - if orientation[0] in ("L", "R"): - new_mask_3d[0:2, :, :] = True - new_mask_3d[-3:-1, :, :] = True - else: - new_mask_3d[:, 0:2, :] = True - new_mask_3d[:, -3:-1, :] = True - mask_nii = nb.Nifti1Image( - new_mask_3d.astype(np.uint8), in_4d_nii.affine, in_4d_nii.header - ) - mask_nii.to_filename(out_file) - plot_roi(mask_nii, mean_img(in_4d_nii), output_file=out_plot) - return out_file, out_plot diff --git a/pydra/tasks/mriqc/workflows/shared.py b/pydra/tasks/mriqc/workflows/shared.py deleted file mode 100644 index 28367d2..0000000 --- a/pydra/tasks/mriqc/workflows/shared.py +++ /dev/null @@ -1,77 +0,0 @@ -import attrs -import logging -from pydra.engine import Workflow -import typing as ty - - -logger = logging.getLogger(__name__) - - -def synthstrip_wf(in_files=attrs.NOTHING, name="synthstrip_wf", omp_nthreads=None): - """Create a brain-extraction workflow using SynthStrip.""" - from pydra.tasks.ants.auto import N4BiasFieldCorrection - from pydra.tasks.niworkflows.interfaces.nibabel import ApplyMask, IntensityClip - from pydra.tasks.mriqc.interfaces.synthstrip import SynthStrip - - # truncate target intensity for N4 correction - workflow = Workflow( - name=name, - input_spec={"in_files": ty.Any}, - output_spec={ - "bias_image": ty.Any, - "out_brain": ty.Any, - "out_corrected": ty.Any, - "out_mask": ty.Any, - }, - in_files=in_files, - ) - - workflow.add( - IntensityClip( - p_max=99.9, p_min=10, in_file=workflow.lzin.in_files, name="pre_clip" - ) - ) - workflow.add( - N4BiasFieldCorrection( - copy_header=True, - dimension=3, - num_threads=omp_nthreads, - rescale_intensities=True, - input_image=workflow.pre_clip.lzout.out_file, - name="pre_n4", - ) - ) - workflow.add( - N4BiasFieldCorrection( - copy_header=True, - dimension=3, - n_iterations=[50] * 4, - num_threads=omp_nthreads, - save_bias=True, - input_image=workflow.pre_clip.lzout.out_file, - name="post_n4", - ) - ) - workflow.add( - SynthStrip( - num_threads=omp_nthreads, - in_file=workflow.pre_n4.lzout.output_image, - name="synthstrip", - ) - ) - workflow.add( - ApplyMask( - in_file=workflow.post_n4.lzout.output_image, - in_mask=workflow.synthstrip.lzout.out_mask, - name="final_masked", - ) - ) - # fmt: off - workflow.post_n4.inputs.weight_image = workflow.synthstrip.lzout.out_mask - workflow.set_output([('out_brain', workflow.final_masked.lzout.out_file)]) - workflow.set_output([('bias_image', workflow.post_n4.lzout.bias_image)]) - workflow.set_output([('out_mask', workflow.synthstrip.lzout.out_mask)]) - workflow.set_output([('out_corrected', workflow.post_n4.lzout.output_image)]) - # fmt: on - - return workflow diff --git a/pydra/tasks/mriqc/workflows/utils.py b/pydra/tasks/mriqc/workflows/utils.py deleted file mode 100644 index 193cb2f..0000000 --- a/pydra/tasks/mriqc/workflows/utils.py +++ /dev/null @@ -1,176 +0,0 @@ -import logging -from pathlib import Path - - -logger = logging.getLogger(__name__) - - -def _tofloat(inlist): - - if isinstance(inlist, (list, tuple)): - return ( - [_tofloat(el) for el in inlist] if len(inlist) > 1 else _tofloat(inlist[0]) - ) - return float(inlist) - - -def generate_filename(in_file, dirname=None, suffix="", extension=None): - """ - Generate a nipype-like filename. - - >>> str(generate_filename("/path/to/input.nii.gz").relative_to(Path.cwd())) - 'input.nii.gz' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", - ... )) - '/other/path/input.nii.gz' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="tsv", - ... )) - '/other/path/input.tsv' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension=".tsv", - ... )) - '/other/path/input.tsv' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="", - ... )) - '/other/path/input' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="", suffix="_mod", - ... )) - '/other/path/input_mod' - - >>> str(generate_filename( - ... "/path/to/input.nii.gz", dirname="/other/path", extension="", suffix="mod", - ... )) - '/other/path/input_mod' - - >>> str(generate_filename( - ... "/path/to/input", dirname="/other/path", extension="tsv", suffix="mod", - ... )) - '/other/path/input_mod.tsv' - - """ - from pathlib import Path - - in_file = Path(in_file) - in_ext = "".join(in_file.suffixes) - dirname = Path.cwd() if dirname is None else Path(dirname) - if extension is not None: - extension = ( - extension if not extension or extension.startswith(".") else f".{extension}" - ) - else: - extension = in_ext - stem = in_file.name[: -len(in_ext)] if in_ext else in_file.name - if suffix and not suffix.startswith("_"): - suffix = f"_{suffix}" - return dirname / f"{stem}{suffix}{extension}" - - -def get_fwhmx(): - - from pydra.tasks.afni.auto import FWHMx, Info - - fwhm_args = {"combine": True, "detrend": True} - afni_version = Info.version() - if afni_version and afni_version >= (2017, 2, 3): - fwhm_args["args"] = "-ShowMeClassicFWHM" - fwhm_interface = FWHMx(**fwhm_args) - return fwhm_interface - - -def slice_wise_fft(in_file, ftmask=None, spike_thres=3.0, out_prefix=None): - """Search for spikes in slices using the 2D FFT""" - import os.path as op - import nibabel as nb - import numpy as np - from scipy.ndimage import binary_erosion, generate_binary_structure - from scipy.ndimage.filters import median_filter - from statsmodels.robust.scale import mad - from pydra.tasks.mriqc.workflows.utils import spectrum_mask - - if out_prefix is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, _ = op.splitext(fname) - out_prefix = op.abspath(fname) - func_data = nb.load(in_file).get_fdata() - if ftmask is None: - ftmask = spectrum_mask(tuple(func_data.shape[:2])) - fft_data = [] - for t in range(func_data.shape[-1]): - func_frame = func_data[..., t] - fft_slices = [] - for z in range(func_frame.shape[2]): - sl = func_frame[..., z] - fftsl = ( - median_filter( - np.real(np.fft.fft2(sl)).astype(np.float32), - size=(5, 5), - mode="constant", - ) - * ftmask - ) - fft_slices.append(fftsl) - fft_data.append(np.stack(fft_slices, axis=-1)) - # Recompose the 4D FFT timeseries - fft_data = np.stack(fft_data, -1) - # Z-score across t, using robust statistics - mu = np.median(fft_data, axis=3) - sigma = np.stack([mad(fft_data, axis=3)] * fft_data.shape[-1], -1) - idxs = np.where(np.abs(sigma) > 1e-4) - fft_zscored = fft_data - mu[..., np.newaxis] - fft_zscored[idxs] /= sigma[idxs] - # save fft z-scored - out_fft = op.abspath(out_prefix + "_zsfft.nii.gz") - nii = nb.Nifti1Image(fft_zscored.astype(np.float32), np.eye(4), None) - nii.to_filename(out_fft) - # Find peaks - spikes_list = [] - for t in range(fft_zscored.shape[-1]): - fft_frame = fft_zscored[..., t] - for z in range(fft_frame.shape[-1]): - sl = fft_frame[..., z] - if np.all(sl < spike_thres): - continue - # Any zscore over spike_thres will be called a spike - sl[sl <= spike_thres] = 0 - sl[sl > 0] = 1 - # Erode peaks and see how many survive - struct = generate_binary_structure(2, 2) - sl = binary_erosion(sl.astype(np.uint8), structure=struct).astype(np.uint8) - if sl.sum() > 10: - spikes_list.append((t, z)) - out_spikes = op.abspath(out_prefix + "_spikes.tsv") - np.savetxt(out_spikes, spikes_list, fmt=b"%d", delimiter=b"\t", header="TR\tZ") - return len(spikes_list), out_spikes, out_fft - - -def spectrum_mask(size): - """Creates a mask to filter the image of size size""" - import numpy as np - from scipy.ndimage.morphology import distance_transform_edt as distance - - ftmask = np.ones(size) - # Set zeros on corners - # ftmask[0, 0] = 0 - # ftmask[size[0] - 1, size[1] - 1] = 0 - # ftmask[0, size[1] - 1] = 0 - # ftmask[size[0] - 1, 0] = 0 - ftmask[size[0] // 2, size[1] // 2] = 0 - # Distance transform - ftmask = distance(ftmask) - ftmask /= ftmask.max() - # Keep this just in case we want to switch to the opposite filter - ftmask *= -1.0 - ftmask += 1.0 - ftmask[ftmask >= 0.4] = 1 - ftmask[ftmask < 1] = 0 - return ftmask From e555fdc1f0d847335c330d4ac14fbdd0b33e07b5 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Sat, 18 May 2024 19:44:56 +1000 Subject: [PATCH 30/47] updated requirements file --- nipype-auto-conv/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype-auto-conv/requirements.txt b/nipype-auto-conv/requirements.txt index 0bdabfa..e0f01dd 100644 --- a/nipype-auto-conv/requirements.txt +++ b/nipype-auto-conv/requirements.txt @@ -1,3 +1,4 @@ nipype2pydra mriqc -nipy \ No newline at end of file +nipy +datalad \ No newline at end of file From 84e14c048b4391d8f8e6674451b5a9b23aa2794c Mon Sep 17 00:00:00 2001 From: Tom Close Date: Sat, 18 May 2024 19:59:28 +1000 Subject: [PATCH 31/47] added nirodents to conv reqs --- nipype-auto-conv/requirements.txt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nipype-auto-conv/requirements.txt b/nipype-auto-conv/requirements.txt index e0f01dd..85ec865 100644 --- a/nipype-auto-conv/requirements.txt +++ b/nipype-auto-conv/requirements.txt @@ -1,4 +1,6 @@ nipype2pydra mriqc nipy -datalad \ No newline at end of file +datalad +nitime +nirodents From 25b5743044f0ad932b0c4947d547c1e6582c5d87 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 20 May 2024 11:31:01 +1000 Subject: [PATCH 32/47] removed installation of toolkit deps from ci-cd (will mock instead) --- .github/workflows/ci-cd.yaml | 32 +++----------------------------- 1 file changed, 3 insertions(+), 29 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index e0c4f71..d2b249d 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -87,38 +87,12 @@ jobs: matrix: python-version: ['3.8'] # '3.11' steps: + - name: Removed unnecessary tools to free space run: | sudo rm -rf /usr/share/dotnet - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - - name: Get Download cache Key - id: cache-key - run: echo "::set-output name=key::anatomical-linux-ubuntu22_amd64-7.4.1" - - name: Cache FreeSurfer - uses: actions/cache@v2 - with: - path: $HOME/downloads/anatomical - key: ${{ steps.cache-key.outputs.key }} - restore-keys: | - anatomical-linux-ubuntu22_amd64-7.4.1 - - name: Download FreeSurfer - if: steps.cache-key.outputs.key != steps.cache-hit.outputs.key - run: | - mkdir -p $HOME/downloads/anatomical - curl -s -o $HOME/downloads/anatomical/anatomical-linux-ubuntu22_amd64-7.4.1.tar.gz https://surfer.nmr.mgh.harvard.edu/pub/dist/anatomical/7.4.1/anatomical-linux-ubuntu22_amd64-7.4.1.tar.gz - shell: bash - - name: Install Freesurfer - env: - FREESURFER_LICENCE: ${{ secrets.FREESURFER_LICENCE }} - run: | - pushd $HOME/downloads/anatomical - tar -zxpf anatomical-linux-ubuntu22_amd64-7.4.1.tar.gz - mv anatomical $HOME/ - popd - export FREESURFER_HOME=$HOME/anatomical - source $FREESURFER_HOME/SetUpFreeSurfer.sh - echo $FREESURFER_LICENCE > $FREESURFER_HOME/license.txt - export PATH=$FREESURFER_HOME/bin:$PATH + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - uses: actions/checkout@v4 - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' From ca0dfbda83366ee44027b2fc06fcd85cc8488b78 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 20 May 2024 13:06:15 +1000 Subject: [PATCH 33/47] added mock tools --- .github/workflows/ci-cd.yaml | 15 ++++++++++++++- mock-tools/afni | 3 +++ mock-tools/antsRegistration | 2 ++ mock-tools/etc/fslversion | 1 + mock-tools/mrconvert | 2 ++ 5 files changed, 22 insertions(+), 1 deletion(-) create mode 100755 mock-tools/afni create mode 100755 mock-tools/antsRegistration create mode 100644 mock-tools/etc/fslversion create mode 100755 mock-tools/mrconvert diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index d2b249d..814d26b 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -85,7 +85,8 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python-version: ['3.8'] # '3.11' + python-version: ['3.8', '3.12'] + steps: - name: Removed unnecessary tools to free space @@ -97,30 +98,42 @@ jobs: - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') + - name: Download tasks converted from Nipype uses: actions/download-artifact@v3 with: name: converted-nipype path: pydra/tasks/anatomical/auto + - name: Strip auto package from gitignore so it is included in package run: | sed -i '/\/src\/pydra\/tasks\/anatomical\/auto/d' .gitignore + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - name: Install build dependencies run: | python -m pip install --upgrade pip + - name: Install task package run: | pip install ".[test]" python -c "import pydra.tasks.anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" + + - name: Set environment variables required for mocking tools + run: | + export PATH=$(pwd)/mock-tools:$PATH + export FSLDIR=$(pwd)/mock-tools + - name: Test with pytest run: | pytest -sv --doctest-modules ./pydra/tasks/anatomical \ --cov pydra.tasks.anatomical --cov-report xml + - uses: codecov/codecov-action@v3 if: ${{ always() }} with: diff --git a/mock-tools/afni b/mock-tools/afni new file mode 100755 index 0000000..695aac5 --- /dev/null +++ b/mock-tools/afni @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +echo "Version AFNI_24_1_0" +echo "Dummy line" diff --git a/mock-tools/antsRegistration b/mock-tools/antsRegistration new file mode 100755 index 0000000..bca3fb9 --- /dev/null +++ b/mock-tools/antsRegistration @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +echo "ANTs Version: 2.5.1" \ No newline at end of file diff --git a/mock-tools/etc/fslversion b/mock-tools/etc/fslversion new file mode 100644 index 0000000..41bd15e --- /dev/null +++ b/mock-tools/etc/fslversion @@ -0,0 +1 @@ +6.0.7 \ No newline at end of file diff --git a/mock-tools/mrconvert b/mock-tools/mrconvert new file mode 100755 index 0000000..4ceec4f --- /dev/null +++ b/mock-tools/mrconvert @@ -0,0 +1,2 @@ +#!/usr/bin/env bash +echo "== mrconvert 3.0.3" From f1191913922ffacc5af79fd00eecc363b8050731 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 20 May 2024 13:42:46 +1000 Subject: [PATCH 34/47] debugging ci-cd --- .github/workflows/ci-cd.yaml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 814d26b..7646920 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -32,10 +32,10 @@ jobs: run: python -m pip install -r ./nipype-auto-conv/requirements.txt - name: Run automatic Nipype > Pydra conversion run: ./nipype-auto-conv/generate - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: converted-nipype - path: pydra/tasks/anatomical/auto + path: pydra/tasks/mriqc devcheck: needs: [nipype-conv] @@ -54,13 +54,13 @@ jobs: if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') - name: Download tasks converted from Nipype - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: converted-nipype - path: pydra/tasks/anatomical/auto + path: pydra/tasks/mriqc - name: Strip auto package from gitignore so it is included in package run: | - sed -i '/\/pydra\/tasks\/anatomical\/auto/d' .gitignore + sed -i '/\/pydra/d' .gitignore - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -100,14 +100,14 @@ jobs: run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') - name: Download tasks converted from Nipype - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: converted-nipype - path: pydra/tasks/anatomical/auto + path: pydra/tasks/mriqc - - name: Strip auto package from gitignore so it is included in package + - name: Strip pydra package from gitignore so it is included in package run: | - sed -i '/\/src\/pydra\/tasks\/anatomical\/auto/d' .gitignore + sed -i '/\/pydra/d' .gitignore - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 @@ -149,17 +149,17 @@ jobs: submodules: recursive fetch-depth: 0 - name: Download tasks converted from Nipype - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: converted-nipype - path: pydra/tasks/anatomical/auto + path: pydra/tasks/mriqc - name: Tag release with a post-release based on Nipype and Nipype2Pydra versions if: github.event_name == 'repository_dispatch' run: | TAG=$(git tag -l | tail -n 1 | awk -F post '{print $1}') - POST=$(python -c "from pydra.tasks.anatomical.auto._post_release import *; print(post_release)") + POST=$(python -c "from pydra.tasks.anatomical._post_release import *; print(post_release)") git checkout $TAG - git add -f pydra/tasks/anatomical/auto/_version.py + git add -f pydra/tasks/mriqc/_version.py git commit -am"added auto-generated version to make new tag for package version" git tag ${TAG}post${POST} - name: Set up Python @@ -168,9 +168,9 @@ jobs: python-version: '3.11' - name: Install build tools run: python -m pip install build twine - - name: Strip auto package from gitignore so it is included in package + - name: Strip pydra package from gitignore so it is included in package run: | - sed -i '/\/pydra\/tasks\/anatomical\/auto/d' .gitignore + sed -i '/\/pydra/d' .gitignore - name: Build source and wheel distributions run: python -m build . - name: Check distributions From 88b929ff812d9fb39c6527b46e378f32a9097cfc Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 20 May 2024 13:53:20 +1000 Subject: [PATCH 35/47] debugging ci-cd --- .github/workflows/ci-cd.yaml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 7646920..e1c3d48 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -121,7 +121,7 @@ jobs: - name: Install task package run: | pip install ".[test]" - python -c "import pydra.tasks.anatomical as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" + python -c "import pydra.tasks.mriqc as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - name: Set environment variables required for mocking tools @@ -131,14 +131,13 @@ jobs: - name: Test with pytest run: | - pytest -sv --doctest-modules ./pydra/tasks/anatomical \ - --cov pydra.tasks.anatomical --cov-report xml + pytest -sv ./pydra --cov pydra.tasks.mriqc --cov-report xml - uses: codecov/codecov-action@v3 if: ${{ always() }} with: files: coverage.xml - name: pydra-anatomical + name: pydra-mriqc deploy: needs: [test] @@ -157,7 +156,7 @@ jobs: if: github.event_name == 'repository_dispatch' run: | TAG=$(git tag -l | tail -n 1 | awk -F post '{print $1}') - POST=$(python -c "from pydra.tasks.anatomical._post_release import *; print(post_release)") + POST=$(python -c "from pydra.tasks.mriqc._post_release import *; print(post_release)") git checkout $TAG git add -f pydra/tasks/mriqc/_version.py git commit -am"added auto-generated version to make new tag for package version" From bb6557dec9ec3efb699646cf9f74bcc86338fffa Mon Sep 17 00:00:00 2001 From: Tom Close Date: Mon, 20 May 2024 14:19:35 +1000 Subject: [PATCH 36/47] updated release checking in ci-cd --- .github/workflows/ci-cd.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index e1c3d48..50d21a5 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -180,7 +180,7 @@ jobs: path: dist/ - name: Check for PyPI token on tag id: deployable - if: (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) || github.event_name == 'repository_dispatch' + if: github.event_name == 'release' env: PYPI_API_TOKEN: "${{ secrets.PYPI_API_TOKEN }}" run: if [ -n "$PYPI_API_TOKEN" ]; then echo "DEPLOY=true" >> $GITHUB_OUTPUT; fi From 33d3312a0192fb79e28cc6f5a0a171cc2c490a2c Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 08:30:15 +1000 Subject: [PATCH 37/47] added in depedencies required to run --- pyproject.toml | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f087361..74be9b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,14 +8,32 @@ description = "Pydra tasks package for mriqc" readme = "README.rst" requires-python = ">=3.8" dependencies = [ - "pydra >=0.22", + "dipy", "fileformats >=0.8.3", "fileformats-datascience >=0.1", "fileformats-medimage >=0.4.1", + "markupsafe ~= 2.0.1", + "matplotlib", + "nibabel", + "nilearn", + "migas >= 0.4.0", + "pandas ~=1.0", + "pydra >=0.22", "pydra-ants", "pydra-afni", "pydra-fsl", "pydra-mrtrix3 >=3.0.3a0", + "pydra-niworkflows", + "PyYAML", + "seaborn", + "scikit-learn", + "scipy", + "statsmodels", + "templateflow", + "nilearn", + "torch", + "toml", + "tomli >= 1.1.0; python_version < '3.11'", ] license = { file = "LICENSE" } authors = [{ name = "Nipype developers", email = "neuroimaging@python.org" }] From 21492968cbdb385fe70a458f0c8d639dbc2fa6f3 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 10:03:32 +1000 Subject: [PATCH 38/47] debugging ci-cd --- .github/workflows/ci-cd.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 50d21a5..71bd2f1 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -24,6 +24,8 @@ jobs: - name: Revert version to most recent tag on upstream update if: github.event_name == 'repository_dispatch' run: git checkout $(git tag -l | tail -n 1 | awk -F post '{print $1}') + - name: Show file tree + run: tree . - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 - name: Install build dependencies @@ -194,4 +196,4 @@ jobs: # Deploy on tags if PYPI_API_TOKEN is defined in the repository secrets. # Secrets are not accessible in the if: condition [0], so set an output variable [1] # [0] https://github.community/t/16928 -# [1] https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-output-parameter \ No newline at end of file +# [1] https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-output-parameter From 22d475f313cbc645f0f5603a8fda29b8554a0e84 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 15:21:57 +1000 Subject: [PATCH 39/47] added package init --- pydra/tasks/mriqc/__init__.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 pydra/tasks/mriqc/__init__.py diff --git a/pydra/tasks/mriqc/__init__.py b/pydra/tasks/mriqc/__init__.py new file mode 100644 index 0000000..eda89fd --- /dev/null +++ b/pydra/tasks/mriqc/__init__.py @@ -0,0 +1,34 @@ +""" +This is a basic doctest demonstrating that the package and pydra can both be successfully +imported. + +>>> import pydra.engine +>>> import pydra.tasks.mriqc +""" + +from warnings import warn +from pathlib import Path + +pkg_path = Path(__file__).parent.parent + +try: + from ._version import __version__ +except ImportError: + raise RuntimeError( + "pydra-mriqc has not been properly installed, please run " + f"`pip install -e {str(pkg_path)}` to install a development version" + ) +if "post" not in __version__: + try: + from ._post_release import post_release + except ImportError: + warn( + "Nipype interfaces haven't been automatically converted from their specs in " + f"`nipype-auto-conv`. Please run `{str(pkg_path / 'nipype-auto-conv' / 'generate')}` " + "to generated the converted Nipype interfaces in pydra.tasks.mriqc" + ) + else: + __version__ += "post" + post_release + + +__all__ = ["__version__"] From 75e8fbef1ae89452584a70c93188d3d21429f44c Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 17:38:07 +1000 Subject: [PATCH 40/47] find replace >py3.8 syntax to 3.8 syntax --- nipype-auto-conv/specs/package.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 9b5b3ac..9c71387 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -44,6 +44,15 @@ find_replace: - ["class _ReadDWIMetadataOutputSpec.+?(?=\\n\\n)", ""] - ["dataset = wf_inputs\\.get\\(.*?_datalad_get\\(\\w+\\)", ""] - ["DWIDenoise", "DwiDenoise"] + - [ + "dict\\[int, \\(float, float\\)\\]", + "ty.Dict[int, ty.Tuple[float, float]]", + ] + - [ + "dict\\[str, float \\| np.ndarray\\]", + "ty.Dict[str, ty.Union[float, np.ndarray]]", + ] + - ["\\bdict\\[", "ty.Dict["] omit_modules: - "mriqc.config" import_find_replace: From edf86983fddeb0caea83b4005809bd7e0ac145bc Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 18:11:43 +1000 Subject: [PATCH 41/47] removed support for Python 3.8 --- nipype-auto-conv/specs/package.yaml | 18 +++++++++--------- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/nipype-auto-conv/specs/package.yaml b/nipype-auto-conv/specs/package.yaml index 9c71387..ab44c0c 100644 --- a/nipype-auto-conv/specs/package.yaml +++ b/nipype-auto-conv/specs/package.yaml @@ -44,15 +44,15 @@ find_replace: - ["class _ReadDWIMetadataOutputSpec.+?(?=\\n\\n)", ""] - ["dataset = wf_inputs\\.get\\(.*?_datalad_get\\(\\w+\\)", ""] - ["DWIDenoise", "DwiDenoise"] - - [ - "dict\\[int, \\(float, float\\)\\]", - "ty.Dict[int, ty.Tuple[float, float]]", - ] - - [ - "dict\\[str, float \\| np.ndarray\\]", - "ty.Dict[str, ty.Union[float, np.ndarray]]", - ] - - ["\\bdict\\[", "ty.Dict["] + # - [ + # "dict\\[int, \\(float, float\\)\\]", + # "ty.Dict[int, ty.Tuple[float, float]]", + # ] + # - [ + # "dict\\[str, float \\| np.ndarray\\]", + # "ty.Dict[str, ty.Union[float, np.ndarray]]", + # ] + # - ["\\bdict\\[", "ty.Dict["] omit_modules: - "mriqc.config" import_find_replace: diff --git a/pyproject.toml b/pyproject.toml index 74be9b4..bbe2d2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "hatchling.build" name = "pydra-mriqc" description = "Pydra tasks package for mriqc" readme = "README.rst" -requires-python = ">=3.8" +requires-python = ">=3.10" dependencies = [ "dipy", "fileformats >=0.8.3", From d782fecb8dcc318d7401d4e04db0ad2c89b2494f Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 19:10:35 +1000 Subject: [PATCH 42/47] python versions --- .github/workflows/ci-cd.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 71bd2f1..1aa836c 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -44,7 +44,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.11'] # Check oldest and newest versions + python-version: ['3.10', '3.12'] # Check oldest and newest versions pip-flags: ['', '--editable'] pydra: - 'pydra' @@ -87,7 +87,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python-version: ['3.8', '3.12'] + python-version: ['3.10', '3.12'] steps: @@ -166,7 +166,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install build tools run: python -m pip install build twine - name: Strip pydra package from gitignore so it is included in package From 1cdebc1684021c7f143411d869a9e07491b0aed6 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Tue, 21 May 2024 19:17:50 +1000 Subject: [PATCH 43/47] added pydra-nireports --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index bbe2d2e..df379ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ dependencies = [ "pydra-fsl", "pydra-mrtrix3 >=3.0.3a0", "pydra-niworkflows", + "pydra-nireports", "PyYAML", "seaborn", "scikit-learn", From a3828d000ae4a90967677e8fce738dd692ddfc82 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 22 May 2024 09:42:02 +1000 Subject: [PATCH 44/47] install dev version of pydra for testing --- .github/workflows/ci-cd.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 1aa836c..6c946fa 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -126,6 +126,9 @@ jobs: python -c "import pydra.tasks.mriqc as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" + - name: Install dev Pydra version for now until it is merged + run: pip install --upgrade git+https://github.com/nipype/pydra.git@nipype-auto-conv + - name: Set environment variables required for mocking tools run: | export PATH=$(pwd)/mock-tools:$PATH From 83e9918bca7a7c8060bcf787af727e2d11e8ab60 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 22 May 2024 11:58:42 +1000 Subject: [PATCH 45/47] fixed branch of pydra install in ci-cd --- .github/workflows/ci-cd.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 6c946fa..306bc0a 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -127,7 +127,7 @@ jobs: python -c "import pydra as m; print(f'{m.__name__} {m.__version__} @ {m.__file__}')" - name: Install dev Pydra version for now until it is merged - run: pip install --upgrade git+https://github.com/nipype/pydra.git@nipype-auto-conv + run: pip install --upgrade git+https://github.com/nipype/pydra.git@typing-bugfixes - name: Set environment variables required for mocking tools run: | From 6fac852c94fc4a0483d9dca5440662930a023bfa Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 22 May 2024 13:23:32 +1000 Subject: [PATCH 46/47] debugging ci-cd --- .github/workflows/ci-cd.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index 306bc0a..e6c5f76 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -133,6 +133,7 @@ jobs: run: | export PATH=$(pwd)/mock-tools:$PATH export FSLDIR=$(pwd)/mock-tools + echo "PATH: $PATH" - name: Test with pytest run: | From 77be07ec84cbbb0901ba9d25e3373319dff00f82 Mon Sep 17 00:00:00 2001 From: Tom Close Date: Wed, 22 May 2024 15:28:45 +1000 Subject: [PATCH 47/47] updated environment variables in ci-cd --- .github/workflows/ci-cd.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yaml b/.github/workflows/ci-cd.yaml index e6c5f76..12380ac 100644 --- a/.github/workflows/ci-cd.yaml +++ b/.github/workflows/ci-cd.yaml @@ -131,9 +131,9 @@ jobs: - name: Set environment variables required for mocking tools run: | - export PATH=$(pwd)/mock-tools:$PATH - export FSLDIR=$(pwd)/mock-tools - echo "PATH: $PATH" + echo "$(pwd)/mock-tools" >> $GITHUB_PATH + echo "export FSLDIR=$(pwd)/mock-tools" >> $GITHUB_ENV + echo "export FSLOUTPUTTYPE=NIFTI_GZ" >> $GITHUB_ENV - name: Test with pytest run: |