diff --git a/master/.buildinfo b/master/.buildinfo index 29b92a498f3..9e4440361fa 100644 --- a/master/.buildinfo +++ b/master/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file records the configuration used when building these files. When it is not found, a full rebuild will be done. -config: da883607b8ab75490fbd1ab786272c8f +config: ee1a91fa07db6ab2c90fcf6a8f18d4d0 tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/master/_modules/index.html b/master/_modules/index.html index 9bd74c0b4a7..5d47b1b4c9e 100644 --- a/master/_modules/index.html +++ b/master/_modules/index.html @@ -388,7 +388,7 @@
"""Nipype translation of ANTs' workflows."""
# general purpose
-from collections import OrderedDict
from multiprocessing import cpu_count
from warnings import warn
-# nipype
-from nipype.pipeline import engine as pe
from nipype.interfaces import utility as niu
from nipype.interfaces.ants import (
AI,
@@ -371,37 +368,42 @@ Source code for niworkflows.anat.ants
ThresholdImage,
)
+# nipype
+from nipype.pipeline import engine as pe
+
from ..data import load as load_data
-from ..utils.misc import get_template_specs
-from ..utils.connections import pop_file as _pop
+from ..interfaces.fixes import (
+ FixHeaderApplyTransforms as ApplyTransforms,
+)
# niworkflows
from ..interfaces.fixes import (
FixHeaderRegistration as Registration,
- FixHeaderApplyTransforms as ApplyTransforms,
)
-from ..interfaces.nibabel import ApplyMask, RegridToZooms
from ..interfaces.header import CopyXForm
-
+from ..interfaces.nibabel import ApplyMask, RegridToZooms
+from ..utils.connections import pop_file as _pop
+from ..utils.misc import get_template_specs
ATROPOS_MODELS = {
- "T1w": OrderedDict([("nclasses", 3), ("csf", 1), ("gm", 2), ("wm", 3)]),
- "T2w": OrderedDict([("nclasses", 3), ("csf", 3), ("gm", 2), ("wm", 1)]),
- "FLAIR": OrderedDict([("nclasses", 3), ("csf", 1), ("gm", 3), ("wm", 2)]),
+ 'T1w': {'nclasses': 3, 'csf': 1, 'gm': 2, 'wm': 3},
+ 'T2w': {'nclasses': 3, 'csf': 3, 'gm': 2, 'wm': 1},
+ 'FLAIR': {'nclasses': 3, 'csf': 1, 'gm': 3, 'wm': 2},
}
+T1W_MODEL = tuple(ATROPOS_MODELS['T1w'].values())
[docs]
def init_brain_extraction_wf(
- name="brain_extraction_wf",
- in_template="OASIS30ANTs",
+ name='brain_extraction_wf',
+ in_template='OASIS30ANTs',
template_spec=None,
use_float=True,
- normalization_quality="precise",
+ normalization_quality='precise',
omp_nthreads=None,
mem_gb=3.0,
- bids_suffix="T1w",
+ bids_suffix='T1w',
atropos_refine=True,
atropos_use_random_seed=True,
atropos_model=None,
@@ -519,7 +521,8 @@ Source code for niworkflows.anat.ants
Output :abbr:`TPMs (tissue probability maps)` by ATROPOS
"""
- from packaging.version import parse as parseversion, Version
+ from packaging.version import Version
+ from packaging.version import parse as parseversion
from templateflow.api import get as get_template
wf = pe.Workflow(name)
@@ -527,27 +530,27 @@ Source code for niworkflows.anat.ants
template_spec = template_spec or {}
# suffix passed via spec takes precedence
- template_spec["suffix"] = template_spec.get("suffix", bids_suffix)
+ template_spec['suffix'] = template_spec.get('suffix', bids_suffix)
tpl_target_path, common_spec = get_template_specs(
- in_template, template_spec=template_spec, fallback=True,
+ in_template,
+ template_spec=template_spec,
+ fallback=True,
)
# Get probabilistic brain mask if available
tpl_mask_path = get_template(
- in_template, label="brain", suffix="probseg", **common_spec
- ) or get_template(in_template, desc="brain", suffix="mask", **common_spec)
+ in_template, label='brain', suffix='probseg', **common_spec
+ ) or get_template(in_template, desc='brain', suffix='mask', **common_spec)
if omp_nthreads is None or omp_nthreads < 1:
omp_nthreads = cpu_count()
- inputnode = pe.Node(
- niu.IdentityInterface(fields=["in_files", "in_mask"]), name="inputnode"
- )
+ inputnode = pe.Node(niu.IdentityInterface(fields=['in_files', 'in_mask']), name='inputnode')
# Try to find a registration mask, set if available
tpl_regmask_path = get_template(
- in_template, desc="BrainCerebellumExtraction", suffix="mask", **common_spec
+ in_template, desc='BrainCerebellumExtraction', suffix='mask', **common_spec
)
if tpl_regmask_path:
inputnode.inputs.in_mask = str(tpl_regmask_path)
@@ -555,23 +558,21 @@ Source code for niworkflows.anat.ants
outputnode = pe.Node(
niu.IdentityInterface(
fields=[
- "out_file",
- "out_mask",
- "bias_corrected",
- "bias_image",
- "out_segm",
- "out_tpms",
+ 'out_file',
+ 'out_mask',
+ 'bias_corrected',
+ 'bias_image',
+ 'out_segm',
+ 'out_tpms',
]
),
- name="outputnode",
+ name='outputnode',
)
trunc = pe.MapNode(
- ImageMath(
- operation="TruncateImageIntensity", op2="0.01 0.999 256", copy_header=True
- ),
- name="truncate_images",
- iterfield=["op1"],
+ ImageMath(operation='TruncateImageIntensity', op2='0.01 0.999 256', copy_header=True),
+ name='truncate_images',
+ iterfield=['op1'],
)
inu_n4 = pe.MapNode(
N4BiasFieldCorrection(
@@ -584,39 +585,39 @@ Source code for niworkflows.anat.ants
bspline_fitting_distance=bspline_fitting_distance,
),
n_procs=omp_nthreads,
- name="inu_n4",
- iterfield=["input_image"],
+ name='inu_n4',
+ iterfield=['input_image'],
)
res_tmpl = pe.Node(
RegridToZooms(in_file=tpl_target_path, zooms=(4, 4, 4), smooth=True),
- name="res_tmpl",
+ name='res_tmpl',
)
- res_target = pe.Node(RegridToZooms(zooms=(4, 4, 4), smooth=True), name="res_target")
+ res_target = pe.Node(RegridToZooms(zooms=(4, 4, 4), smooth=True), name='res_target')
lap_tmpl = pe.Node(
- ImageMath(operation="Laplacian", op2="1.5 1", copy_header=True), name="lap_tmpl"
+ ImageMath(operation='Laplacian', op2='1.5 1', copy_header=True), name='lap_tmpl'
)
lap_tmpl.inputs.op1 = tpl_target_path
lap_target = pe.Node(
- ImageMath(operation="Laplacian", op2="1.5 1", copy_header=True),
- name="lap_target",
+ ImageMath(operation='Laplacian', op2='1.5 1', copy_header=True),
+ name='lap_target',
)
- mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl")
+ mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
mrg_tmpl.inputs.in1 = tpl_target_path
- mrg_target = pe.Node(niu.Merge(2), name="mrg_target")
+ mrg_target = pe.Node(niu.Merge(2), name='mrg_target')
# Initialize transforms with antsAI
init_aff = pe.Node(
AI(
- metric=("Mattes", 32, "Regular", 0.25),
- transform=("Affine", 0.1),
+ metric=('Mattes', 32, 'Regular', 0.25),
+ transform=('Affine', 0.1),
search_factor=(15, 0.1),
principal_axes=False,
convergence=(10, 1e-6, 10),
verbose=True,
),
- name="init_aff",
+ name='init_aff',
n_procs=omp_nthreads,
)
@@ -626,31 +627,32 @@ Source code for niworkflows.anat.ants
except ValueError:
warn(
"antsAI's option --search-grid was added in ANTS 2.3.0 "
- f"({init_aff.interface.version} found.)"
+ f'({init_aff.interface.version} found.)',
+ stacklevel=1,
)
# Set up spatial normalization
settings_file = (
- "antsBrainExtraction_%s.json"
+ 'antsBrainExtraction_%s.json'
if use_laplacian
- else "antsBrainExtractionNoLaplacian_%s.json"
+ else 'antsBrainExtractionNoLaplacian_%s.json'
)
norm = pe.Node(
Registration(from_file=load_data(settings_file % normalization_quality)),
- name="norm",
+ name='norm',
n_procs=omp_nthreads,
mem_gb=mem_gb,
)
norm.inputs.float = use_float
- fixed_mask_trait = "fixed_image_mask"
+ fixed_mask_trait = 'fixed_image_mask'
- if norm.interface.version and parseversion(norm.interface.version) >= Version(
- "2.2.0"
- ):
- fixed_mask_trait += "s"
+ if norm.interface.version and parseversion(norm.interface.version) >= Version('2.2.0'):
+ fixed_mask_trait += 's'
map_brainmask = pe.Node(
- ApplyTransforms(interpolation="Gaussian"), name="map_brainmask", mem_gb=1,
+ ApplyTransforms(interpolation='Gaussian'),
+ name='map_brainmask',
+ mem_gb=1,
)
map_brainmask.inputs.input_image = str(tpl_mask_path)
@@ -663,7 +665,7 @@ Source code for niworkflows.anat.ants
outside_value=0,
copy_header=True,
),
- name="thr_brainmask",
+ name='thr_brainmask',
)
# Refine INU correction
@@ -678,107 +680,105 @@ Source code for niworkflows.anat.ants
bspline_fitting_distance=bspline_fitting_distance,
),
n_procs=omp_nthreads,
- name="inu_n4_final",
- iterfield=["input_image"],
+ name='inu_n4_final',
+ iterfield=['input_image'],
)
try:
inu_n4_final.inputs.rescale_intensities = True
except ValueError:
warn(
"N4BiasFieldCorrection's --rescale-intensities option was added in ANTS 2.1.0 "
- f"({inu_n4_final.interface.version} found.) Please consider upgrading.",
+ f'({inu_n4_final.interface.version} found.) Please consider upgrading.',
UserWarning,
+ stacklevel=1,
)
# Apply mask
- apply_mask = pe.MapNode(ApplyMask(), iterfield=["in_file"], name="apply_mask")
+ apply_mask = pe.MapNode(ApplyMask(), iterfield=['in_file'], name='apply_mask')
# fmt: off
wf.connect([
- (inputnode, trunc, [("in_files", "op1")]),
- (inputnode, inu_n4_final, [("in_files", "input_image")]),
- (inputnode, init_aff, [("in_mask", "fixed_image_mask")]),
- (inputnode, norm, [("in_mask", fixed_mask_trait)]),
- (inputnode, map_brainmask, [(("in_files", _pop), "reference_image")]),
- (trunc, inu_n4, [("output_image", "input_image")]),
- (inu_n4, res_target, [(("output_image", _pop), "in_file")]),
- (res_tmpl, init_aff, [("out_file", "fixed_image")]),
- (res_target, init_aff, [("out_file", "moving_image")]),
- (init_aff, norm, [("output_transform", "initial_moving_transform")]),
+ (inputnode, trunc, [('in_files', 'op1')]),
+ (inputnode, inu_n4_final, [('in_files', 'input_image')]),
+ (inputnode, init_aff, [('in_mask', 'fixed_image_mask')]),
+ (inputnode, norm, [('in_mask', fixed_mask_trait)]),
+ (inputnode, map_brainmask, [(('in_files', _pop), 'reference_image')]),
+ (trunc, inu_n4, [('output_image', 'input_image')]),
+ (inu_n4, res_target, [(('output_image', _pop), 'in_file')]),
+ (res_tmpl, init_aff, [('out_file', 'fixed_image')]),
+ (res_target, init_aff, [('out_file', 'moving_image')]),
+ (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
(norm, map_brainmask, [
- ("reverse_transforms", "transforms"),
- ("reverse_invert_flags", "invert_transform_flags"),
+ ('reverse_transforms', 'transforms'),
+ ('reverse_invert_flags', 'invert_transform_flags'),
]),
- (map_brainmask, thr_brainmask, [("output_image", "input_image")]),
- (map_brainmask, inu_n4_final, [("output_image", "weight_image")]),
- (inu_n4_final, apply_mask, [("output_image", "in_file")]),
- (thr_brainmask, apply_mask, [("output_image", "in_mask")]),
- (thr_brainmask, outputnode, [("output_image", "out_mask")]),
- (inu_n4_final, outputnode, [("output_image", "bias_corrected"),
- ("bias_image", "bias_image")]),
- (apply_mask, outputnode, [("out_file", "out_file")]),
+ (map_brainmask, thr_brainmask, [('output_image', 'input_image')]),
+ (map_brainmask, inu_n4_final, [('output_image', 'weight_image')]),
+ (inu_n4_final, apply_mask, [('output_image', 'in_file')]),
+ (thr_brainmask, apply_mask, [('output_image', 'in_mask')]),
+ (thr_brainmask, outputnode, [('output_image', 'out_mask')]),
+ (inu_n4_final, outputnode, [('output_image', 'bias_corrected'),
+ ('bias_image', 'bias_image')]),
+ (apply_mask, outputnode, [('out_file', 'out_file')]),
])
# fmt: on
- wm_tpm = (
- get_template(in_template, label="WM", suffix="probseg", **common_spec) or None
- )
+ wm_tpm = get_template(in_template, label='WM', suffix='probseg', **common_spec) or None
if wm_tpm:
map_wmmask = pe.Node(
- ApplyTransforms(interpolation="Gaussian"), name="map_wmmask", mem_gb=1,
+ ApplyTransforms(interpolation='Gaussian'),
+ name='map_wmmask',
+ mem_gb=1,
)
# Add the brain stem if it is found.
- bstem_tpm = (
- get_template(in_template, label="BS", suffix="probseg", **common_spec)
- or None
- )
+ bstem_tpm = get_template(in_template, label='BS', suffix='probseg', **common_spec) or None
if bstem_tpm:
- full_wm = pe.Node(niu.Function(function=_imsum), name="full_wm")
+ full_wm = pe.Node(niu.Function(function=_imsum), name='full_wm')
full_wm.inputs.op1 = str(wm_tpm)
full_wm.inputs.op2 = str(bstem_tpm)
# fmt: off
wf.connect([
- (full_wm, map_wmmask, [("out", "input_image")])
+ (full_wm, map_wmmask, [('out', 'input_image')])
])
# fmt: on
else:
map_wmmask.inputs.input_image = str(wm_tpm)
# fmt: off
wf.disconnect([
- (map_brainmask, inu_n4_final, [("output_image", "weight_image")]),
+ (map_brainmask, inu_n4_final, [('output_image', 'weight_image')]),
])
wf.connect([
- (inputnode, map_wmmask, [(("in_files", _pop), "reference_image")]),
+ (inputnode, map_wmmask, [(('in_files', _pop), 'reference_image')]),
(norm, map_wmmask, [
- ("reverse_transforms", "transforms"),
- ("reverse_invert_flags", "invert_transform_flags"),
+ ('reverse_transforms', 'transforms'),
+ ('reverse_invert_flags', 'invert_transform_flags'),
]),
- (map_wmmask, inu_n4_final, [("output_image", "weight_image")]),
+ (map_wmmask, inu_n4_final, [('output_image', 'weight_image')]),
])
# fmt: on
if use_laplacian:
lap_tmpl = pe.Node(
- ImageMath(operation="Laplacian", op2="1.5 1", copy_header=True),
- name="lap_tmpl",
+ ImageMath(operation='Laplacian', op2='1.5 1', copy_header=True),
+ name='lap_tmpl',
)
lap_tmpl.inputs.op1 = tpl_target_path
lap_target = pe.Node(
- ImageMath(operation="Laplacian", op2="1.5 1", copy_header=True),
- name="lap_target",
+ ImageMath(operation='Laplacian', op2='1.5 1', copy_header=True),
+ name='lap_target',
)
- mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl")
+ mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl')
mrg_tmpl.inputs.in1 = tpl_target_path
- mrg_target = pe.Node(niu.Merge(2), name="mrg_target")
+ mrg_target = pe.Node(niu.Merge(2), name='mrg_target')
# fmt: off
wf.connect([
- (inu_n4, lap_target, [(("output_image", _pop), "op1")]),
- (lap_tmpl, mrg_tmpl, [("output_image", "in2")]),
- (inu_n4, mrg_target, [("output_image", "in1")]),
- (lap_target, mrg_target, [("output_image", "in2")]),
- (mrg_tmpl, norm, [("out", "fixed_image")]),
- (mrg_target, norm, [("out", "moving_image")]),
+ (inu_n4, lap_target, [(('output_image', _pop), 'op1')]),
+ (lap_tmpl, mrg_tmpl, [('output_image', 'in2')]),
+ (inu_n4, mrg_target, [('output_image', 'in1')]),
+ (lap_target, mrg_target, [('output_image', 'in2')]),
+ (mrg_tmpl, norm, [('out', 'fixed_image')]),
+ (mrg_target, norm, [('out', 'moving_image')]),
])
# fmt: on
@@ -786,7 +786,7 @@ Source code for niworkflows.anat.ants
norm.inputs.fixed_image = tpl_target_path
# fmt: off
wf.connect([
- (inu_n4, norm, [(("output_image", _pop), "moving_image")]),
+ (inu_n4, norm, [(('output_image', _pop), 'moving_image')]),
])
# fmt: on
@@ -803,29 +803,29 @@ Source code for niworkflows.anat.ants
# fmt: off
wf.disconnect([
- (thr_brainmask, outputnode, [("output_image", "out_mask")]),
- (inu_n4_final, outputnode, [("output_image", "bias_corrected"),
- ("bias_image", "bias_image")]),
- (apply_mask, outputnode, [("out_file", "out_file")]),
+ (thr_brainmask, outputnode, [('output_image', 'out_mask')]),
+ (inu_n4_final, outputnode, [('output_image', 'bias_corrected'),
+ ('bias_image', 'bias_image')]),
+ (apply_mask, outputnode, [('out_file', 'out_file')]),
])
wf.connect([
- (inputnode, atropos_wf, [("in_files", "inputnode.in_files")]),
- (inu_n4_final, atropos_wf, [("output_image", "inputnode.in_corrected")]),
- (thr_brainmask, atropos_wf, [("output_image", "inputnode.in_mask")]),
+ (inputnode, atropos_wf, [('in_files', 'inputnode.in_files')]),
+ (inu_n4_final, atropos_wf, [('output_image', 'inputnode.in_corrected')]),
+ (thr_brainmask, atropos_wf, [('output_image', 'inputnode.in_mask')]),
(atropos_wf, outputnode, [
- ("outputnode.out_file", "out_file"),
- ("outputnode.bias_corrected", "bias_corrected"),
- ("outputnode.bias_image", "bias_image"),
- ("outputnode.out_mask", "out_mask"),
- ("outputnode.out_segm", "out_segm"),
- ("outputnode.out_tpms", "out_tpms"),
+ ('outputnode.out_file', 'out_file'),
+ ('outputnode.bias_corrected', 'bias_corrected'),
+ ('outputnode.bias_image', 'bias_image'),
+ ('outputnode.out_mask', 'out_mask'),
+ ('outputnode.out_segm', 'out_segm'),
+ ('outputnode.out_tpms', 'out_tpms'),
]),
])
# fmt: on
if wm_tpm:
# fmt: off
wf.connect([
- (map_wmmask, atropos_wf, [("output_image", "inputnode.wm_prior")]),
+ (map_wmmask, atropos_wf, [('output_image', 'inputnode.wm_prior')]),
])
# fmt: on
return wf
@@ -835,12 +835,12 @@ Source code for niworkflows.anat.ants
[docs]
def init_atropos_wf(
- name="atropos_wf",
+ name='atropos_wf',
use_random_seed=True,
omp_nthreads=None,
mem_gb=3.0,
padding=10,
- in_segmentation_model=tuple(ATROPOS_MODELS["T1w"].values()),
+ in_segmentation_model=T1W_MODEL,
bspline_fitting_distance=200,
wm_prior=False,
):
@@ -926,30 +926,28 @@ Source code for niworkflows.anat.ants
"""
wf = pe.Workflow(name)
- out_fields = ["bias_corrected", "bias_image", "out_mask", "out_segm", "out_tpms"]
+ out_fields = ['bias_corrected', 'bias_image', 'out_mask', 'out_segm', 'out_tpms']
inputnode = pe.Node(
- niu.IdentityInterface(
- fields=["in_files", "in_corrected", "in_mask", "wm_prior"]
- ),
- name="inputnode",
+ niu.IdentityInterface(fields=['in_files', 'in_corrected', 'in_mask', 'wm_prior']),
+ name='inputnode',
)
outputnode = pe.Node(
- niu.IdentityInterface(fields=["out_file"] + out_fields), name="outputnode"
+ niu.IdentityInterface(fields=['out_file'] + out_fields), name='outputnode'
)
copy_xform = pe.Node(
- CopyXForm(fields=out_fields), name="copy_xform", run_without_submitting=True
+ CopyXForm(fields=out_fields), name='copy_xform', run_without_submitting=True
)
# Morphological dilation, radius=2
dil_brainmask = pe.Node(
- ImageMath(operation="MD", op2="2", copy_header=True), name="dil_brainmask"
+ ImageMath(operation='MD', op2='2', copy_header=True), name='dil_brainmask'
)
# Get largest connected component
get_brainmask = pe.Node(
- ImageMath(operation="GetLargestComponent", copy_header=True),
- name="get_brainmask",
+ ImageMath(operation='GetLargestComponent', copy_header=True),
+ name='get_brainmask',
)
# Run atropos (core node)
@@ -957,8 +955,8 @@ Source code for niworkflows.anat.ants
Atropos(
convergence_threshold=0.0,
dimension=3,
- initialization="KMeans",
- likelihood_model="Gaussian",
+ initialization='KMeans',
+ likelihood_model='Gaussian',
mrf_radius=[1, 1, 1],
mrf_smoothing_factor=0.1,
n_iterations=3,
@@ -966,42 +964,40 @@ Source code for niworkflows.anat.ants
save_posteriors=True,
use_random_seed=use_random_seed,
),
- name="01_atropos",
+ name='01_atropos',
n_procs=omp_nthreads,
mem_gb=mem_gb,
)
# massage outputs
pad_segm = pe.Node(
- ImageMath(operation="PadImage", op2=f"{padding}", copy_header=False),
- name="02_pad_segm",
+ ImageMath(operation='PadImage', op2=f'{padding}', copy_header=False),
+ name='02_pad_segm',
)
pad_mask = pe.Node(
- ImageMath(operation="PadImage", op2=f"{padding}", copy_header=False),
- name="03_pad_mask",
+ ImageMath(operation='PadImage', op2=f'{padding}', copy_header=False),
+ name='03_pad_mask',
)
# Split segmentation in binary masks
sel_labels = pe.Node(
- niu.Function(
- function=_select_labels, output_names=["out_wm", "out_gm", "out_csf"]
- ),
- name="04_sel_labels",
+ niu.Function(function=_select_labels, output_names=['out_wm', 'out_gm', 'out_csf']),
+ name='04_sel_labels',
)
sel_labels.inputs.labels = list(reversed(in_segmentation_model[1:]))
# Select largest components (GM, WM)
# ImageMath ${DIMENSION} ${EXTRACTION_WM} GetLargestComponent ${EXTRACTION_WM}
- get_wm = pe.Node(ImageMath(operation="GetLargestComponent"), name="05_get_wm")
- get_gm = pe.Node(ImageMath(operation="GetLargestComponent"), name="06_get_gm")
+ get_wm = pe.Node(ImageMath(operation='GetLargestComponent'), name='05_get_wm')
+ get_gm = pe.Node(ImageMath(operation='GetLargestComponent'), name='06_get_gm')
# Fill holes and calculate intersection
# ImageMath ${DIMENSION} ${EXTRACTION_TMP} FillHoles ${EXTRACTION_GM} 2
# MultiplyImages ${DIMENSION} ${EXTRACTION_GM} ${EXTRACTION_TMP} ${EXTRACTION_GM}
- fill_gm = pe.Node(ImageMath(operation="FillHoles", op2="2"), name="07_fill_gm")
+ fill_gm = pe.Node(ImageMath(operation='FillHoles', op2='2'), name='07_fill_gm')
mult_gm = pe.Node(
- MultiplyImages(dimension=3, output_product_image="08_mult_gm.nii.gz"),
- name="08_mult_gm",
+ MultiplyImages(dimension=3, output_product_image='08_mult_gm.nii.gz'),
+ name='08_mult_gm',
)
# MultiplyImages ${DIMENSION} ${EXTRACTION_WM} ${ATROPOS_WM_CLASS_LABEL} ${EXTRACTION_WM}
@@ -1010,78 +1006,72 @@ Source code for niworkflows.anat.ants
MultiplyImages(
dimension=3,
second_input=in_segmentation_model[-1],
- output_product_image="09_relabel_wm.nii.gz",
+ output_product_image='09_relabel_wm.nii.gz',
),
- name="09_relabel_wm",
+ name='09_relabel_wm',
)
- me_csf = pe.Node(ImageMath(operation="ME", op2="10"), name="10_me_csf")
+ me_csf = pe.Node(ImageMath(operation='ME', op2='10'), name='10_me_csf')
# ImageMath ${DIMENSION} ${EXTRACTION_GM} addtozero ${EXTRACTION_GM} ${EXTRACTION_TMP}
# MultiplyImages ${DIMENSION} ${EXTRACTION_GM} ${ATROPOS_GM_CLASS_LABEL} ${EXTRACTION_GM}
# ImageMath ${DIMENSION} ${EXTRACTION_SEGMENTATION} addtozero ${EXTRACTION_WM} ${EXTRACTION_GM}
- add_gm = pe.Node(ImageMath(operation="addtozero"), name="11_add_gm")
+ add_gm = pe.Node(ImageMath(operation='addtozero'), name='11_add_gm')
relabel_gm = pe.Node(
MultiplyImages(
dimension=3,
second_input=in_segmentation_model[-2],
- output_product_image="12_relabel_gm.nii.gz",
+ output_product_image='12_relabel_gm.nii.gz',
),
- name="12_relabel_gm",
+ name='12_relabel_gm',
)
- add_gm_wm = pe.Node(ImageMath(operation="addtozero"), name="13_add_gm_wm")
+ add_gm_wm = pe.Node(ImageMath(operation='addtozero'), name='13_add_gm_wm')
# Superstep 7
# Split segmentation in binary masks
sel_labels2 = pe.Node(
- niu.Function(function=_select_labels, output_names=["out_gm", "out_wm"]),
- name="14_sel_labels2",
+ niu.Function(function=_select_labels, output_names=['out_gm', 'out_wm']),
+ name='14_sel_labels2',
)
sel_labels2.inputs.labels = in_segmentation_model[2:]
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} addtozero ${EXTRACTION_MASK} ${EXTRACTION_TMP}
- add_7 = pe.Node(ImageMath(operation="addtozero"), name="15_add_7")
+ add_7 = pe.Node(ImageMath(operation='addtozero'), name='15_add_7')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} ME ${EXTRACTION_MASK} 2
- me_7 = pe.Node(ImageMath(operation="ME", op2="2"), name="16_me_7")
+ me_7 = pe.Node(ImageMath(operation='ME', op2='2'), name='16_me_7')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} GetLargestComponent ${EXTRACTION_MASK}
- comp_7 = pe.Node(ImageMath(operation="GetLargestComponent"), name="17_comp_7")
+ comp_7 = pe.Node(ImageMath(operation='GetLargestComponent'), name='17_comp_7')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} MD ${EXTRACTION_MASK} 4
- md_7 = pe.Node(ImageMath(operation="MD", op2="4"), name="18_md_7")
+ md_7 = pe.Node(ImageMath(operation='MD', op2='4'), name='18_md_7')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} FillHoles ${EXTRACTION_MASK} 2
- fill_7 = pe.Node(ImageMath(operation="FillHoles", op2="2"), name="19_fill_7")
+ fill_7 = pe.Node(ImageMath(operation='FillHoles', op2='2'), name='19_fill_7')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} addtozero ${EXTRACTION_MASK} \
# ${EXTRACTION_MASK_PRIOR_WARPED}
- add_7_2 = pe.Node(ImageMath(operation="addtozero"), name="20_add_7_2")
+ add_7_2 = pe.Node(ImageMath(operation='addtozero'), name='20_add_7_2')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} MD ${EXTRACTION_MASK} 5
- md_7_2 = pe.Node(ImageMath(operation="MD", op2="5"), name="21_md_7_2")
+ md_7_2 = pe.Node(ImageMath(operation='MD', op2='5'), name='21_md_7_2')
# ImageMath ${DIMENSION} ${EXTRACTION_MASK} ME ${EXTRACTION_MASK} 5
- me_7_2 = pe.Node(ImageMath(operation="ME", op2="5"), name="22_me_7_2")
+ me_7_2 = pe.Node(ImageMath(operation='ME', op2='5'), name='22_me_7_2')
# De-pad
depad_mask = pe.Node(
- ImageMath(operation="PadImage", op2="-%d" % padding), name="23_depad_mask"
+ ImageMath(operation='PadImage', op2='-%d' % padding), name='23_depad_mask'
)
depad_segm = pe.Node(
- ImageMath(operation="PadImage", op2="-%d" % padding), name="24_depad_segm"
- )
- depad_gm = pe.Node(
- ImageMath(operation="PadImage", op2="-%d" % padding), name="25_depad_gm"
- )
- depad_wm = pe.Node(
- ImageMath(operation="PadImage", op2="-%d" % padding), name="26_depad_wm"
- )
- depad_csf = pe.Node(
- ImageMath(operation="PadImage", op2="-%d" % padding), name="27_depad_csf"
+ ImageMath(operation='PadImage', op2='-%d' % padding), name='24_depad_segm'
)
+ depad_gm = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding), name='25_depad_gm')
+ depad_wm = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding), name='26_depad_wm')
+ depad_csf = pe.Node(ImageMath(operation='PadImage', op2='-%d' % padding), name='27_depad_csf')
- msk_conform = pe.Node(niu.Function(function=_conform_mask), name="msk_conform")
- merge_tpms = pe.Node(niu.Merge(in_segmentation_model[0]), name="merge_tpms")
+ msk_conform = pe.Node(niu.Function(function=_conform_mask), name='msk_conform')
+ merge_tpms = pe.Node(niu.Merge(in_segmentation_model[0]), name='merge_tpms')
- sel_wm = pe.Node(niu.Select(), name="sel_wm", run_without_submitting=True)
+ sel_wm = pe.Node(niu.Select(), name='sel_wm', run_without_submitting=True)
if not wm_prior:
sel_wm.inputs.index = in_segmentation_model[-1] - 1
copy_xform_wm = pe.Node(
- CopyXForm(fields=["wm_map"]), name="copy_xform_wm", run_without_submitting=True
+ CopyXForm(fields=['wm_map']), name='copy_xform_wm', run_without_submitting=True
)
# Refine INU correction
@@ -1096,8 +1086,8 @@ Source code for niworkflows.anat.ants
bspline_fitting_distance=bspline_fitting_distance,
),
n_procs=omp_nthreads,
- name="inu_n4_final",
- iterfield=["input_image"],
+ name='inu_n4_final',
+ iterfield=['input_image'],
)
try:
@@ -1105,74 +1095,75 @@ Source code for niworkflows.anat.ants
except ValueError:
warn(
"N4BiasFieldCorrection's --rescale-intensities option was added in ANTS 2.1.0 "
- f"({inu_n4_final.interface.version} found.) Please consider upgrading.",
+ f'({inu_n4_final.interface.version} found.) Please consider upgrading.',
UserWarning,
+ stacklevel=1,
)
# Apply mask
- apply_mask = pe.MapNode(ApplyMask(), iterfield=["in_file"], name="apply_mask")
+ apply_mask = pe.MapNode(ApplyMask(), iterfield=['in_file'], name='apply_mask')
# fmt: off
wf.connect([
- (inputnode, dil_brainmask, [("in_mask", "op1")]),
- (inputnode, copy_xform, [(("in_files", _pop), "hdr_file")]),
- (inputnode, copy_xform_wm, [(("in_files", _pop), "hdr_file")]),
- (inputnode, pad_mask, [("in_mask", "op1")]),
- (inputnode, atropos, [("in_corrected", "intensity_images")]),
- (inputnode, inu_n4_final, [("in_files", "input_image")]),
- (inputnode, msk_conform, [(("in_files", _pop), "in_reference")]),
- (dil_brainmask, get_brainmask, [("output_image", "op1")]),
- (get_brainmask, atropos, [("output_image", "mask_image")]),
- (atropos, pad_segm, [("classified_image", "op1")]),
- (pad_segm, sel_labels, [("output_image", "in_segm")]),
- (sel_labels, get_wm, [("out_wm", "op1")]),
- (sel_labels, get_gm, [("out_gm", "op1")]),
- (get_gm, fill_gm, [("output_image", "op1")]),
- (get_gm, mult_gm, [("output_image", "first_input")]),
- (fill_gm, mult_gm, [("output_image", "second_input")]),
- (get_wm, relabel_wm, [("output_image", "first_input")]),
- (sel_labels, me_csf, [("out_csf", "op1")]),
- (mult_gm, add_gm, [("output_product_image", "op1")]),
- (me_csf, add_gm, [("output_image", "op2")]),
- (add_gm, relabel_gm, [("output_image", "first_input")]),
- (relabel_wm, add_gm_wm, [("output_product_image", "op1")]),
- (relabel_gm, add_gm_wm, [("output_product_image", "op2")]),
- (add_gm_wm, sel_labels2, [("output_image", "in_segm")]),
- (sel_labels2, add_7, [("out_wm", "op1"), ("out_gm", "op2")]),
- (add_7, me_7, [("output_image", "op1")]),
- (me_7, comp_7, [("output_image", "op1")]),
- (comp_7, md_7, [("output_image", "op1")]),
- (md_7, fill_7, [("output_image", "op1")]),
- (fill_7, add_7_2, [("output_image", "op1")]),
- (pad_mask, add_7_2, [("output_image", "op2")]),
- (add_7_2, md_7_2, [("output_image", "op1")]),
- (md_7_2, me_7_2, [("output_image", "op1")]),
- (me_7_2, depad_mask, [("output_image", "op1")]),
- (add_gm_wm, depad_segm, [("output_image", "op1")]),
- (relabel_wm, depad_wm, [("output_product_image", "op1")]),
- (relabel_gm, depad_gm, [("output_product_image", "op1")]),
- (sel_labels, depad_csf, [("out_csf", "op1")]),
- (depad_csf, merge_tpms, [("output_image", "in1")]),
- (depad_gm, merge_tpms, [("output_image", "in2")]),
- (depad_wm, merge_tpms, [("output_image", "in3")]),
- (depad_mask, msk_conform, [("output_image", "in_mask")]),
- (msk_conform, copy_xform, [("out", "out_mask")]),
- (depad_segm, copy_xform, [("output_image", "out_segm")]),
- (merge_tpms, copy_xform, [("out", "out_tpms")]),
- (atropos, sel_wm, [("posteriors", "inlist")]),
- (sel_wm, copy_xform_wm, [("out", "wm_map")]),
- (copy_xform_wm, inu_n4_final, [("wm_map", "weight_image")]),
- (inu_n4_final, copy_xform, [("output_image", "bias_corrected"),
- ("bias_image", "bias_image")]),
- (copy_xform, apply_mask, [("bias_corrected", "in_file"),
- ("out_mask", "in_mask")]),
- (apply_mask, outputnode, [("out_file", "out_file")]),
+ (inputnode, dil_brainmask, [('in_mask', 'op1')]),
+ (inputnode, copy_xform, [(('in_files', _pop), 'hdr_file')]),
+ (inputnode, copy_xform_wm, [(('in_files', _pop), 'hdr_file')]),
+ (inputnode, pad_mask, [('in_mask', 'op1')]),
+ (inputnode, atropos, [('in_corrected', 'intensity_images')]),
+ (inputnode, inu_n4_final, [('in_files', 'input_image')]),
+ (inputnode, msk_conform, [(('in_files', _pop), 'in_reference')]),
+ (dil_brainmask, get_brainmask, [('output_image', 'op1')]),
+ (get_brainmask, atropos, [('output_image', 'mask_image')]),
+ (atropos, pad_segm, [('classified_image', 'op1')]),
+ (pad_segm, sel_labels, [('output_image', 'in_segm')]),
+ (sel_labels, get_wm, [('out_wm', 'op1')]),
+ (sel_labels, get_gm, [('out_gm', 'op1')]),
+ (get_gm, fill_gm, [('output_image', 'op1')]),
+ (get_gm, mult_gm, [('output_image', 'first_input')]),
+ (fill_gm, mult_gm, [('output_image', 'second_input')]),
+ (get_wm, relabel_wm, [('output_image', 'first_input')]),
+ (sel_labels, me_csf, [('out_csf', 'op1')]),
+ (mult_gm, add_gm, [('output_product_image', 'op1')]),
+ (me_csf, add_gm, [('output_image', 'op2')]),
+ (add_gm, relabel_gm, [('output_image', 'first_input')]),
+ (relabel_wm, add_gm_wm, [('output_product_image', 'op1')]),
+ (relabel_gm, add_gm_wm, [('output_product_image', 'op2')]),
+ (add_gm_wm, sel_labels2, [('output_image', 'in_segm')]),
+ (sel_labels2, add_7, [('out_wm', 'op1'), ('out_gm', 'op2')]),
+ (add_7, me_7, [('output_image', 'op1')]),
+ (me_7, comp_7, [('output_image', 'op1')]),
+ (comp_7, md_7, [('output_image', 'op1')]),
+ (md_7, fill_7, [('output_image', 'op1')]),
+ (fill_7, add_7_2, [('output_image', 'op1')]),
+ (pad_mask, add_7_2, [('output_image', 'op2')]),
+ (add_7_2, md_7_2, [('output_image', 'op1')]),
+ (md_7_2, me_7_2, [('output_image', 'op1')]),
+ (me_7_2, depad_mask, [('output_image', 'op1')]),
+ (add_gm_wm, depad_segm, [('output_image', 'op1')]),
+ (relabel_wm, depad_wm, [('output_product_image', 'op1')]),
+ (relabel_gm, depad_gm, [('output_product_image', 'op1')]),
+ (sel_labels, depad_csf, [('out_csf', 'op1')]),
+ (depad_csf, merge_tpms, [('output_image', 'in1')]),
+ (depad_gm, merge_tpms, [('output_image', 'in2')]),
+ (depad_wm, merge_tpms, [('output_image', 'in3')]),
+ (depad_mask, msk_conform, [('output_image', 'in_mask')]),
+ (msk_conform, copy_xform, [('out', 'out_mask')]),
+ (depad_segm, copy_xform, [('output_image', 'out_segm')]),
+ (merge_tpms, copy_xform, [('out', 'out_tpms')]),
+ (atropos, sel_wm, [('posteriors', 'inlist')]),
+ (sel_wm, copy_xform_wm, [('out', 'wm_map')]),
+ (copy_xform_wm, inu_n4_final, [('wm_map', 'weight_image')]),
+ (inu_n4_final, copy_xform, [('output_image', 'bias_corrected'),
+ ('bias_image', 'bias_image')]),
+ (copy_xform, apply_mask, [('bias_corrected', 'in_file'),
+ ('out_mask', 'in_mask')]),
+ (apply_mask, outputnode, [('out_file', 'out_file')]),
(copy_xform, outputnode, [
- ("bias_corrected", "bias_corrected"),
- ("bias_image", "bias_image"),
- ("out_mask", "out_mask"),
- ("out_segm", "out_segm"),
- ("out_tpms", "out_tpms"),
+ ('bias_corrected', 'bias_corrected'),
+ ('bias_image', 'bias_image'),
+ ('out_mask', 'out_mask'),
+ ('out_segm', 'out_segm'),
+ ('out_tpms', 'out_tpms'),
]),
])
# fmt: on
@@ -1187,27 +1178,27 @@ Source code for niworkflows.anat.ants
match_wm = pe.Node(
niu.Function(function=_matchlen),
- name="match_wm",
+ name='match_wm',
run_without_submitting=True,
)
- overlap = pe.Node(FuzzyOverlap(), name="overlap", run_without_submitting=True)
+ overlap = pe.Node(FuzzyOverlap(), name='overlap', run_without_submitting=True)
- apply_wm_prior = pe.Node(niu.Function(function=_improd), name="apply_wm_prior")
+ apply_wm_prior = pe.Node(niu.Function(function=_improd), name='apply_wm_prior')
# fmt: off
wf.disconnect([
- (copy_xform_wm, inu_n4_final, [("wm_map", "weight_image")]),
+ (copy_xform_wm, inu_n4_final, [('wm_map', 'weight_image')]),
])
wf.connect([
- (inputnode, apply_wm_prior, [("in_mask", "in_mask"),
- ("wm_prior", "op2")]),
- (inputnode, match_wm, [("wm_prior", "value")]),
- (atropos, match_wm, [("posteriors", "reference")]),
- (atropos, overlap, [("posteriors", "in_ref")]),
- (match_wm, overlap, [("out", "in_tst")]),
- (overlap, sel_wm, [(("class_fdi", _argmax), "index")]),
- (copy_xform_wm, apply_wm_prior, [("wm_map", "op1")]),
- (apply_wm_prior, inu_n4_final, [("out", "weight_image")]),
+ (inputnode, apply_wm_prior, [('in_mask', 'in_mask'),
+ ('wm_prior', 'op2')]),
+ (inputnode, match_wm, [('wm_prior', 'value')]),
+ (atropos, match_wm, [('posteriors', 'reference')]),
+ (atropos, overlap, [('posteriors', 'in_ref')]),
+ (match_wm, overlap, [('out', 'in_tst')]),
+ (overlap, sel_wm, [(('class_fdi', _argmax), 'index')]),
+ (copy_xform_wm, apply_wm_prior, [('wm_map', 'op1')]),
+ (apply_wm_prior, inu_n4_final, [('out', 'weight_image')]),
])
# fmt: on
return wf
@@ -1220,9 +1211,9 @@ Source code for niworkflows.anat.ants
atropos_model=None,
atropos_refine=True,
atropos_use_random_seed=True,
- bids_suffix="T1w",
+ bids_suffix='T1w',
mem_gb=3.0,
- name="n4_only_wf",
+ name='n4_only_wf',
omp_nthreads=None,
):
"""
@@ -1296,26 +1287,24 @@ Source code for niworkflows.anat.ants
wf = pe.Workflow(name)
- inputnode = pe.Node(
- niu.IdentityInterface(fields=["in_files", "in_mask"]), name="inputnode"
- )
+ inputnode = pe.Node(niu.IdentityInterface(fields=['in_files', 'in_mask']), name='inputnode')
outputnode = pe.Node(
niu.IdentityInterface(
fields=[
- "out_file",
- "out_mask",
- "bias_corrected",
- "bias_image",
- "out_segm",
- "out_tpms",
+ 'out_file',
+ 'out_mask',
+ 'bias_corrected',
+ 'bias_image',
+ 'out_segm',
+ 'out_tpms',
]
),
- name="outputnode",
+ name='outputnode',
)
# Create brain mask
- thr_brainmask = pe.Node(Binarize(thresh_low=2), name="binarize")
+ thr_brainmask = pe.Node(Binarize(thresh_low=2), name='binarize')
# INU correction
inu_n4_final = pe.MapNode(
@@ -1329,8 +1318,8 @@ Source code for niworkflows.anat.ants
bspline_fitting_distance=200,
),
n_procs=omp_nthreads,
- name="inu_n4_final",
- iterfield=["input_image"],
+ name='inu_n4_final',
+ iterfield=['input_image'],
)
# Check ANTs version
@@ -1339,18 +1328,19 @@ Source code for niworkflows.anat.ants
except ValueError:
warn(
"N4BiasFieldCorrection's --rescale-intensities option was added in ANTS 2.1.0 "
- f"({inu_n4_final.interface.version} found.) Please consider upgrading.",
+ f'({inu_n4_final.interface.version} found.) Please consider upgrading.',
UserWarning,
+ stacklevel=1,
)
# fmt: off
wf.connect([
- (inputnode, inu_n4_final, [("in_files", "input_image")]),
- (inputnode, thr_brainmask, [(("in_files", _pop), "in_file")]),
- (thr_brainmask, outputnode, [("out_mask", "out_mask")]),
- (inu_n4_final, outputnode, [("output_image", "out_file"),
- ("output_image", "bias_corrected"),
- ("bias_image", "bias_image")]),
+ (inputnode, inu_n4_final, [('in_files', 'input_image')]),
+ (inputnode, thr_brainmask, [(('in_files', _pop), 'in_file')]),
+ (thr_brainmask, outputnode, [('out_mask', 'out_mask')]),
+ (inu_n4_final, outputnode, [('output_image', 'out_file'),
+ ('output_image', 'bias_corrected'),
+ ('bias_image', 'bias_image')]),
])
# fmt: on
@@ -1366,20 +1356,20 @@ Source code for niworkflows.anat.ants
# fmt: off
wf.disconnect([
- (inu_n4_final, outputnode, [("output_image", "out_file"),
- ("output_image", "bias_corrected"),
- ("bias_image", "bias_image")]),
+ (inu_n4_final, outputnode, [('output_image', 'out_file'),
+ ('output_image', 'bias_corrected'),
+ ('bias_image', 'bias_image')]),
])
wf.connect([
- (inputnode, atropos_wf, [("in_files", "inputnode.in_files")]),
- (inu_n4_final, atropos_wf, [("output_image", "inputnode.in_corrected")]),
- (thr_brainmask, atropos_wf, [("out_mask", "inputnode.in_mask")]),
+ (inputnode, atropos_wf, [('in_files', 'inputnode.in_files')]),
+ (inu_n4_final, atropos_wf, [('output_image', 'inputnode.in_corrected')]),
+ (thr_brainmask, atropos_wf, [('out_mask', 'inputnode.in_mask')]),
(atropos_wf, outputnode, [
- ("outputnode.out_file", "out_file"),
- ("outputnode.bias_corrected", "bias_corrected"),
- ("outputnode.bias_image", "bias_image"),
- ("outputnode.out_segm", "out_segm"),
- ("outputnode.out_tpms", "out_tpms"),
+ ('outputnode.out_file', 'out_file'),
+ ('outputnode.bias_corrected', 'bias_corrected'),
+ ('outputnode.bias_image', 'bias_image'),
+ ('outputnode.out_segm', 'out_segm'),
+ ('outputnode.out_tpms', 'out_tpms'),
]),
])
# fmt: on
@@ -1390,19 +1380,20 @@ Source code for niworkflows.anat.ants
def _select_labels(in_segm, labels):
from os import getcwd
- import numpy as np
+
import nibabel as nb
+ import numpy as np
from nipype.utils.filemanip import fname_presuffix
out_files = []
cwd = getcwd()
nii = nb.load(in_segm)
- label_data = np.asanyarray(nii.dataobj).astype("uint8")
+ label_data = np.asanyarray(nii.dataobj).astype('uint8')
for label in labels:
newnii = nii.__class__(np.uint8(label_data == label), nii.affine, nii.header)
- newnii.set_data_dtype("uint8")
- out_file = fname_presuffix(in_segm, suffix="_class-%02d" % label, newpath=cwd)
+ newnii.set_data_dtype('uint8')
+ out_file = fname_presuffix(in_segm, suffix='_class-%02d' % label, newpath=cwd)
newnii.to_filename(out_file)
out_files.append(out_file)
return out_files
@@ -1411,14 +1402,15 @@ Source code for niworkflows.anat.ants
def _conform_mask(in_mask, in_reference):
"""Ensures the mask headers make sense and match those of the T1w"""
from pathlib import Path
- import numpy as np
+
import nibabel as nb
+ import numpy as np
from nipype.utils.filemanip import fname_presuffix
ref = nb.load(in_reference)
nii = nb.load(in_mask)
hdr = nii.header.copy()
- hdr.set_data_dtype("int16")
+ hdr.set_data_dtype('int16')
hdr.set_slope_inter(1, 0)
qform, qcode = ref.header.get_qform(coded=True)
@@ -1429,15 +1421,15 @@ Source code for niworkflows.anat.ants
if scode is not None:
hdr.set_sform(sform, int(scode))
- if "_maths" in in_mask: # Cut the name at first _maths occurrence
- ext = "".join(Path(in_mask).suffixes)
+ if '_maths' in in_mask: # Cut the name at first _maths occurrence
+ ext = ''.join(Path(in_mask).suffixes)
basename = Path(in_mask).name
- in_mask = basename.split("_maths")[0] + ext
+ in_mask = basename.split('_maths')[0] + ext
- out_file = fname_presuffix(in_mask, suffix="_mask", newpath=str(Path()))
- nii.__class__(
- np.asanyarray(nii.dataobj).astype("int16"), ref.affine, hdr
- ).to_filename(out_file)
+ out_file = fname_presuffix(in_mask, suffix='_mask', newpath=str(Path()))
+ nii.__class__(np.asanyarray(nii.dataobj).astype('int16'), ref.affine, hdr).to_filename(
+ out_file
+ )
return out_file
@@ -1450,14 +1442,14 @@ Source code for niworkflows.anat.ants
im1 = nb.load(op1)
- data = im1.get_fdata(dtype="float32") + nb.load(op2).get_fdata(dtype="float32")
+ data = im1.get_fdata(dtype='float32') + nb.load(op2).get_fdata(dtype='float32')
data /= data.max()
nii = nb.Nifti1Image(data, im1.affine, im1.header)
if out_file is None:
from pathlib import Path
- out_file = str((Path() / "summap.nii.gz").absolute())
+ out_file = str((Path() / 'summap.nii.gz').absolute())
nii.to_filename(out_file)
return out_file
@@ -1468,7 +1460,7 @@ Source code for niworkflows.anat.ants
im1 = nb.load(op1)
- data = im1.get_fdata(dtype="float32") * nb.load(op2).get_fdata(dtype="float32")
+ data = im1.get_fdata(dtype='float32') * nb.load(op2).get_fdata(dtype='float32')
mskdata = nb.load(in_mask).get_fdata() > 0
data[~mskdata] = 0
data[data < 0] = 0
@@ -1479,7 +1471,7 @@ Source code for niworkflows.anat.ants
if out_file is None:
from pathlib import Path
- out_file = str((Path() / "prodmap.nii.gz").absolute())
+ out_file = str((Path() / 'prodmap.nii.gz').absolute())
nii.to_filename(out_file)
return out_file
@@ -1495,7 +1487,7 @@ Source code for niworkflows.anat.ants
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/anat/coregistration.html b/master/_modules/niworkflows/anat/coregistration.html
index 99613aa6653..b3f8f59f6c1 100644
--- a/master/_modules/niworkflows/anat/coregistration.html
+++ b/master/_modules/niworkflows/anat/coregistration.html
@@ -353,11 +353,12 @@ Source code for niworkflows.anat.coregistration
<
# https://www.nipreps.org/community/licensing/
#
"""Workflow for the registration of EPI datasets to anatomical space via reconstructed surfaces."""
-from nipype.pipeline import engine as pe
-from nipype.interfaces import utility as niu
+
from nipype import logging
+from nipype.interfaces import utility as niu
+from nipype.pipeline import engine as pe
-LOGGER = logging.getLogger("workflow")
+LOGGER = logging.getLogger('workflow')
@@ -366,9 +367,9 @@ Source code for niworkflows.anat.coregistration
<
*,
omp_nthreads,
debug=False,
- epi2t1w_init="register",
+ epi2t1w_init='register',
epi2t1w_dof=6,
- name="bbreg_wf",
+ name='bbreg_wf',
use_bbr=None,
):
"""
@@ -441,9 +442,13 @@ Source code for niworkflows.anat.coregistration
<
# See https://github.com/nipreps/fmriprep/issues/768
from ..interfaces.freesurfer import (
PatchedBBRegisterRPT as BBRegisterRPT,
- PatchedMRICoregRPT as MRICoregRPT,
+ )
+ from ..interfaces.freesurfer import (
PatchedLTAConvert as LTAConvert,
)
+ from ..interfaces.freesurfer import (
+ PatchedMRICoregRPT as MRICoregRPT,
+ )
from ..interfaces.nitransforms import ConcatenateXFMs
workflow = Workflow(name=name)
@@ -452,64 +457,60 @@ Source code for niworkflows.anat.coregistration
<
`bbregister` (FreeSurfer) which implements boundary-based registration [@bbr].
Co-registration was configured with {dof} degrees of freedom{reason}.
""".format(
- dof={6: "six", 9: "nine", 12: "twelve"}[epi2t1w_dof],
- reason=""
+ dof={6: 'six', 9: 'nine', 12: 'twelve'}[epi2t1w_dof],
+ reason=''
if epi2t1w_dof == 6
- else "to account for distortions remaining in the EPI reference",
+ else 'to account for distortions remaining in the EPI reference',
)
inputnode = pe.Node(
niu.IdentityInterface(
[
- "in_file",
- "fsnative2t1w_xfm",
- "subjects_dir",
- "subject_id", # BBRegister
- "t1w_dseg", # FLIRT BBR
- "t1w_brain", # FLIRT BBR
+ 'in_file',
+ 'fsnative2t1w_xfm',
+ 'subjects_dir',
+ 'subject_id', # BBRegister
+ 't1w_dseg', # FLIRT BBR
+ 't1w_brain', # FLIRT BBR
]
),
- name="inputnode",
+ name='inputnode',
)
outputnode = pe.Node(
- niu.IdentityInterface(
- ["itk_epi_to_t1w", "itk_t1w_to_epi", "out_report", "fallback"]
- ),
- name="outputnode",
+ niu.IdentityInterface(['itk_epi_to_t1w', 'itk_t1w_to_epi', 'out_report', 'fallback']),
+ name='outputnode',
)
- if epi2t1w_init not in ("register", "header"):
- raise ValueError(f"Unknown EPI-T1w initialization option: {epi2t1w_init}")
+ if epi2t1w_init not in ('register', 'header'):
+ raise ValueError(f'Unknown EPI-T1w initialization option: {epi2t1w_init}')
# For now make BBR unconditional - in the future, we can fall back to identity,
# but adding the flexibility without testing seems a bit dangerous
- if epi2t1w_init == "header":
+ if epi2t1w_init == 'header':
if use_bbr is False:
- raise ValueError("Cannot disable BBR and use header registration")
+ raise ValueError('Cannot disable BBR and use header registration')
if use_bbr is None:
- LOGGER.warning("Initializing BBR with header; affine fallback disabled")
+ LOGGER.warning('Initializing BBR with header; affine fallback disabled')
use_bbr = True
- merge_ltas = pe.Node(niu.Merge(2), name="merge_ltas", run_without_submitting=True)
- concat_xfm = pe.Node(ConcatenateXFMs(inverse=True), name="concat_xfm")
+ merge_ltas = pe.Node(niu.Merge(2), name='merge_ltas', run_without_submitting=True)
+ concat_xfm = pe.Node(ConcatenateXFMs(inverse=True), name='concat_xfm')
# fmt:off
workflow.connect([
# Output ITK transforms
- (inputnode, merge_ltas, [("fsnative2t1w_xfm", "in2")]),
- (merge_ltas, concat_xfm, [("out", "in_xfms")]),
- (concat_xfm, outputnode, [("out_xfm", "itk_epi_to_t1w")]),
- (concat_xfm, outputnode, [("out_inv", "itk_t1w_to_epi")]),
+ (inputnode, merge_ltas, [('fsnative2t1w_xfm', 'in2')]),
+ (merge_ltas, concat_xfm, [('out', 'in_xfms')]),
+ (concat_xfm, outputnode, [('out_xfm', 'itk_epi_to_t1w')]),
+ (concat_xfm, outputnode, [('out_inv', 'itk_t1w_to_epi')]),
])
# fmt:on
if debug is True:
from ..interfaces.nibabel import RegridToZooms
- downsample = pe.Node(
- RegridToZooms(zooms=(4.0, 4.0, 4.0), smooth=True), name="downsample"
- )
- workflow.connect([(inputnode, downsample, [("in_file", "in_file")])])
+ downsample = pe.Node(RegridToZooms(zooms=(4.0, 4.0, 4.0), smooth=True), name='downsample')
+ workflow.connect([(inputnode, downsample, [('in_file', 'in_file')])])
mri_coreg = pe.Node(
MRICoregRPT(
@@ -519,31 +520,31 @@ Source code for niworkflows.anat.coregistration
<
linmintol=0.01,
generate_report=not use_bbr,
),
- name="mri_coreg",
+ name='mri_coreg',
n_procs=omp_nthreads,
mem_gb=5,
)
# Use mri_coreg
- if epi2t1w_init == "register":
+ if epi2t1w_init == 'register':
# fmt:off
workflow.connect([
- (inputnode, mri_coreg, [("subjects_dir", "subjects_dir"),
- ("subject_id", "subject_id")]),
+ (inputnode, mri_coreg, [('subjects_dir', 'subjects_dir'),
+ ('subject_id', 'subject_id')]),
])
# fmt:on
if not debug:
- workflow.connect(inputnode, "in_file", mri_coreg, "source_file")
+ workflow.connect(inputnode, 'in_file', mri_coreg, 'source_file')
else:
- workflow.connect(downsample, "out_file", mri_coreg, "source_file")
+ workflow.connect(downsample, 'out_file', mri_coreg, 'source_file')
# Short-circuit workflow building, use initial registration
if use_bbr is False:
# fmt:off
workflow.connect([
- (mri_coreg, outputnode, [("out_report", "out_report")]),
- (mri_coreg, merge_ltas, [("out_lta_file", "in1")]),
+ (mri_coreg, outputnode, [('out_report', 'out_report')]),
+ (mri_coreg, merge_ltas, [('out_lta_file', 'in1')]),
])
# fmt:on
outputnode.inputs.fallback = True
@@ -553,38 +554,38 @@ Source code for niworkflows.anat.coregistration
<
bbregister = pe.Node(
BBRegisterRPT(
dof=epi2t1w_dof,
- contrast_type="t2",
+ contrast_type='t2',
registered_file=True,
out_lta_file=True,
generate_report=True,
),
- name="bbregister",
+ name='bbregister',
mem_gb=12,
)
# fmt:off
workflow.connect([
- (inputnode, bbregister, [("subjects_dir", "subjects_dir"),
- ("subject_id", "subject_id")]),
+ (inputnode, bbregister, [('subjects_dir', 'subjects_dir'),
+ ('subject_id', 'subject_id')]),
])
# fmt:on
if not debug:
- workflow.connect(inputnode, "in_file", bbregister, "source_file")
+ workflow.connect(inputnode, 'in_file', bbregister, 'source_file')
else:
- workflow.connect(downsample, "out_file", bbregister, "source_file")
+ workflow.connect(downsample, 'out_file', bbregister, 'source_file')
- if epi2t1w_init == "header":
- bbregister.inputs.init = "header"
+ if epi2t1w_init == 'header':
+ bbregister.inputs.init = 'header'
else:
- workflow.connect([(mri_coreg, bbregister, [("out_lta_file", "init_reg_file")])])
+ workflow.connect([(mri_coreg, bbregister, [('out_lta_file', 'init_reg_file')])])
# Short-circuit workflow building, use boundary-based registration
if use_bbr is True:
# fmt:off
workflow.connect([
- (bbregister, outputnode, [("out_report", "out_report")]),
- (bbregister, merge_ltas, [("out_lta_file", "in1")]),
+ (bbregister, outputnode, [('out_report', 'out_report')]),
+ (bbregister, merge_ltas, [('out_lta_file', 'in1')]),
])
# fmt:on
@@ -592,41 +593,35 @@ Source code for niworkflows.anat.coregistration
<
return workflow
# Only reach this point if epi2t1w_init is "register" and use_bbr is None
- transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name="transforms")
- reports = pe.Node(niu.Merge(2), run_without_submitting=True, name="reports")
+ transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms')
+ reports = pe.Node(niu.Merge(2), run_without_submitting=True, name='reports')
lta_ras2ras = pe.MapNode(
- LTAConvert(out_lta=True), iterfield=["in_lta"], name="lta_ras2ras", mem_gb=2
- )
- compare_transforms = pe.Node(
- niu.Function(function=compare_xforms), name="compare_transforms"
+ LTAConvert(out_lta=True), iterfield=['in_lta'], name='lta_ras2ras', mem_gb=2
)
+ compare_transforms = pe.Node(niu.Function(function=compare_xforms), name='compare_transforms')
- select_transform = pe.Node(
- niu.Select(), run_without_submitting=True, name="select_transform"
- )
- select_report = pe.Node(
- niu.Select(), run_without_submitting=True, name="select_report"
- )
+ select_transform = pe.Node(niu.Select(), run_without_submitting=True, name='select_transform')
+ select_report = pe.Node(niu.Select(), run_without_submitting=True, name='select_report')
# fmt:off
workflow.connect([
- (bbregister, transforms, [("out_lta_file", "in1")]),
- (mri_coreg, transforms, [("out_lta_file", "in2")]),
+ (bbregister, transforms, [('out_lta_file', 'in1')]),
+ (mri_coreg, transforms, [('out_lta_file', 'in2')]),
# Normalize LTA transforms to RAS2RAS (inputs are VOX2VOX) and compare
- (transforms, lta_ras2ras, [("out", "in_lta")]),
- (lta_ras2ras, compare_transforms, [("out_lta", "lta_list")]),
- (compare_transforms, outputnode, [("out", "fallback")]),
+ (transforms, lta_ras2ras, [('out', 'in_lta')]),
+ (lta_ras2ras, compare_transforms, [('out_lta', 'lta_list')]),
+ (compare_transforms, outputnode, [('out', 'fallback')]),
# Select output transform
- (transforms, select_transform, [("out", "inlist")]),
- (compare_transforms, select_transform, [("out", "index")]),
- (select_transform, merge_ltas, [("out", "in1")]),
+ (transforms, select_transform, [('out', 'inlist')]),
+ (compare_transforms, select_transform, [('out', 'index')]),
+ (select_transform, merge_ltas, [('out', 'in1')]),
# Select output report
- (bbregister, reports, [("out_report", "in1")]),
- (mri_coreg, reports, [("out_report", "in2")]),
- (reports, select_report, [("out", "inlist")]),
- (compare_transforms, select_report, [("out", "index")]),
- (select_report, outputnode, [("out", "out_report")]),
+ (bbregister, reports, [('out_report', 'in1')]),
+ (mri_coreg, reports, [('out_report', 'in2')]),
+ (reports, select_report, [('out', 'inlist')]),
+ (compare_transforms, select_report, [('out', 'index')]),
+ (select_report, outputnode, [('out', 'out_report')]),
])
# fmt:on
@@ -666,9 +661,10 @@ Source code for niworkflows.anat.coregistration
<
second transform relative to the first (default: `15`)
"""
- from niworkflows.interfaces.surf import load_transform
from nipype.algorithms.rapidart import _calc_norm_affine
+ from niworkflows.interfaces.surf import load_transform
+
bbr_affine = load_transform(lta_list[0])
fallback_affine = load_transform(lta_list[1])
@@ -688,7 +684,7 @@ Source code for niworkflows.anat.coregistration
<
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/anat/freesurfer.html b/master/_modules/niworkflows/anat/freesurfer.html
index a3065483bc5..40dfae85cb8 100644
--- a/master/_modules/niworkflows/anat/freesurfer.html
+++ b/master/_modules/niworkflows/anat/freesurfer.html
@@ -355,23 +355,26 @@ Source code for niworkflows.anat.freesurfer
"""FreeSurfer-related workflows."""
from os import getenv
-from nipype.pipeline import engine as pe
-from nipype.interfaces import utility as niu
-from nipype.interfaces import io as nio
+
from nipype.interfaces import freesurfer as fs
+from nipype.interfaces import io as nio
+from nipype.interfaces import utility as niu
+from nipype.pipeline import engine as pe
from ..interfaces.freesurfer import (
MakeMidthickness,
+)
+from ..interfaces.freesurfer import (
PatchedRobustRegister as RobustRegister,
)
from ..interfaces.surf import NormalizeSurf
+SUBJECTS_DIR = getenv('SUBJECTS_DIR')
+
[docs]
-def init_gifti_surface_wf(
- name="gifti_surface_wf", subjects_dir=getenv("SUBJECTS_DIR", None)
-):
+def init_gifti_surface_wf(name='gifti_surface_wf', subjects_dir=SUBJECTS_DIR):
"""
Build a Nipype workflow to prepare GIFTI surfaces from FreeSurfer.
@@ -419,71 +422,67 @@ Source code for niworkflows.anat.freesurfer
"""
if subjects_dir is None:
- raise RuntimeError("``$SUBJECTS_DIR`` must be set")
+ raise RuntimeError('``$SUBJECTS_DIR`` must be set')
workflow = pe.Workflow(name=name)
- inputnode = pe.Node(
- niu.IdentityInterface(["in_t1w", "subject_id"]), name="inputnode"
- )
+ inputnode = pe.Node(niu.IdentityInterface(['in_t1w', 'subject_id']), name='inputnode')
outputnode = pe.Node(
- niu.IdentityInterface(["surfaces", "surf_norm", "fsnative_to_t1w_xfm"]),
- name="outputnode",
+ niu.IdentityInterface(['surfaces', 'surf_norm', 'fsnative_to_t1w_xfm']),
+ name='outputnode',
)
fssource = pe.Node(
nio.FreeSurferSource(subjects_dir=subjects_dir),
- name="fssource",
+ name='fssource',
run_without_submitting=True,
)
fsnative_2_t1_xfm = pe.Node(
- RobustRegister(auto_sens=True, est_int_scale=True), name="fsnative_2_t1_xfm"
+ RobustRegister(auto_sens=True, est_int_scale=True), name='fsnative_2_t1_xfm'
)
midthickness = pe.MapNode(
- MakeMidthickness(thickness=True, distance=0.5, out_name="midthickness"),
- iterfield="in_file",
- name="midthickness",
+ MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'),
+ iterfield='in_file',
+ name='midthickness',
)
save_midthickness = pe.Node(
nio.DataSink(parameterization=False, base_directory=subjects_dir),
- name="save_midthickness",
+ name='save_midthickness',
run_without_submitting=True,
)
surface_list = pe.Node(
niu.Merge(4, ravel_inputs=True),
- name="surface_list",
+ name='surface_list',
run_without_submitting=True,
)
- fs_2_gii = pe.MapNode(
- fs.MRIsConvert(out_datatype="gii"), iterfield="in_file", name="fs_2_gii"
- )
- fix_surfs = pe.MapNode(NormalizeSurf(), iterfield="in_file", name="fix_surfs")
+ fs_2_gii = pe.MapNode(fs.MRIsConvert(out_datatype='gii'), iterfield='in_file', name='fs_2_gii')
+ fix_surfs = pe.MapNode(NormalizeSurf(), iterfield='in_file', name='fix_surfs')
# fmt: off
workflow.connect([
- (inputnode, fssource, [("subject_id", "subject_id")]),
- (inputnode, save_midthickness, [("subject_id", "container")]),
+ (inputnode, fssource, [('subject_id', 'subject_id')]),
+ (inputnode, save_midthickness, [('subject_id', 'container')]),
# Generate fsnative-to-T1w transform
- (inputnode, fsnative_2_t1_xfm, [("in_t1w", "target_file")]),
- (fssource, fsnative_2_t1_xfm, [("orig", "source_file")]),
+ (inputnode, fsnative_2_t1_xfm, [('in_t1w', 'target_file')]),
+ (fssource, fsnative_2_t1_xfm, [('orig', 'source_file')]),
# Generate midthickness surfaces and save to FreeSurfer derivatives
- (fssource, midthickness, [("white", "in_file"), ("graymid", "graymid")]),
- (midthickness, save_midthickness, [("out_file", "surf.@graymid")]),
+ (fssource, midthickness, [('white', 'in_file'), ('graymid', 'graymid')]),
+ (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]),
# Produce valid GIFTI surface files (dense mesh)
(fssource, surface_list, [
- ("white", "in1"), ("pial", "in2"), ("inflated", "in3"),
+ ('white', 'in1'), ('pial', 'in2'), ('inflated', 'in3'),
]),
- (save_midthickness, surface_list, [("out_file", "in4")]),
- (surface_list, fs_2_gii, [("out", "in_file")]),
- (fs_2_gii, fix_surfs, [("converted", "in_file")]),
- (fsnative_2_t1_xfm, fix_surfs, [("out_reg_file", "transform_file")]),
- (fsnative_2_t1_xfm, outputnode, [("out_reg_file", "fsnative_to_t1w_xfm")]),
- (fix_surfs, outputnode, [("out_file", "surf_norm")]),
- (fs_2_gii, outputnode, [("converted", "surfaces")]),
+ (save_midthickness, surface_list, [('out_file', 'in4')]),
+ (surface_list, fs_2_gii, [('out', 'in_file')]),
+ (fs_2_gii, fix_surfs, [('converted', 'in_file')]),
+ (fsnative_2_t1_xfm, fix_surfs, [('out_reg_file', 'transform_file')]),
+ (fsnative_2_t1_xfm, outputnode, [('out_reg_file', 'fsnative_to_t1w_xfm')]),
+ (fix_surfs, outputnode, [('out_file', 'surf_norm')]),
+ (fs_2_gii, outputnode, [('converted', 'surfaces')]),
])
# fmt: on
@@ -501,7 +500,7 @@ Source code for niworkflows.anat.freesurfer
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/anat/skullstrip.html b/master/_modules/niworkflows/anat/skullstrip.html
index 6152556ba84..d2603de2a8e 100644
--- a/master/_modules/niworkflows/anat/skullstrip.html
+++ b/master/_modules/niworkflows/anat/skullstrip.html
@@ -353,15 +353,18 @@ Source code for niworkflows.anat.skullstrip
# https://www.nipreps.org/community/licensing/
#
"""Brain extraction workflows."""
-from nipype.interfaces import afni, utility as niu
+
+from nipype.interfaces import afni
+from nipype.interfaces import utility as niu
from nipype.pipeline import engine as pe
-from ..interfaces.nibabel import Binarize
+
from ..interfaces.fixes import FixN4BiasFieldCorrection as N4BiasFieldCorrection
+from ..interfaces.nibabel import Binarize
[docs]
-def afni_wf(name="AFNISkullStripWorkflow", unifize=False, n4_nthreads=1):
+def afni_wf(name='AFNISkullStripWorkflow', unifize=False, n4_nthreads=1):
"""
Create a skull-stripping workflow based on AFNI's tools.
@@ -406,12 +409,10 @@ Source code for niworkflows.anat.skullstrip
"""
workflow = pe.Workflow(name=name)
- inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode")
+ inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
outputnode = pe.Node(
- niu.IdentityInterface(
- fields=["bias_corrected", "out_file", "out_mask", "bias_image"]
- ),
- name="outputnode",
+ niu.IdentityInterface(fields=['bias_corrected', 'out_file', 'out_mask', 'bias_image']),
+ name='outputnode',
)
inu_n4 = pe.Node(
@@ -423,51 +424,49 @@ Source code for niworkflows.anat.skullstrip
copy_header=True,
),
n_procs=n4_nthreads,
- name="inu_n4",
+ name='inu_n4',
)
- sstrip = pe.Node(afni.SkullStrip(outputtype="NIFTI_GZ"), name="skullstrip")
+ sstrip = pe.Node(afni.SkullStrip(outputtype='NIFTI_GZ'), name='skullstrip')
sstrip_orig_vol = pe.Node(
- afni.Calc(expr="a*step(b)", outputtype="NIFTI_GZ"), name="sstrip_orig_vol"
+ afni.Calc(expr='a*step(b)', outputtype='NIFTI_GZ'), name='sstrip_orig_vol'
)
- binarize = pe.Node(Binarize(thresh_low=0.0), name="binarize")
+ binarize = pe.Node(Binarize(thresh_low=0.0), name='binarize')
if unifize:
# Add two unifize steps, pre- and post- skullstripping.
- inu_uni_0 = pe.Node(
- afni.Unifize(outputtype="NIFTI_GZ"), name="unifize_pre_skullstrip"
- )
+ inu_uni_0 = pe.Node(afni.Unifize(outputtype='NIFTI_GZ'), name='unifize_pre_skullstrip')
inu_uni_1 = pe.Node(
- afni.Unifize(gm=True, outputtype="NIFTI_GZ"), name="unifize_post_skullstrip"
+ afni.Unifize(gm=True, outputtype='NIFTI_GZ'), name='unifize_post_skullstrip'
)
# fmt: off
workflow.connect([
- (inu_n4, inu_uni_0, [("output_image", "in_file")]),
- (inu_uni_0, sstrip, [("out_file", "in_file")]),
- (inu_uni_0, sstrip_orig_vol, [("out_file", "in_file_a")]),
- (sstrip_orig_vol, inu_uni_1, [("out_file", "in_file")]),
- (inu_uni_1, outputnode, [("out_file", "out_file")]),
- (inu_uni_0, outputnode, [("out_file", "bias_corrected")]),
+ (inu_n4, inu_uni_0, [('output_image', 'in_file')]),
+ (inu_uni_0, sstrip, [('out_file', 'in_file')]),
+ (inu_uni_0, sstrip_orig_vol, [('out_file', 'in_file_a')]),
+ (sstrip_orig_vol, inu_uni_1, [('out_file', 'in_file')]),
+ (inu_uni_1, outputnode, [('out_file', 'out_file')]),
+ (inu_uni_0, outputnode, [('out_file', 'bias_corrected')]),
])
# fmt: on
else:
# fmt: off
workflow.connect([
- (inputnode, sstrip_orig_vol, [("in_file", "in_file_a")]),
- (inu_n4, sstrip, [("output_image", "in_file")]),
- (sstrip_orig_vol, outputnode, [("out_file", "out_file")]),
- (inu_n4, outputnode, [("output_image", "bias_corrected")]),
+ (inputnode, sstrip_orig_vol, [('in_file', 'in_file_a')]),
+ (inu_n4, sstrip, [('output_image', 'in_file')]),
+ (sstrip_orig_vol, outputnode, [('out_file', 'out_file')]),
+ (inu_n4, outputnode, [('output_image', 'bias_corrected')]),
])
# fmt: on
# Remaining connections
# fmt: off
workflow.connect([
- (sstrip, sstrip_orig_vol, [("out_file", "in_file_b")]),
- (inputnode, inu_n4, [("in_file", "input_image")]),
- (sstrip_orig_vol, binarize, [("out_file", "in_file")]),
- (binarize, outputnode, [("out_mask", "out_mask")]),
- (inu_n4, outputnode, [("bias_image", "bias_image")]),
+ (sstrip, sstrip_orig_vol, [('out_file', 'in_file_b')]),
+ (inputnode, inu_n4, [('in_file', 'input_image')]),
+ (sstrip_orig_vol, binarize, [('out_file', 'in_file')]),
+ (binarize, outputnode, [('out_mask', 'out_mask')]),
+ (inu_n4, outputnode, [('bias_image', 'bias_image')]),
])
# fmt: on
return workflow
@@ -484,7 +483,7 @@ Source code for niworkflows.anat.skullstrip
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/cli/boldref.html b/master/_modules/niworkflows/cli/boldref.html
index b9d34709d49..16d3bc6a930 100644
--- a/master/_modules/niworkflows/cli/boldref.html
+++ b/master/_modules/niworkflows/cli/boldref.html
@@ -353,6 +353,7 @@ Source code for niworkflows.cli.boldref
# https://www.nipreps.org/community/licensing/
#
"""Run the BOLD reference+mask workflow"""
+
import os
@@ -360,43 +361,42 @@ Source code for niworkflows.cli.boldref
[docs]
def get_parser():
"""Build parser object."""
- from argparse import ArgumentParser
- from argparse import RawTextHelpFormatter, RawDescriptionHelpFormatter
+ from argparse import ArgumentParser, RawDescriptionHelpFormatter, RawTextHelpFormatter
parser = ArgumentParser(
description="""NiWorkflows Utilities""", formatter_class=RawTextHelpFormatter
)
- subparsers = parser.add_subparsers(dest="command")
+ subparsers = parser.add_subparsers(dest='command')
be_parser = subparsers.add_parser(
- "brain-extract",
+ 'brain-extract',
formatter_class=RawDescriptionHelpFormatter,
description="""Execute brain extraction and related operations (e.g., \
intensity nonuniformity correction, robust averaging, etc.)""",
)
- be_parser.add_argument("input_file", action="store", help="the input file")
- be_parser.add_argument("out_path", action="store", help="the output directory")
+ be_parser.add_argument('input_file', action='store', help='the input file')
+ be_parser.add_argument('out_path', action='store', help='the output directory')
be_parser.add_argument(
- "--modality",
- "-m",
- action="store",
- choices=("bold", "t1w"),
- default="bold",
- help="the input file",
+ '--modality',
+ '-m',
+ action='store',
+ choices=('bold', 't1w'),
+ default='bold',
+ help='the input file',
)
parser.add_argument(
- "--omp-nthreads",
- action="store",
+ '--omp-nthreads',
+ action='store',
type=int,
default=os.cpu_count(),
- help="Number of CPUs available to individual processes",
+ help='Number of CPUs available to individual processes',
)
parser.add_argument(
- "--nprocs",
- action="store",
+ '--nprocs',
+ action='store',
type=int,
default=os.cpu_count(),
- help="Number of processes that may run in parallel",
+ help='Number of processes that may run in parallel',
)
return parser
@@ -408,26 +408,29 @@ Source code for niworkflows.cli.boldref
def main(args=None):
"""Entry point."""
from nipype.utils.filemanip import hash_infile
+
from ..func.util import init_bold_reference_wf
opts = get_parser().parse_args(args=args)
wf = init_bold_reference_wf(
- opts.omp_nthreads, gen_report=True, name=hash_infile(opts.input_file),
+ opts.omp_nthreads,
+ gen_report=True,
+ name=hash_infile(opts.input_file),
)
wf.inputs.inputnode.bold_file = opts.input_file
wf.base_dir = os.getcwd()
plugin = {
- "plugin": "MultiProc",
- "plugin_args": {"nprocs": opts.nprocs},
+ 'plugin': 'MultiProc',
+ 'plugin_args': {'nprocs': opts.nprocs},
}
if opts.nprocs < 2:
- plugin = {"plugin": "Linear"}
+ plugin = {'plugin': 'Linear'}
wf.run(**plugin)
-if __name__ == "__main__":
+if __name__ == '__main__':
from sys import argv
main(args=argv[1:])
@@ -443,7 +446,7 @@ Source code for niworkflows.cli.boldref
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/engine/plugin.html b/master/_modules/niworkflows/engine/plugin.html
index 074b4ce3866..b42befef603 100644
--- a/master/_modules/niworkflows/engine/plugin.html
+++ b/master/_modules/niworkflows/engine/plugin.html
@@ -354,14 +354,14 @@ Source code for niworkflows.engine.plugin
#
"""A lightweight NiPype MultiProc execution plugin."""
+import gc
+import multiprocessing as mp
import os
import sys
+from concurrent.futures import ProcessPoolExecutor
from copy import deepcopy
from time import sleep, time
-import multiprocessing as mp
-from concurrent.futures import ProcessPoolExecutor
from traceback import format_exception
-import gc
from nipype.utils.misc import str2bool
@@ -388,14 +388,14 @@ Source code for niworkflows.engine.plugin
"""
# Init variables
- result = dict(result=None, traceback=None, taskid=taskid)
+ result = {'result': None, 'traceback': None, 'taskid': taskid}
# Try and execute the node via node.run()
try:
- result["result"] = node.run(updatehash=updatehash)
+ result['result'] = node.run(updatehash=updatehash)
except: # noqa: E722, intendedly catch all here
- result["traceback"] = format_exception(*sys.exc_info())
- result["result"] = node.result
+ result['traceback'] = format_exception(*sys.exc_info())
+ result['result'] = node.result
# Return the result dictionary
return result
@@ -413,7 +413,7 @@ Source code for niworkflows.engine.plugin
plugin_args = {}
self.plugin_args = plugin_args
self._config = None
- self._status_callback = plugin_args.get("status_callback")
+ self._status_callback = plugin_args.get('status_callback')
[docs]
@@ -486,7 +486,7 @@ Source code for niworkflows.engine.plugin
self.proc_done = None
self.proc_pending = None
self.pending_tasks = []
- self.max_jobs = self.plugin_args.get("max_jobs", None)
+ self.max_jobs = self.plugin_args.get('max_jobs', None)
def _prerun_check(self, graph):
"""Stub method to validate/massage graph and nodes before running."""
@@ -501,7 +501,7 @@ Source code for niworkflows.engine.plugin
import numpy as np
self._config = config
- poll_sleep_secs = float(config["execution"]["poll_sleep_duration"])
+ poll_sleep_secs = float(config['execution']['poll_sleep_duration'])
self._prerun_check(graph)
# Generate appropriate structures for worker-manager model
@@ -525,15 +525,16 @@ Source code for niworkflows.engine.plugin
errors.append(exc)
else:
if result:
- if result["traceback"]:
+ if result['traceback']:
notrun.append(self._clean_queue(jobid, graph, result=result))
- errors.append("".join(result["traceback"]))
+ errors.append(''.join(result['traceback']))
else:
self._task_finished_cb(jobid)
self._remove_node_dirs()
self._clear_task(taskid)
else:
- assert self.proc_done[jobid] and self.proc_pending[jobid]
+ assert self.proc_done[jobid]
+ assert self.proc_pending[jobid]
toappend.insert(0, (taskid, jobid))
if toappend:
@@ -559,7 +560,7 @@ Source code for niworkflows.engine.plugin
if len(errors) > 1:
error, cause = (
- RuntimeError(f"{len(errors)} raised. Re-raising first."),
+ RuntimeError(f'{len(errors)} raised. Re-raising first.'),
error,
)
@@ -577,8 +578,8 @@ Source code for niworkflows.engine.plugin
tb = None
if result is not None:
- node._result = result["result"]
- tb = result["traceback"]
+ node._result = result['result']
+ tb = result['traceback']
node._traceback = tb
return report_crash(node, traceback=tb)
@@ -587,16 +588,16 @@ Source code for niworkflows.engine.plugin
def _clean_queue(self, jobid, graph, result=None):
if self._status_callback:
- self._status_callback(self.procs[jobid], "exception")
+ self._status_callback(self.procs[jobid], 'exception')
if result is None:
result = {
- "result": None,
- "traceback": "\n".join(format_exception(*sys.exc_info())),
+ 'result': None,
+ 'traceback': '\n'.join(format_exception(*sys.exc_info())),
}
crashfile = self._report_crash(self.procs[jobid], result=result)
- if str2bool(self._config["execution"]["stop_on_first_crash"]):
- raise RuntimeError("".join(result["traceback"]))
+ if str2bool(self._config['execution']['stop_on_first_crash']):
+ raise RuntimeError(''.join(result['traceback']))
if jobid in self.mapnodesubids:
# remove current jobid
self.proc_pending[jobid] = False
@@ -625,11 +626,11 @@ Source code for niworkflows.engine.plugin
self.procs.extend(mapnodesubids)
self.depidx = ssp.vstack(
(self.depidx, ssp.lil_matrix(np.zeros((numnodes, self.depidx.shape[1])))),
- "lil",
+ 'lil',
)
self.depidx = ssp.hstack(
(self.depidx, ssp.lil_matrix(np.zeros((self.depidx.shape[0], numnodes)))),
- "lil",
+ 'lil',
)
self.depidx[-numnodes:, jobid] = 1
self.proc_done = np.concatenate((self.proc_done, np.zeros(numnodes, dtype=bool)))
@@ -637,7 +638,7 @@ Source code for niworkflows.engine.plugin
return False
def _local_hash_check(self, jobid, graph):
- if not str2bool(self.procs[jobid].config["execution"]["local_hash_check"]):
+ if not str2bool(self.procs[jobid].config['execution']['local_hash_check']):
return False
try:
@@ -665,7 +666,7 @@ Source code for niworkflows.engine.plugin
This is called when a job is completed.
"""
if self._status_callback:
- self._status_callback(self.procs[jobid], "end")
+ self._status_callback(self.procs[jobid], 'end')
# Update job and worker queues
self.proc_pending[jobid] = False
# update the job dependency structure
@@ -688,7 +689,7 @@ Source code for niworkflows.engine.plugin
from networkx import to_scipy_sparse_matrix as to_scipy_sparse_array
self.procs, _ = topological_sort(graph)
- self.depidx = to_scipy_sparse_array(graph, nodelist=self.procs, format="lil")
+ self.depidx = to_scipy_sparse_array(graph, nodelist=self.procs, format='lil')
self.refidx = self.depidx.astype(int)
self.proc_done = np.zeros(len(self.procs), dtype=bool)
self.proc_pending = np.zeros(len(self.procs), dtype=bool)
@@ -700,19 +701,20 @@ Source code for niworkflows.engine.plugin
dfs_preorder = nx.dfs_preorder
except AttributeError:
dfs_preorder = nx.dfs_preorder_nodes
- subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])]
+ subnodes = list(dfs_preorder(graph, self.procs[jobid]))
for node in subnodes:
idx = self.procs.index(node)
self.proc_done[idx] = True
self.proc_pending[idx] = False
- return dict(node=self.procs[jobid], dependents=subnodes, crashfile=crashfile)
+ return {'node': self.procs[jobid], 'dependents': subnodes, 'crashfile': crashfile}
def _remove_node_dirs(self):
"""Remove directories whose outputs have already been used up."""
- import numpy as np
from shutil import rmtree
- if str2bool(self._config["execution"]["remove_node_directories"]):
+ import numpy as np
+
+ if str2bool(self._config['execution']['remove_node_directories']):
indices = np.nonzero((self.refidx.sum(axis=1) == 0).__array__())[0]
for idx in indices:
if idx in self.mapnodesubids:
@@ -779,9 +781,10 @@ Source code for niworkflows.engine.plugin
# Retrieve a nipreps-style configuration object
try:
- config = plugin_args["app_config"]
+ config = plugin_args['app_config']
except (KeyError, TypeError):
from types import SimpleNamespace
+
from nipype.utils.profiler import get_system_total_memory_gb
config = SimpleNamespace(
@@ -796,15 +799,15 @@ Source code for niworkflows.engine.plugin
)
# Read in options or set defaults.
- self.processors = self.plugin_args.get("n_procs", mp.cpu_count())
+ self.processors = self.plugin_args.get('n_procs', mp.cpu_count())
self.memory_gb = self.plugin_args.get(
- "memory_gb", # Allocate 90% of system memory
+ 'memory_gb', # Allocate 90% of system memory
config.environment.total_memory * 0.9,
)
- self.raise_insufficient = self.plugin_args.get("raise_insufficient", False)
+ self.raise_insufficient = self.plugin_args.get('raise_insufficient', False)
# Instantiate different thread pools for non-daemon processes
- mp_context = mp.get_context(self.plugin_args.get("mp_context"))
+ mp_context = mp.get_context(self.plugin_args.get('mp_context'))
self.pool = pool or ProcessPoolExecutor(
max_workers=self.processors,
initializer=config._process_initializer,
@@ -816,7 +819,7 @@ Source code for niworkflows.engine.plugin
def _async_callback(self, args):
result = args.result()
- self._taskresult[result["taskid"]] = result
+ self._taskresult[result['taskid']] = result
def _get_result(self, taskid):
return self._taskresult.get(taskid)
@@ -828,8 +831,8 @@ Source code for niworkflows.engine.plugin
self._taskid += 1
# Don't allow streaming outputs
- if getattr(node.interface, "terminal_output", "") == "stream":
- node.interface.terminal_output = "allatonce"
+ if getattr(node.interface, 'terminal_output', '') == 'stream':
+ node.interface.terminal_output = 'allatonce'
result_future = self.pool.submit(run_node, node, updatehash, self._taskid)
result_future.add_done_callback(self._async_callback)
@@ -850,7 +853,7 @@ Source code for niworkflows.engine.plugin
np.any(np.array(tasks_mem_gb) > self.memory_gb)
or np.any(np.array(tasks_num_th) > self.processors)
):
- raise RuntimeError("Insufficient resources available for job")
+ raise RuntimeError('Insufficient resources available for job')
def _postrun_check(self):
self.pool.shutdown()
@@ -894,7 +897,7 @@ Source code for niworkflows.engine.plugin
if len(jobids) + len(self.pending_tasks) == 0:
return
- jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get("scheduler"))
+ jobids = self._sort_jobs(jobids, scheduler=self.plugin_args.get('scheduler'))
# Run garbage collector before potentially submitting jobs
gc.collect()
@@ -902,13 +905,13 @@ Source code for niworkflows.engine.plugin
# Submit jobs
for jobid in jobids:
# First expand mapnodes
- if self.procs[jobid].__class__.__name__ == "MapNode":
+ if self.procs[jobid].__class__.__name__ == 'MapNode':
try:
num_subnodes = self.procs[jobid].num_subnodes()
except Exception:
traceback = format_exception(*sys.exc_info())
self._clean_queue(
- jobid, graph, result={"result": None, "traceback": traceback}
+ jobid, graph, result={'result': None, 'traceback': traceback}
)
self.proc_pending[jobid] = False
continue
@@ -942,7 +945,7 @@ Source code for niworkflows.engine.plugin
except Exception:
traceback = format_exception(*sys.exc_info())
self._clean_queue(
- jobid, graph, result={"result": None, "traceback": traceback}
+ jobid, graph, result={'result': None, 'traceback': traceback}
)
# Release resources
@@ -960,7 +963,7 @@ Source code for niworkflows.engine.plugin
# Task should be submitted to workers
# Send job to task manager and add to pending tasks
if self._status_callback:
- self._status_callback(self.procs[jobid], "start")
+ self._status_callback(self.procs[jobid], 'start')
tid = self._submit_job(deepcopy(self.procs[jobid]), updatehash=updatehash)
if tid is None:
self.proc_done[jobid] = False
@@ -970,8 +973,8 @@ Source code for niworkflows.engine.plugin
# Display stats next loop
self._stats = None
- def _sort_jobs(self, jobids, scheduler="tsort"):
- if scheduler == "mem_thread":
+ def _sort_jobs(self, jobids, scheduler='tsort'):
+ if scheduler == 'mem_thread':
return sorted(
jobids,
key=lambda item: (self.procs[item].mem_gb, self.procs[item].n_procs),
@@ -990,7 +993,7 @@ Source code for niworkflows.engine.plugin
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/engine/workflows.html b/master/_modules/niworkflows/engine/workflows.html
index 00b28029857..9d87c4493d1 100644
--- a/master/_modules/niworkflows/engine/workflows.html
+++ b/master/_modules/niworkflows/engine/workflows.html
@@ -357,6 +357,7 @@ Source code for niworkflows.engine.workflows
Add special features to the Nipype's vanilla workflows
"""
+
from nipype.pipeline import engine as pe
@@ -399,7 +400,7 @@ Source code for niworkflows.engine.workflows
if self.__postdesc__:
desc += [self.__postdesc__]
- return "".join(desc)
+ return ''.join(desc)
@@ -414,7 +415,7 @@ Source code for niworkflows.engine.workflows
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/func/util.html b/master/_modules/niworkflows/func/util.html
index e01a2185b89..0276a314b8f 100644
--- a/master/_modules/niworkflows/func/util.html
+++ b/master/_modules/niworkflows/func/util.html
@@ -353,18 +353,23 @@ Source code for niworkflows.func.util
# https://www.nipreps.org/community/licensing/
#
"""Utility workflows."""
-from packaging.version import parse as parseversion, Version
+from nipype.interfaces import afni, fsl
+from nipype.interfaces import utility as niu
from nipype.pipeline import engine as pe
-from nipype.interfaces import utility as niu, fsl, afni
-
+from packaging.version import Version
+from packaging.version import parse as parseversion
from templateflow.api import get as get_template
from .. import data
from ..engine.workflows import LiterateWorkflow as Workflow
from ..interfaces.fixes import (
- FixHeaderRegistration as Registration,
FixHeaderApplyTransforms as ApplyTransforms,
+)
+from ..interfaces.fixes import (
+ FixHeaderRegistration as Registration,
+)
+from ..interfaces.fixes import (
FixN4BiasFieldCorrection as N4BiasFieldCorrection,
)
from ..interfaces.header import CopyHeader, CopyXForm, ValidateImage
@@ -372,7 +377,6 @@ Source code for niworkflows.func.util
from ..utils.connections import listify
from ..utils.misc import pass_dummy_scans as _pass_dummy_scans
-
DEFAULT_MEMORY_MIN_GB = 0.01
@@ -385,7 +389,7 @@ Source code for niworkflows.func.util
brainmask_thresh=0.85,
pre_mask=False,
multiecho=False,
- name="bold_reference_wf",
+ name='bold_reference_wf',
gen_report=False,
):
"""
@@ -474,9 +478,9 @@ Source code for niworkflows.func.util
* :py:func:`~niworkflows.func.util.init_enhance_and_skullstrip_wf`
"""
- from ..utils.connections import pop_file as _pop
from ..interfaces.bold import NonsteadyStatesDetector
from ..interfaces.images import RobustAverage
+ from ..utils.connections import pop_file as _pop
workflow = Workflow(name=name)
workflow.__desc__ = f"""\
@@ -486,27 +490,25 @@ Source code for niworkflows.func.util
"""
inputnode = pe.Node(
- niu.IdentityInterface(
- fields=["bold_file", "bold_mask", "dummy_scans", "sbref_file"]
- ),
- name="inputnode",
+ niu.IdentityInterface(fields=['bold_file', 'bold_mask', 'dummy_scans', 'sbref_file']),
+ name='inputnode',
)
outputnode = pe.Node(
niu.IdentityInterface(
fields=[
- "bold_file",
- "all_bold_files",
- "raw_ref_image",
- "skip_vols",
- "algo_dummy_scans",
- "ref_image",
- "ref_image_brain",
- "bold_mask",
- "validation_report",
- "mask_report",
+ 'bold_file',
+ 'all_bold_files',
+ 'raw_ref_image',
+ 'skip_vols',
+ 'algo_dummy_scans',
+ 'ref_image',
+ 'ref_image_brain',
+ 'bold_mask',
+ 'validation_report',
+ 'mask_report',
]
),
- name="outputnode",
+ name='outputnode',
)
# Simplify manually setting input image
@@ -515,13 +517,13 @@ Source code for niworkflows.func.util
val_bold = pe.MapNode(
ValidateImage(),
- name="val_bold",
+ name='val_bold',
mem_gb=DEFAULT_MEMORY_MIN_GB,
- iterfield=["in_file"],
+ iterfield=['in_file'],
)
- get_dummy = pe.Node(NonsteadyStatesDetector(), name="get_dummy")
- gen_avg = pe.Node(RobustAverage(), name="gen_avg", mem_gb=1)
+ get_dummy = pe.Node(NonsteadyStatesDetector(), name='get_dummy')
+ gen_avg = pe.Node(RobustAverage(), name='gen_avg', mem_gb=1)
enhance_and_skullstrip_bold_wf = init_enhance_and_skullstrip_bold_wf(
brainmask_thresh=brainmask_thresh,
@@ -530,41 +532,41 @@ Source code for niworkflows.func.util
)
calc_dummy_scans = pe.Node(
- niu.Function(function=_pass_dummy_scans, output_names=["skip_vols_num"]),
- name="calc_dummy_scans",
+ niu.Function(function=_pass_dummy_scans, output_names=['skip_vols_num']),
+ name='calc_dummy_scans',
run_without_submitting=True,
mem_gb=DEFAULT_MEMORY_MIN_GB,
)
# fmt: off
workflow.connect([
- (inputnode, val_bold, [(("bold_file", listify), "in_file")]),
- (inputnode, get_dummy, [(("bold_file", _pop), "in_file")]),
- (inputnode, enhance_and_skullstrip_bold_wf, [("bold_mask", "inputnode.pre_mask")]),
- (inputnode, calc_dummy_scans, [("dummy_scans", "dummy_scans")]),
- (gen_avg, enhance_and_skullstrip_bold_wf, [("out_file", "inputnode.in_file")]),
- (get_dummy, calc_dummy_scans, [("n_dummy", "algo_dummy_scans")]),
- (calc_dummy_scans, outputnode, [("skip_vols_num", "skip_vols")]),
- (gen_avg, outputnode, [("out_file", "raw_ref_image")]),
- (get_dummy, outputnode, [("n_dummy", "algo_dummy_scans")]),
- (val_bold, outputnode, [(("out_file", _pop), "bold_file"),
- ("out_file", "all_bold_files"),
- (("out_report", _pop), "validation_report")]),
+ (inputnode, val_bold, [(('bold_file', listify), 'in_file')]),
+ (inputnode, get_dummy, [(('bold_file', _pop), 'in_file')]),
+ (inputnode, enhance_and_skullstrip_bold_wf, [('bold_mask', 'inputnode.pre_mask')]),
+ (inputnode, calc_dummy_scans, [('dummy_scans', 'dummy_scans')]),
+ (gen_avg, enhance_and_skullstrip_bold_wf, [('out_file', 'inputnode.in_file')]),
+ (get_dummy, calc_dummy_scans, [('n_dummy', 'algo_dummy_scans')]),
+ (calc_dummy_scans, outputnode, [('skip_vols_num', 'skip_vols')]),
+ (gen_avg, outputnode, [('out_file', 'raw_ref_image')]),
+ (get_dummy, outputnode, [('n_dummy', 'algo_dummy_scans')]),
+ (val_bold, outputnode, [(('out_file', _pop), 'bold_file'),
+ ('out_file', 'all_bold_files'),
+ (('out_report', _pop), 'validation_report')]),
(enhance_and_skullstrip_bold_wf, outputnode, [
- ("outputnode.bias_corrected_file", "ref_image"),
- ("outputnode.mask_file", "bold_mask"),
- ("outputnode.skull_stripped_file", "ref_image_brain"),
+ ('outputnode.bias_corrected_file', 'ref_image'),
+ ('outputnode.mask_file', 'bold_mask'),
+ ('outputnode.skull_stripped_file', 'ref_image_brain'),
]),
])
# fmt: on
if gen_report:
- mask_reportlet = pe.Node(SimpleShowMaskRPT(), name="mask_reportlet")
+ mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')
# fmt: off
workflow.connect([
(enhance_and_skullstrip_bold_wf, mask_reportlet, [
- ("outputnode.bias_corrected_file", "background_file"),
- ("outputnode.mask_file", "mask_file"),
+ ('outputnode.bias_corrected_file', 'background_file'),
+ ('outputnode.mask_file', 'mask_file'),
]),
])
# fmt: on
@@ -572,8 +574,8 @@ Source code for niworkflows.func.util
if not sbref_files:
# fmt: off
workflow.connect([
- (val_bold, gen_avg, [(("out_file", _pop), "in_file")]), # pop first echo of ME-EPI
- (get_dummy, gen_avg, [("t_mask", "t_mask")]),
+ (val_bold, gen_avg, [(('out_file', _pop), 'in_file')]), # pop first echo of ME-EPI
+ (get_dummy, gen_avg, [('t_mask', 't_mask')]),
])
# fmt: on
return workflow
@@ -588,17 +590,17 @@ Source code for niworkflows.func.util
val_sbref = pe.MapNode(
ValidateImage(),
- name="val_sbref",
+ name='val_sbref',
mem_gb=DEFAULT_MEMORY_MIN_GB,
- iterfield=["in_file"],
+ iterfield=['in_file'],
)
- merge_sbrefs = pe.Node(MergeSeries(), name="merge_sbrefs")
+ merge_sbrefs = pe.Node(MergeSeries(), name='merge_sbrefs')
# fmt: off
workflow.connect([
- (inputnode, val_sbref, [(("sbref_file", listify), "in_file")]),
- (val_sbref, merge_sbrefs, [("out_file", "in_files")]),
- (merge_sbrefs, gen_avg, [("out_file", "in_file")]),
+ (inputnode, val_sbref, [(('sbref_file', listify), 'in_file')]),
+ (val_sbref, merge_sbrefs, [('out_file', 'in_files')]),
+ (merge_sbrefs, gen_avg, [('out_file', 'in_file')]),
])
# fmt: on
@@ -616,7 +618,7 @@ Source code for niworkflows.func.util
[docs]
def init_enhance_and_skullstrip_bold_wf(
brainmask_thresh=0.5,
- name="enhance_and_skullstrip_bold_wf",
+ name='enhance_and_skullstrip_bold_wf',
omp_nthreads=1,
pre_mask=False,
):
@@ -703,157 +705,147 @@ Source code for niworkflows.func.util
from niworkflows.interfaces.nibabel import ApplyMask, BinaryDilation
workflow = Workflow(name=name)
- inputnode = pe.Node(
- niu.IdentityInterface(fields=["in_file", "pre_mask"]), name="inputnode"
- )
+ inputnode = pe.Node(niu.IdentityInterface(fields=['in_file', 'pre_mask']), name='inputnode')
outputnode = pe.Node(
- niu.IdentityInterface(
- fields=["mask_file", "skull_stripped_file", "bias_corrected_file"]
- ),
- name="outputnode",
+ niu.IdentityInterface(fields=['mask_file', 'skull_stripped_file', 'bias_corrected_file']),
+ name='outputnode',
)
# Run N4 normally, force num_threads=1 for stability (images are small, no need for >1)
n4_correct = pe.Node(
- N4BiasFieldCorrection(
- dimension=3, copy_header=True, bspline_fitting_distance=200
- ),
+ N4BiasFieldCorrection(dimension=3, copy_header=True, bspline_fitting_distance=200),
shrink_factor=2,
- name="n4_correct",
+ name='n4_correct',
n_procs=1,
)
n4_correct.inputs.rescale_intensities = True
# Create a generous BET mask out of the bias-corrected EPI
- skullstrip_first_pass = pe.Node(
- fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass"
- )
- first_dilate = pe.Node(BinaryDilation(radius=6), name="first_dilate")
- first_mask = pe.Node(ApplyMask(), name="first_mask")
+ skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass')
+ first_dilate = pe.Node(BinaryDilation(radius=6), name='first_dilate')
+ first_mask = pe.Node(ApplyMask(), name='first_mask')
# Use AFNI's unifize for T2 contrast & fix header
unifize = pe.Node(
afni.Unifize(
t2=True,
- outputtype="NIFTI_GZ",
+ outputtype='NIFTI_GZ',
# Default -clfrac is 0.1, 0.4 was too conservative
# -rbt because I'm a Jedi AFNI Master (see 3dUnifize's documentation)
- args="-clfrac 0.2 -rbt 18.3 65.0 90.0",
- out_file="uni.nii.gz",
+ args='-clfrac 0.2 -rbt 18.3 65.0 90.0',
+ out_file='uni.nii.gz',
),
- name="unifize",
+ name='unifize',
)
- fixhdr_unifize = pe.Node(CopyXForm(), name="fixhdr_unifize", mem_gb=0.1)
+ fixhdr_unifize = pe.Node(CopyXForm(), name='fixhdr_unifize', mem_gb=0.1)
# Run ANFI's 3dAutomask to extract a refined brain mask
skullstrip_second_pass = pe.Node(
- afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass"
+ afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass'
)
- fixhdr_skullstrip2 = pe.Node(CopyXForm(), name="fixhdr_skullstrip2", mem_gb=0.1)
+ fixhdr_skullstrip2 = pe.Node(CopyXForm(), name='fixhdr_skullstrip2', mem_gb=0.1)
# Take intersection of both masks
- combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks")
+ combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks')
# Compute masked brain
- apply_mask = pe.Node(ApplyMask(), name="apply_mask")
+ apply_mask = pe.Node(ApplyMask(), name='apply_mask')
if not pre_mask:
from nipype.interfaces.ants.utils import AI
bold_template = get_template(
- "MNI152NLin2009cAsym", resolution=2, desc="fMRIPrep", suffix="boldref"
- )
- brain_mask = get_template(
- "MNI152NLin2009cAsym", resolution=2, desc="brain", suffix="mask"
+ 'MNI152NLin2009cAsym', resolution=2, desc='fMRIPrep', suffix='boldref'
)
+ brain_mask = get_template('MNI152NLin2009cAsym', resolution=2, desc='brain', suffix='mask')
# Initialize transforms with antsAI
init_aff = pe.Node(
AI(
fixed_image=str(bold_template),
fixed_image_mask=str(brain_mask),
- metric=("Mattes", 32, "Regular", 0.2),
- transform=("Affine", 0.1),
+ metric=('Mattes', 32, 'Regular', 0.2),
+ transform=('Affine', 0.1),
search_factor=(20, 0.12),
principal_axes=False,
convergence=(10, 1e-6, 10),
verbose=True,
),
- name="init_aff",
+ name='init_aff',
n_procs=omp_nthreads,
)
# Registration().version may be None
- if parseversion(Registration().version or "0.0.0") > Version("2.2.0"):
+ if parseversion(Registration().version or '0.0.0') > Version('2.2.0'):
init_aff.inputs.search_grid = (40, (0, 40, 40))
# Set up spatial normalization
norm = pe.Node(
- Registration(from_file=data.load("epi_atlasbased_brainmask.json")),
- name="norm",
+ Registration(from_file=data.load('epi_atlasbased_brainmask.json')),
+ name='norm',
n_procs=omp_nthreads,
)
norm.inputs.fixed_image = str(bold_template)
map_brainmask = pe.Node(
ApplyTransforms(
- interpolation="Linear",
+ interpolation='Linear',
# Use the higher resolution and probseg for numerical stability in rounding
input_image=str(
get_template(
- "MNI152NLin2009cAsym",
+ 'MNI152NLin2009cAsym',
resolution=1,
- label="brain",
- suffix="probseg",
+ label='brain',
+ suffix='probseg',
)
),
),
- name="map_brainmask",
+ name='map_brainmask',
)
# Ensure mask's header matches reference's
- fix_header = pe.Node(CopyHeader(), name="fix_header", run_without_submitting=True)
+ fix_header = pe.Node(CopyHeader(), name='fix_header', run_without_submitting=True)
# fmt: off
workflow.connect([
- (inputnode, fix_header, [("in_file", "hdr_file")]),
- (inputnode, init_aff, [("in_file", "moving_image")]),
- (inputnode, map_brainmask, [("in_file", "reference_image")]),
- (inputnode, norm, [("in_file", "moving_image")]),
- (init_aff, norm, [("output_transform", "initial_moving_transform")]),
+ (inputnode, fix_header, [('in_file', 'hdr_file')]),
+ (inputnode, init_aff, [('in_file', 'moving_image')]),
+ (inputnode, map_brainmask, [('in_file', 'reference_image')]),
+ (inputnode, norm, [('in_file', 'moving_image')]),
+ (init_aff, norm, [('output_transform', 'initial_moving_transform')]),
(norm, map_brainmask, [
- ("reverse_invert_flags", "invert_transform_flags"),
- ("reverse_transforms", "transforms"),
+ ('reverse_invert_flags', 'invert_transform_flags'),
+ ('reverse_transforms', 'transforms'),
]),
- (map_brainmask, fix_header, [("output_image", "in_file")]),
- (fix_header, n4_correct, [("out_file", "weight_image")]),
+ (map_brainmask, fix_header, [('output_image', 'in_file')]),
+ (fix_header, n4_correct, [('out_file', 'weight_image')]),
])
# fmt: on
else:
# fmt: off
workflow.connect([
- (inputnode, n4_correct, [("pre_mask", "weight_image")]),
+ (inputnode, n4_correct, [('pre_mask', 'weight_image')]),
])
# fmt: on
# fmt: off
workflow.connect([
- (inputnode, n4_correct, [("in_file", "input_image")]),
- (inputnode, fixhdr_unifize, [("in_file", "hdr_file")]),
- (inputnode, fixhdr_skullstrip2, [("in_file", "hdr_file")]),
- (n4_correct, skullstrip_first_pass, [("output_image", "in_file")]),
- (skullstrip_first_pass, first_dilate, [("mask_file", "in_file")]),
- (first_dilate, first_mask, [("out_file", "in_mask")]),
- (skullstrip_first_pass, first_mask, [("out_file", "in_file")]),
- (first_mask, unifize, [("out_file", "in_file")]),
- (unifize, fixhdr_unifize, [("out_file", "in_file")]),
- (fixhdr_unifize, skullstrip_second_pass, [("out_file", "in_file")]),
- (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]),
- (skullstrip_second_pass, fixhdr_skullstrip2, [("out_file", "in_file")]),
- (fixhdr_skullstrip2, combine_masks, [("out_file", "operand_file")]),
- (fixhdr_unifize, apply_mask, [("out_file", "in_file")]),
- (combine_masks, apply_mask, [("out_file", "in_mask")]),
- (combine_masks, outputnode, [("out_file", "mask_file")]),
- (apply_mask, outputnode, [("out_file", "skull_stripped_file")]),
- (n4_correct, outputnode, [("output_image", "bias_corrected_file")]),
+ (inputnode, n4_correct, [('in_file', 'input_image')]),
+ (inputnode, fixhdr_unifize, [('in_file', 'hdr_file')]),
+ (inputnode, fixhdr_skullstrip2, [('in_file', 'hdr_file')]),
+ (n4_correct, skullstrip_first_pass, [('output_image', 'in_file')]),
+ (skullstrip_first_pass, first_dilate, [('mask_file', 'in_file')]),
+ (first_dilate, first_mask, [('out_file', 'in_mask')]),
+ (skullstrip_first_pass, first_mask, [('out_file', 'in_file')]),
+ (first_mask, unifize, [('out_file', 'in_file')]),
+ (unifize, fixhdr_unifize, [('out_file', 'in_file')]),
+ (fixhdr_unifize, skullstrip_second_pass, [('out_file', 'in_file')]),
+ (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
+ (skullstrip_second_pass, fixhdr_skullstrip2, [('out_file', 'in_file')]),
+ (fixhdr_skullstrip2, combine_masks, [('out_file', 'operand_file')]),
+ (fixhdr_unifize, apply_mask, [('out_file', 'in_file')]),
+ (combine_masks, apply_mask, [('out_file', 'in_mask')]),
+ (combine_masks, outputnode, [('out_file', 'mask_file')]),
+ (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
+ (n4_correct, outputnode, [('output_image', 'bias_corrected_file')]),
])
# fmt: on
@@ -863,7 +855,7 @@ Source code for niworkflows.func.util
[docs]
-def init_skullstrip_bold_wf(name="skullstrip_bold_wf"):
+def init_skullstrip_bold_wf(name='skullstrip_bold_wf'):
"""
Apply skull-stripping to a BOLD image.
@@ -898,38 +890,34 @@ Source code for niworkflows.func.util
from niworkflows.interfaces.nibabel import ApplyMask
workflow = Workflow(name=name)
- inputnode = pe.Node(niu.IdentityInterface(fields=["in_file"]), name="inputnode")
+ inputnode = pe.Node(niu.IdentityInterface(fields=['in_file']), name='inputnode')
outputnode = pe.Node(
- niu.IdentityInterface(
- fields=["mask_file", "skull_stripped_file", "out_report"]
- ),
- name="outputnode",
- )
- skullstrip_first_pass = pe.Node(
- fsl.BET(frac=0.2, mask=True), name="skullstrip_first_pass"
+ niu.IdentityInterface(fields=['mask_file', 'skull_stripped_file', 'out_report']),
+ name='outputnode',
)
+ skullstrip_first_pass = pe.Node(fsl.BET(frac=0.2, mask=True), name='skullstrip_first_pass')
skullstrip_second_pass = pe.Node(
- afni.Automask(dilate=1, outputtype="NIFTI_GZ"), name="skullstrip_second_pass"
+ afni.Automask(dilate=1, outputtype='NIFTI_GZ'), name='skullstrip_second_pass'
)
- combine_masks = pe.Node(fsl.BinaryMaths(operation="mul"), name="combine_masks")
- apply_mask = pe.Node(ApplyMask(), name="apply_mask")
- mask_reportlet = pe.Node(SimpleShowMaskRPT(), name="mask_reportlet")
+ combine_masks = pe.Node(fsl.BinaryMaths(operation='mul'), name='combine_masks')
+ apply_mask = pe.Node(ApplyMask(), name='apply_mask')
+ mask_reportlet = pe.Node(SimpleShowMaskRPT(), name='mask_reportlet')
# fmt: off
workflow.connect([
- (inputnode, skullstrip_first_pass, [("in_file", "in_file")]),
- (skullstrip_first_pass, skullstrip_second_pass, [("out_file", "in_file")]),
- (skullstrip_first_pass, combine_masks, [("mask_file", "in_file")]),
- (skullstrip_second_pass, combine_masks, [("out_file", "operand_file")]),
- (combine_masks, outputnode, [("out_file", "mask_file")]),
+ (inputnode, skullstrip_first_pass, [('in_file', 'in_file')]),
+ (skullstrip_first_pass, skullstrip_second_pass, [('out_file', 'in_file')]),
+ (skullstrip_first_pass, combine_masks, [('mask_file', 'in_file')]),
+ (skullstrip_second_pass, combine_masks, [('out_file', 'operand_file')]),
+ (combine_masks, outputnode, [('out_file', 'mask_file')]),
# Masked file
- (inputnode, apply_mask, [("in_file", "in_file")]),
- (combine_masks, apply_mask, [("out_file", "in_mask")]),
- (apply_mask, outputnode, [("out_file", "skull_stripped_file")]),
+ (inputnode, apply_mask, [('in_file', 'in_file')]),
+ (combine_masks, apply_mask, [('out_file', 'in_mask')]),
+ (apply_mask, outputnode, [('out_file', 'skull_stripped_file')]),
# Reportlet
- (inputnode, mask_reportlet, [("in_file", "background_file")]),
- (combine_masks, mask_reportlet, [("out_file", "mask_file")]),
- (mask_reportlet, outputnode, [("out_report", "out_report")]),
+ (inputnode, mask_reportlet, [('in_file', 'background_file')]),
+ (combine_masks, mask_reportlet, [('out_file', 'mask_file')]),
+ (mask_reportlet, outputnode, [('out_report', 'out_report')]),
])
# fmt: on
@@ -947,7 +935,7 @@ Source code for niworkflows.func.util
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/interfaces/bids.html b/master/_modules/niworkflows/interfaces/bids.html
index 19f71c6c15a..8ee4e32c803 100644
--- a/master/_modules/niworkflows/interfaces/bids.html
+++ b/master/_modules/niworkflows/interfaces/bids.html
@@ -353,56 +353,57 @@ Source code for niworkflows.interfaces.bids
# https://www.nipreps.org/community/licensing/
#
"""Interfaces for handling BIDS-like neuroimaging structures."""
+
+import os
+import re
+import shutil
+import sys
from collections import defaultdict
from contextlib import suppress
from json import dumps, loads
from pathlib import Path
-import shutil
-import os
-import re
-import sys
import nibabel as nb
import numpy as np
-
+import templateflow as tf
from nipype import logging
from nipype.interfaces.base import (
- traits,
- isdefined,
- Undefined,
- TraitedSpec,
BaseInterfaceInputSpec,
+ Directory,
DynamicTraitedSpec,
File,
- Directory,
InputMultiObject,
OutputMultiObject,
- Str,
SimpleInterface,
+ Str,
+ TraitedSpec,
+ Undefined,
+ isdefined,
+ traits,
)
from nipype.interfaces.io import add_traits
from nipype.utils.filemanip import hash_infile
-import templateflow as tf
+
from .. import data
from ..utils.bids import _init_layout, relative_to_root
from ..utils.images import set_consumables, unsafe_write_nifti_header_and_data
from ..utils.misc import _copy_any, unlink
-regz = re.compile(r"\.gz$")
-_pybids_spec = loads(data.load.readable("nipreps.json").read_text())
-BIDS_DERIV_ENTITIES = _pybids_spec["entities"]
-BIDS_DERIV_PATTERNS = tuple(_pybids_spec["default_path_patterns"])
+regz = re.compile(r'\.gz$')
+_pybids_spec = loads(data.load.readable('nipreps.json').read_text())
+BIDS_DERIV_ENTITIES = _pybids_spec['entities']
+BIDS_DERIV_PATTERNS = tuple(_pybids_spec['default_path_patterns'])
STANDARD_SPACES = tf.api.templates()
-LOGGER = logging.getLogger("nipype.interface")
+LOGGER = logging.getLogger('nipype.interface')
if sys.version_info < (3, 10): # PY39
builtin_zip = zip
- def zip(*args, strict=False):
+ def zip(*args, strict=False): # noqa: A001
if strict and any(len(args[0]) != len(arg) for arg in args):
- raise ValueError("strict_zip() requires all arguments to have the same length")
+ raise ValueError('strict_zip() requires all arguments to have the same length')
return builtin_zip(*args)
@@ -414,24 +415,24 @@ Source code for niworkflows.interfaces.bids
DEFAULT_DTYPES = defaultdict(
_none,
(
- ("mask", "uint8"),
- ("dseg", "int16"),
- ("probseg", "float32"),
- ("boldref", "float32"),
+ ('mask', 'uint8'),
+ ('dseg', 'int16'),
+ ('probseg', 'float32'),
+ ('boldref', 'float32'),
),
)
class _BIDSBaseInputSpec(BaseInterfaceInputSpec):
bids_dir = traits.Either(
- (None, Directory(exists=True)), usedefault=True, desc="optional bids directory"
+ (None, Directory(exists=True)), usedefault=True, desc='optional bids directory'
)
- bids_validate = traits.Bool(True, usedefault=True, desc="enable BIDS validator")
- index_db = Directory(exists=True, desc="a PyBIDS layout cache directory")
+ bids_validate = traits.Bool(True, usedefault=True, desc='enable BIDS validator')
+ index_db = Directory(exists=True, desc='a PyBIDS layout cache directory')
class _BIDSInfoInputSpec(_BIDSBaseInputSpec):
- in_file = File(mandatory=True, desc="input file, part of a BIDS tree")
+ in_file = File(mandatory=True, desc='input file, part of a BIDS tree')
class _BIDSInfoOutputSpec(DynamicTraitedSpec):
@@ -550,8 +551,7 @@ Source code for niworkflows.interfaces.bids
pass
params = parse_file_entities(in_file)
self._results = {
- key: params.get(key, Undefined)
- for key in _BIDSInfoOutputSpec().get().keys()
+ key: params.get(key, Undefined) for key in _BIDSInfoOutputSpec().get().keys()
}
return runtime
@@ -563,17 +563,17 @@ Source code for niworkflows.interfaces.bids
class _BIDSDataGrabberOutputSpec(TraitedSpec):
- out_dict = traits.Dict(desc="output data structure")
- fmap = OutputMultiObject(desc="output fieldmaps")
- bold = OutputMultiObject(desc="output functional images")
- sbref = OutputMultiObject(desc="output sbrefs")
- t1w = OutputMultiObject(desc="output T1w images")
- roi = OutputMultiObject(desc="output ROI images")
- t2w = OutputMultiObject(desc="output T2w images")
- flair = OutputMultiObject(desc="output FLAIR images")
- pet = OutputMultiObject(desc="output PET images")
- dwi = OutputMultiObject(desc="output DWI images")
- asl = OutputMultiObject(desc="output ASL images")
+ out_dict = traits.Dict(desc='output data structure')
+ fmap = OutputMultiObject(desc='output fieldmaps')
+ bold = OutputMultiObject(desc='output functional images')
+ sbref = OutputMultiObject(desc='output sbrefs')
+ t1w = OutputMultiObject(desc='output T1w images')
+ roi = OutputMultiObject(desc='output ROI images')
+ t2w = OutputMultiObject(desc='output T2w images')
+ flair = OutputMultiObject(desc='output FLAIR images')
+ pet = OutputMultiObject(desc='output PET images')
+ dwi = OutputMultiObject(desc='output DWI images')
+ asl = OutputMultiObject(desc='output ASL images')
@@ -602,8 +602,8 @@ Source code for niworkflows.interfaces.bids
_require_funcs = True
def __init__(self, *args, **kwargs):
- anat_only = kwargs.pop("anat_only")
- anat_derivatives = kwargs.pop("anat_derivatives", None)
+ anat_only = kwargs.pop('anat_only')
+ anat_derivatives = kwargs.pop('anat_derivatives', None)
super().__init__(*args, **kwargs)
if anat_only is not None:
self._require_funcs = not anat_only
@@ -612,60 +612,55 @@ Source code for niworkflows.interfaces.bids
def _run_interface(self, runtime):
bids_dict = self.inputs.subject_data
- self._results["out_dict"] = bids_dict
+ self._results['out_dict'] = bids_dict
self._results.update(bids_dict)
if self._require_t1w and not bids_dict['t1w']:
raise FileNotFoundError(
- "No T1w images found for subject sub-{}".format(self.inputs.subject_id)
+ f'No T1w images found for subject sub-{self.inputs.subject_id}'
)
- if self._require_funcs and not bids_dict["bold"]:
+ if self._require_funcs and not bids_dict['bold']:
raise FileNotFoundError(
- "No functional images found for subject sub-{}".format(
- self.inputs.subject_id
- )
+ f'No functional images found for subject sub-{self.inputs.subject_id}'
)
- for imtype in ["bold", "t2w", "flair", "fmap", "sbref", "roi", "pet", "asl"]:
+ for imtype in ['bold', 't2w', 'flair', 'fmap', 'sbref', 'roi', 'pet', 'asl']:
if not bids_dict[imtype]:
- LOGGER.info(
- 'No "%s" images found for sub-%s', imtype, self.inputs.subject_id
- )
+ LOGGER.info('No "%s" images found for sub-%s', imtype, self.inputs.subject_id)
return runtime
class _PrepareDerivativeInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
- check_hdr = traits.Bool(True, usedefault=True, desc="fix headers of NIfTI outputs")
+ check_hdr = traits.Bool(True, usedefault=True, desc='fix headers of NIfTI outputs')
compress = InputMultiObject(
traits.Either(None, traits.Bool),
usedefault=True,
- desc="whether ``in_file`` should be compressed (True), uncompressed (False) "
- "or left unmodified (None, default).",
+ desc='whether ``in_file`` should be compressed (True), uncompressed (False) '
+ 'or left unmodified (None, default).',
)
data_dtype = Str(
- desc="NumPy datatype to coerce NIfTI data to, or `source` to match the input file dtype"
+ desc='NumPy datatype to coerce NIfTI data to, or `source` to match the input file dtype'
)
dismiss_entities = InputMultiObject(
traits.Either(None, Str),
usedefault=True,
- desc="a list entities that will not be propagated from the source file",
+ desc='a list entities that will not be propagated from the source file',
)
- in_file = InputMultiObject(
- File(exists=True), mandatory=True, desc="the object to be saved"
- )
- meta_dict = traits.DictStrAny(desc="an input dictionary containing metadata")
+ in_file = InputMultiObject(File(exists=True), mandatory=True, desc='the object to be saved')
+ meta_dict = traits.DictStrAny(desc='an input dictionary containing metadata')
source_file = InputMultiObject(
- File(exists=False), mandatory=True, desc="the source file(s) to extract entities from")
+ File(exists=False), mandatory=True, desc='the source file(s) to extract entities from'
+ )
class _PrepareDerivativeOutputSpec(TraitedSpec):
- out_file = OutputMultiObject(File(exists=True), desc="derivative file path")
- out_meta = traits.DictStrAny(desc="derivative metadata")
- out_path = OutputMultiObject(Str, desc="relative path in target directory")
- fixed_hdr = traits.List(traits.Bool, desc="whether derivative header was fixed")
+ out_file = OutputMultiObject(File(exists=True), desc='derivative file path')
+ out_meta = traits.DictStrAny(desc='derivative metadata')
+ out_path = OutputMultiObject(Str, desc='relative path in target directory')
+ fixed_hdr = traits.List(traits.Bool, desc='whether derivative header was fixed')
@@ -835,7 +830,7 @@ Source code for niworkflows.interfaces.bids
input_spec = _PrepareDerivativeInputSpec
output_spec = _PrepareDerivativeOutputSpec
- _config_entities = frozenset({e["name"] for e in BIDS_DERIV_ENTITIES})
+ _config_entities = frozenset({e['name'] for e in BIDS_DERIV_ENTITIES})
_config_entities_dict = BIDS_DERIV_ENTITIES
_standard_spaces = STANDARD_SPACES
_file_patterns = BIDS_DERIV_PATTERNS
@@ -843,9 +838,7 @@ Source code for niworkflows.interfaces.bids
def __init__(self, allowed_entities=None, **inputs):
"""Initialize the SimpleInterface and extend inputs with custom entities."""
- self._allowed_entities = set(allowed_entities or []).union(
- set(self._config_entities)
- )
+ self._allowed_entities = set(allowed_entities or []).union(set(self._config_entities))
self._metadata = {}
self._static_traits = self.input_spec.class_editable_traits() + sorted(
@@ -862,7 +855,7 @@ Source code for niworkflows.interfaces.bids
setattr(self.inputs, k, inputs[k])
def _run_interface(self, runtime):
- from bids.layout import parse_file_entities, Config
+ from bids.layout import Config, parse_file_entities
from bids.layout.writing import build_path
from bids.utils import listify
@@ -873,47 +866,52 @@ Source code for niworkflows.interfaces.bids
# Middle precedence: metadata passed to constructor
**self._metadata,
# Highest precedence: metadata set as inputs
- **({
- k: getattr(self.inputs, k)
- for k in self.inputs.copyable_trait_names()
- if k not in self._static_traits
- })
+ **(
+ {
+ k: getattr(self.inputs, k)
+ for k in self.inputs.copyable_trait_names()
+ if k not in self._static_traits
+ }
+ ),
}
in_file = listify(self.inputs.in_file)
# Initialize entities with those from the source file.
custom_config = Config(
- name="custom",
+ name='custom',
entities=self._config_entities_dict,
default_path_patterns=self._file_patterns,
)
in_entities = [
parse_file_entities(
str(relative_to_root(source_file)),
- config=["bids", "derivatives", custom_config],
+ config=['bids', 'derivatives', custom_config],
)
for source_file in self.inputs.source_file
]
- out_entities = {k: v for k, v in in_entities[0].items()
- if all(ent.get(k) == v for ent in in_entities[1:])}
+ out_entities = {
+ k: v
+ for k, v in in_entities[0].items()
+ if all(ent.get(k) == v for ent in in_entities[1:])
+ }
for drop_entity in listify(self.inputs.dismiss_entities or []):
out_entities.pop(drop_entity, None)
# Override extension with that of the input file(s)
- out_entities["extension"] = [
+ out_entities['extension'] = [
# _splitext does not accept .surf.gii (for instance)
- "".join(Path(orig_file).suffixes).lstrip(".")
+ ''.join(Path(orig_file).suffixes).lstrip('.')
for orig_file in in_file
]
compress = listify(self.inputs.compress) or [None]
if len(compress) == 1:
compress = compress * len(in_file)
- for i, ext in enumerate(out_entities["extension"]):
+ for i, ext in enumerate(out_entities['extension']):
if compress[i] is not None:
- ext = regz.sub("", ext)
- out_entities["extension"][i] = f"{ext}.gz" if compress[i] else ext
+ ext = regz.sub('', ext)
+ out_entities['extension'][i] = f'{ext}.gz' if compress[i] else ext
# Override entities with those set as inputs
for key in self._allowed_entities:
@@ -922,52 +920,51 @@ Source code for niworkflows.interfaces.bids
out_entities[key] = value
# Clean up native resolution with space
- if out_entities.get("resolution") == "native" and out_entities.get("space"):
- out_entities.pop("resolution", None)
+ if out_entities.get('resolution') == 'native' and out_entities.get('space'):
+ out_entities.pop('resolution', None)
# Expand templateflow resolutions
- resolution = out_entities.get("resolution")
- space = out_entities.get("space")
+ resolution = out_entities.get('resolution')
+ space = out_entities.get('space')
if resolution:
# Standard spaces
if space in self._standard_spaces:
res = _get_tf_resolution(space, resolution)
else: # TODO: Nonstandard?
- res = "Unknown"
+ res = 'Unknown'
metadata['Resolution'] = res
- if len(set(out_entities["extension"])) == 1:
- out_entities["extension"] = out_entities["extension"][0]
+ if len(set(out_entities['extension'])) == 1:
+ out_entities['extension'] = out_entities['extension'][0]
# Insert custom (non-BIDS) entities from allowed_entities.
custom_entities = set(out_entities) - set(self._config_entities)
patterns = self._file_patterns
if custom_entities:
# Example: f"{key}-{{{key}}}" -> "task-{task}"
- custom_pat = "_".join(f"{key}-{{{key}}}" for key in sorted(custom_entities))
+ custom_pat = '_'.join(f'{key}-{{{key}}}' for key in sorted(custom_entities))
patterns = [
- pat.replace("_{suffix", "_".join(("", custom_pat, "{suffix")))
- for pat in patterns
+ pat.replace('_{suffix', '_'.join(('', custom_pat, '{suffix'))) for pat in patterns
]
# Build the output path(s)
dest_files = build_path(out_entities, path_patterns=patterns)
if not dest_files:
- raise ValueError(f"Could not build path with entities {out_entities}.")
+ raise ValueError(f'Could not build path with entities {out_entities}.')
# Make sure the interpolated values is embedded in a list, and check
dest_files = listify(dest_files)
if len(in_file) != len(dest_files):
raise ValueError(
- f"Input files ({len(in_file)}) not matched "
- f"by interpolated patterns ({len(dest_files)})."
+ f'Input files ({len(in_file)}) not matched '
+ f'by interpolated patterns ({len(dest_files)}).'
)
# Prepare SimpleInterface outputs object
- self._results["out_file"] = []
- self._results["fixed_hdr"] = [False] * len(in_file)
- self._results["out_path"] = dest_files
- self._results["out_meta"] = metadata
+ self._results['out_file'] = []
+ self._results['fixed_hdr'] = [False] * len(in_file)
+ self._results['out_path'] = dest_files
+ self._results['out_meta'] = metadata
for i, (orig_file, dest_file) in enumerate(zip(in_file, dest_files)):
# Set data and header iff changes need to be made. If these are
@@ -980,9 +977,9 @@ Source code for niworkflows.interfaces.bids
new_compression = False
if is_nifti:
- new_compression = (
- os.fspath(orig_file).endswith(".gz") ^ os.fspath(dest_file).endswith(".gz")
- )
+ new_compression = os.fspath(orig_file).endswith('.gz') ^ os.fspath(
+ dest_file
+ ).endswith('.gz')
data_dtype = self.inputs.data_dtype or self._default_dtypes[self.inputs.suffix]
if is_nifti and any((self.inputs.check_hdr, data_dtype)):
@@ -991,39 +988,37 @@ Source code for niworkflows.interfaces.bids
if self.inputs.check_hdr:
hdr = nii.header
curr_units = tuple(
- [None if u == "unknown" else u for u in hdr.get_xyzt_units()]
+ [None if u == 'unknown' else u for u in hdr.get_xyzt_units()]
)
- curr_codes = (int(hdr["qform_code"]), int(hdr["sform_code"]))
+ curr_codes = (int(hdr['qform_code']), int(hdr['sform_code']))
# Default to mm, use sec if data type is bold
units = (
- curr_units[0] or "mm",
- "sec" if out_entities["suffix"] == "bold" else None,
+ curr_units[0] or 'mm',
+ 'sec' if out_entities['suffix'] == 'bold' else None,
)
xcodes = (1, 1) # Derivative in its original scanner space
if self.inputs.space:
- xcodes = (
- (4, 4) if self.inputs.space in self._standard_spaces else (2, 2)
- )
+ xcodes = (4, 4) if self.inputs.space in self._standard_spaces else (2, 2)
curr_zooms = zooms = hdr.get_zooms()
- if "RepetitionTime" in self.inputs.get():
+ if 'RepetitionTime' in self.inputs.get():
zooms = curr_zooms[:3] + (self.inputs.RepetitionTime,)
if (curr_codes, curr_units, curr_zooms) != (xcodes, units, zooms):
- self._results["fixed_hdr"][i] = True
+ self._results['fixed_hdr'][i] = True
new_header = hdr.copy()
new_header.set_qform(nii.affine, xcodes[0])
new_header.set_sform(nii.affine, xcodes[1])
new_header.set_xyzt_units(*units)
new_header.set_zooms(zooms)
- if data_dtype == "source": # match source dtype
+ if data_dtype == 'source': # match source dtype
try:
data_dtype = nb.load(self.inputs.source_file[0]).get_data_dtype()
- except Exception:
+ except Exception: # noqa: BLE001
LOGGER.warning(
- f"Could not get data type of file {self.inputs.source_file[0]}"
+ f'Could not get data type of file {self.inputs.source_file[0]}'
)
data_dtype = None
@@ -1032,8 +1027,8 @@ Source code for niworkflows.interfaces.bids
orig_dtype = nii.get_data_dtype()
if orig_dtype != data_dtype:
LOGGER.warning(
- f"Changing {Path(dest_file).name} dtype "
- f"from {orig_dtype} to {data_dtype}"
+ f'Changing {Path(dest_file).name} dtype '
+ f'from {orig_dtype} to {data_dtype}'
)
# coerce dataobj to new data dtype
if np.issubdtype(data_dtype, np.integer):
@@ -1062,15 +1057,13 @@ Source code for niworkflows.interfaces.bids
else:
# Without this, we would be writing nans
# This is our punishment for hacking around nibabel defaults
- new_header.set_slope_inter(slope=1., inter=0.)
+ new_header.set_slope_inter(slope=1.0, inter=0.0)
unsafe_write_nifti_header_and_data(
- fname=out_file,
- header=new_header,
- data=new_data
+ fname=out_file, header=new_header, data=new_data
)
del orig_img
- self._results["out_file"].append(str(out_file))
+ self._results['out_file'].append(str(out_file))
return runtime
@@ -1078,20 +1071,18 @@ Source code for niworkflows.interfaces.bids
class _SaveDerivativeInputSpec(TraitedSpec):
base_directory = Directory(
- exists=True, mandatory=True, desc="Path to the base directory for storing data."
- )
- in_file = InputMultiObject(
- File(exists=True), mandatory=True, desc="the object to be saved"
+ exists=True, mandatory=True, desc='Path to the base directory for storing data.'
)
- metadata = traits.DictStrAny(desc="metadata to be saved alongside the file")
+ in_file = InputMultiObject(File(exists=True), mandatory=True, desc='the object to be saved')
+ metadata = traits.DictStrAny(desc='metadata to be saved alongside the file')
relative_path = InputMultiObject(
- traits.Str, desc="path to the file relative to the base directory"
+ traits.Str, desc='path to the file relative to the base directory'
)
class _SaveDerivativeOutputSpec(TraitedSpec):
- out_file = OutputMultiObject(File, desc="written file path")
- out_meta = OutputMultiObject(File, desc="written JSON sidecar path")
+ out_file = OutputMultiObject(File, desc='written file path')
+ out_meta = OutputMultiObject(File, desc='written JSON sidecar path')
@@ -1106,16 +1097,18 @@ Source code for niworkflows.interfaces.bids
This ensures that changes to the output directory metadata (e.g., mtime) do not
trigger unnecessary recomputations in the workflow.
"""
+
input_spec = _SaveDerivativeInputSpec
output_spec = _SaveDerivativeOutputSpec
_always_run = True
def _run_interface(self, runtime):
- self._results["out_file"] = []
- self._results["out_meta"] = []
+ self._results['out_file'] = []
+ self._results['out_meta'] = []
for in_file, relative_path in zip(
- self.inputs.in_file, self.inputs.relative_path,
+ self.inputs.in_file,
+ self.inputs.relative_path,
strict=True,
):
out_file = Path(self.inputs.base_directory) / relative_path
@@ -1128,50 +1121,46 @@ Source code for niworkflows.interfaces.bids
sidecar = out_file.parent / f"{out_file.name.split('.', 1)[0]}.json"
sidecar.unlink(missing_ok=True)
sidecar.write_text(dumps(self.inputs.metadata, indent=2))
- self._results["out_meta"].append(str(sidecar))
- self._results["out_file"].append(str(out_file))
+ self._results['out_meta'].append(str(sidecar))
+ self._results['out_file'].append(str(out_file))
return runtime
class _DerivativesDataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
- base_directory = traits.Directory(
- desc="Path to the base directory for storing data."
- )
- check_hdr = traits.Bool(True, usedefault=True, desc="fix headers of NIfTI outputs")
+ base_directory = traits.Directory(desc='Path to the base directory for storing data.')
+ check_hdr = traits.Bool(True, usedefault=True, desc='fix headers of NIfTI outputs')
compress = InputMultiObject(
traits.Either(None, traits.Bool),
usedefault=True,
- desc="whether ``in_file`` should be compressed (True), uncompressed (False) "
- "or left unmodified (None, default).",
+ desc='whether ``in_file`` should be compressed (True), uncompressed (False) '
+ 'or left unmodified (None, default).',
)
data_dtype = Str(
- desc="NumPy datatype to coerce NIfTI data to, or `source` to"
- "match the input file dtype"
+ desc='NumPy datatype to coerce NIfTI data to, or `source` to match the input file dtype'
)
dismiss_entities = InputMultiObject(
traits.Either(None, Str),
usedefault=True,
- desc="a list entities that will not be propagated from the source file",
+ desc='a list entities that will not be propagated from the source file',
)
- in_file = InputMultiObject(
- File(exists=True), mandatory=True, desc="the object to be saved"
- )
- meta_dict = traits.DictStrAny(desc="an input dictionary containing metadata")
+ in_file = InputMultiObject(File(exists=True), mandatory=True, desc='the object to be saved')
+ meta_dict = traits.DictStrAny(desc='an input dictionary containing metadata')
source_file = InputMultiObject(
- File(exists=False), mandatory=True, desc="the source file(s) to extract entities from")
+ File(exists=False), mandatory=True, desc='the source file(s) to extract entities from'
+ )
class _DerivativesDataSinkOutputSpec(TraitedSpec):
- out_file = OutputMultiObject(File(exists=True, desc="written file path"))
- out_meta = OutputMultiObject(File(exists=True, desc="written JSON sidecar path"))
+ out_file = OutputMultiObject(File(exists=True, desc='written file path'))
+ out_meta = OutputMultiObject(File(exists=True, desc='written JSON sidecar path'))
compression = OutputMultiObject(
traits.Either(None, traits.Bool),
- desc="whether ``in_file`` should be compressed (True), uncompressed (False) "
- "or left unmodified (None).",
+ desc='whether ``in_file`` should be compressed (True), uncompressed (False) '
+ 'or left unmodified (None).',
)
- fixed_hdr = traits.List(traits.Bool, desc="whether derivative header was fixed")
+ fixed_hdr = traits.List(traits.Bool, desc='whether derivative header was fixed')
@@ -1350,9 +1339,9 @@ Source code for niworkflows.interfaces.bids
input_spec = _DerivativesDataSinkInputSpec
output_spec = _DerivativesDataSinkOutputSpec
- out_path_base = "niworkflows"
+ out_path_base = 'niworkflows'
_always_run = True
- _config_entities = frozenset({e["name"] for e in BIDS_DERIV_ENTITIES})
+ _config_entities = frozenset({e['name'] for e in BIDS_DERIV_ENTITIES})
_config_entities_dict = BIDS_DERIV_ENTITIES
_standard_spaces = STANDARD_SPACES
_file_patterns = BIDS_DERIV_PATTERNS
@@ -1360,9 +1349,7 @@ Source code for niworkflows.interfaces.bids
def __init__(self, allowed_entities=None, out_path_base=None, **inputs):
"""Initialize the SimpleInterface and extend inputs with custom entities."""
- self._allowed_entities = set(allowed_entities or []).union(
- set(self._config_entities)
- )
+ self._allowed_entities = set(allowed_entities or []).union(set(self._config_entities))
if out_path_base:
self.out_path_base = out_path_base
@@ -1381,7 +1368,7 @@ Source code for niworkflows.interfaces.bids
setattr(self.inputs, k, inputs[k])
def _run_interface(self, runtime):
- from bids.layout import parse_file_entities, Config
+ from bids.layout import Config, parse_file_entities
from bids.layout.writing import build_path
from bids.utils import listify
@@ -1405,36 +1392,39 @@ Source code for niworkflows.interfaces.bids
# Initialize entities with those from the source file.
custom_config = Config(
- name="custom",
+ name='custom',
entities=self._config_entities_dict,
default_path_patterns=self._file_patterns,
)
in_entities = [
parse_file_entities(
str(relative_to_root(source_file)),
- config=["bids", "derivatives", custom_config],
+ config=['bids', 'derivatives', custom_config],
)
for source_file in self.inputs.source_file
]
- out_entities = {k: v for k, v in in_entities[0].items()
- if all(ent.get(k) == v for ent in in_entities[1:])}
+ out_entities = {
+ k: v
+ for k, v in in_entities[0].items()
+ if all(ent.get(k) == v for ent in in_entities[1:])
+ }
for drop_entity in listify(self.inputs.dismiss_entities or []):
out_entities.pop(drop_entity, None)
# Override extension with that of the input file(s)
- out_entities["extension"] = [
+ out_entities['extension'] = [
# _splitext does not accept .surf.gii (for instance)
- "".join(Path(orig_file).suffixes).lstrip(".")
+ ''.join(Path(orig_file).suffixes).lstrip('.')
for orig_file in in_file
]
compress = listify(self.inputs.compress) or [None]
if len(compress) == 1:
compress = compress * len(in_file)
- for i, ext in enumerate(out_entities["extension"]):
+ for i, ext in enumerate(out_entities['extension']):
if compress[i] is not None:
- ext = regz.sub("", ext)
- out_entities["extension"][i] = f"{ext}.gz" if compress[i] else ext
+ ext = regz.sub('', ext)
+ out_entities['extension'][i] = f'{ext}.gz' if compress[i] else ext
# Override entities with those set as inputs
for key in self._allowed_entities:
@@ -1443,56 +1433,55 @@ Source code for niworkflows.interfaces.bids
out_entities[key] = value
# Clean up native resolution with space
- if out_entities.get("resolution") == "native" and out_entities.get("space"):
- out_entities.pop("resolution", None)
+ if out_entities.get('resolution') == 'native' and out_entities.get('space'):
+ out_entities.pop('resolution', None)
# Expand templateflow resolutions
- resolution = out_entities.get("resolution")
- space = out_entities.get("space")
+ resolution = out_entities.get('resolution')
+ space = out_entities.get('space')
if resolution:
# Standard spaces
if space in self._standard_spaces:
res = _get_tf_resolution(space, resolution)
else: # TODO: Nonstandard?
- res = "Unknown"
+ res = 'Unknown'
self._metadata['Resolution'] = res
- if len(set(out_entities["extension"])) == 1:
- out_entities["extension"] = out_entities["extension"][0]
+ if len(set(out_entities['extension'])) == 1:
+ out_entities['extension'] = out_entities['extension'][0]
# Insert custom (non-BIDS) entities from allowed_entities.
custom_entities = set(out_entities) - set(self._config_entities)
patterns = self._file_patterns
if custom_entities:
# Example: f"{key}-{{{key}}}" -> "task-{task}"
- custom_pat = "_".join(f"{key}-{{{key}}}" for key in sorted(custom_entities))
+ custom_pat = '_'.join(f'{key}-{{{key}}}' for key in sorted(custom_entities))
patterns = [
- pat.replace("_{suffix", "_".join(("", custom_pat, "{suffix")))
- for pat in patterns
+ pat.replace('_{suffix', '_'.join(('', custom_pat, '{suffix'))) for pat in patterns
]
# Prepare SimpleInterface outputs object
- self._results["out_file"] = []
- self._results["compression"] = []
- self._results["fixed_hdr"] = [False] * len(in_file)
+ self._results['out_file'] = []
+ self._results['compression'] = []
+ self._results['fixed_hdr'] = [False] * len(in_file)
dest_files = build_path(out_entities, path_patterns=patterns)
if not dest_files:
- raise ValueError(f"Could not build path with entities {out_entities}.")
+ raise ValueError(f'Could not build path with entities {out_entities}.')
# Make sure the interpolated values is embedded in a list, and check
dest_files = listify(dest_files)
if len(in_file) != len(dest_files):
raise ValueError(
- f"Input files ({len(in_file)}) not matched "
- f"by interpolated patterns ({len(dest_files)})."
+ f'Input files ({len(in_file)}) not matched '
+ f'by interpolated patterns ({len(dest_files)}).'
)
for i, (orig_file, dest_file) in enumerate(zip(in_file, dest_files)):
out_file = out_path / dest_file
out_file.parent.mkdir(exist_ok=True, parents=True)
- self._results["out_file"].append(str(out_file))
- self._results["compression"].append(str(dest_file).endswith(".gz"))
+ self._results['out_file'].append(str(out_file))
+ self._results['compression'].append(str(dest_file).endswith('.gz'))
# An odd but possible case is that an input file is in the location of
# the output and we have made no changes to it.
@@ -1522,39 +1511,37 @@ Source code for niworkflows.interfaces.bids
if self.inputs.check_hdr:
hdr = nii.header
curr_units = tuple(
- [None if u == "unknown" else u for u in hdr.get_xyzt_units()]
+ [None if u == 'unknown' else u for u in hdr.get_xyzt_units()]
)
- curr_codes = (int(hdr["qform_code"]), int(hdr["sform_code"]))
+ curr_codes = (int(hdr['qform_code']), int(hdr['sform_code']))
# Default to mm, use sec if data type is bold
units = (
- curr_units[0] or "mm",
- "sec" if out_entities["suffix"] == "bold" else None,
+ curr_units[0] or 'mm',
+ 'sec' if out_entities['suffix'] == 'bold' else None,
)
xcodes = (1, 1) # Derivative in its original scanner space
if self.inputs.space:
- xcodes = (
- (4, 4) if self.inputs.space in self._standard_spaces else (2, 2)
- )
+ xcodes = (4, 4) if self.inputs.space in self._standard_spaces else (2, 2)
curr_zooms = zooms = hdr.get_zooms()
- if "RepetitionTime" in self.inputs.get():
+ if 'RepetitionTime' in self.inputs.get():
zooms = curr_zooms[:3] + (self.inputs.RepetitionTime,)
if (curr_codes, curr_units, curr_zooms) != (xcodes, units, zooms):
- self._results["fixed_hdr"][i] = True
+ self._results['fixed_hdr'][i] = True
new_header = hdr.copy()
new_header.set_qform(nii.affine, xcodes[0])
new_header.set_sform(nii.affine, xcodes[1])
new_header.set_xyzt_units(*units)
new_header.set_zooms(zooms)
- if data_dtype == "source": # match source dtype
+ if data_dtype == 'source': # match source dtype
try:
data_dtype = nb.load(self.inputs.source_file[0]).get_data_dtype()
- except Exception:
+ except Exception: # noqa: BLE001
LOGGER.warning(
- f"Could not get data type of file {self.inputs.source_file[0]}"
+ f'Could not get data type of file {self.inputs.source_file[0]}'
)
data_dtype = None
@@ -1563,7 +1550,7 @@ Source code for niworkflows.interfaces.bids
orig_dtype = nii.get_data_dtype()
if orig_dtype != data_dtype:
LOGGER.warning(
- f"Changing {out_file} dtype from {orig_dtype} to {data_dtype}"
+ f'Changing {out_file} dtype from {orig_dtype} to {data_dtype}'
)
# coerce dataobj to new data dtype
if np.issubdtype(data_dtype, np.integer):
@@ -1587,34 +1574,28 @@ Source code for niworkflows.interfaces.bids
else:
# Without this, we would be writing nans
# This is our punishment for hacking around nibabel defaults
- new_header.set_slope_inter(slope=1., inter=0.)
+ new_header.set_slope_inter(slope=1.0, inter=0.0)
unsafe_write_nifti_header_and_data(
- fname=out_file,
- header=new_header,
- data=new_data
+ fname=out_file, header=new_header, data=new_data
)
del orig_img
- if len(self._results["out_file"]) == 1:
+ if len(self._results['out_file']) == 1:
meta_fields = self.inputs.copyable_trait_names()
self._metadata.update(
- {
- k: getattr(self.inputs, k)
- for k in meta_fields
- if k not in self._static_traits
- }
+ {k: getattr(self.inputs, k) for k in meta_fields if k not in self._static_traits}
)
if self._metadata:
sidecar = out_file.parent / f"{out_file.name.split('.', 1)[0]}.json"
unlink(sidecar, missing_ok=True)
sidecar.write_text(dumps(self._metadata, sort_keys=True, indent=2))
- self._results["out_meta"] = str(sidecar)
+ self._results['out_meta'] = str(sidecar)
return runtime
class _ReadSidecarJSONInputSpec(_BIDSBaseInputSpec):
- in_file = File(exists=True, mandatory=True, desc="the input nifti file")
+ in_file = File(exists=True, mandatory=True, desc='the input nifti file')
class _ReadSidecarJSONOutputSpec(_BIDSInfoOutputSpec):
@@ -1691,29 +1672,23 @@ Source code for niworkflows.interfaces.bids
self.inputs.in_file,
self.layout,
self.inputs.bids_validate,
- database_path=(
- self.inputs.index_db if isdefined(self.inputs.index_db)
- else None
- )
+ database_path=(self.inputs.index_db if isdefined(self.inputs.index_db) else None),
)
# Fill in BIDS entities of the output ("*_id")
output_keys = list(_BIDSInfoOutputSpec().get().keys())
params = self.layout.parse_file_entities(self.inputs.in_file)
- self._results = {
- key: params.get(key.split("_")[0], Undefined) for key in output_keys
- }
+ self._results = {key: params.get(key.split('_')[0], Undefined) for key in output_keys}
# Fill in metadata
metadata = self.layout.get_metadata(self.inputs.in_file)
- self._results["out_dict"] = metadata
+ self._results['out_dict'] = metadata
# Set dynamic outputs if fields input is present
for fname in self._fields:
if not self._undef_fields and fname not in metadata:
raise KeyError(
- 'Metadata field "%s" not found for file %s'
- % (fname, self.inputs.in_file)
+ f'Metadata field "{fname}" not found for file {self.inputs.in_file}'
)
self._results[fname] = metadata.get(fname, Undefined)
return runtime
@@ -1721,28 +1696,26 @@ Source code for niworkflows.interfaces.bids
class _BIDSFreeSurferDirInputSpec(BaseInterfaceInputSpec):
- derivatives = Directory(
- exists=True, mandatory=True, desc="BIDS derivatives directory"
- )
+ derivatives = Directory(exists=True, mandatory=True, desc='BIDS derivatives directory')
freesurfer_home = Directory(
- exists=True, mandatory=True, desc="FreeSurfer installation directory"
+ exists=True, mandatory=True, desc='FreeSurfer installation directory'
)
subjects_dir = traits.Either(
traits.Str(),
Directory(),
- default="freesurfer",
+ default='freesurfer',
usedefault=True,
- desc="Name of FreeSurfer subjects directory",
+ desc='Name of FreeSurfer subjects directory',
)
- spaces = traits.List(traits.Str, desc="Set of output spaces to prepare")
+ spaces = traits.List(traits.Str, desc='Set of output spaces to prepare')
overwrite_fsaverage = traits.Bool(
- False, usedefault=True, desc="Overwrite fsaverage directories, if present"
+ False, usedefault=True, desc='Overwrite fsaverage directories, if present'
)
- minimum_fs_version = traits.Enum("7.0.0", desc="Minimum FreeSurfer version for compatibility")
+ minimum_fs_version = traits.Enum('7.0.0', desc='Minimum FreeSurfer version for compatibility')
class _BIDSFreeSurferDirOutputSpec(TraitedSpec):
- subjects_dir = traits.Directory(exists=True, desc="FreeSurfer subjects directory")
+ subjects_dir = traits.Directory(exists=True, desc='FreeSurfer subjects directory')
@@ -1777,9 +1750,9 @@ Source code for niworkflows.interfaces.bids
if not subjects_dir.is_absolute():
subjects_dir = Path(self.inputs.derivatives) / subjects_dir
subjects_dir.mkdir(parents=True, exist_ok=True)
- self._results["subjects_dir"] = str(subjects_dir)
+ self._results['subjects_dir'] = str(subjects_dir)
- orig_subjects_dir = Path(self.inputs.freesurfer_home) / "subjects"
+ orig_subjects_dir = Path(self.inputs.freesurfer_home) / 'subjects'
# Source is target, so just quit
if subjects_dir == orig_subjects_dir:
@@ -1787,12 +1760,12 @@ Source code for niworkflows.interfaces.bids
spaces = list(self.inputs.spaces)
# Always copy fsaverage, for proper recon-all functionality
- if "fsaverage" not in spaces:
- spaces.append("fsaverage")
+ if 'fsaverage' not in spaces:
+ spaces.append('fsaverage')
for space in spaces:
# Skip non-freesurfer spaces and fsnative
- if not space.startswith("fsaverage"):
+ if not space.startswith('fsaverage'):
continue
source = orig_subjects_dir / space
dest = subjects_dir / space
@@ -1802,12 +1775,12 @@ Source code for niworkflows.interfaces.bids
if dest.exists():
continue
else:
- raise FileNotFoundError("Expected to find '%s' to copy" % source)
+ raise FileNotFoundError(f"Expected to find '{source}' to copy")
if (
space == 'fsaverage'
and dest.exists()
- and self.inputs.minimum_fs_version == "7.0.0"
+ and self.inputs.minimum_fs_version == '7.0.0'
):
label = dest / 'label' / 'rh.FG1.mpm.vpnl.label' # new in FS7
if not label.exists():
@@ -1827,8 +1800,8 @@ Source code for niworkflows.interfaces.bids
shutil.copytree(source, dest, copy_function=shutil.copy)
except FileExistsError:
LOGGER.warning(
- "%s exists; if multiple jobs are running in parallel"
- ", this can be safely ignored",
+ '%s exists; if multiple jobs are running in parallel'
+ ', this can be safely ignored',
dest,
)
@@ -1859,11 +1832,11 @@ Source code for niworkflows.interfaces.bids
if r in resolutions:
res_meta = resolutions[r]
if res_meta is None:
- return "Unknown"
+ return 'Unknown'
def _fmt_xyz(coords: list) -> str:
- xyz = "x".join([str(c) for c in coords])
- return f"{xyz} mm^3"
+ xyz = 'x'.join([str(c) for c in coords])
+ return f'{xyz} mm^3'
return (
f"Template {space} ({_fmt_xyz(res_meta['zooms'])}),"
@@ -1881,7 +1854,7 @@ Source code for niworkflows.interfaces.bids
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/interfaces/bold.html b/master/_modules/niworkflows/interfaces/bold.html
index 1ef7b39c36d..cdee2c4f647 100644
--- a/master/_modules/niworkflows/interfaces/bold.html
+++ b/master/_modules/niworkflows/interfaces/bold.html
@@ -353,45 +353,47 @@ Source code for niworkflows.interfaces.bold
# https://www.nipreps.org/community/licensing/
#
"""Utilities for BOLD fMRI imaging."""
-import numpy as np
+
import nibabel as nb
+import numpy as np
from nipype import logging
from nipype.interfaces.base import (
- traits,
- TraitedSpec,
BaseInterfaceInputSpec,
- SimpleInterface,
File,
+ SimpleInterface,
+ TraitedSpec,
+ traits,
)
-LOGGER = logging.getLogger("nipype.interface")
+LOGGER = logging.getLogger('nipype.interface')
class _NonsteadyStatesDetectorInputSpec(BaseInterfaceInputSpec):
- in_file = File(exists=True, mandatory=True, desc="BOLD fMRI timeseries")
- nonnegative = traits.Bool(True, usedefault=True,
- desc="whether image voxels must be nonnegative")
+ in_file = File(exists=True, mandatory=True, desc='BOLD fMRI timeseries')
+ nonnegative = traits.Bool(
+ True, usedefault=True, desc='whether image voxels must be nonnegative'
+ )
n_volumes = traits.Range(
value=40,
low=10,
high=200,
usedefault=True,
- desc="drop volumes in 4D image beyond this timepoint",
+ desc='drop volumes in 4D image beyond this timepoint',
)
zero_dummy_masked = traits.Range(
value=20,
low=2,
high=40,
usedefault=True,
- desc="number of timepoints to average when the number of dummies is zero"
+ desc='number of timepoints to average when the number of dummies is zero',
)
class _NonsteadyStatesDetectorOutputSpec(TraitedSpec):
t_mask = traits.List(
- traits.Bool, desc="list of nonsteady-states (True) and stable (False) volumes"
+ traits.Bool, desc='list of nonsteady-states (True) and stable (False) volumes'
)
- n_dummy = traits.Int(desc="number of volumes identified as nonsteady states")
+ n_dummy = traits.Int(desc='number of volumes identified as nonsteady states')
@@ -409,29 +411,29 @@ Source code for niworkflows.interfaces.bold
t_mask = np.zeros((ntotal,), dtype=bool)
if ntotal == 1:
- self._results["t_mask"] = [True]
- self._results["n_dummy"] = 1
+ self._results['t_mask'] = [True]
+ self._results['n_dummy'] = 1
return runtime
from nipype.algorithms.confounds import is_outlier
- data = img.get_fdata(dtype="float32")[..., :self.inputs.n_volumes]
+ data = img.get_fdata(dtype='float32')[..., : self.inputs.n_volumes]
# Data can come with outliers showing very high numbers - preemptively prune
data = np.clip(
data,
a_min=0.0 if self.inputs.nonnegative else np.percentile(data, 0.2),
a_max=np.percentile(data, 99.8),
)
- self._results["n_dummy"] = is_outlier(np.mean(data, axis=(0, 1, 2)))
+ self._results['n_dummy'] = is_outlier(np.mean(data, axis=(0, 1, 2)))
start = 0
- stop = self._results["n_dummy"]
+ stop = self._results['n_dummy']
if stop < 2:
stop = min(ntotal, self.inputs.n_volumes)
start = max(0, stop - self.inputs.zero_dummy_masked)
t_mask[start:stop] = True
- self._results["t_mask"] = t_mask.tolist()
+ self._results['t_mask'] = t_mask.tolist()
return runtime
@@ -447,7 +449,7 @@ Source code for niworkflows.interfaces.bold
- Copyright © Copyright 2024, The NiPreps Developers
+ Copyright © Copyright, The NiPreps Developers
Made with Sphinx and @pradyunsg's
diff --git a/master/_modules/niworkflows/interfaces/cifti.html b/master/_modules/niworkflows/interfaces/cifti.html
index 0187ec55146..8715a1ccfed 100644
--- a/master/_modules/niworkflows/interfaces/cifti.html
+++ b/master/_modules/niworkflows/interfaces/cifti.html
@@ -353,81 +353,80 @@ Source code for niworkflows.interfaces.cifti
# https://www.nipreps.org/community/licensing/