From ab64f37c2d0cd3ab1160d99cfe4ba27874b69cc2 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 5 May 2024 13:12:32 +0200 Subject: [PATCH 01/34] STY: Apply ruff/flake8-implicit-str-concat rule ISC001 ISC001 Implicitly concatenated string literals on one line This rule is currently disabled because it conflicts with the formatter: https://github.com/astral-sh/ruff/issues/8272 --- nibabel/streamlines/__init__.py | 2 +- nibabel/volumeutils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/streamlines/__init__.py b/nibabel/streamlines/__init__.py index dd00a1e84..46b403b42 100644 --- a/nibabel/streamlines/__init__.py +++ b/nibabel/streamlines/__init__.py @@ -131,7 +131,7 @@ def save(tractogram, filename, **kwargs): warnings.warn(msg, ExtensionWarning) if kwargs: - msg = "A 'TractogramFile' object was provided, no need for" ' keyword arguments.' + msg = "A 'TractogramFile' object was provided, no need for keyword arguments." raise ValueError(msg) tractogram_file.save(filename) diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index cf2437e62..379d654a3 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -441,7 +441,7 @@ def array_from_file( True """ if mmap not in (True, False, 'c', 'r', 'r+'): - raise ValueError("mmap value should be one of True, False, 'c', " "'r', 'r+'") + raise ValueError("mmap value should be one of True, False, 'c', 'r', 'r+'") in_dtype = np.dtype(in_dtype) # Get file-like object from Opener instance infile = getattr(infile, 'fobj', infile) From 1bd8c262c8ac1adb17eeb313456232488f721d83 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 21 May 2024 18:16:51 -0400 Subject: [PATCH 02/34] MNT: Fix ruff arg in pre-commit config --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 354bd3da1..b348393a4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ repos: rev: v0.3.4 hooks: - id: ruff - args: [--fix, --show-fix, --exit-non-zero-on-fix] + args: [--fix, --show-fixes, --exit-non-zero-on-fix] exclude: = ["doc", "tools"] - id: ruff-format exclude: = ["doc", "tools"] From d571b92588447871fb8d869642d8053db44f1b74 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Tue, 21 May 2024 18:16:58 -0400 Subject: [PATCH 03/34] ENH: Add Nifti2 capabilities to nib-nifti-dx --- nibabel/cmdline/nifti_dx.py | 27 +++++++++++++++++++-------- nibabel/tests/test_scripts.py | 2 +- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/nibabel/cmdline/nifti_dx.py b/nibabel/cmdline/nifti_dx.py index 103bbf264..eb917a04b 100644 --- a/nibabel/cmdline/nifti_dx.py +++ b/nibabel/cmdline/nifti_dx.py @@ -9,8 +9,7 @@ ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## """Print nifti diagnostics for header files""" -import sys -from optparse import OptionParser +from argparse import ArgumentParser import nibabel as nib @@ -21,15 +20,27 @@ def main(args=None): """Go go team""" - parser = OptionParser( - usage=f'{sys.argv[0]} [FILE ...]\n\n' + __doc__, version='%prog ' + nib.__version__ + parser = ArgumentParser(description=__doc__) + parser.add_argument('--version', action='version', version=f'%(prog)s {nib.__version__}') + parser.add_argument( + '-1', + '--nifti1', + dest='header_class', + action='store_const', + const=nib.Nifti1Header, + default=nib.Nifti1Header, ) - (opts, files) = parser.parse_args(args=args) + parser.add_argument( + '-2', '--nifti2', dest='header_class', action='store_const', const=nib.Nifti2Header + ) + parser.add_argument('files', nargs='*', metavar='FILE', help='Nifti file names') + + args = parser.parse_args(args=args) - for fname in files: + for fname in args.files: with nib.openers.ImageOpener(fname) as fobj: - hdr = fobj.read(nib.nifti1.header_dtype.itemsize) - result = nib.Nifti1Header.diagnose_binaryblock(hdr) + hdr = fobj.read(args.header_class.template_dtype.itemsize) + result = args.header_class.diagnose_binaryblock(hdr) if len(result): print(f'Picky header check output for "{fname}"\n') print(result + '\n') diff --git a/nibabel/tests/test_scripts.py b/nibabel/tests/test_scripts.py index 455a994ae..d97c99d05 100644 --- a/nibabel/tests/test_scripts.py +++ b/nibabel/tests/test_scripts.py @@ -202,7 +202,7 @@ def test_help(): code, stdout, stderr = run_command([cmd, '--help']) assert code == 0 assert_re_in(f'.*{cmd}', stdout) - assert_re_in('.*Usage', stdout) + assert_re_in('.*[uU]sage', stdout) # Some third party modules might like to announce some Deprecation # etc warnings, see e.g. https://travis-ci.org/nipy/nibabel/jobs/370353602 if 'warning' not in stderr.lower(): From 82c8588528d5a06fd0dfc99e3cbb83d5cc299e2b Mon Sep 17 00:00:00 2001 From: Sandro Date: Wed, 29 May 2024 00:20:34 +0200 Subject: [PATCH 04/34] Replace deprecated setup() and teardown() Those were compatibility functions for porting from nose. They are now deprecated and have been removed from pytest. This will make all tests compatible with pytests 8.x. --- nibabel/streamlines/tests/test_streamlines.py | 2 +- nibabel/tests/test_deprecated.py | 4 ++-- nibabel/tests/test_dft.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index f0bd9c7c4..53a43c393 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -20,7 +20,7 @@ DATA = {} -def setup(): +def setup_module(): global DATA DATA['empty_filenames'] = [pjoin(data_path, 'empty' + ext) for ext in FORMATS.keys()] DATA['simple_filenames'] = [pjoin(data_path, 'simple' + ext) for ext in FORMATS.keys()] diff --git a/nibabel/tests/test_deprecated.py b/nibabel/tests/test_deprecated.py index f1c3d517c..01636632e 100644 --- a/nibabel/tests/test_deprecated.py +++ b/nibabel/tests/test_deprecated.py @@ -14,12 +14,12 @@ from nibabel.tests.test_deprecator import TestDeprecatorFunc as _TestDF -def setup(): +def setup_module(): # Hack nibabel version string pkg_info.cmp_pkg_version.__defaults__ = ('2.0',) -def teardown(): +def teardown_module(): # Hack nibabel version string back again pkg_info.cmp_pkg_version.__defaults__ = (pkg_info.__version__,) diff --git a/nibabel/tests/test_dft.py b/nibabel/tests/test_dft.py index 654af9827..6c6695b16 100644 --- a/nibabel/tests/test_dft.py +++ b/nibabel/tests/test_dft.py @@ -26,7 +26,7 @@ data_dir = pjoin(dirname(__file__), 'data') -def setUpModule(): +def setup_module(): if os.name == 'nt': raise unittest.SkipTest('FUSE not available for windows, skipping dft tests') if not have_dicom: From 95e7c156e0d115c222f4a7e9545f27edd8f6dced Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 19:43:01 -0400 Subject: [PATCH 05/34] RF: Prefer using `getlocale()` instead of `getdefaultlocale()` Prefer using `getlocale()` instead of `getdefaultlocale()`. Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/cmdline/dicomfs.py:40: DeprecationWarning: 'locale.getdefaultlocale' is deprecated and slated for removal in Python 3.15. Use setlocale(), getencoding() and getlocale() instead. encoding = locale.getdefaultlocale()[1] ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:164 --- nibabel/cmdline/dicomfs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/cmdline/dicomfs.py b/nibabel/cmdline/dicomfs.py index 66ffb8ade..552bb0931 100644 --- a/nibabel/cmdline/dicomfs.py +++ b/nibabel/cmdline/dicomfs.py @@ -37,7 +37,7 @@ class dummy_fuse: import nibabel as nib import nibabel.dft as dft -encoding = locale.getdefaultlocale()[1] +encoding = locale.getlocale()[1] fuse.fuse_python_api = (0, 2) From 17809b067ddd22de438b9b49b116c2c496b7a752 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 19:51:43 -0400 Subject: [PATCH 06/34] RF: Prefer using `np.vstack` instead of `np.row_stack` Prefer using `np.vstack` instead of `np.row_stack`. Fixes: ``` nibabel/ecat.py: 3 warnings /home/runner/work/nibabel/nibabel/nibabel/ecat.py:393: DeprecationWarning: `row_stack` alias is deprecated. Use `np.vstack` directly. return np.row_stack(mlists) ``` and similar warnings. Raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:186 Documentation: https://numpy.org/doc/1.26/reference/generated/numpy.row_stack.html This helps preparing for full Numpy 2.0 compatibility. Documentation: https://numpy.org/doc/stable/numpy_2_0_migration_guide.html#main-namespace --- nibabel/brikhead.py | 2 +- nibabel/ecat.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nibabel/brikhead.py b/nibabel/brikhead.py index 3a3cfd087..da8692efd 100644 --- a/nibabel/brikhead.py +++ b/nibabel/brikhead.py @@ -391,7 +391,7 @@ def get_affine(self): # AFNI default is RAI- == LPS+ == DICOM order. We need to flip RA sign # to align with nibabel RAS+ system affine = np.asarray(self.info['IJK_TO_DICOM_REAL']).reshape(3, 4) - affine = np.row_stack((affine * [[-1], [-1], [1]], [0, 0, 0, 1])) + affine = np.vstack((affine * [[-1], [-1], [1]], [0, 0, 0, 1])) return affine def get_data_scaling(self): diff --git a/nibabel/ecat.py b/nibabel/ecat.py index 03a4c72b9..34ff06323 100644 --- a/nibabel/ecat.py +++ b/nibabel/ecat.py @@ -390,7 +390,7 @@ def read_mlist(fileobj, endianness): mlist_index += n_rows if mlist_block_no <= 2: # should block_no in (1, 2) be an error? break - return np.row_stack(mlists) + return np.vstack(mlists) def get_frame_order(mlist): From 94e3e83752c58b1ae20a50e97c5ea9eed21abacf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 20:17:19 -0400 Subject: [PATCH 07/34] RF: Fix `ast` library type and attribute deprecation warnings Fix `ast` library type and attribute deprecation warnings. Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:177: DeprecationWarning: ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead if isinstance(value, ast.Num): /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:179: DeprecationWarning: ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead if isinstance(value, ast.Str): /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:180: DeprecationWarning: Attribute s is deprecated and will be removed in Python 3.14; use value instead return value.s /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:94: DeprecationWarning: Attribute n is deprecated and will be removed in Python 3.14; use value instead index = target.slice.n /home/runner/work/nibabel/nibabel/nibabel/nicom/ascconv.py:182: DeprecationWarning: Attribute n is deprecated and will be removed in Python 3.14; use value instead return -value.operand.n ``` raised for example in: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:207 Documentation: https://docs.python.org/3/library/ast.html --- nibabel/nicom/ascconv.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/nibabel/nicom/ascconv.py b/nibabel/nicom/ascconv.py index 0966de2a9..8ec72fb3e 100644 --- a/nibabel/nicom/ascconv.py +++ b/nibabel/nicom/ascconv.py @@ -91,7 +91,7 @@ def assign2atoms(assign_ast, default_class=int): prev_target_type = OrderedDict elif isinstance(target, ast.Subscript): if isinstance(target.slice, ast.Constant): # PY39 - index = target.slice.n + index = target.slice.value else: # PY38 index = target.slice.value.n atoms.append(Atom(target, prev_target_type, index)) @@ -174,12 +174,10 @@ def obj_from_atoms(atoms, namespace): def _get_value(assign): value = assign.value - if isinstance(value, ast.Num): - return value.n - if isinstance(value, ast.Str): - return value.s + if isinstance(value, ast.Constant): + return value.value if isinstance(value, ast.UnaryOp) and isinstance(value.op, ast.USub): - return -value.operand.n + return -value.operand.value raise AscconvParseError(f'Unexpected RHS of assignment: {value}') From d1235a6ef5ea31c5be784a6b5448b9e0d598014f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 20:04:02 -0400 Subject: [PATCH 08/34] RF: Remove unnecessary call to `asbytes` for `b`-prepended strings Remove unnecessary call to `asbytes` for `b`-prepended strings: strings prepended with `b` are already treated as bytes literals: - `TckFile.MAGIC_NUMBER` is b'mrtrix tracks' - `TrkFile.MAGIC_NUMBER` is b'TRACK' Documentation: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/streamlines/tests/test_streamlines.py:9: DeprecationWarning: `np.compat`, which was used during the Python 2 to 3 transition, is deprecated since 1.26.0, and will be removed from numpy.compat.py3k import asbytes ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:178 --- nibabel/streamlines/tests/test_streamlines.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/streamlines/tests/test_streamlines.py b/nibabel/streamlines/tests/test_streamlines.py index 53a43c393..857e64fec 100644 --- a/nibabel/streamlines/tests/test_streamlines.py +++ b/nibabel/streamlines/tests/test_streamlines.py @@ -6,7 +6,6 @@ import numpy as np import pytest -from numpy.compat.py3k import asbytes import nibabel as nib from nibabel.testing import clear_and_catch_warnings, data_path, error_warnings @@ -95,7 +94,7 @@ def test_is_supported_detect_format(tmp_path): # Valid file without extension for tfile_cls in FORMATS.values(): f = BytesIO() - f.write(asbytes(tfile_cls.MAGIC_NUMBER)) + f.write(tfile_cls.MAGIC_NUMBER) f.seek(0, os.SEEK_SET) assert nib.streamlines.is_supported(f) assert nib.streamlines.detect_format(f) is tfile_cls @@ -104,7 +103,7 @@ def test_is_supported_detect_format(tmp_path): for tfile_cls in FORMATS.values(): fpath = tmp_path / 'test.txt' with open(fpath, 'w+b') as f: - f.write(asbytes(tfile_cls.MAGIC_NUMBER)) + f.write(tfile_cls.MAGIC_NUMBER) f.seek(0, os.SEEK_SET) assert nib.streamlines.is_supported(f) assert nib.streamlines.detect_format(f) is tfile_cls From 447ef576316d814138f7af33cee97dc6e23e5337 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Wed, 26 Jun 2024 20:27:22 -0400 Subject: [PATCH 09/34] RF: Fix for `abc` library `Traversable` class module Fix for `abc` library `Traversable` class module: import from `importlib.resources.abc`. Fixes: ``` /home/runner/work/nibabel/nibabel/nibabel/testing/__init__.py:30: DeprecationWarning: 'importlib.abc.Traversable' is deprecated and slated for removal in Python 3.14 from importlib.abc import Traversable ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9637811213/job/26577586721#step:7:157 Documentation: https://docs.python.org/3/library/importlib.resources.abc.html#importlib.resources.abc.Traversable --- nibabel/testing/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index d335c9a8c..0ba82d6cb 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -27,8 +27,8 @@ from .np_features import memmap_after_ufunc try: - from importlib.abc import Traversable from importlib.resources import as_file, files + from importlib.resources.abc import Traversable except ImportError: # PY38 from importlib_resources import as_file, files from importlib_resources.abc import Traversable From 7caef99068f88bafbf25f61b0e75b10770e28df4 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 27 Jun 2024 16:27:58 +0900 Subject: [PATCH 10/34] MNT: Update importlib_resources requirement to match 3.12 usage --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bf7b09903..4df5886d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ requires-python = ">=3.8" dependencies = [ "numpy >=1.20", "packaging >=17", - "importlib_resources >=1.3; python_version < '3.9'", + "importlib_resources >=5.12; python_version < '3.12'", ] classifiers = [ "Development Status :: 5 - Production/Stable", From 3a7cebaca9729b0b03c8dd4ba01ff1a62d39cb26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jon=20Haitz=20Legarreta=20Gorro=C3=B1o?= Date: Thu, 27 Jun 2024 18:35:51 -0400 Subject: [PATCH 11/34] RF: Use `numpy.lib.scimath` to demonstrate warning context manager Use `numpy.lib.scimath` instead of deprecated `numpy.core.fromnumeric` in `clear_and_catch_warnings` context manager doctests. Take advantage of the commit to add an actual case that would raise a warning. Fixes: ``` nibabel/testing/__init__.py::nibabel.testing.clear_and_catch_warnings :1: DeprecationWarning: numpy.core is deprecated and has been renamed to numpy._core. The numpy._core namespace contains private NumPy internals and its use is discouraged, as NumPy internals can change without warning in any release. In practice, most real-world usage of numpy.core is to access functionality in the public NumPy API. If that is the case, use the public NumPy API. If not, you are using NumPy internals. If you would still like to access an internal attribute, use numpy._core.fromnumeric. ``` raised for example at: https://github.com/nipy/nibabel/actions/runs/9692730430/job/26746686623#step:7:195 --- nibabel/testing/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nibabel/testing/__init__.py b/nibabel/testing/__init__.py index 0ba82d6cb..992ef2ead 100644 --- a/nibabel/testing/__init__.py +++ b/nibabel/testing/__init__.py @@ -150,9 +150,10 @@ class clear_and_catch_warnings(warnings.catch_warnings): Examples -------- >>> import warnings - >>> with clear_and_catch_warnings(modules=[np.core.fromnumeric]): + >>> with clear_and_catch_warnings(modules=[np.lib.scimath]): ... warnings.simplefilter('always') - ... # do something that raises a warning in np.core.fromnumeric + ... # do something that raises a warning in np.lib.scimath + ... _ = np.arccos(90) """ class_modules = () From 170b20c53a3c0c0bfae29ebd8c14638cfb9d192e Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Fri, 5 Jul 2024 21:10:41 -0400 Subject: [PATCH 12/34] FIX: Use legacy numpy printing during doc builds/tests --- doc/source/conf.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/source/conf.py b/doc/source/conf.py index f4ab16d2d..4255ff184 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -28,6 +28,10 @@ import tomli as tomllib # Check for external Sphinx extensions we depend on +try: + import numpy as np +except ImportError: + raise RuntimeError('Need to install "numpy" package for doc build') try: import numpydoc except ImportError: @@ -45,6 +49,11 @@ 'Need nibabel on Python PATH; consider "make htmldoc" from nibabel root directory' ) +from packaging.version import Version + +if Version(np.__version__) >= Version('1.22'): + np.set_printoptions(legacy='1.21') + # -- General configuration ---------------------------------------------------- # We load the nibabel release info into a dict by explicit execution From 65c3ca28a21b5aa15e0fac06e6b5a3faa0096857 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Sun, 7 Jul 2024 07:26:30 -0400 Subject: [PATCH 13/34] MNT: Update coverage config Remove ignored entry, add excludes for patterns that are unreachable or reasonable not to test. --- .coveragerc | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index bcf28e09c..8e218461f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,9 +1,19 @@ [run] branch = True source = nibabel -include = */nibabel/* omit = */externals/* */benchmarks/* */tests/* nibabel/_version.py + +[report] +exclude_also = + def __repr__ + if (ty\.|typing\.)?TYPE_CHECKING: + class .*\((ty\.|typing\.)Protocol\): + @(ty\.|typing\.)overload + if 0: + if __name__ == .__main__.: + @(abc\.)?abstractmethod + raise NotImplementedError From 2306616a1fb0bf1752b8cd3ad12b19156e64c295 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 8 Jul 2024 11:29:52 -0400 Subject: [PATCH 14/34] MNT: Remove "pragma: no cover" from lines ignored by config --- nibabel/_compression.py | 2 +- nibabel/arrayproxy.py | 12 +++++------- nibabel/dataobj_images.py | 2 +- nibabel/deprecated.py | 2 +- nibabel/deprecator.py | 2 +- nibabel/filebasedimages.py | 14 +++++++------- nibabel/filename_parser.py | 2 +- nibabel/loadsave.py | 2 +- nibabel/onetime.py | 6 ++---- nibabel/openers.py | 7 +++---- nibabel/pointset.py | 8 +++----- nibabel/spatialimages.py | 15 ++++++--------- nibabel/volumeutils.py | 6 +++--- nibabel/xmlutils.py | 8 ++++---- 14 files changed, 39 insertions(+), 49 deletions(-) diff --git a/nibabel/_compression.py b/nibabel/_compression.py index eeb66f36b..f697fa54c 100644 --- a/nibabel/_compression.py +++ b/nibabel/_compression.py @@ -17,7 +17,7 @@ from .optpkg import optional_package -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import indexed_gzip # type: ignore[import] import pyzstd diff --git a/nibabel/arrayproxy.py b/nibabel/arrayproxy.py index 4bf5bd470..ed2310519 100644 --- a/nibabel/arrayproxy.py +++ b/nibabel/arrayproxy.py @@ -57,7 +57,7 @@ KEEP_FILE_OPEN_DEFAULT = False -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt from typing_extensions import Self # PY310 @@ -75,19 +75,17 @@ class ArrayLike(ty.Protocol): shape: tuple[int, ...] @property - def ndim(self) -> int: ... # pragma: no cover + def ndim(self) -> int: ... # If no dtype is passed, any dtype might be returned, depending on the array-like @ty.overload - def __array__( - self, dtype: None = ..., / - ) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # pragma: no cover + def __array__(self, dtype: None = ..., /) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # Any dtype might be passed, and *that* dtype must be returned @ty.overload - def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... # pragma: no cover + def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... - def __getitem__(self, key, /) -> npt.NDArray: ... # pragma: no cover + def __getitem__(self, key, /) -> npt.NDArray: ... class ArrayProxy(ArrayLike): diff --git a/nibabel/dataobj_images.py b/nibabel/dataobj_images.py index e84ac8567..685059901 100644 --- a/nibabel/dataobj_images.py +++ b/nibabel/dataobj_images.py @@ -19,7 +19,7 @@ from .filebasedimages import FileBasedHeader, FileBasedImage from .fileholders import FileMap -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt from .filename_parser import FileSpec diff --git a/nibabel/deprecated.py b/nibabel/deprecated.py index b8c378cee..15d3e5326 100644 --- a/nibabel/deprecated.py +++ b/nibabel/deprecated.py @@ -8,7 +8,7 @@ from .deprecator import Deprecator from .pkg_info import cmp_pkg_version -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: P = ty.ParamSpec('P') diff --git a/nibabel/deprecator.py b/nibabel/deprecator.py index 010b1be23..83118dd53 100644 --- a/nibabel/deprecator.py +++ b/nibabel/deprecator.py @@ -9,7 +9,7 @@ import warnings from textwrap import dedent -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: T = ty.TypeVar('T') P = ty.ParamSpec('P') diff --git a/nibabel/filebasedimages.py b/nibabel/filebasedimages.py index 4e0d06b64..c12644a2b 100644 --- a/nibabel/filebasedimages.py +++ b/nibabel/filebasedimages.py @@ -20,7 +20,7 @@ from .filename_parser import TypesFilenamesError, _stringify_path, splitext_addext, types_filenames from .openers import ImageOpener -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from .filename_parser import ExtensionSpec, FileSpec FileSniff = ty.Tuple[bytes, str] @@ -54,13 +54,13 @@ def from_header(klass: type[HdrT], header: FileBasedHeader | ty.Mapping | None = @classmethod def from_fileobj(klass: type[HdrT], fileobj: io.IOBase) -> HdrT: - raise NotImplementedError # pragma: no cover + raise NotImplementedError def write_to(self, fileobj: io.IOBase) -> None: - raise NotImplementedError # pragma: no cover + raise NotImplementedError def __eq__(self, other: object) -> bool: - raise NotImplementedError # pragma: no cover + raise NotImplementedError def __ne__(self, other: object) -> bool: return not self == other @@ -251,7 +251,7 @@ def from_filename(klass: type[ImgT], filename: FileSpec) -> ImgT: @classmethod def from_file_map(klass: type[ImgT], file_map: FileMap) -> ImgT: - raise NotImplementedError # pragma: no cover + raise NotImplementedError @classmethod def filespec_to_file_map(klass, filespec: FileSpec) -> FileMap: @@ -308,7 +308,7 @@ def to_filename(self, filename: FileSpec, **kwargs) -> None: self.to_file_map(**kwargs) def to_file_map(self, file_map: FileMap | None = None, **kwargs) -> None: - raise NotImplementedError # pragma: no cover + raise NotImplementedError @classmethod def make_file_map(klass, mapping: ty.Mapping[str, str | io.IOBase] | None = None) -> FileMap: @@ -373,7 +373,7 @@ def from_image(klass: type[ImgT], img: FileBasedImage) -> ImgT: img : ``FileBasedImage`` instance Image, of our own class """ - raise NotImplementedError # pragma: no cover + raise NotImplementedError @classmethod def _sniff_meta_for( diff --git a/nibabel/filename_parser.py b/nibabel/filename_parser.py index bdbca6a38..d2c23ae6e 100644 --- a/nibabel/filename_parser.py +++ b/nibabel/filename_parser.py @@ -14,7 +14,7 @@ import pathlib import typing as ty -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: FileSpec = str | os.PathLike[str] ExtensionSpec = tuple[str, str | None] diff --git a/nibabel/loadsave.py b/nibabel/loadsave.py index 159d9bae8..e39aeceba 100644 --- a/nibabel/loadsave.py +++ b/nibabel/loadsave.py @@ -26,7 +26,7 @@ _compressed_suffixes = ('.gz', '.bz2', '.zst') -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from .filebasedimages import FileBasedImage from .filename_parser import FileSpec diff --git a/nibabel/onetime.py b/nibabel/onetime.py index fa1b2f992..5018ba90c 100644 --- a/nibabel/onetime.py +++ b/nibabel/onetime.py @@ -137,12 +137,10 @@ def __init__(self, func: ty.Callable[[InstanceT], T]) -> None: @ty.overload def __get__( self, obj: None, objtype: type[InstanceT] | None = None - ) -> ty.Callable[[InstanceT], T]: ... # pragma: no cover + ) -> ty.Callable[[InstanceT], T]: ... @ty.overload - def __get__( - self, obj: InstanceT, objtype: type[InstanceT] | None = None - ) -> T: ... # pragma: no cover + def __get__(self, obj: InstanceT, objtype: type[InstanceT] | None = None) -> T: ... def __get__( self, obj: InstanceT | None, objtype: type[InstanceT] | None = None diff --git a/nibabel/openers.py b/nibabel/openers.py index f84ccb706..c3fa9a478 100644 --- a/nibabel/openers.py +++ b/nibabel/openers.py @@ -18,7 +18,7 @@ from ._compression import HAVE_INDEXED_GZIP, IndexedGzipFile, pyzstd -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from types import TracebackType from _typeshed import WriteableBuffer @@ -36,9 +36,8 @@ @ty.runtime_checkable class Fileish(ty.Protocol): - def read(self, size: int = -1, /) -> bytes: ... # pragma: no cover - - def write(self, b: bytes, /) -> int | None: ... # pragma: no cover + def read(self, size: int = -1, /) -> bytes: ... + def write(self, b: bytes, /) -> int | None: ... class DeterministicGzipFile(gzip.GzipFile): diff --git a/nibabel/pointset.py b/nibabel/pointset.py index e39a4d418..70a802480 100644 --- a/nibabel/pointset.py +++ b/nibabel/pointset.py @@ -30,7 +30,7 @@ from nibabel.fileslice import strided_scalar from nibabel.spatialimages import SpatialImage -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: from typing_extensions import Self _DType = ty.TypeVar('_DType', bound=np.dtype[ty.Any]) @@ -41,12 +41,10 @@ class CoordinateArray(ty.Protocol): shape: tuple[int, int] @ty.overload - def __array__( - self, dtype: None = ..., / - ) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... # pragma: no cover + def __array__(self, dtype: None = ..., /) -> np.ndarray[ty.Any, np.dtype[ty.Any]]: ... @ty.overload - def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... # pragma: no cover + def __array__(self, dtype: _DType, /) -> np.ndarray[ty.Any, _DType]: ... @dataclass diff --git a/nibabel/spatialimages.py b/nibabel/spatialimages.py index 185694cd7..96f8115a2 100644 --- a/nibabel/spatialimages.py +++ b/nibabel/spatialimages.py @@ -154,7 +154,7 @@ except ImportError: # PY38 from functools import lru_cache as cache -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt SpatialImgT = ty.TypeVar('SpatialImgT', bound='SpatialImage') @@ -162,18 +162,15 @@ class HasDtype(ty.Protocol): - def get_data_dtype(self) -> np.dtype: ... # pragma: no cover - - def set_data_dtype(self, dtype: npt.DTypeLike) -> None: ... # pragma: no cover + def get_data_dtype(self) -> np.dtype: ... + def set_data_dtype(self, dtype: npt.DTypeLike) -> None: ... @ty.runtime_checkable class SpatialProtocol(ty.Protocol): - def get_data_dtype(self) -> np.dtype: ... # pragma: no cover - - def get_data_shape(self) -> ty.Tuple[int, ...]: ... # pragma: no cover - - def get_zooms(self) -> ty.Tuple[float, ...]: ... # pragma: no cover + def get_data_dtype(self) -> np.dtype: ... + def get_data_shape(self) -> ty.Tuple[int, ...]: ... + def get_zooms(self) -> ty.Tuple[float, ...]: ... class HeaderDataError(Exception): diff --git a/nibabel/volumeutils.py b/nibabel/volumeutils.py index 379d654a3..29b954dbb 100644 --- a/nibabel/volumeutils.py +++ b/nibabel/volumeutils.py @@ -24,7 +24,7 @@ from .casting import OK_FLOATS, shared_range from .externals.oset import OrderedSet -if ty.TYPE_CHECKING: # pragma: no cover +if ty.TYPE_CHECKING: import numpy.typing as npt Scalar = np.number | float @@ -1191,13 +1191,13 @@ def _ftype4scaled_finite( @ty.overload def finite_range( arr: npt.ArrayLike, check_nan: ty.Literal[False] = False -) -> tuple[Scalar, Scalar]: ... # pragma: no cover +) -> tuple[Scalar, Scalar]: ... @ty.overload def finite_range( arr: npt.ArrayLike, check_nan: ty.Literal[True] -) -> tuple[Scalar, Scalar, bool]: ... # pragma: no cover +) -> tuple[Scalar, Scalar, bool]: ... def finite_range( diff --git a/nibabel/xmlutils.py b/nibabel/xmlutils.py index 5049a7641..5d079e117 100644 --- a/nibabel/xmlutils.py +++ b/nibabel/xmlutils.py @@ -20,7 +20,7 @@ class XmlSerializable: def _to_xml_element(self) -> Element: """Output should be a xml.etree.ElementTree.Element""" - raise NotImplementedError # pragma: no cover + raise NotImplementedError def to_xml(self, enc='utf-8', **kwargs) -> bytes: r"""Generate an XML bytestring with a given encoding. @@ -109,10 +109,10 @@ def parse(self, string=None, fname=None, fptr=None): parser.ParseFile(fptr) def StartElementHandler(self, name, attrs): - raise NotImplementedError # pragma: no cover + raise NotImplementedError def EndElementHandler(self, name): - raise NotImplementedError # pragma: no cover + raise NotImplementedError def CharacterDataHandler(self, data): - raise NotImplementedError # pragma: no cover + raise NotImplementedError From 043c431ef46c5f6cd301a087bda2173a7972ab75 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 8 Jul 2024 11:40:54 -0400 Subject: [PATCH 15/34] MNT: Require coverage>=7.2 for exclude_also Remove outdated pytest version cap while we're here. --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4df5886d7..ff5168f9c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,11 +67,12 @@ doc = [ "tomli; python_version < '3.11'", ] test = [ - "pytest<8.1", # relax once pytest-doctestplus releases 1.2.0 + "pytest", "pytest-doctestplus", "pytest-cov", "pytest-httpserver", "pytest-xdist", + "coverage>=7.2", ] # Remaining: Simpler to centralize in tox dev = ["tox"] From ee1c9c43900dc42d511d08a4302d4486c9258250 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 8 Jul 2024 11:42:23 -0400 Subject: [PATCH 16/34] MNT: Stop excluding tests from coverage --- .coveragerc | 1 - 1 file changed, 1 deletion(-) diff --git a/.coveragerc b/.coveragerc index 8e218461f..f65ab1441 100644 --- a/.coveragerc +++ b/.coveragerc @@ -4,7 +4,6 @@ source = nibabel omit = */externals/* */benchmarks/* - */tests/* nibabel/_version.py [report] From 07db76b966020b26b636e5fd94b79b8b04b440ab Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 21:16:03 -0400 Subject: [PATCH 17/34] CI: Add 3.13-nogil build --- .github/workflows/test.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2b3d9f249..2b453e890 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -125,9 +125,9 @@ jobs: - os: ubuntu-latest python-version: 3.8 dependencies: 'min' - # NumPy 2.0 + # NoGIL - os: ubuntu-latest - python-version: '3.12' + python-version: '3.13-dev' dependencies: 'dev' exclude: # x86 for Windows + Python<3.12 @@ -168,11 +168,18 @@ jobs: submodules: recursive fetch-depth: 0 - name: Set up Python ${{ matrix.python-version }} + if: "!endsWith(matrix.python-version, '-dev')" uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} architecture: ${{ matrix.architecture }} allow-prereleases: true + - name: Set up Python ${{ matrix.python-version }} + if: endsWith(matrix.python-version, '-dev') + uses: deadsnakes/action@v3.1.0 + with: + python-version: ${{ matrix.python-version }} + nogil: true - name: Display Python version run: python -c "import sys; print(sys.version)" - name: Install tox From 6efd41a7279de2488aa857518e3ab30e8a8ff6d4 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 21:17:24 -0400 Subject: [PATCH 18/34] TOX: Add a Python 3.13 environment --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index b9ac9557c..02de7a7e0 100644 --- a/tox.ini +++ b/tox.ini @@ -16,7 +16,7 @@ envlist = # x64-only range py312-{full,pre}-x64 # Special environment for numpy 2.0-dev testing - py312-dev-x64 + py313-dev-x64 install doctest style @@ -31,6 +31,7 @@ python = 3.10: py310 3.11: py311 3.12: py312 + 3.13: py313 [gh-actions:env] DEPENDS = From cb73d1c6dfcd0e8ca93011125cf507c85987f1ad Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 21:26:42 -0400 Subject: [PATCH 19/34] TOX: Drop h5py and indexed_gzip dependencies for dev Allow pillow and matplotlib to be built from sdist in dev environments. --- tox.ini | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 02de7a7e0..5b4dcc017 100644 --- a/tox.ini +++ b/tox.ini @@ -51,7 +51,8 @@ description = Pytest with coverage labels = test install_command = python -I -m pip install -v \ - --only-binary numpy,scipy,h5py,pillow,matplotlib \ + dev: --only-binary numpy,scipy,h5py \ + !dev: --only-binary numpy,scipy,h5py,pillow,matplotlib \ pre,dev: --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \ {opts} {packages} pip_pre = @@ -90,15 +91,15 @@ deps = # Numpy 2.0 is a major breaking release; we cannot put much effort into # supporting until it's at least RC stable pre: numpy <2.0.dev0 - dev: numpy >=2.0.dev0 + dev: numpy >=2.1.dev0 # Scipy stopped producing win32 wheels at py310 py3{8,9}-full-x86,x64,arm64: scipy >=1.6 # Matplotlib depends on scipy, so cannot be built for py310 on x86 py3{8,9}-full-x86,x64,arm64: matplotlib >=3.4 # h5py stopped producing win32 wheels at py39 - py38-full-x86,x64,arm64: h5py >=2.10 + py38-full-x86,{full,pre}-{x64,arm64}: h5py >=2.10 full,pre,dev: pillow >=8.1 - full,pre,dev: indexed_gzip >=1.4 + full,pre: indexed_gzip >=1.4 full,pre,dev: pyzstd >=0.14.3 full,pre: pydicom >=2.1 dev: pydicom @ git+https://github.com/pydicom/pydicom.git@main From a14ead51ccb8ff3da9603e5ca0002857de18ae6d Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 22:01:49 -0400 Subject: [PATCH 20/34] CI: Run tox in debug to see what files are downloaded --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2b453e890..05718dc1f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -189,7 +189,7 @@ jobs: - name: Show tox config run: tox c - name: Run tox - run: tox -v --exit-and-dump-after 1200 + run: tox -vv --exit-and-dump-after 1200 - uses: codecov/codecov-action@v4 if: ${{ always() }} with: From 880e13e3dcd30b077762e1c8b46ce76496bd28b8 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Wed, 10 Jul 2024 22:08:41 -0400 Subject: [PATCH 21/34] TOX: Add PYTHON_GIL=0 to py313 environments --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index 5b4dcc017..5df35c8d3 100644 --- a/tox.ini +++ b/tox.ini @@ -71,6 +71,8 @@ pass_env = NO_COLOR CLICOLOR CLICOLOR_FORCE +set_env = + py313: PYTHON_GIL=0 extras = test deps = # General minimum dependencies: pin based on API usage From e0e50df3e8fb7a48fba207098ec446abf9d2efed Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 25 Jul 2024 11:34:36 -0400 Subject: [PATCH 22/34] RF: Replace OneTimeProperty/auto_attr with cached_property --- nibabel/onetime.py | 114 +++++++--------------------------- nibabel/tests/test_onetime.py | 40 ++++++++---- 2 files changed, 51 insertions(+), 103 deletions(-) diff --git a/nibabel/onetime.py b/nibabel/onetime.py index 5018ba90c..f6d3633af 100644 --- a/nibabel/onetime.py +++ b/nibabel/onetime.py @@ -1,9 +1,12 @@ """Descriptor support for NIPY -Utilities to support special Python descriptors [1,2], in particular the use of -a useful pattern for properties we call 'one time properties'. These are -object attributes which are declared as properties, but become regular -attributes once they've been read the first time. They can thus be evaluated +Utilities to support special Python descriptors [1,2], in particular +:func:`~functools.cached_property`, which has been available in the Python +standard library since Python 3.8. We currently maintain aliases from +earlier names for this descriptor, specifically `OneTimeProperty` and `auto_attr`. + +:func:`~functools.cached_property` creates properties that are computed once +and then stored as regular attributes. They can thus be evaluated later in the object's life cycle, but once evaluated they become normal, static attributes with no function call overhead on access or any other constraints. @@ -21,10 +24,7 @@ from __future__ import annotations -import typing as ty - -InstanceT = ty.TypeVar('InstanceT') -T = ty.TypeVar('T') +from functools import cached_property from nibabel.deprecated import deprecate_with_version @@ -34,22 +34,22 @@ class ResetMixin: - """A Mixin class to add a .reset() method to users of OneTimeProperty. + """A Mixin class to add a .reset() method to users of cached_property. - By default, auto attributes once computed, become static. If they happen + By default, cached properties, once computed, become static. If they happen to depend on other parts of an object and those parts change, their values may now be invalid. This class offers a .reset() method that users can call *explicitly* when they know the state of their objects may have changed and they want to ensure that *all* their special attributes should be invalidated. Once - reset() is called, all their auto attributes are reset to their - OneTimeProperty descriptors, and their accessor functions will be triggered - again. + reset() is called, all their cached properties are reset to their + :func:`~functools.cached_property` descriptors, + and their accessor functions will be triggered again. .. warning:: - If a class has a set of attributes that are OneTimeProperty, but that + If a class has a set of attributes that are cached_property, but that can be initialized from any one of them, do NOT use this mixin! For instance, UniformTimeSeries can be initialized with only sampling_rate and t0, sampling_interval and time are auto-computed. But if you were @@ -68,15 +68,15 @@ class ResetMixin: ... def __init__(self,x=1.0): ... self.x = x ... - ... @auto_attr + ... @cached_property ... def y(self): ... print('*** y computation executed ***') ... return self.x / 2.0 - ... >>> a = A(10) About to access y twice, the second time no computation is done: + >>> a.y *** y computation executed *** 5.0 @@ -84,17 +84,21 @@ class ResetMixin: 5.0 Changing x + >>> a.x = 20 a.y doesn't change to 10, since it is a static attribute: + >>> a.y 5.0 We now reset a, and this will then force all auto attributes to recompute the next time we access them: + >>> a.reset() About to access y twice again after reset(): + >>> a.y *** y computation executed *** 10.0 @@ -103,88 +107,18 @@ class ResetMixin: """ def reset(self) -> None: - """Reset all OneTimeProperty attributes that may have fired already.""" + """Reset all cached_property attributes that may have fired already.""" # To reset them, we simply remove them from the instance dict. At that # point, it's as if they had never been computed. On the next access, # the accessor function from the parent class will be called, simply # because that's how the python descriptor protocol works. for mname, mval in self.__class__.__dict__.items(): - if mname in self.__dict__ and isinstance(mval, OneTimeProperty): + if mname in self.__dict__ and isinstance(mval, cached_property): delattr(self, mname) -class OneTimeProperty(ty.Generic[T]): - """A descriptor to make special properties that become normal attributes. - - This is meant to be used mostly by the auto_attr decorator in this module. - """ - - def __init__(self, func: ty.Callable[[InstanceT], T]) -> None: - """Create a OneTimeProperty instance. - - Parameters - ---------- - func : method - - The method that will be called the first time to compute a value. - Afterwards, the method's name will be a standard attribute holding - the value of this computation. - """ - self.getter = func - self.name = func.__name__ - self.__doc__ = func.__doc__ - - @ty.overload - def __get__( - self, obj: None, objtype: type[InstanceT] | None = None - ) -> ty.Callable[[InstanceT], T]: ... - - @ty.overload - def __get__(self, obj: InstanceT, objtype: type[InstanceT] | None = None) -> T: ... - - def __get__( - self, obj: InstanceT | None, objtype: type[InstanceT] | None = None - ) -> T | ty.Callable[[InstanceT], T]: - """This will be called on attribute access on the class or instance.""" - if obj is None: - # Being called on the class, return the original function. This - # way, introspection works on the class. - return self.getter - - # Errors in the following line are errors in setting a OneTimeProperty - val = self.getter(obj) - - obj.__dict__[self.name] = val - return val - - -def auto_attr(func: ty.Callable[[InstanceT], T]) -> OneTimeProperty[T]: - """Decorator to create OneTimeProperty attributes. - - Parameters - ---------- - func : method - The method that will be called the first time to compute a value. - Afterwards, the method's name will be a standard attribute holding the - value of this computation. - - Examples - -------- - >>> class MagicProp: - ... @auto_attr - ... def a(self): - ... return 99 - ... - >>> x = MagicProp() - >>> 'a' in x.__dict__ - False - >>> x.a - 99 - >>> 'a' in x.__dict__ - True - """ - return OneTimeProperty(func) - +OneTimeProperty = cached_property +auto_attr = cached_property # ----------------------------------------------------------------------------- # Deprecated API diff --git a/nibabel/tests/test_onetime.py b/nibabel/tests/test_onetime.py index 4d7294927..d6b457953 100644 --- a/nibabel/tests/test_onetime.py +++ b/nibabel/tests/test_onetime.py @@ -1,7 +1,22 @@ -from nibabel.onetime import auto_attr, setattr_on_read +from functools import cached_property + +from nibabel.onetime import ResetMixin, setattr_on_read from nibabel.testing import deprecated_to, expires +class A(ResetMixin): + @cached_property + def y(self): + return self.x / 2.0 + + @cached_property + def z(self): + return self.x / 3.0 + + def __init__(self, x=1.0): + self.x = x + + @expires('5.0.0') def test_setattr_on_read(): with deprecated_to('5.0.0'): @@ -19,15 +34,14 @@ def a(self): assert x.a is obj -def test_auto_attr(): - class MagicProp: - @auto_attr - def a(self): - return object() - - x = MagicProp() - assert 'a' not in x.__dict__ - obj = x.a - assert 'a' in x.__dict__ - # Each call to object() produces a unique object. Verify we get the same one every time. - assert x.a is obj +def test_ResetMixin(): + a = A(10) + assert 'y' not in a.__dict__ + assert a.y == 5 + assert 'y' in a.__dict__ + a.x = 20 + assert a.y == 5 + # Call reset and no error should be raised even though z was never accessed + a.reset() + assert 'y' not in a.__dict__ + assert a.y == 10 From c7c98f7dae9733e38892b70bfcd190610e21c5d0 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 25 Jul 2024 11:42:00 -0400 Subject: [PATCH 23/34] DOC: Use packaging.version.Version over LooseVersion --- doc/tools/build_modref_templates.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index 0e82cf6bf..76cf9cdf3 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -9,7 +9,7 @@ import sys # version comparison -from distutils.version import LooseVersion as V +from packaging.version import Version as V from os.path import join as pjoin # local imports @@ -73,6 +73,8 @@ def abort(error): if re.match('^_version_(major|minor|micro|extra)', v) ] ) + + source_version = V(source_version) print('***', source_version) if source_version != installed_version: From b6eccc250cc56ddc1cb8a81b240f0bc0e3325436 Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Thu, 25 Jul 2024 12:04:29 -0400 Subject: [PATCH 24/34] RF: nibabel.onetime.auto_attr -> functools.cached_property --- nibabel/nicom/dicomwrappers.py | 46 +++++++++++++++++----------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index a5ea550d8..2270ed3f0 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -14,12 +14,12 @@ import operator import warnings +from functools import cached_property import numpy as np from nibabel.optpkg import optional_package -from ..onetime import auto_attr as one_time from ..openers import ImageOpener from . import csareader as csar from .dwiparams import B2q, nearest_pos_semi_def, q2bg @@ -140,7 +140,7 @@ def __init__(self, dcm_data): """ self.dcm_data = dcm_data - @one_time + @cached_property def image_shape(self): """The array shape as it will be returned by ``get_data()``""" shape = (self.get('Rows'), self.get('Columns')) @@ -148,7 +148,7 @@ def image_shape(self): return None return shape - @one_time + @cached_property def image_orient_patient(self): """Note that this is _not_ LR flipped""" iop = self.get('ImageOrientationPatient') @@ -158,7 +158,7 @@ def image_orient_patient(self): iop = np.array(list(map(float, iop))) return np.array(iop).reshape(2, 3).T - @one_time + @cached_property def slice_normal(self): iop = self.image_orient_patient if iop is None: @@ -166,7 +166,7 @@ def slice_normal(self): # iop[:, 0] is column index cosine, iop[:, 1] is row index cosine return np.cross(iop[:, 1], iop[:, 0]) - @one_time + @cached_property def rotation_matrix(self): """Return rotation matrix between array indices and mm @@ -193,7 +193,7 @@ def rotation_matrix(self): raise WrapperPrecisionError('Rotation matrix not nearly orthogonal') return R - @one_time + @cached_property def voxel_sizes(self): """voxel sizes for array as returned by ``get_data()``""" # pix space gives (row_spacing, column_spacing). That is, the @@ -212,7 +212,7 @@ def voxel_sizes(self): pix_space = list(map(float, pix_space)) return tuple(pix_space + [zs]) - @one_time + @cached_property def image_position(self): """Return position of first voxel in data block @@ -231,7 +231,7 @@ def image_position(self): # Values are python Decimals in pydicom 0.9.7 return np.array(list(map(float, ipp))) - @one_time + @cached_property def slice_indicator(self): """A number that is higher for higher slices in Z @@ -246,12 +246,12 @@ def slice_indicator(self): return None return np.inner(ipp, s_norm) - @one_time + @cached_property def instance_number(self): """Just because we use this a lot for sorting""" return self.get('InstanceNumber') - @one_time + @cached_property def series_signature(self): """Signature for matching slices into series @@ -390,7 +390,7 @@ def _apply_scale_offset(self, data, scale, offset): return data + offset return data - @one_time + @cached_property def b_value(self): """Return b value for diffusion or None if not available""" q_vec = self.q_vector @@ -398,7 +398,7 @@ def b_value(self): return None return q2bg(q_vec)[0] - @one_time + @cached_property def b_vector(self): """Return b vector for diffusion or None if not available""" q_vec = self.q_vector @@ -469,7 +469,7 @@ def __init__(self, dcm_data): raise WrapperError('SharedFunctionalGroupsSequence is empty.') self._shape = None - @one_time + @cached_property def image_shape(self): """The array shape as it will be returned by ``get_data()`` @@ -573,7 +573,7 @@ def image_shape(self): ) return tuple(shape) - @one_time + @cached_property def image_orient_patient(self): """ Note that this is _not_ LR flipped @@ -590,7 +590,7 @@ def image_orient_patient(self): iop = np.array(list(map(float, iop))) return np.array(iop).reshape(2, 3).T - @one_time + @cached_property def voxel_sizes(self): """Get i, j, k voxel sizes""" try: @@ -610,7 +610,7 @@ def voxel_sizes(self): # Ensure values are float rather than Decimal return tuple(map(float, list(pix_space) + [zs])) - @one_time + @cached_property def image_position(self): try: ipp = self.shared.PlanePositionSequence[0].ImagePositionPatient @@ -623,7 +623,7 @@ def image_position(self): return None return np.array(list(map(float, ipp))) - @one_time + @cached_property def series_signature(self): signature = {} eq = operator.eq @@ -696,7 +696,7 @@ def __init__(self, dcm_data, csa_header=None): csa_header = {} self.csa_header = csa_header - @one_time + @cached_property def slice_normal(self): # The std_slice_normal comes from the cross product of the directions # in the ImageOrientationPatient @@ -720,7 +720,7 @@ def slice_normal(self): else: return std_slice_normal - @one_time + @cached_property def series_signature(self): """Add ICE dims from CSA header to signature""" signature = super().series_signature @@ -730,7 +730,7 @@ def series_signature(self): signature['ICE_Dims'] = (ice, operator.eq) return signature - @one_time + @cached_property def b_matrix(self): """Get DWI B matrix referring to voxel space @@ -767,7 +767,7 @@ def b_matrix(self): # semi-definite. return nearest_pos_semi_def(B_vox) - @one_time + @cached_property def q_vector(self): """Get DWI q vector referring to voxel space @@ -840,7 +840,7 @@ def __init__(self, dcm_data, csa_header=None, n_mosaic=None): self.n_mosaic = n_mosaic self.mosaic_size = int(np.ceil(np.sqrt(n_mosaic))) - @one_time + @cached_property def image_shape(self): """Return image shape as returned by ``get_data()``""" # reshape pixel slice array back from mosaic @@ -850,7 +850,7 @@ def image_shape(self): return None return (rows // self.mosaic_size, cols // self.mosaic_size, self.n_mosaic) - @one_time + @cached_property def image_position(self): """Return position of first voxel in data block From c49dff290f6113327eaa62bbd8aff4da924dd54a Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Tue, 18 Jun 2024 16:44:03 -0700 Subject: [PATCH 25/34] BF: Fix for 'split' (concatenated?) multiframe DICOM Can't just use number of frame indices to determine shape of data, as the actual frames could still be split into different files. Also can't assume a multiframe file is more than a single slice. --- nibabel/nicom/dicomwrappers.py | 34 ++++++++++++++++------------------ 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 2270ed3f0..894a0ed21 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -554,23 +554,20 @@ def image_shape(self): raise WrapperError('Missing information, cannot remove indices with confidence.') derived_dim_idx = dim_seq.index(derived_tag) frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) - # account for the 2 additional dimensions (row and column) not included - # in the indices - n_dim = frame_indices.shape[1] + 2 # Store frame indices self._frame_indices = frame_indices - if n_dim < 4: # 3D volume - return rows, cols, n_frames - # More than 3 dimensions + # Determine size of any extra-spatial dimensions ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] - shape = (rows, cols) + tuple(ns_unique) - n_vols = np.prod(shape[3:]) - n_frames_calc = n_vols * shape[2] - if n_frames != n_frames_calc: - raise WrapperError( - f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' - f'of shape {shape} does not match NumberOfFrames {n_frames}.' - ) + shape = (rows, cols) + tuple(x for i, x in enumerate(ns_unique) if i == 0 or x != 1) + n_dim = len(shape) + if n_dim > 3: + n_vols = np.prod(shape[3:]) + n_frames_calc = n_vols * shape[2] + if n_frames != n_frames_calc: + raise WrapperError( + f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' + f'of shape {shape} does not match NumberOfFrames {n_frames}.' + ) return tuple(shape) @cached_property @@ -640,10 +637,11 @@ def get_data(self): raise WrapperError('No valid information for image shape') data = self.get_pixel_array() # Roll frames axis to last - data = data.transpose((1, 2, 0)) - # Sort frames with first index changing fastest, last slowest - sorted_indices = np.lexsort(self._frame_indices.T) - data = data[..., sorted_indices] + if len(data.shape) > 2: + data = data.transpose((1, 2, 0)) + # Sort frames with first index changing fastest, last slowest + sorted_indices = np.lexsort(self._frame_indices.T) + data = data[..., sorted_indices] data = data.reshape(shape, order='F') return self._scale_data(data) From 4063114c2bde09f34d88c1193a5fd20adc8c1932 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 15:29:26 -0700 Subject: [PATCH 26/34] BF+TST: Test and fix a bunch of multiframe fixes Corrects issue where order of slice indices was assumed to match the order needed to move along the direction of the slice normal, which resulted in slice orientation flips. Ignores indices that don't evenly divide data, and at the end will try to combine those indices (if needed) into a single tuple index. --- nibabel/nicom/dicomwrappers.py | 124 +++++++++++++++----- nibabel/nicom/tests/test_dicomwrappers.py | 132 ++++++++++++++++++---- 2 files changed, 203 insertions(+), 53 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 894a0ed21..c3f484a00 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -467,6 +467,25 @@ def __init__(self, dcm_data): self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: raise WrapperError('SharedFunctionalGroupsSequence is empty.') + # Try to determine slice order and minimal image position patient + self._frame_slc_ord = self._ipp = None + try: + frame_ipps = [self.shared.PlanePositionSequence[0].ImagePositionPatient] + except AttributeError: + try: + frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] + except AttributeError: + frame_ipps = None + if frame_ipps is not None and all(ipp is not None for ipp in frame_ipps): + frame_ipps = [np.array(list(map(float, ipp))) for ipp in frame_ipps] + frame_slc_pos = [np.inner(ipp, self.slice_normal) for ipp in frame_ipps] + rnd_slc_pos = np.round(frame_slc_pos, 4) + uniq_slc_pos = np.unique(rnd_slc_pos) + pos_ord_map = { + val: order for val, order in zip(uniq_slc_pos, np.argsort(uniq_slc_pos)) + } + self._frame_slc_ord = [pos_ord_map[pos] for pos in rnd_slc_pos] + self._ipp = frame_ipps[np.argmin(frame_slc_pos)] self._shape = None @cached_property @@ -509,14 +528,16 @@ def image_shape(self): if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): # DWI image may include derived isotropic, ADC or trace volume try: - anisotropic = pydicom.Sequence( - frame - for frame in self.frames - if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' - ) + aniso_frames = pydicom.Sequence() + aniso_slc_ord = [] + for slc_ord, frame in zip(self._frame_slc_ord, self.frames): + if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC': + aniso_frames.append(frame) + aniso_slc_ord.append(slc_ord) # Image contains DWI volumes followed by derived images; remove derived images - if len(anisotropic) != 0: - self.frames = anisotropic + if len(aniso_frames) != 0: + self.frames = aniso_frames + self._frame_slc_ord = aniso_slc_ord except IndexError: # Sequence tag is found but missing items! raise WrapperError('Diffusion file missing information') @@ -554,20 +575,70 @@ def image_shape(self): raise WrapperError('Missing information, cannot remove indices with confidence.') derived_dim_idx = dim_seq.index(derived_tag) frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) + # Determine the shape and which indices to use + shape = [rows, cols] + curr_parts = n_frames + frames_per_part = 1 + del_indices = {} + for row_idx, row in enumerate(frame_indices.T): + if curr_parts == 1: + break + unique = np.unique(row) + count = len(unique) + if count == 1: + continue + # Replace slice indices with order determined from slice positions along normal + if len(shape) == 2: + row = self._frame_slc_ord + frame_indices.T[row_idx, :] = row + unique = np.unique(row) + if len(unique) != count: + raise WrapperError("Number of slice indices and positions don't match") + new_parts, leftover = divmod(curr_parts, count) + allowed_val_counts = [new_parts * frames_per_part] + if len(shape) > 2: + # Except for the slice dim, having a unique value for each frame is valid + allowed_val_counts.append(n_frames) + if leftover != 0 or any( + np.count_nonzero(row == val) not in allowed_val_counts for val in unique + ): + if len(shape) == 2: + raise WrapperError('Missing slices from multiframe') + del_indices[row_idx] = count + continue + frames_per_part *= count + shape.append(count) + curr_parts = new_parts + if del_indices: + if curr_parts > 1: + ns_failed = [k for k, v in del_indices.items() if v != 1] + if len(ns_failed) > 1: + # If some indices weren't used yet but we still have unaccounted for + # partitions, try combining indices into single tuple and using that + tup_dtype = np.dtype(','.join(['I'] * len(ns_failed))) + row = [tuple(x for x in vals) for vals in frame_indices[:, ns_failed]] + row = np.array(row, dtype=tup_dtype) + frame_indices = np.delete(frame_indices, np.array(list(del_indices.keys())), axis=1) + if curr_parts > 1 and len(ns_failed) > 1: + unique = np.unique(row, axis=0) + count = len(unique) + new_parts, rem = divmod(curr_parts, count) + allowed_val_counts = [new_parts * frames_per_part, n_frames] + if rem == 0 and all( + np.count_nonzero(row == val) in allowed_val_counts for val in unique + ): + shape.append(count) + curr_parts = new_parts + ord_vals = np.argsort(unique) + order = {tuple(unique[i]): ord_vals[i] for i in range(count)} + ord_row = np.array([order[tuple(v)] for v in row]) + frame_indices = np.hstack( + [frame_indices, np.array(ord_row).reshape((n_frames, 1))] + ) + if curr_parts > 1: + raise WrapperError('Unable to determine sorting of final dimension(s)') # Store frame indices self._frame_indices = frame_indices - # Determine size of any extra-spatial dimensions - ns_unique = [len(np.unique(row)) for row in self._frame_indices.T] - shape = (rows, cols) + tuple(x for i, x in enumerate(ns_unique) if i == 0 or x != 1) - n_dim = len(shape) - if n_dim > 3: - n_vols = np.prod(shape[3:]) - n_frames_calc = n_vols * shape[2] - if n_frames != n_frames_calc: - raise WrapperError( - f'Calculated # of frames ({n_frames_calc}={n_vols}*{shape[2]}) ' - f'of shape {shape} does not match NumberOfFrames {n_frames}.' - ) return tuple(shape) @cached_property @@ -607,18 +678,11 @@ def voxel_sizes(self): # Ensure values are float rather than Decimal return tuple(map(float, list(pix_space) + [zs])) - @cached_property + @property def image_position(self): - try: - ipp = self.shared.PlanePositionSequence[0].ImagePositionPatient - except AttributeError: - try: - ipp = self.frames[0].PlanePositionSequence[0].ImagePositionPatient - except AttributeError: - raise WrapperError('Cannot get image position from dicom') - if ipp is None: - return None - return np.array(list(map(float, ipp))) + if self._ipp is None: + raise WrapperError('Not enough information for image_position_patient') + return self._ipp @cached_property def series_signature(self): diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index d14c35dcd..25a58d70e 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -364,7 +364,7 @@ def test_decimal_rescale(): assert dw.get_data().dtype != np.dtype(object) -def fake_frames(seq_name, field_name, value_seq): +def fake_frames(seq_name, field_name, value_seq, frame_seq=None): """Make fake frames for multiframe testing Parameters @@ -375,6 +375,8 @@ def fake_frames(seq_name, field_name, value_seq): name of field within sequence value_seq : length N sequence sequence of values + frame_seq : length N list + previous result from this function to update Returns ------- @@ -386,19 +388,28 @@ def fake_frames(seq_name, field_name, value_seq): class Fake: pass - frames = [] - for value in value_seq: - fake_frame = Fake() + if frame_seq == None: + frame_seq = [Fake() for _ in range(len(value_seq))] + for value, fake_frame in zip(value_seq, frame_seq): fake_element = Fake() setattr(fake_element, field_name, value) setattr(fake_frame, seq_name, [fake_element]) - frames.append(fake_frame) - return frames + return frame_seq -def fake_shape_dependents(div_seq, sid_seq=None, sid_dim=None): +def fake_shape_dependents( + div_seq, + sid_seq=None, + sid_dim=None, + ipp_seq=None, + slice_dim=None, + flip_ipp_idx_corr=False, +): """Make a fake dictionary of data that ``image_shape`` is dependent on. + If you are providing the ``ipp_seq`` argument, they should be generated using + a slice normal aligned with the z-axis (i.e. iop == (0, 1, 0, 1, 0, 0)). + Parameters ---------- div_seq : list of tuples @@ -407,39 +418,86 @@ def fake_shape_dependents(div_seq, sid_seq=None, sid_dim=None): list of values to use for the `StackID` of each frame. sid_dim : int the index of the column in 'div_seq' to use as 'sid_seq' + ipp_seq : list of tuples + list of values to use for `ImagePositionPatient` for each frame + slice_dim : int + the index of the column in 'div_seq' corresponding to slices + flip_ipp_idx_corr : bool + generate ipp values so slice location is negatively correlated with slice index """ - class DimIdxSeqElem: + class PrintBase: + def __repr__(self): + attr_strs = [] + for attr in dir(self): + if attr[0].isupper(): + attr_strs.append(f'{attr}={getattr(self, attr)}') + return f"{self.__class__.__name__}({', '.join(attr_strs)})" + + class DimIdxSeqElem(PrintBase): def __init__(self, dip=(0, 0), fgp=None): self.DimensionIndexPointer = dip if fgp is not None: self.FunctionalGroupPointer = fgp - class FrmContSeqElem: + class FrmContSeqElem(PrintBase): def __init__(self, div, sid): self.DimensionIndexValues = div self.StackID = sid - class PerFrmFuncGrpSeqElem: - def __init__(self, div, sid): + class PlnPosSeqElem(PrintBase): + def __init__(self, ipp): + self.ImagePositionPatient = ipp + + class PlnOrientSeqElem(PrintBase): + def __init__(self, iop): + self.ImageOrientationPatient = iop + + class PerFrmFuncGrpSeqElem(PrintBase): + def __init__(self, div, sid, ipp, iop): self.FrameContentSequence = [FrmContSeqElem(div, sid)] + self.PlanePositionSequence = [PlnPosSeqElem(ipp)] + self.PlaneOrientationSequence = [PlnOrientSeqElem(iop)] # if no StackID values passed in then use the values at index 'sid_dim' in # the value for DimensionIndexValues for it + n_indices = len(div_seq[0]) if sid_seq is None: if sid_dim is None: sid_dim = 0 sid_seq = [div[sid_dim] for div in div_seq] - # create the DimensionIndexSequence + # Determine slice_dim and create per-slice ipp information + if slice_dim is None: + slice_dim = 1 if sid_dim == 0 else 0 num_of_frames = len(div_seq) - dim_idx_seq = [DimIdxSeqElem()] * num_of_frames + frame_slc_indices = np.array(div_seq)[:, slice_dim] + uniq_slc_indices = np.unique(frame_slc_indices) + n_slices = len(uniq_slc_indices) + assert num_of_frames % n_slices == 0 + iop_seq = [(0.0, 1.0, 0.0, 1.0, 0.0, 0.0) for _ in range(num_of_frames)] + if ipp_seq is None: + slc_locs = np.linspace(-1.0, 1.0, n_slices) + if flip_ipp_idx_corr: + slc_locs = slc_locs[::-1] + slc_idx_loc = { + div_idx: slc_locs[arr_idx] for arr_idx, div_idx in enumerate(np.sort(uniq_slc_indices)) + } + ipp_seq = [(-1.0, -1.0, slc_idx_loc[idx]) for idx in frame_slc_indices] + else: + assert flip_ipp_idx_corr is False # caller can flip it themselves + assert len(ipp_seq) == num_of_frames + # create the DimensionIndexSequence + dim_idx_seq = [DimIdxSeqElem()] * n_indices # add an entry for StackID into the DimensionIndexSequence if sid_dim is not None: sid_tag = pydicom.datadict.tag_for_keyword('StackID') fcs_tag = pydicom.datadict.tag_for_keyword('FrameContentSequence') dim_idx_seq[sid_dim] = DimIdxSeqElem(sid_tag, fcs_tag) # create the PerFrameFunctionalGroupsSequence - frames = [PerFrmFuncGrpSeqElem(div, sid) for div, sid in zip(div_seq, sid_seq)] + frames = [ + PerFrmFuncGrpSeqElem(div, sid, ipp, iop) + for div, sid, ipp, iop in zip(div_seq, sid_seq, ipp_seq, iop_seq) + ] return { 'NumberOfFrames': num_of_frames, 'DimensionIndexSequence': dim_idx_seq, @@ -480,7 +538,15 @@ def test_shape(self): # PerFrameFunctionalGroupsSequence does not match NumberOfFrames with pytest.raises(AssertionError): dw.image_shape - # check 3D shape when StackID index is 0 + # check 2D shape with StackID index is 0 + div_seq = ((1, 1),) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64) + # Check 2D shape with extraneous extra indices + div_seq = ((1, 1, 2),) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64) + # Check 3D shape when StackID index is 0 div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 4) @@ -541,6 +607,18 @@ def test_shape(self): div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + # Test with combo indices, here with the last two needing to be combined into + # a single index corresponding to [(1, 1), (1, 1), (2, 1), (2, 1), (2, 2), (2, 2)] + div_seq = ( + (1, 1, 1, 1), + (1, 2, 1, 1), + (1, 1, 2, 1), + (1, 2, 2, 1), + (1, 1, 2, 2), + (1, 2, 2, 2), + ) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) def test_iop(self): # Test Image orient patient for multiframe @@ -608,22 +686,30 @@ def test_image_position(self): with pytest.raises(didw.WrapperError): dw.image_position # Make a fake frame - fake_frame = fake_frames( - 'PlanePositionSequence', 'ImagePositionPatient', [[-2.0, 3.0, 7]] - )[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] + iop = [0, 1, 0, 1, 0, 0] + frames = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', [iop]) + frames = fake_frames( + 'PlanePositionSequence', 'ImagePositionPatient', [[-2.0, 3.0, 7]], frames + ) + fake_mf['SharedFunctionalGroupsSequence'] = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) fake_mf['SharedFunctionalGroupsSequence'] = [None] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_position - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] + fake_mf['PerFrameFunctionalGroupsSequence'] = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) # Check lists of Decimals work - fake_frame.PlanePositionSequence[0].ImagePositionPatient = [ + frames[0].PlanePositionSequence[0].ImagePositionPatient = [ Decimal(str(v)) for v in [-2, 3, 7] ] assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) assert MFW(fake_mf).image_position.dtype == float + # We should get minimum along slice normal with multiple frames + frames = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', [iop] * 2) + ipps = [[-2.0, 3.0, 7], [-2.0, 3.0, 6]] + frames = fake_frames('PlanePositionSequence', 'ImagePositionPatient', ipps, frames) + fake_mf['PerFrameFunctionalGroupsSequence'] = frames + assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 6]) @dicom_test @pytest.mark.xfail(reason='Not packaged in install', raises=FileNotFoundError) @@ -644,7 +730,7 @@ def test_data_real(self): if endian_codes[data.dtype.byteorder] == '>': data = data.byteswap() dat_str = data.tobytes() - assert sha1(dat_str).hexdigest() == '149323269b0af92baa7508e19ca315240f77fa8c' + assert sha1(dat_str).hexdigest() == 'dc011bb49682fb78f3cebacf965cb65cc9daba7d' @dicom_test def test_slicethickness_fallback(self): @@ -665,7 +751,7 @@ def test_data_derived_shape(self): def test_data_trace(self): # Test that a standalone trace volume is found and not dropped dw = didw.wrapper_from_file(DATA_FILE_SIEMENS_TRACE) - assert dw.image_shape == (72, 72, 39, 1) + assert dw.image_shape == (72, 72, 39) @dicom_test @needs_nibabel_data('nitest-dicom') From 14c24ef7fc156d2a0bb760304e482cfde4694bc3 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 16:34:11 -0700 Subject: [PATCH 27/34] BF: Trim unneeded trailing indices from _frame_indices --- nibabel/nicom/dicomwrappers.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index c3f484a00..eab0471ec 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -581,11 +581,10 @@ def image_shape(self): frames_per_part = 1 del_indices = {} for row_idx, row in enumerate(frame_indices.T): - if curr_parts == 1: - break unique = np.unique(row) count = len(unique) - if count == 1: + if count == 1 or curr_parts == 1: + del_indices[row_idx] = count continue # Replace slice indices with order determined from slice positions along normal if len(shape) == 2: From 019f448c9924e352ed5503aae384b59918bb1d95 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 17:09:09 -0700 Subject: [PATCH 28/34] BF+TST: Fix 2D plus time case Explicitly use `InStackPositionNumber` to identify the slice dim, produce correct output for 2D + time data. --- nibabel/nicom/dicomwrappers.py | 17 +++++++++++------ nibabel/nicom/tests/test_dicomwrappers.py | 9 ++++++++- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index eab0471ec..14041e631 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -470,10 +470,10 @@ def __init__(self, dcm_data): # Try to determine slice order and minimal image position patient self._frame_slc_ord = self._ipp = None try: - frame_ipps = [self.shared.PlanePositionSequence[0].ImagePositionPatient] + frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] except AttributeError: try: - frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] + frame_ipps = [self.shared.PlanePositionSequence[0].ImagePositionPatient] except AttributeError: frame_ipps = None if frame_ipps is not None and all(ipp is not None for ipp in frame_ipps): @@ -575,19 +575,24 @@ def image_shape(self): raise WrapperError('Missing information, cannot remove indices with confidence.') derived_dim_idx = dim_seq.index(derived_tag) frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) + dim_seq.pop(derived_dim_idx) # Determine the shape and which indices to use shape = [rows, cols] curr_parts = n_frames frames_per_part = 1 del_indices = {} + stackpos_tag = pydicom.datadict.tag_for_keyword('InStackPositionNumber') + slice_dim_idx = dim_seq.index(stackpos_tag) for row_idx, row in enumerate(frame_indices.T): unique = np.unique(row) count = len(unique) - if count == 1 or curr_parts == 1: + if curr_parts == 1 or (count == 1 and row_idx != slice_dim_idx): del_indices[row_idx] = count continue # Replace slice indices with order determined from slice positions along normal - if len(shape) == 2: + if row_idx == slice_dim_idx: + if len(shape) > 2: + raise WrapperError('Non-singular index precedes the slice index') row = self._frame_slc_ord frame_indices.T[row_idx, :] = row unique = np.unique(row) @@ -595,13 +600,13 @@ def image_shape(self): raise WrapperError("Number of slice indices and positions don't match") new_parts, leftover = divmod(curr_parts, count) allowed_val_counts = [new_parts * frames_per_part] - if len(shape) > 2: + if row_idx != slice_dim_idx: # Except for the slice dim, having a unique value for each frame is valid allowed_val_counts.append(n_frames) if leftover != 0 or any( np.count_nonzero(row == val) not in allowed_val_counts for val in unique ): - if len(shape) == 2: + if row_idx == slice_dim_idx: raise WrapperError('Missing slices from multiframe') del_indices[row_idx] = count continue diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 25a58d70e..040242162 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -488,10 +488,13 @@ def __init__(self, div, sid, ipp, iop): assert len(ipp_seq) == num_of_frames # create the DimensionIndexSequence dim_idx_seq = [DimIdxSeqElem()] * n_indices + # Add entry for InStackPositionNumber to DimensionIndexSequence + fcs_tag = pydicom.datadict.tag_for_keyword('FrameContentSequence') + isp_tag = pydicom.datadict.tag_for_keyword('InStackPositionNumber') + dim_idx_seq[slice_dim] = DimIdxSeqElem(isp_tag, fcs_tag) # add an entry for StackID into the DimensionIndexSequence if sid_dim is not None: sid_tag = pydicom.datadict.tag_for_keyword('StackID') - fcs_tag = pydicom.datadict.tag_for_keyword('FrameContentSequence') dim_idx_seq[sid_dim] = DimIdxSeqElem(sid_tag, fcs_tag) # create the PerFrameFunctionalGroupsSequence frames = [ @@ -546,6 +549,10 @@ def test_shape(self): div_seq = ((1, 1, 2),) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64) + # Check 2D plus time + div_seq = ((1, 1, 1), (1, 1, 2), (1, 1, 3)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 1, 3) # Check 3D shape when StackID index is 0 div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) From 0215ce5db008f32e6001335f2b4d4f39d5a0a346 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 18:33:56 -0700 Subject: [PATCH 29/34] BF+TST: Handle case with extra-spatial index that is unique per frame Not sure if this ever actually happens in real multiframe data, but it does in non-multiframe and I can imagine if a DimensionIndexSequence element refrences a per-frame AcquisitionTime then this could happen. --- nibabel/nicom/dicomwrappers.py | 25 +++++++++++++------ nibabel/nicom/tests/test_dicomwrappers.py | 29 +++++++++++++++++++++++ 2 files changed, 47 insertions(+), 7 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 14041e631..374387870 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -598,21 +598,32 @@ def image_shape(self): unique = np.unique(row) if len(unique) != count: raise WrapperError("Number of slice indices and positions don't match") + elif count == n_frames: + if shape[-1] == 'remaining': + raise WrapperError('At most one index have ambiguous size') + shape.append('remaining') + continue new_parts, leftover = divmod(curr_parts, count) - allowed_val_counts = [new_parts * frames_per_part] - if row_idx != slice_dim_idx: - # Except for the slice dim, having a unique value for each frame is valid - allowed_val_counts.append(n_frames) - if leftover != 0 or any( - np.count_nonzero(row == val) not in allowed_val_counts for val in unique - ): + expected = new_parts * frames_per_part + if leftover != 0 or any(np.count_nonzero(row == val) != expected for val in unique): if row_idx == slice_dim_idx: raise WrapperError('Missing slices from multiframe') del_indices[row_idx] = count continue + if shape[-1] == 'remaining': + shape[-1] = new_parts + frames_per_part *= shape[-1] + new_parts = 1 frames_per_part *= count shape.append(count) curr_parts = new_parts + if shape[-1] == 'remaining': + if curr_parts > 1: + shape[-1] = curr_parts + curr_parts = 1 + else: + del_indices[len(shape)] = 1 + shape = shape[:-1] if del_indices: if curr_parts > 1: ns_failed = [k for k, v in del_indices.items() if v != 1] diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 040242162..b50535a4b 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -626,6 +626,35 @@ def test_shape(self): ) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + # Test invalid 4D indices + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 4)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 2)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + # Time index that is unique to each frame + div_seq = ((1, 1, 1), (1, 2, 2), (1, 1, 3), (1, 2, 4), (1, 1, 5), (1, 2, 6)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + div_seq = ( + (1, 1, 1, 1), + (1, 2, 2, 1), + (1, 1, 3, 1), + (1, 2, 4, 1), + (1, 1, 5, 1), + (1, 2, 6, 1), + (1, 1, 7, 2), + (1, 2, 8, 2), + (1, 1, 9, 2), + (1, 2, 10, 2), + (1, 1, 11, 2), + (1, 2, 12, 2), + ) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 2, 3, 2) def test_iop(self): # Test Image orient patient for multiframe From 259483f1f5412e4e8deb919b800b72abddccd439 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Wed, 24 Jul 2024 23:24:53 -0700 Subject: [PATCH 30/34] TST: Expand test coverage for multiframe dicom shape determination --- nibabel/nicom/tests/test_dicomwrappers.py | 33 ++++++++++++++++++++++- 1 file changed, 32 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index b50535a4b..2168476bb 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -473,7 +473,6 @@ def __init__(self, div, sid, ipp, iop): frame_slc_indices = np.array(div_seq)[:, slice_dim] uniq_slc_indices = np.unique(frame_slc_indices) n_slices = len(uniq_slc_indices) - assert num_of_frames % n_slices == 0 iop_seq = [(0.0, 1.0, 0.0, 1.0, 0.0, 0.0) for _ in range(num_of_frames)] if ipp_seq is None: slc_locs = np.linspace(-1.0, 1.0, n_slices) @@ -579,6 +578,17 @@ def test_shape(self): div_seq = ((1, 1, 0), (1, 2, 0), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 2) + # Check number of IPP vals match the number of slices or we raise + frames = fake_mf['PerFrameFunctionalGroupsSequence'] + for frame in frames[1:]: + frame.PlanePositionSequence = frames[0].PlanePositionSequence[:] + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + # Check we raise on missing slices + div_seq = ((1, 1, 0), (1, 2, 0), (1, 1, 1)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # check 3D shape when there is no StackID index div_seq = ((1,), (2,), (3,), (4,)) sid_seq = (1, 1, 1, 1) @@ -614,6 +624,11 @@ def test_shape(self): div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3) + # Check non-singular dimension preceding slice dim raises + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 3)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0, slice_dim=2)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Test with combo indices, here with the last two needing to be combined into # a single index corresponding to [(1, 1), (1, 1), (2, 1), (2, 1), (2, 2), (2, 2)] div_seq = ( @@ -655,6 +670,22 @@ def test_shape(self): ) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 3, 2) + # Check we only allow one extra spatial dimension with unique val per frame + div_seq = ( + (1, 1, 1, 6), + (1, 2, 2, 5), + (1, 1, 3, 4), + (1, 2, 4, 3), + (1, 1, 5, 2), + (1, 2, 6, 1), + ) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape + # Check that having unique value per frame works with single volume + div_seq = ((1, 1, 1), (1, 2, 2), (1, 3, 3)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + assert MFW(fake_mf).image_shape == (32, 64, 3) def test_iop(self): # Test Image orient patient for multiframe From 52c31052e4f22ff7f0a01883129584c6091e9ac9 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Thu, 25 Jul 2024 09:58:19 -0700 Subject: [PATCH 31/34] TST+CLN: More slice ordering testing, minor cleanup --- nibabel/nicom/tests/test_dicomwrappers.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 2168476bb..e01759c86 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -388,7 +388,7 @@ def fake_frames(seq_name, field_name, value_seq, frame_seq=None): class Fake: pass - if frame_seq == None: + if frame_seq is None: frame_seq = [Fake() for _ in range(len(value_seq))] for value, fake_frame in zip(value_seq, frame_seq): fake_element = Fake() @@ -868,6 +868,11 @@ def test_data_fake(self): sorted_data = data[..., [3, 1, 2, 0]] fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) + # Check slice sorting with negative index / IPP correlation + fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0, flip_ipp_idx_corr=True)) + sorted_data = data[..., [0, 2, 1, 3]] + fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # 5D! dim_idxs = [ [1, 4, 2, 1], From 629dbb52e14e813203d1f9c355de95399fd70dda Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Thu, 25 Jul 2024 10:05:32 -0700 Subject: [PATCH 32/34] DOC: Add some notes to the changelog --- Changelog | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/Changelog b/Changelog index 689295125..24e89095f 100644 --- a/Changelog +++ b/Changelog @@ -25,6 +25,33 @@ Eric Larson (EL), Demian Wassermann, Stephan Gerhard and Ross Markello (RM). References like "pr/298" refer to github pull request numbers. +Upcoming release (To be determined) +=================================== + +New features +------------ + +Enhancements +------------ + * Ability to read data from many multiframe DICOM files that previously generated errors + +Bug fixes +--------- + * Fixed multiframe DICOM issue where data could be flipped along slice dimension relative to the + affine + * Fixed multiframe DICOM issue where ``image_position`` and the translation component in the + ``affine`` could be incorrect + +Documentation +------------- + +Maintenance +----------- + +API changes and deprecations +---------------------------- + + 5.2.1 (Monday 26 February 2024) =============================== From fd56bf4abe195da9d351d64345381231ce7f7038 Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Mon, 12 Aug 2024 15:08:26 -0700 Subject: [PATCH 33/34] BF+ENH: Fixes to DICOM scaling, make frame filtering explicit Fixes how we handle DICOM scaling, particularly for Philips and multi-frame files. For Philips data scale factors without defined units should be avoided, and instead a private tag should be used to make image intensities comparable across series. For multi-frame DICOM, it is possible to have different scale factors (potentially coming from different tags) per-frame. We also prefer scale factors from a RealWorldValueMapping provided they have defined units. The base Wrapper class now has a few new attributes and methods to support this functionality. In particular an attribute `scale_factors` that provides an array of slope/intercept pairs, and a method `get_unscaled_data` that will return the reordered/reshaped data but without the scaling applied. A `vendor` attribute was also added to better support vendor-specific implementation details. For the MultiFrameWrapper I also added an attribute `frame_order` which exposes the order used to sort the frames, and use this to return the `scale_factors` in sorted order. While implementing this I kept bumping into issues due to the (implicit) frame filtering that was happening in the `image_shape` property, so I made this filtering explicit and configurable and moved it into the class initialization. --- nibabel/nicom/dicomwrappers.py | 410 +++++++++++++++++----- nibabel/nicom/tests/test_dicomwrappers.py | 363 ++++++++++++++----- nibabel/nicom/utils.py | 54 +++ 3 files changed, 636 insertions(+), 191 deletions(-) diff --git a/nibabel/nicom/dicomwrappers.py b/nibabel/nicom/dicomwrappers.py index 374387870..3842248fd 100755 --- a/nibabel/nicom/dicomwrappers.py +++ b/nibabel/nicom/dicomwrappers.py @@ -13,6 +13,7 @@ """ import operator +import re import warnings from functools import cached_property @@ -23,6 +24,7 @@ from ..openers import ImageOpener from . import csareader as csar from .dwiparams import B2q, nearest_pos_semi_def, q2bg +from .utils import Vendor, find_private_section, vendor_from_private pydicom = optional_package('pydicom')[0] @@ -59,7 +61,7 @@ def wrapper_from_file(file_like, *args, **kwargs): return wrapper_from_data(dcm_data) -def wrapper_from_data(dcm_data): +def wrapper_from_data(dcm_data, frame_filters=None): """Create DICOM wrapper from DICOM data object Parameters @@ -68,6 +70,9 @@ def wrapper_from_data(dcm_data): Object allowing attribute access, with DICOM attributes. Probably a dataset as read by ``pydicom``. + frame_filters + Optionally override the `frame_filters` used to create a `MultiFrameWrapper` + Returns ------- dcm_w : ``dicomwrappers.Wrapper`` or subclass @@ -76,9 +81,8 @@ def wrapper_from_data(dcm_data): sop_class = dcm_data.get('SOPClassUID') # try to detect what type of dicom object to wrap if sop_class == '1.2.840.10008.5.1.4.1.1.4.1': # Enhanced MR Image Storage - # currently only Philips is using Enhanced Multiframe DICOM - return MultiframeWrapper(dcm_data) - # Check for Siemens DICOM format types + return MultiframeWrapper(dcm_data, frame_filters) + # Check for non-enhanced (legacy) Siemens DICOM format types # Only Siemens will have data for the CSA header try: csa = csar.get_csa_header(dcm_data) @@ -103,6 +107,7 @@ class Wrapper: Methods: * get_data() + * get_unscaled_data() * get_pixel_array() * is_same_series(other) * __getitem__ : return attributes from `dcm_data` @@ -120,6 +125,8 @@ class Wrapper: * image_position : sequence length 3 * slice_indicator : float * series_signature : tuple + * scale_factors : (N, 2) array + * vendor : Vendor """ is_csa = False @@ -136,10 +143,34 @@ def __init__(self, dcm_data): dcm_data : object object should allow 'get' and '__getitem__' access. Usually this will be a ``dicom.dataset.Dataset`` object resulting from reading a - DICOM file, but a dictionary should also work. + DICOM file. """ self.dcm_data = dcm_data + @cached_property + def vendor(self): + """The vendor of the instrument that produced the DICOM""" + # Look at manufacturer tag first + mfgr = self.get('Manufacturer') + if mfgr: + if re.search('Siemens', mfgr, re.IGNORECASE): + return Vendor.SIEMENS + if re.search('Philips', mfgr, re.IGNORECASE): + return Vendor.PHILIPS + if re.search('GE Medical', mfgr, re.IGNORECASE): + return Vendor.GE + # Next look at UID prefixes + for uid_src in ('StudyInstanceUID', 'SeriesInstanceUID', 'SOPInstanceUID'): + uid = str(self.get(uid_src)) + if uid.startswith(('1.3.12.2.1007.', '1.3.12.2.1107.')): + return Vendor.SIEMENS + if uid.startswith(('1.3.46', '1.3.12.2.1017')): + return Vendor.PHILIPS + if uid.startswith('1.2.840.113619'): + return Vendor.GE + # Finally look for vendor specific private blocks + return vendor_from_private(self.dcm_data) + @cached_property def image_shape(self): """The array shape as it will be returned by ``get_data()``""" @@ -315,14 +346,30 @@ def affine(self): return aff def get_pixel_array(self): - """Return unscaled pixel array from DICOM""" + """Return raw pixel array without reshaping or scaling + + Returns + ------- + data : array + array with raw pixel data from DICOM + """ data = self.dcm_data.get('pixel_array') if data is None: raise WrapperError('Cannot find data in DICOM') return data + def get_unscaled_data(self): + """Return pixel array that is potentially reshaped, but without any scaling + + Returns + ------- + data : array + array with raw pixel data from DICOM + """ + return self.get_pixel_array() + def get_data(self): - """Get scaled image data from DICOMs + """Get potentially scaled and reshaped image data from DICOMs We return the data as DICOM understands it, first dimension is rows, second dimension is columns @@ -333,7 +380,7 @@ def get_data(self): array with data as scaled from any scaling in the DICOM fields. """ - return self._scale_data(self.get_pixel_array()) + return self._scale_data(self.get_unscaled_data()) def is_same_series(self, other): """Return True if `other` appears to be in same series @@ -372,11 +419,86 @@ def is_same_series(self, other): return False return True + @cached_property + def scale_factors(self): + """Return (2, N) array of slope/intercept pairs""" + scaling = self._get_best_scale_factor(self.dcm_data) + if scaling is None: + if self.vendor == Vendor.PHILIPS: + warnings.warn( + 'Unable to find Philips private scale factor, cross-series comparisons may be invalid' + ) + scaling = (1, 0) + return np.array((scaling,)) + + def _get_rwv_scale_factor(self, dcm_data): + """Return the first set of 'real world' scale factors with defined units""" + rw_seq = dcm_data.get('RealWorldValueMappingSequence') + if rw_seq: + for rw_map in rw_seq: + try: + units = rw_map.MeasurementUnitsCodeSequence[0].CodeMeaning + except (AttributeError, IndexError): + continue + if units not in ('', 'no units', 'UNDEFINED'): + return ( + rw_map.get('RealWorldValueSlope', 1), + rw_map.get('RealWorldValueIntercept', 0), + ) + + def _get_legacy_scale_factor(self, dcm_data): + """Return scale factors from older 'Modality LUT' macro + + For Philips data we require RescaleType is defined and not set to 'normalized' + """ + pix_trans_seq = dcm_data.get('PixelValueTransformationSequence') + if pix_trans_seq is not None: + pix_trans = pix_trans_seq[0] + if self.vendor != Vendor.PHILIPS or pix_trans.get('RescaleType', 'US') not in ( + '', + 'US', + 'normalized', + ): + return (pix_trans.get('RescaleSlope', 1), pix_trans.get('RescaleIntercept', 0)) + if ( + dcm_data.get('RescaleSlope') is not None + or dcm_data.get('RescaleIntercept') is not None + ): + if self.vendor != Vendor.PHILIPS or dcm_data.get('RescaleType', 'US') not in ( + '', + 'US', + 'normalized', + ): + return (dcm_data.get('RescaleSlope', 1), dcm_data.get('RescaleIntercept', 0)) + + def _get_philips_scale_factor(self, dcm_data): + """Return scale factors from Philips private element + + If we don't have any other scale factors that are tied to real world units, then + this is the best scaling to use to enable cross-series comparisons + """ + offset = find_private_section(dcm_data, 0x2005, 'Philips MR Imaging DD 001') + priv_scale = None if offset is None else dcm_data.get((0x2005, offset + 0xE)) + if priv_scale is not None: + return (priv_scale.value, 0.0) + + def _get_best_scale_factor(self, dcm_data): + """Return the most appropriate scale factor found or None""" + scaling = self._get_rwv_scale_factor(dcm_data) + if scaling is not None: + return scaling + scaling = self._get_legacy_scale_factor(dcm_data) + if scaling is not None: + return scaling + if self.vendor == Vendor.PHILIPS: + scaling = self._get_philips_scale_factor(dcm_data) + if scaling is not None: + return scaling + def _scale_data(self, data): # depending on pydicom and dicom files, values might need casting from # Decimal to float - scale = float(self.get('RescaleSlope', 1)) - offset = float(self.get('RescaleIntercept', 0)) + scale, offset = self.scale_factors[0] return self._apply_scale_offset(data, scale, offset) def _apply_scale_offset(self, data, scale, offset): @@ -407,6 +529,71 @@ def b_vector(self): return q2bg(q_vec)[1] +class FrameFilter: + """Base class for defining how to filter out (ignore) frames from a multiframe file + + It is guaranteed that the `applies` method will on a dataset before the `keep` method + is called on any of the frames inside. + """ + + def applies(self, dcm_wrp) -> bool: + """Returns true if the filter should be applied to a dataset""" + return True + + def keep(self, frame_data) -> bool: + """Return true if the frame should be kept""" + raise NotImplementedError + + +class FilterMultiStack(FrameFilter): + """Filter out all but one `StackID`""" + + def __init__(self, keep_id=None): + self._keep_id = keep_id + + def applies(self, dcm_wrp) -> bool: + first_fcs = dcm_wrp.frames[0].get('FrameContentSequence', (None,))[0] + if first_fcs is None or not hasattr(first_fcs, 'StackID'): + return False + stack_ids = {frame.FrameContentSequence[0].StackID for frame in dcm_wrp.frames} + if self._keep_id is not None: + if self._keep_id not in stack_ids: + raise WrapperError('Explicitly requested StackID not found') + self._selected = self._keep_id + if len(stack_ids) > 1: + if self._keep_id is None: + warnings.warn( + 'A multi-stack file was passed without an explicit filter, just using lowest StackID' + ) + self._selected = sorted(stack_ids)[0] + return True + return False + + def keep(self, frame) -> bool: + return frame.FrameContentSequence[0].StackID == self._selected + + +class FilterDwiIso(FrameFilter): + """Filter out derived ISOTROPIC frames from DWI series""" + + def applies(self, dcm_wrp) -> bool: + if not hasattr(dcm_wrp.frames[0], 'MRDiffusionSequence'): + return False + diff_dirs = { + f.MRDiffusionSequence[0].get('DiffusionDirectionality') for f in dcm_wrp.frames + } + if len(diff_dirs) > 1 and 'ISOTROPIC' in diff_dirs: + warnings.warn('Derived images found and removed') + return True + return False + + def keep(self, frame) -> bool: + return frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC' + + +DEFUALT_FRAME_FILTERS = (FilterMultiStack(), FilterDwiIso()) + + class MultiframeWrapper(Wrapper): """Wrapper for Enhanced MR Storage SOP Class @@ -436,17 +623,20 @@ class MultiframeWrapper(Wrapper): Methods ------- + vendor(self) + frame_order(self) image_shape(self) image_orient_patient(self) voxel_sizes(self) image_position(self) series_signature(self) + scale_factors(self) get_data(self) """ is_multiframe = True - def __init__(self, dcm_data): + def __init__(self, dcm_data, frame_filters=None): """Initializes MultiframeWrapper Parameters @@ -454,10 +644,13 @@ def __init__(self, dcm_data): dcm_data : object object should allow 'get' and '__getitem__' access. Usually this will be a ``dicom.dataset.Dataset`` object resulting from reading a - DICOM file, but a dictionary should also work. + DICOM file. + + frame_filters : Iterable of FrameFilter + defines which frames inside the dataset should be ignored. If None then + `dicomwrappers.DEFAULT_FRAME_FILTERS` will be used. """ Wrapper.__init__(self, dcm_data) - self.dcm_data = dcm_data self.frames = dcm_data.get('PerFrameFunctionalGroupsSequence') try: self.frames[0] @@ -467,8 +660,19 @@ def __init__(self, dcm_data): self.shared = dcm_data.get('SharedFunctionalGroupsSequence')[0] except TypeError: raise WrapperError('SharedFunctionalGroupsSequence is empty.') + # Apply frame filters one at a time in the order provided + if frame_filters is None: + frame_filters = DEFUALT_FRAME_FILTERS + frame_filters = [filt for filt in frame_filters if filt.applies(self)] + for filt in frame_filters: + self.frames = [f for f in self.frames if filt.keep(f)] + # Make sure there is only one StackID remaining + first_fcs = self.frames[0].get('FrameContentSequence', (None,))[0] + if first_fcs is not None and hasattr(first_fcs, 'StackID'): + if len({frame.FrameContentSequence[0].StackID for frame in self.frames}) > 1: + raise WrapperError('More than one StackID remains after filtering') # Try to determine slice order and minimal image position patient - self._frame_slc_ord = self._ipp = None + self._frame_slc_ord = self._ipp = self._slice_spacing = None try: frame_ipps = [f.PlanePositionSequence[0].ImagePositionPatient for f in self.frames] except AttributeError: @@ -485,8 +689,29 @@ def __init__(self, dcm_data): val: order for val, order in zip(uniq_slc_pos, np.argsort(uniq_slc_pos)) } self._frame_slc_ord = [pos_ord_map[pos] for pos in rnd_slc_pos] + if len(self._frame_slc_ord) > 1: + self._slice_spacing = ( + frame_slc_pos[self._frame_slc_ord[1]] - frame_slc_pos[self._frame_slc_ord[0]] + ) self._ipp = frame_ipps[np.argmin(frame_slc_pos)] - self._shape = None + self._frame_indices = None + + @cached_property + def vendor(self): + """The vendor of the instrument that produced the DICOM""" + vendor = super().vendor + if vendor is not None: + return vendor + vendor = vendor_from_private(self.shared) + if vendor is not None: + return vendor + return vendor_from_private(self.frames[0]) + + @cached_property + def frame_order(self): + if self._frame_indices is None: + _ = self.image_shape + return np.lexsort(self._frame_indices.T) @cached_property def image_shape(self): @@ -519,68 +744,20 @@ def image_shape(self): rows, cols = self.get('Rows'), self.get('Columns') if None in (rows, cols): raise WrapperError('Rows and/or Columns are empty.') - - # Check number of frames - first_frame = self.frames[0] - n_frames = self.get('NumberOfFrames') - # some Philips may have derived images appended - has_derived = False - if hasattr(first_frame, 'get') and first_frame.get([0x18, 0x9117]): - # DWI image may include derived isotropic, ADC or trace volume - try: - aniso_frames = pydicom.Sequence() - aniso_slc_ord = [] - for slc_ord, frame in zip(self._frame_slc_ord, self.frames): - if frame.MRDiffusionSequence[0].DiffusionDirectionality != 'ISOTROPIC': - aniso_frames.append(frame) - aniso_slc_ord.append(slc_ord) - # Image contains DWI volumes followed by derived images; remove derived images - if len(aniso_frames) != 0: - self.frames = aniso_frames - self._frame_slc_ord = aniso_slc_ord - except IndexError: - # Sequence tag is found but missing items! - raise WrapperError('Diffusion file missing information') - except AttributeError: - # DiffusionDirectionality tag is not required - pass - else: - if n_frames != len(self.frames): - warnings.warn('Derived images found and removed') - n_frames = len(self.frames) - has_derived = True - - assert len(self.frames) == n_frames - frame_indices = np.array( - [frame.FrameContentSequence[0].DimensionIndexValues for frame in self.frames] - ) - # Check that there is only one multiframe stack index - stack_ids = {frame.FrameContentSequence[0].StackID for frame in self.frames} - if len(stack_ids) > 1: - raise WrapperError( - 'File contains more than one StackID. Cannot handle multi-stack files' + # Check number of frames, initialize array of frame indices + n_frames = len(self.frames) + try: + frame_indices = np.array( + [frame.FrameContentSequence[0].DimensionIndexValues for frame in self.frames] ) - # Determine if one of the dimension indices refers to the stack id - dim_seq = [dim.DimensionIndexPointer for dim in self.get('DimensionIndexSequence')] - stackid_tag = pydicom.datadict.tag_for_keyword('StackID') - # remove the stack id axis if present - if stackid_tag in dim_seq: - stackid_dim_idx = dim_seq.index(stackid_tag) - frame_indices = np.delete(frame_indices, stackid_dim_idx, axis=1) - dim_seq.pop(stackid_dim_idx) - if has_derived: - # derived volume is included - derived_tag = pydicom.datadict.tag_for_keyword('DiffusionBValue') - if derived_tag not in dim_seq: - raise WrapperError('Missing information, cannot remove indices with confidence.') - derived_dim_idx = dim_seq.index(derived_tag) - frame_indices = np.delete(frame_indices, derived_dim_idx, axis=1) - dim_seq.pop(derived_dim_idx) + except AttributeError: + raise WrapperError("Can't find frame 'DimensionIndexValues'") # Determine the shape and which indices to use shape = [rows, cols] curr_parts = n_frames frames_per_part = 1 del_indices = {} + dim_seq = [dim.DimensionIndexPointer for dim in self.get('DimensionIndexSequence')] stackpos_tag = pydicom.datadict.tag_for_keyword('InStackPositionNumber') slice_dim_idx = dim_seq.index(stackpos_tag) for row_idx, row in enumerate(frame_indices.T): @@ -684,12 +861,15 @@ def voxel_sizes(self): except AttributeError: raise WrapperError('Not enough data for pixel spacing') pix_space = pix_measures.PixelSpacing - try: - zs = pix_measures.SliceThickness - except AttributeError: - zs = self.get('SpacingBetweenSlices') - if zs is None: - raise WrapperError('Not enough data for slice thickness') + if self._slice_spacing is not None: + zs = self._slice_spacing + else: + try: + zs = pix_measures.SliceThickness + except AttributeError: + zs = self.get('SpacingBetweenSlices') + if zs is None: + raise WrapperError('Not enough data for slice thickness') # Ensure values are float rather than Decimal return tuple(map(float, list(pix_space) + [zs])) @@ -710,27 +890,63 @@ def series_signature(self): signature['vox'] = (self.voxel_sizes, none_or_close) return signature - def get_data(self): + @cached_property + def scale_factors(self): + """Return `(2, N)` array of slope/intercept pairs + + If there is a single global scale factor then `N` will be one, otherwise it will + be the number of frames + """ + # Look for shared / global RWV scale factor first + shared_scale = self._get_rwv_scale_factor(self.shared) + if shared_scale is not None: + return np.array([shared_scale]) + shared_scale = self._get_rwv_scale_factor(self.dcm_data) + if shared_scale is not None: + return np.array([shared_scale]) + # Try pulling out best scale factors from each individual frame + frame_scales = [self._get_best_scale_factor(f) for f in self.frames] + if any(s is not None for s in frame_scales): + if any(s is None for s in frame_scales): + if self.vendor == Vendor.PHILIPS: + warnings.warn( + 'Unable to find Philips private scale factor, cross-series comparisons may be invalid' + ) + frame_scales = [s if s is not None else (1, 0) for s in frame_scales] + if all(s == frame_scales[0] for s in frame_scales[1:]): + return np.array([frame_scales[0]]) + return np.array(frame_scales)[self.frame_order] + # Finally look for shared non-RWV scale factors + shared_scale = self._get_best_scale_factor(self.shared) + if shared_scale is not None: + return np.array([shared_scale]) + shared_scale = self._get_best_scale_factor(self.dcm_data) + if shared_scale is None: + if self.vendor == Vendor.PHILIPS: + warnings.warn( + 'Unable to find Philips private scale factor, cross-series comparisons may be invalid' + ) + shared_scale = (1, 0) + return np.array([shared_scale]) + + def get_unscaled_data(self): shape = self.image_shape if shape is None: raise WrapperError('No valid information for image shape') data = self.get_pixel_array() - # Roll frames axis to last + # Roll frames axis to last and reorder if len(data.shape) > 2: - data = data.transpose((1, 2, 0)) - # Sort frames with first index changing fastest, last slowest - sorted_indices = np.lexsort(self._frame_indices.T) - data = data[..., sorted_indices] - data = data.reshape(shape, order='F') - return self._scale_data(data) + data = data.transpose((1, 2, 0))[..., self.frame_order] + return data.reshape(shape, order='F') def _scale_data(self, data): - pix_trans = getattr(self.frames[0], 'PixelValueTransformationSequence', None) - if pix_trans is None: - return super()._scale_data(data) - scale = float(pix_trans[0].RescaleSlope) - offset = float(pix_trans[0].RescaleIntercept) - return self._apply_scale_offset(data, scale, offset) + scale_factors = self.scale_factors + if scale_factors.shape[0] == 1: + scale, offset = scale_factors[0] + return self._apply_scale_offset(data, scale, offset) + orig_shape = data.shape + data = data.reshape(data.shape[:2] + (len(self.frames),)) + return (data * scale_factors[:, 0] + scale_factors[:, 1]).reshape(orig_shape) class SiemensWrapper(Wrapper): @@ -757,7 +973,7 @@ def __init__(self, dcm_data, csa_header=None): object should allow 'get' and '__getitem__' access. If `csa_header` is None, it should also be possible to extract a CSA header from `dcm_data`. Usually this will be a ``dicom.dataset.Dataset`` object - resulting from reading a DICOM file. A dict should also work. + resulting from reading a DICOM file. csa_header : None or mapping, optional mapping giving values for Siemens CSA image sub-header. If None, we try and read the CSA information from `dcm_data`. @@ -773,6 +989,11 @@ def __init__(self, dcm_data, csa_header=None): csa_header = {} self.csa_header = csa_header + @cached_property + def vendor(self): + """The vendor of the instrument that produced the DICOM""" + return Vendor.SIEMENS + @cached_property def slice_normal(self): # The std_slice_normal comes from the cross product of the directions @@ -964,7 +1185,7 @@ def image_position(self): Q = np.fliplr(iop) * pix_spacing return ipp + np.dot(Q, vox_trans_fixes[:, None]).ravel() - def get_data(self): + def get_unscaled_data(self): """Get scaled image data from DICOMs Resorts data block from mosaic to 3D @@ -1007,8 +1228,7 @@ def get_data(self): # pool mosaic-generated dims v3 = v4.reshape((n_slice_rows, n_slice_cols, n_blocks)) # delete any padding slices - v3 = v3[..., :n_mosaic] - return self._scale_data(v3) + return v3[..., :n_mosaic] def none_or_close(val1, val2, rtol=1e-5, atol=1e-6): diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index e01759c86..0556fc63c 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -1,7 +1,7 @@ """Testing DICOM wrappers""" import gzip -from copy import copy +from copy import deepcopy from decimal import Decimal from hashlib import sha1 from os.path import dirname @@ -11,6 +11,7 @@ import numpy as np import pytest from numpy.testing import assert_array_almost_equal, assert_array_equal +from pydicom.dataset import Dataset from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...volumeutils import endian_codes @@ -63,8 +64,8 @@ def test_wrappers(): # test direct wrapper calls # first with empty or minimal data multi_minimal = { - 'PerFrameFunctionalGroupsSequence': [None], - 'SharedFunctionalGroupsSequence': [None], + 'PerFrameFunctionalGroupsSequence': [Dataset()], + 'SharedFunctionalGroupsSequence': [Dataset()], } for maker, args in ( (didw.Wrapper, ({},)), @@ -163,10 +164,10 @@ def test_wrapper_from_data(): fake_data['SOPClassUID'] = '1.2.840.10008.5.1.4.1.1.4.1' with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['PerFrameFunctionalGroupsSequence'] = [None] + fake_data['PerFrameFunctionalGroupsSequence'] = [Dataset()] with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['SharedFunctionalGroupsSequence'] = [None] + fake_data['SharedFunctionalGroupsSequence'] = [Dataset()] # minimal set should now be met dw = didw.wrapper_from_data(fake_data) assert dw.is_multiframe @@ -384,16 +385,17 @@ def fake_frames(seq_name, field_name, value_seq, frame_seq=None): each element in list is obj.[0]. = value_seq[n] for n in range(N) """ - - class Fake: - pass - if frame_seq is None: - frame_seq = [Fake() for _ in range(len(value_seq))] + frame_seq = [Dataset() for _ in range(len(value_seq))] for value, fake_frame in zip(value_seq, frame_seq): - fake_element = Fake() + if value is None: + continue + if hasattr(fake_frame, seq_name): + fake_element = getattr(fake_frame, seq_name)[0] + else: + fake_element = Dataset() + setattr(fake_frame, seq_name, [fake_element]) setattr(fake_element, field_name, value) - setattr(fake_frame, seq_name, [fake_element]) return frame_seq @@ -434,27 +436,32 @@ def __repr__(self): attr_strs.append(f'{attr}={getattr(self, attr)}') return f"{self.__class__.__name__}({', '.join(attr_strs)})" - class DimIdxSeqElem(PrintBase): + class DimIdxSeqElem(Dataset): def __init__(self, dip=(0, 0), fgp=None): + super().__init__() self.DimensionIndexPointer = dip if fgp is not None: self.FunctionalGroupPointer = fgp - class FrmContSeqElem(PrintBase): + class FrmContSeqElem(Dataset): def __init__(self, div, sid): + super().__init__() self.DimensionIndexValues = div self.StackID = sid - class PlnPosSeqElem(PrintBase): + class PlnPosSeqElem(Dataset): def __init__(self, ipp): + super().__init__() self.ImagePositionPatient = ipp - class PlnOrientSeqElem(PrintBase): + class PlnOrientSeqElem(Dataset): def __init__(self, iop): + super().__init__() self.ImageOrientationPatient = iop - class PerFrmFuncGrpSeqElem(PrintBase): + class PerFrmFuncGrpSeqElem(Dataset): def __init__(self, div, sid, ipp, iop): + super().__init__() self.FrameContentSequence = [FrmContSeqElem(div, sid)] self.PlanePositionSequence = [PlnPosSeqElem(ipp)] self.PlaneOrientationSequence = [PlnOrientSeqElem(iop)] @@ -473,7 +480,7 @@ def __init__(self, div, sid, ipp, iop): frame_slc_indices = np.array(div_seq)[:, slice_dim] uniq_slc_indices = np.unique(frame_slc_indices) n_slices = len(uniq_slc_indices) - iop_seq = [(0.0, 1.0, 0.0, 1.0, 0.0, 0.0) for _ in range(num_of_frames)] + iop_seq = [[0.0, 1.0, 0.0, 1.0, 0.0, 0.0] for _ in range(num_of_frames)] if ipp_seq is None: slc_locs = np.linspace(-1.0, 1.0, n_slices) if flip_ipp_idx_corr: @@ -481,7 +488,7 @@ def __init__(self, div, sid, ipp, iop): slc_idx_loc = { div_idx: slc_locs[arr_idx] for arr_idx, div_idx in enumerate(np.sort(uniq_slc_indices)) } - ipp_seq = [(-1.0, -1.0, slc_idx_loc[idx]) for idx in frame_slc_indices] + ipp_seq = [[-1.0, -1.0, slc_idx_loc[idx]] for idx in frame_slc_indices] else: assert flip_ipp_idx_corr is False # caller can flip it themselves assert len(ipp_seq) == num_of_frames @@ -507,38 +514,37 @@ def __init__(self, div, sid, ipp, iop): } +class FakeDataset(Dataset): + pixel_array = None + + class TestMultiFrameWrapper(TestCase): # Test MultiframeWrapper - MINIMAL_MF = { - # Minimal contents of dcm_data for this wrapper - 'PerFrameFunctionalGroupsSequence': [None], - 'SharedFunctionalGroupsSequence': [None], - } + # Minimal contents of dcm_data for this wrapper + MINIMAL_MF = FakeDataset() + MINIMAL_MF.PerFrameFunctionalGroupsSequence = [Dataset()] + MINIMAL_MF.SharedFunctionalGroupsSequence = [Dataset()] WRAPCLASS = didw.MultiframeWrapper @dicom_test def test_shape(self): # Check the shape algorithm - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) # No rows, cols, raise WrapperError with pytest.raises(didw.WrapperError): dw.image_shape - fake_mf['Rows'] = 64 + fake_mf.Rows = 64 with pytest.raises(didw.WrapperError): dw.image_shape fake_mf.pop('Rows') - fake_mf['Columns'] = 64 + fake_mf.Columns = 64 with pytest.raises(didw.WrapperError): dw.image_shape - fake_mf['Rows'] = 32 - # Missing frame data, raise AssertionError - with pytest.raises(AssertionError): - dw.image_shape - fake_mf['NumberOfFrames'] = 4 - # PerFrameFunctionalGroupsSequence does not match NumberOfFrames - with pytest.raises(AssertionError): + fake_mf.Rows = 32 + # No frame data raises WrapperError + with pytest.raises(didw.WrapperError): dw.image_shape # check 2D shape with StackID index is 0 div_seq = ((1, 1),) @@ -556,11 +562,32 @@ def test_shape(self): div_seq = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 4) - # Check stack number matching when StackID index is 0 + # Check fow warning when implicitly dropping stacks div_seq = ((1, 1), (1, 2), (1, 3), (2, 4)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + assert MFW(fake_mf).image_shape == (32, 64, 3) + # No warning if we expclitly select that StackID to keep + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(1),)).image_shape == (32, 64, 3) + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(2),)).image_shape == (32, 64) + # Stack filtering is the same when StackID is not an index + div_seq = ((1,), (2,), (3,), (4,)) + sid_seq = (1, 1, 1, 2) + fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + assert MFW(fake_mf).image_shape == (32, 64, 3) + # No warning if we expclitly select that StackID to keep + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(1),)).image_shape == (32, 64, 3) + assert MFW(fake_mf, frame_filters=(didw.FilterMultiStack(2),)).image_shape == (32, 64) + # Check for error when explicitly requested StackID is missing with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + MFW(fake_mf, frame_filters=(didw.FilterMultiStack(3),)) # Make some fake frame data for 4D when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) @@ -568,8 +595,12 @@ def test_shape(self): # Check stack number matching for 4D when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2), (1, 1, 3), (2, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # Check indices can be non-contiguous when StackID index is 0 div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 3), (1, 2, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) @@ -579,7 +610,7 @@ def test_shape(self): fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 2, 2) # Check number of IPP vals match the number of slices or we raise - frames = fake_mf['PerFrameFunctionalGroupsSequence'] + frames = fake_mf.PerFrameFunctionalGroupsSequence for frame in frames[1:]: frame.PlanePositionSequence = frames[0].PlanePositionSequence[:] with pytest.raises(didw.WrapperError): @@ -594,12 +625,6 @@ def test_shape(self): sid_seq = (1, 1, 1, 1) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) assert MFW(fake_mf).image_shape == (32, 64, 4) - # check 3D stack number matching when there is no StackID index - div_seq = ((1,), (2,), (3,), (4,)) - sid_seq = (1, 1, 1, 2) - fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape # check 4D shape when there is no StackID index div_seq = ((1, 1), (2, 1), (1, 2), (2, 2), (1, 3), (2, 3)) sid_seq = (1, 1, 1, 1, 1, 1) @@ -609,8 +634,12 @@ def test_shape(self): div_seq = ((1, 1), (2, 1), (1, 2), (2, 2), (1, 3), (2, 3)) sid_seq = (1, 1, 1, 1, 1, 2) fake_mf.update(fake_shape_dependents(div_seq, sid_seq=sid_seq)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + with pytest.raises(didw.WrapperError): + MFW(fake_mf).image_shape # check 3D shape when StackID index is 1 div_seq = ((1, 1), (2, 1), (3, 1), (4, 1)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) @@ -618,8 +647,11 @@ def test_shape(self): # Check stack number matching when StackID index is 1 div_seq = ((1, 1), (2, 1), (3, 2), (4, 1)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) - with pytest.raises(didw.WrapperError): - MFW(fake_mf).image_shape + with pytest.warns( + UserWarning, + match='A multi-stack file was passed without an explicit filter, just using lowest StackID', + ): + assert MFW(fake_mf).image_shape == (32, 64, 3) # Make some fake frame data for 4D when StackID index is 1 div_seq = ((1, 1, 1), (2, 1, 1), (1, 1, 2), (2, 1, 2), (1, 1, 3), (2, 1, 3)) fake_mf.update(fake_shape_dependents(div_seq, sid_dim=1)) @@ -689,7 +721,7 @@ def test_shape(self): def test_iop(self): # Test Image orient patient for multiframe - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) with pytest.raises(didw.WrapperError): @@ -698,56 +730,56 @@ def test_iop(self): fake_frame = fake_frames( 'PlaneOrientationSequence', 'ImageOrientationPatient', [[0, 1, 0, 1, 0, 0]] )[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] + fake_mf.SharedFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) - fake_mf['SharedFunctionalGroupsSequence'] = [None] + fake_mf.SharedFunctionalGroupsSequence = [Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_orient_patient - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] + fake_mf.PerFrameFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) def test_voxel_sizes(self): # Test voxel size calculation - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) with pytest.raises(didw.WrapperError): dw.voxel_sizes # Make a fake frame fake_frame = fake_frames('PixelMeasuresSequence', 'PixelSpacing', [[2.1, 3.2]])[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] + fake_mf.SharedFunctionalGroupsSequence = [fake_frame] # Still not enough, we lack information for slice distances with pytest.raises(didw.WrapperError): MFW(fake_mf).voxel_sizes # This can come from SpacingBetweenSlices or frame SliceThickness - fake_mf['SpacingBetweenSlices'] = 4.3 + fake_mf.SpacingBetweenSlices = 4.3 assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 4.3]) # If both, prefer SliceThickness fake_frame.PixelMeasuresSequence[0].SliceThickness = 5.4 assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Just SliceThickness is OK - del fake_mf['SpacingBetweenSlices'] + del fake_mf.SpacingBetweenSlices assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Removing shared leads to error again - fake_mf['SharedFunctionalGroupsSequence'] = [None] + fake_mf.SharedFunctionalGroupsSequence = [Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).voxel_sizes # Restoring to frames makes it work again - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] + fake_mf.PerFrameFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Decimals in any field are OK fake_frame = fake_frames( 'PixelMeasuresSequence', 'PixelSpacing', [[Decimal('2.1'), Decimal('3.2')]] )[0] - fake_mf['SharedFunctionalGroupsSequence'] = [fake_frame] - fake_mf['SpacingBetweenSlices'] = Decimal('4.3') + fake_mf.SharedFunctionalGroupsSequence = [fake_frame] + fake_mf.SpacingBetweenSlices = Decimal('4.3') assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 4.3]) fake_frame.PixelMeasuresSequence[0].SliceThickness = Decimal('5.4') assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) def test_image_position(self): # Test image_position property for multiframe - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) with pytest.raises(didw.WrapperError): @@ -758,12 +790,12 @@ def test_image_position(self): frames = fake_frames( 'PlanePositionSequence', 'ImagePositionPatient', [[-2.0, 3.0, 7]], frames ) - fake_mf['SharedFunctionalGroupsSequence'] = frames + fake_mf.SharedFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) - fake_mf['SharedFunctionalGroupsSequence'] = [None] + fake_mf.SharedFunctionalGroupsSequence = [Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_position - fake_mf['PerFrameFunctionalGroupsSequence'] = frames + fake_mf.PerFrameFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) # Check lists of Decimals work frames[0].PlanePositionSequence[0].ImagePositionPatient = [ @@ -775,7 +807,7 @@ def test_image_position(self): frames = fake_frames('PlaneOrientationSequence', 'ImageOrientationPatient', [iop] * 2) ipps = [[-2.0, 3.0, 7], [-2.0, 3.0, 6]] frames = fake_frames('PlanePositionSequence', 'ImagePositionPatient', ipps, frames) - fake_mf['PerFrameFunctionalGroupsSequence'] = frames + fake_mf.PerFrameFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 6]) @dicom_test @@ -809,9 +841,9 @@ def test_slicethickness_fallback(self): def test_data_derived_shape(self): # Test 4D diffusion data with an additional trace volume included # Excludes the trace volume and generates the correct shape - dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) with pytest.warns(UserWarning, match='Derived images found and removed'): - assert dw.image_shape == (96, 96, 60, 33) + dw = didw.wrapper_from_file(DATA_FILE_4D_DERIVED) + assert dw.image_shape == (96, 96, 60, 33) @dicom_test @needs_nibabel_data('dcm_qa_xa30') @@ -831,7 +863,7 @@ def test_data_unreadable_private_headers(self): @dicom_test def test_data_fake(self): # Test algorithm for get_data - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) MFW = self.WRAPCLASS dw = MFW(fake_mf) # Fails - no shape @@ -843,8 +875,8 @@ def test_data_fake(self): with pytest.raises(didw.WrapperError): dw.get_data() # Make shape and indices - fake_mf['Rows'] = 2 - fake_mf['Columns'] = 3 + fake_mf.Rows = 2 + fake_mf.Columns = 3 dim_idxs = ((1, 1), (1, 2), (1, 3), (1, 4)) fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0)) assert MFW(fake_mf).image_shape == (2, 3, 4) @@ -854,24 +886,24 @@ def test_data_fake(self): # Add data - 3D data = np.arange(24).reshape((2, 3, 4)) # Frames dim is first for some reason - fake_mf['pixel_array'] = np.rollaxis(data, 2) + object.__setattr__(fake_mf, 'pixel_array', np.rollaxis(data, 2)) # Now it should work dw = MFW(fake_mf) assert_array_equal(dw.get_data(), data) # Test scaling works - fake_mf['RescaleSlope'] = 2.0 - fake_mf['RescaleIntercept'] = -1 + fake_mf.RescaleSlope = 2.0 + fake_mf.RescaleIntercept = -1 assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # Check slice sorting dim_idxs = ((1, 4), (1, 2), (1, 3), (1, 1)) fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0)) sorted_data = data[..., [3, 1, 2, 0]] - fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # Check slice sorting with negative index / IPP correlation fake_mf.update(fake_shape_dependents(dim_idxs, sid_dim=0, flip_ipp_idx_corr=True)) sorted_data = data[..., [0, 2, 1, 3]] - fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) # 5D! dim_idxs = [ @@ -898,28 +930,167 @@ def test_data_fake(self): sorted_data = data.reshape(shape[:2] + (-1,), order='F') order = [11, 9, 10, 8, 3, 1, 2, 0, 15, 13, 14, 12, 7, 5, 6, 4] sorted_data = sorted_data[..., np.argsort(order)] - fake_mf['pixel_array'] = np.rollaxis(sorted_data, 2) + fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) - def test__scale_data(self): + def test_scale_data(self): # Test data scaling - fake_mf = copy(self.MINIMAL_MF) + fake_mf = deepcopy(self.MINIMAL_MF) + fake_mf.Rows = 2 + fake_mf.Columns = 3 + fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] MFW = self.WRAPCLASS - dw = MFW(fake_mf) - data = np.arange(24).reshape((2, 3, 4)) - assert_array_equal(data, dw._scale_data(data)) - fake_mf['RescaleSlope'] = 2.0 - fake_mf['RescaleIntercept'] = -1.0 - assert_array_equal(data * 2 - 1, dw._scale_data(data)) - fake_frame = fake_frames('PixelValueTransformationSequence', 'RescaleSlope', [3.0])[0] - fake_mf['PerFrameFunctionalGroupsSequence'] = [fake_frame] - # Lacking RescaleIntercept -> Error - dw = MFW(fake_mf) - with pytest.raises(AttributeError): - dw._scale_data(data) - fake_frame.PixelValueTransformationSequence[0].RescaleIntercept = -2 - assert_array_equal(data * 3 - 2, dw._scale_data(data)) + data = np.arange(24).reshape((2, 3, 4), order='F') + assert_array_equal(data, MFW(fake_mf)._scale_data(data)) + # Test legacy top-level slope/intercept + fake_mf.RescaleSlope = 2.0 + fake_mf.RescaleIntercept = -1.0 + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + # RealWorldValueMapping takes precedence, but only with defined units + fake_mf.RealWorldValueMappingSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 + fake_mf.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ + 0 + ].CodeMeaning = 'no units' + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + # Possible to have more than one RealWorldValueMapping, use first one with defined units + fake_mf.RealWorldValueMappingSequence.append(Dataset()) + fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueSlope = 15.0 + fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueIntercept = -3.0 + fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + assert_array_equal(data * 15 - 3, MFW(fake_mf)._scale_data(data)) + # A global RWV scale takes precedence over per-frame PixelValueTransformation + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + frames = fake_frames( + 'PixelValueTransformationSequence', + 'RescaleSlope', + [3.0, 3.0, 3.0, 3.0], + fake_mf.PerFrameFunctionalGroupsSequence, + ) + assert_array_equal(data * 15 - 3, MFW(fake_mf)._scale_data(data)) + # The per-frame PixelValueTransformation takes precedence over plain top-level slope / inter + delattr(fake_mf, 'RealWorldValueMappingSequence') + assert_array_equal(data * 3, MFW(fake_mf)._scale_data(data)) + for frame in frames: + frame.PixelValueTransformationSequence[0].RescaleIntercept = -2 + assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) # Decimals are OK - fake_frame.PixelValueTransformationSequence[0].RescaleSlope = Decimal('3') - fake_frame.PixelValueTransformationSequence[0].RescaleIntercept = Decimal('-2') - assert_array_equal(data * 3 - 2, dw._scale_data(data)) + for frame in frames: + frame.PixelValueTransformationSequence[0].RescaleSlope = Decimal('3') + frame.PixelValueTransformationSequence[0].RescaleIntercept = Decimal('-2') + assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) + # A per-frame RWV scaling takes precedence over per-frame PixelValueTransformation + for frame in frames: + frame.RealWorldValueMappingSequence = [Dataset()] + frame.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 + frame.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 + frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ + 0 + ].CodeMeaning = '%' + assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) + # Test varying per-frame scale factors + for frame_idx, frame in enumerate(frames): + frame.RealWorldValueMappingSequence[0].RealWorldValueSlope = 2 * (frame_idx + 1) + frame.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -1 * (frame_idx + 1) + assert_array_equal( + data * np.array([2, 4, 6, 8]) + np.array([-1, -2, -3, -4]), + MFW(fake_mf)._scale_data(data), + ) + + def test_philips_scale_data(self): + fake_mf = deepcopy(self.MINIMAL_MF) + fake_mf.Manufacturer = 'Philips' + fake_mf.Rows = 2 + fake_mf.Columns = 3 + fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] + MFW = self.WRAPCLASS + data = np.arange(24).reshape((2, 3, 4), order='F') + # Unlike other manufacturers, public scale factors from Philips without defined + # units should not be used. In lieu of this the private scale factor should be + # used, which should always be available (modulo deidentification). If we can't + # find any of these scale factors a warning is issued. + with pytest.warns( + UserWarning, + match='Unable to find Philips private scale factor, cross-series comparisons may be invalid', + ): + assert_array_equal(data, MFW(fake_mf)._scale_data(data)) + fake_mf.RescaleSlope = 2.0 + fake_mf.RescaleIntercept = -1.0 + for rescale_type in (None, '', 'US', 'normalized'): + if rescale_type is not None: + fake_mf.RescaleType = rescale_type + with pytest.warns( + UserWarning, + match='Unable to find Philips private scale factor, cross-series comparisons may be invalid', + ): + assert_array_equal(data, MFW(fake_mf)._scale_data(data)) + # Falling back to private scaling doesn't generate error + priv_block = fake_mf.private_block(0x2005, 'Philips MR Imaging DD 001', create=True) + priv_block.add_new(0xE, 'FL', 3.0) + assert_array_equal(data * 3.0, MFW(fake_mf)._scale_data(data)) + # If the units are defined they take precedence over private scaling + fake_mf.RescaleType = 'mrad' + assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) + # A RWV scale factor with defined units takes precdence + shared = Dataset() + fake_mf.SharedFunctionalGroupsSequence = [shared] + rwv_map = Dataset() + rwv_map.RealWorldValueSlope = 10.0 + rwv_map.RealWorldValueIntercept = -5.0 + rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + shared.RealWorldValueMappingSequence = [rwv_map] + assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) + # Get rid of valid top-level scale factors, test per-frame scale factors + delattr(shared, 'RealWorldValueMappingSequence') + delattr(fake_mf, 'RescaleType') + del fake_mf[priv_block.get_tag(0xE)] + div_seq = ((1, 1, 1), (1, 2, 1), (1, 1, 2), (1, 2, 2)) + fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) + # Simplest case is all frames have same (valid) scale factor + for frame in fake_mf.PerFrameFunctionalGroupsSequence: + pix_trans = Dataset() + pix_trans.RescaleSlope = 2.5 + pix_trans.RescaleIntercept = -4 + pix_trans.RescaleType = 'mrad' + frame.PixelValueTransformationSequence = [pix_trans] + assert_array_equal(data * 2.5 - 4, MFW(fake_mf)._scale_data(data)) + # If some frames are missing valid scale factors we should get a warning + for frame in fake_mf.PerFrameFunctionalGroupsSequence[2:]: + delattr(frame.PixelValueTransformationSequence[0], 'RescaleType') + with pytest.warns( + UserWarning, + match='Unable to find Philips private scale factor, cross-series comparisons may be invalid', + ): + assert_array_equal( + data * np.array([2.5, 2.5, 1, 1]) + np.array([-4, -4, 0, 0]), + MFW(fake_mf)._scale_data(data), + ) + # We can fall back to private scale factor on frame-by-frame basis + for frame in fake_mf.PerFrameFunctionalGroupsSequence: + priv_block = frame.private_block(0x2005, 'Philips MR Imaging DD 001', create=True) + priv_block.add_new(0xE, 'FL', 7.0) + assert_array_equal( + data * np.array([2.5, 2.5, 7, 7]) + np.array([-4, -4, 0, 0]), + MFW(fake_mf)._scale_data(data), + ) + # Again RWV scale factors take precedence + for frame_idx, frame in enumerate(fake_mf.PerFrameFunctionalGroupsSequence): + rwv_map = Dataset() + rwv_map.RealWorldValueSlope = 14.0 - frame_idx + rwv_map.RealWorldValueIntercept = 5.0 + rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' + frame.RealWorldValueMappingSequence = [rwv_map] + assert_array_equal( + data * np.array([14, 13, 12, 11]) + np.array([5, 5, 5, 5]), + MFW(fake_mf)._scale_data(data), + ) diff --git a/nibabel/nicom/utils.py b/nibabel/nicom/utils.py index 24f4afc2f..2c01c9d16 100644 --- a/nibabel/nicom/utils.py +++ b/nibabel/nicom/utils.py @@ -1,5 +1,7 @@ """Utilities for working with DICOM datasets""" +from enum import Enum + def find_private_section(dcm_data, group_no, creator): """Return start element in group `group_no` given creator name `creator` @@ -45,3 +47,55 @@ def find_private_section(dcm_data, group_no, creator): if match_func(val): return elno * 0x100 return None + + +class Vendor(Enum): + SIEMENS = 1 + GE = 2 + PHILIPS = 3 + + +vendor_priv_sections = { + Vendor.SIEMENS: [ + (0x9, 'SIEMENS SYNGO INDEX SERVICE'), + (0x19, 'SIEMENS MR HEADER'), + (0x21, 'SIEMENS MR SDR 01'), + (0x21, 'SIEMENS MR SDS 01'), + (0x21, 'SIEMENS MR SDI 02'), + (0x29, 'SIEMENS CSA HEADER'), + (0x29, 'SIEMENS MEDCOM HEADER2'), + (0x51, 'SIEMENS MR HEADER'), + ], + Vendor.PHILIPS: [ + (0x2001, 'Philips Imaging DD 001'), + (0x2001, 'Philips Imaging DD 002'), + (0x2001, 'Philips Imaging DD 129'), + (0x2005, 'Philips MR Imaging DD 001'), + (0x2005, 'Philips MR Imaging DD 002'), + (0x2005, 'Philips MR Imaging DD 003'), + (0x2005, 'Philips MR Imaging DD 004'), + (0x2005, 'Philips MR Imaging DD 005'), + (0x2005, 'Philips MR Imaging DD 006'), + (0x2005, 'Philips MR Imaging DD 007'), + (0x2005, 'Philips MR Imaging DD 005'), + (0x2005, 'Philips MR Imaging DD 006'), + ], + Vendor.GE: [ + (0x9, 'GEMS_IDEN_01'), + (0x19, 'GEMS_ACQU_01'), + (0x21, 'GEMS_RELA_01'), + (0x23, 'GEMS_STDY_01'), + (0x25, 'GEMS_SERS_01'), + (0x27, 'GEMS_IMAG_01'), + (0x29, 'GEMS_IMPS_01'), + (0x43, 'GEMS_PARM_01'), + ], +} + + +def vendor_from_private(dcm_data): + """Try to determine the vendor by looking for specific private tags""" + for vendor, priv_sections in vendor_priv_sections.items(): + for priv_group, priv_creator in priv_sections: + if find_private_section(dcm_data, priv_group, priv_creator) != None: + return vendor From f0264abbb295e063ea8b66be36d56319a30b2ecb Mon Sep 17 00:00:00 2001 From: Brendan Moloney Date: Mon, 12 Aug 2024 17:14:04 -0700 Subject: [PATCH 34/34] TST: Don't assume pydicom installed in test_dicomwrappers --- nibabel/nicom/tests/test_dicomwrappers.py | 84 +++++++++++++---------- 1 file changed, 48 insertions(+), 36 deletions(-) diff --git a/nibabel/nicom/tests/test_dicomwrappers.py b/nibabel/nicom/tests/test_dicomwrappers.py index 0556fc63c..55c27df50 100755 --- a/nibabel/nicom/tests/test_dicomwrappers.py +++ b/nibabel/nicom/tests/test_dicomwrappers.py @@ -11,7 +11,6 @@ import numpy as np import pytest from numpy.testing import assert_array_almost_equal, assert_array_equal -from pydicom.dataset import Dataset from ...tests.nibabel_data import get_nibabel_data, needs_nibabel_data from ...volumeutils import endian_codes @@ -64,8 +63,8 @@ def test_wrappers(): # test direct wrapper calls # first with empty or minimal data multi_minimal = { - 'PerFrameFunctionalGroupsSequence': [Dataset()], - 'SharedFunctionalGroupsSequence': [Dataset()], + 'PerFrameFunctionalGroupsSequence': [pydicom.Dataset()], + 'SharedFunctionalGroupsSequence': [pydicom.Dataset()], } for maker, args in ( (didw.Wrapper, ({},)), @@ -164,10 +163,10 @@ def test_wrapper_from_data(): fake_data['SOPClassUID'] = '1.2.840.10008.5.1.4.1.1.4.1' with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['PerFrameFunctionalGroupsSequence'] = [Dataset()] + fake_data['PerFrameFunctionalGroupsSequence'] = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): didw.wrapper_from_data(fake_data) - fake_data['SharedFunctionalGroupsSequence'] = [Dataset()] + fake_data['SharedFunctionalGroupsSequence'] = [pydicom.Dataset()] # minimal set should now be met dw = didw.wrapper_from_data(fake_data) assert dw.is_multiframe @@ -386,14 +385,14 @@ def fake_frames(seq_name, field_name, value_seq, frame_seq=None): value_seq[n] for n in range(N) """ if frame_seq is None: - frame_seq = [Dataset() for _ in range(len(value_seq))] + frame_seq = [pydicom.Dataset() for _ in range(len(value_seq))] for value, fake_frame in zip(value_seq, frame_seq): if value is None: continue if hasattr(fake_frame, seq_name): fake_element = getattr(fake_frame, seq_name)[0] else: - fake_element = Dataset() + fake_element = pydicom.Dataset() setattr(fake_frame, seq_name, [fake_element]) setattr(fake_element, field_name, value) return frame_seq @@ -436,30 +435,30 @@ def __repr__(self): attr_strs.append(f'{attr}={getattr(self, attr)}') return f"{self.__class__.__name__}({', '.join(attr_strs)})" - class DimIdxSeqElem(Dataset): + class DimIdxSeqElem(pydicom.Dataset): def __init__(self, dip=(0, 0), fgp=None): super().__init__() self.DimensionIndexPointer = dip if fgp is not None: self.FunctionalGroupPointer = fgp - class FrmContSeqElem(Dataset): + class FrmContSeqElem(pydicom.Dataset): def __init__(self, div, sid): super().__init__() self.DimensionIndexValues = div self.StackID = sid - class PlnPosSeqElem(Dataset): + class PlnPosSeqElem(pydicom.Dataset): def __init__(self, ipp): super().__init__() self.ImagePositionPatient = ipp - class PlnOrientSeqElem(Dataset): + class PlnOrientSeqElem(pydicom.Dataset): def __init__(self, iop): super().__init__() self.ImageOrientationPatient = iop - class PerFrmFuncGrpSeqElem(Dataset): + class PerFrmFuncGrpSeqElem(pydicom.Dataset): def __init__(self, div, sid, ipp, iop): super().__init__() self.FrameContentSequence = [FrmContSeqElem(div, sid)] @@ -514,17 +513,21 @@ def __init__(self, div, sid, ipp, iop): } -class FakeDataset(Dataset): - pixel_array = None +if have_dicom: + + class FakeDataset(pydicom.Dataset): + pixel_array = None class TestMultiFrameWrapper(TestCase): # Test MultiframeWrapper - # Minimal contents of dcm_data for this wrapper - MINIMAL_MF = FakeDataset() - MINIMAL_MF.PerFrameFunctionalGroupsSequence = [Dataset()] - MINIMAL_MF.SharedFunctionalGroupsSequence = [Dataset()] - WRAPCLASS = didw.MultiframeWrapper + + if have_dicom: + # Minimal contents of dcm_data for this wrapper + MINIMAL_MF = FakeDataset() + MINIMAL_MF.PerFrameFunctionalGroupsSequence = [pydicom.Dataset()] + MINIMAL_MF.SharedFunctionalGroupsSequence = [pydicom.Dataset()] + WRAPCLASS = didw.MultiframeWrapper @dicom_test def test_shape(self): @@ -719,6 +722,7 @@ def test_shape(self): fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) assert MFW(fake_mf).image_shape == (32, 64, 3) + @dicom_test def test_iop(self): # Test Image orient patient for multiframe fake_mf = deepcopy(self.MINIMAL_MF) @@ -732,12 +736,13 @@ def test_iop(self): )[0] fake_mf.SharedFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) - fake_mf.SharedFunctionalGroupsSequence = [Dataset()] + fake_mf.SharedFunctionalGroupsSequence = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_orient_patient fake_mf.PerFrameFunctionalGroupsSequence = [fake_frame] assert_array_equal(MFW(fake_mf).image_orient_patient, [[0, 1], [1, 0], [0, 0]]) + @dicom_test def test_voxel_sizes(self): # Test voxel size calculation fake_mf = deepcopy(self.MINIMAL_MF) @@ -761,7 +766,7 @@ def test_voxel_sizes(self): del fake_mf.SpacingBetweenSlices assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) # Removing shared leads to error again - fake_mf.SharedFunctionalGroupsSequence = [Dataset()] + fake_mf.SharedFunctionalGroupsSequence = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).voxel_sizes # Restoring to frames makes it work again @@ -777,6 +782,7 @@ def test_voxel_sizes(self): fake_frame.PixelMeasuresSequence[0].SliceThickness = Decimal('5.4') assert_array_equal(MFW(fake_mf).voxel_sizes, [2.1, 3.2, 5.4]) + @dicom_test def test_image_position(self): # Test image_position property for multiframe fake_mf = deepcopy(self.MINIMAL_MF) @@ -792,7 +798,7 @@ def test_image_position(self): ) fake_mf.SharedFunctionalGroupsSequence = frames assert_array_equal(MFW(fake_mf).image_position, [-2, 3, 7]) - fake_mf.SharedFunctionalGroupsSequence = [Dataset()] + fake_mf.SharedFunctionalGroupsSequence = [pydicom.Dataset()] with pytest.raises(didw.WrapperError): MFW(fake_mf).image_position fake_mf.PerFrameFunctionalGroupsSequence = frames @@ -933,12 +939,13 @@ def test_data_fake(self): fake_mf.pixel_array = np.rollaxis(sorted_data, 2) assert_array_equal(MFW(fake_mf).get_data(), data * 2.0 - 1) + @dicom_test def test_scale_data(self): # Test data scaling fake_mf = deepcopy(self.MINIMAL_MF) fake_mf.Rows = 2 fake_mf.Columns = 3 - fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] + fake_mf.PerFrameFunctionalGroupsSequence = [pydicom.Dataset() for _ in range(4)] MFW = self.WRAPCLASS data = np.arange(24).reshape((2, 3, 4), order='F') assert_array_equal(data, MFW(fake_mf)._scale_data(data)) @@ -947,11 +954,11 @@ def test_scale_data(self): fake_mf.RescaleIntercept = -1.0 assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) # RealWorldValueMapping takes precedence, but only with defined units - fake_mf.RealWorldValueMappingSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence = [pydicom.Dataset()] fake_mf.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 fake_mf.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) - fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [pydicom.Dataset()] fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) fake_mf.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ @@ -959,10 +966,12 @@ def test_scale_data(self): ].CodeMeaning = 'no units' assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) # Possible to have more than one RealWorldValueMapping, use first one with defined units - fake_mf.RealWorldValueMappingSequence.append(Dataset()) + fake_mf.RealWorldValueMappingSequence.append(pydicom.Dataset()) fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueSlope = 15.0 fake_mf.RealWorldValueMappingSequence[-1].RealWorldValueIntercept = -3.0 - fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence = [Dataset()] + fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence = [ + pydicom.Dataset() + ] fake_mf.RealWorldValueMappingSequence[-1].MeasurementUnitsCodeSequence[0].CodeMeaning = '%' assert_array_equal(data * 15 - 3, MFW(fake_mf)._scale_data(data)) # A global RWV scale takes precedence over per-frame PixelValueTransformation @@ -988,10 +997,12 @@ def test_scale_data(self): assert_array_equal(data * 3 - 2, MFW(fake_mf)._scale_data(data)) # A per-frame RWV scaling takes precedence over per-frame PixelValueTransformation for frame in frames: - frame.RealWorldValueMappingSequence = [Dataset()] + frame.RealWorldValueMappingSequence = [pydicom.Dataset()] frame.RealWorldValueMappingSequence[0].RealWorldValueSlope = 10.0 frame.RealWorldValueMappingSequence[0].RealWorldValueIntercept = -5.0 - frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [Dataset()] + frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence = [ + pydicom.Dataset() + ] frame.RealWorldValueMappingSequence[0].MeasurementUnitsCodeSequence[ 0 ].CodeMeaning = '%' @@ -1005,12 +1016,13 @@ def test_scale_data(self): MFW(fake_mf)._scale_data(data), ) + @dicom_test def test_philips_scale_data(self): fake_mf = deepcopy(self.MINIMAL_MF) fake_mf.Manufacturer = 'Philips' fake_mf.Rows = 2 fake_mf.Columns = 3 - fake_mf.PerFrameFunctionalGroupsSequence = [Dataset() for _ in range(4)] + fake_mf.PerFrameFunctionalGroupsSequence = [pydicom.Dataset() for _ in range(4)] MFW = self.WRAPCLASS data = np.arange(24).reshape((2, 3, 4), order='F') # Unlike other manufacturers, public scale factors from Philips without defined @@ -1040,12 +1052,12 @@ def test_philips_scale_data(self): fake_mf.RescaleType = 'mrad' assert_array_equal(data * 2 - 1, MFW(fake_mf)._scale_data(data)) # A RWV scale factor with defined units takes precdence - shared = Dataset() + shared = pydicom.Dataset() fake_mf.SharedFunctionalGroupsSequence = [shared] - rwv_map = Dataset() + rwv_map = pydicom.Dataset() rwv_map.RealWorldValueSlope = 10.0 rwv_map.RealWorldValueIntercept = -5.0 - rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence = [pydicom.Dataset()] rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' shared.RealWorldValueMappingSequence = [rwv_map] assert_array_equal(data * 10 - 5, MFW(fake_mf)._scale_data(data)) @@ -1057,7 +1069,7 @@ def test_philips_scale_data(self): fake_mf.update(fake_shape_dependents(div_seq, sid_dim=0)) # Simplest case is all frames have same (valid) scale factor for frame in fake_mf.PerFrameFunctionalGroupsSequence: - pix_trans = Dataset() + pix_trans = pydicom.Dataset() pix_trans.RescaleSlope = 2.5 pix_trans.RescaleIntercept = -4 pix_trans.RescaleType = 'mrad' @@ -1084,10 +1096,10 @@ def test_philips_scale_data(self): ) # Again RWV scale factors take precedence for frame_idx, frame in enumerate(fake_mf.PerFrameFunctionalGroupsSequence): - rwv_map = Dataset() + rwv_map = pydicom.Dataset() rwv_map.RealWorldValueSlope = 14.0 - frame_idx rwv_map.RealWorldValueIntercept = 5.0 - rwv_map.MeasurementUnitsCodeSequence = [Dataset()] + rwv_map.MeasurementUnitsCodeSequence = [pydicom.Dataset()] rwv_map.MeasurementUnitsCodeSequence[0].CodeMeaning = '%' frame.RealWorldValueMappingSequence = [rwv_map] assert_array_equal(