From 636d8017dd79c6129afb766e6b025ed1185f93c0 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 2 Sep 2020 15:14:35 +0200 Subject: [PATCH 001/103] Initial commit --- .gitignore | 129 +++++++++++++++++++++++++++++++++++++++++++++++++++++ LICENSE | 29 ++++++++++++ README.md | 1 + 3 files changed, 159 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..b6e47617 --- /dev/null +++ b/.gitignore @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..563a14d7 --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2020, BrainGlobe +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md new file mode 100644 index 00000000..5d156d9e --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# bg-atlas \ No newline at end of file From f06c2d590208b022def4b4a8870c5008a4f11164 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 2 Sep 2020 15:16:24 +0200 Subject: [PATCH 002/103] Update README.md --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5d156d9e..bac9c95c 100644 --- a/README.md +++ b/README.md @@ -1 +1,3 @@ -# bg-atlas \ No newline at end of file +# BG-AtlasGen + +Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi` module. From c42e115a42e0e2c0c6f4c4fdf69d7a1210aaca9d Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 2 Sep 2020 16:54:25 +0200 Subject: [PATCH 003/103] First commit --- .idea/.gitignore | 3 + .idea/bg-atlasgen.iml | 8 + .../inspectionProfiles/profiles_settings.xml | 6 + .idea/misc.xml | 4 + .idea/modules.xml | 8 + .idea/vcs.xml | 6 + .pre-commit-config.yaml | 13 + atlas_gen/__init__.py | 1 + atlas_gen/atlas_scripts/__init__.py | 0 atlas_gen/atlas_scripts/allen_mouse.py | 113 +++++ atlas_gen/atlas_scripts/example_mouse.py | 115 +++++ atlas_gen/atlas_scripts/humanatlas.py | 431 ++++++++++++++++++ atlas_gen/atlas_scripts/kim_unified_atlas.py | 259 +++++++++++ atlas_gen/atlas_scripts/mpin_zfish.py | 206 +++++++++ atlas_gen/atlas_scripts/ratatlas.py | 364 +++++++++++++++ atlas_gen/git_script.py | 72 +++ atlas_gen/mesh_utils.py | 252 ++++++++++ atlas_gen/metadata_utils.py | 138 ++++++ atlas_gen/run_all.py | 48 ++ atlas_gen/stacks.py | 62 +++ atlas_gen/structure_json_to_csv.py | 83 ++++ atlas_gen/structures.py | 115 +++++ atlas_gen/volume_utils.py | 102 +++++ atlas_gen/wrapup.py | 205 +++++++++ pyproject.toml | 21 + requirements.txt | 24 + setup.cfg | 18 + setup.py | 31 ++ 28 files changed, 2708 insertions(+) create mode 100644 .idea/.gitignore create mode 100644 .idea/bg-atlasgen.iml create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml create mode 100644 .pre-commit-config.yaml create mode 100644 atlas_gen/__init__.py create mode 100644 atlas_gen/atlas_scripts/__init__.py create mode 100644 atlas_gen/atlas_scripts/allen_mouse.py create mode 100644 atlas_gen/atlas_scripts/example_mouse.py create mode 100644 atlas_gen/atlas_scripts/humanatlas.py create mode 100644 atlas_gen/atlas_scripts/kim_unified_atlas.py create mode 100644 atlas_gen/atlas_scripts/mpin_zfish.py create mode 100644 atlas_gen/atlas_scripts/ratatlas.py create mode 100644 atlas_gen/git_script.py create mode 100644 atlas_gen/mesh_utils.py create mode 100644 atlas_gen/metadata_utils.py create mode 100644 atlas_gen/run_all.py create mode 100644 atlas_gen/stacks.py create mode 100644 atlas_gen/structure_json_to_csv.py create mode 100644 atlas_gen/structures.py create mode 100644 atlas_gen/volume_utils.py create mode 100644 atlas_gen/wrapup.py create mode 100644 pyproject.toml create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 00000000..26d33521 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/.idea/bg-atlasgen.iml b/.idea/bg-atlasgen.iml new file mode 100644 index 00000000..d0876a78 --- /dev/null +++ b/.idea/bg-atlasgen.iml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 00000000..105ce2da --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..26abc6bc --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..694fd4bf --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..94a25f7f --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..73b679a1 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,13 @@ +repos: +- repo: https://github.com/python/black + rev: 19.10b0 + hooks: + - id: black + pass_filenames: true +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.7.9 + hooks: + - id: flake8 + pass_filenames: true + # this seems to need to be here in addition to setup.cfg + exclude: __init__.py \ No newline at end of file diff --git a/atlas_gen/__init__.py b/atlas_gen/__init__.py new file mode 100644 index 00000000..a53a3a5a --- /dev/null +++ b/atlas_gen/__init__.py @@ -0,0 +1 @@ +__version__ = "0" diff --git a/atlas_gen/atlas_scripts/__init__.py b/atlas_gen/atlas_scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/atlas_gen/atlas_scripts/allen_mouse.py b/atlas_gen/atlas_scripts/allen_mouse.py new file mode 100644 index 00000000..f7eb58a5 --- /dev/null +++ b/atlas_gen/atlas_scripts/allen_mouse.py @@ -0,0 +1,113 @@ +__version__ = "3" + +from allensdk.api.queries.ontologies_api import OntologiesApi +from allensdk.api.queries.reference_space_api import ReferenceSpaceApi +from allensdk.core.reference_space_cache import ReferenceSpaceCache + +from requests import exceptions +from pathlib import Path +from tqdm import tqdm + +from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasapi import descriptors + + +def create_atlas(version, res_um, bg_root_dir): + # Specify information about the atlas: + ATLAS_NAME = "allen_mouse" + SPECIES = "Mus musculus" + ATLAS_LINK = "http://www.brain-map.org.com" + CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" + ORIENTATION = "asr" + + # Temporary folder for nrrd files download: + download_dir_path = bg_root_dir / "downloading_path" + download_dir_path.mkdir(exist_ok=True) + + # Download annotated and template volume: + ######################################### + spacecache = ReferenceSpaceCache( + manifest=download_dir_path / "manifest.json", + # downloaded files are stored relative to here + resolution=res_um, + reference_space_key="annotation/ccf_2017" + # use the latest version of the CCF + ) + + # Download + annotated_volume, _ = spacecache.get_annotation_volume() + template_volume, _ = spacecache.get_template_volume() + print("Download completed...") + + # Download structures tree and meshes: + ###################################### + oapi = OntologiesApi() # ontologies + struct_tree = spacecache.get_structure_tree() # structures tree + + # Find id of set of regions with mesh: + select_set = ( + "Structures whose surfaces are represented by a precomputed mesh" + ) + + mesh_set_ids = [ + s["id"] + for s in oapi.get_structure_sets() + if s["description"] == select_set + ] + + structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids) + + # Directory for mesh saving: + meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME + + space = ReferenceSpaceApi() + meshes_dict = dict() + for s in tqdm(structs_with_mesh): + name = s["id"] + filename = meshes_dir / f"{name}.obj" + try: + space.download_structure_mesh( + structure_id=s["id"], + ccf_version="annotation/ccf_2017", + file_name=filename, + ) + meshes_dict[name] = filename + except (exceptions.HTTPError, ConnectionError): + print(s) + + # Loop over structures, remove entries not used: + for struct in structs_with_mesh: + [ + struct.pop(k) + for k in ["graph_id", "structure_set_ids", "graph_order"] + ] + + # Wrap up, compress, and remove file:0 + print(f"Finalising atlas") + wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=version, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(res_um,) * 3, + orientation=ORIENTATION, + root_id=997, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structs_with_mesh, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + ) + + +if __name__ == "__main__": + RES_UM = 25 + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "allen_mouse" + bg_root_dir.mkdir(exist_ok=True) + + create_atlas(__version__, RES_UM, bg_root_dir) diff --git a/atlas_gen/atlas_scripts/example_mouse.py b/atlas_gen/atlas_scripts/example_mouse.py new file mode 100644 index 00000000..7e07ac39 --- /dev/null +++ b/atlas_gen/atlas_scripts/example_mouse.py @@ -0,0 +1,115 @@ +__version__ = "4" + +from allensdk.api.queries.ontologies_api import OntologiesApi +from allensdk.api.queries.reference_space_api import ReferenceSpaceApi +from allensdk.core.reference_space_cache import ReferenceSpaceCache + +from requests import exceptions +from pathlib import Path +from tqdm import tqdm + +from atlas_gen.wrapup import wrapup_atlas_from_data + + +def create_atlas(bg_root_dir): + + # Specify information about the atlas: + RES_UM = 100 + ATLAS_NAME = "example_mouse" + SPECIES = "Mus musculus" + ATLAS_LINK = "http://www.brain-map.org.com" + CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" + ORIENTATION = "asr" + + # Temporary folder for nrrd files download: + download_dir_path = bg_root_dir / "downloading_path" + download_dir_path.mkdir(exist_ok=True) + + # Download annotated and template volume: + ######################################### + spacecache = ReferenceSpaceCache( + manifest=download_dir_path / "manifest.json", + # downloaded files are stored relative to here + resolution=RES_UM, + reference_space_key="annotation/ccf_2017" + # use the latest version of the CCF + ) + + # Download + annotated_volume, _ = spacecache.get_annotation_volume() + template_volume, _ = spacecache.get_template_volume() + print("Download completed...") + + # Download structures tree and meshes: + ###################################### + oapi = OntologiesApi() # ontologies + struct_tree = spacecache.get_structure_tree() # structures tree + + # Find id of set of regions with mesh: + select_set = ( + "Structures whose surfaces are represented by a precomputed mesh" + ) + + mesh_set_ids = [ + s["id"] + for s in oapi.get_structure_sets() + if s["description"] == select_set + ] + + structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)[:3] + + # Directory for mesh saving: + meshes_dir = bg_root_dir / "mesh_temp_download" + + space = ReferenceSpaceApi() + meshes_dict = dict() + for s in tqdm(structs_with_mesh): + name = s["id"] + filename = meshes_dir / f"{name}.obj" + try: + space.download_structure_mesh( + structure_id=s["id"], + ccf_version="annotation/ccf_2017", + file_name=filename, + ) + meshes_dict[name] = filename + except (exceptions.HTTPError, ConnectionError): + print(s) + + # Loop over structures, remove entries not used: + for struct in structs_with_mesh: + [ + struct.pop(k) + for k in ["graph_id", "structure_set_ids", "graph_order"] + ] + + # Wrap up, compress, and remove file: + print(f"Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(RES_UM,) * 3, + orientation=ORIENTATION, + root_id=997, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structs_with_mesh, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + ) + + return Path() + + +if __name__ == "__main__": + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "example" + bg_root_dir.mkdir(exist_ok=True) + + create_atlas(bg_root_dir) diff --git a/atlas_gen/atlas_scripts/humanatlas.py b/atlas_gen/atlas_scripts/humanatlas.py new file mode 100644 index 00000000..7d9a4134 --- /dev/null +++ b/atlas_gen/atlas_scripts/humanatlas.py @@ -0,0 +1,431 @@ +import json +from rich.progress import track +import pandas as pd +import numpy as np +import time +import multiprocessing as mp +from pathlib import Path +import treelib +from brainio import brainio +import urllib3 +from allensdk.core.structure_tree import StructureTree + +# import sys + +# sys.path.append("./") +from atlas_gen.mesh_utils import create_region_mesh, Region +from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + + +def prune_tree(tree): + nodes = tree.nodes.copy() + for key, node in nodes.items(): + if node.tag == "root": + continue + if node.data.has_label: + try: + children = tree.children(node.identifier) + except treelib.exceptions.NodeIDAbsentError: + continue + + if children: + for child in children: + try: + tree.remove_node(child.identifier) + except treelib.exceptions.NodeIDAbsentError: + pass + else: + # Remove if none of the children has mesh + try: + subtree = tree.subtree(node.identifier) + except treelib.exceptions.NodeIDAbsentError: + continue + else: + if not np.any( + [c.data.has_label for _, c in subtree.nodes.items()] + ): + tree.remove_node(node.identifier) + return tree + + +if __name__ == "__main__": + PARALLEL = False # disable parallel mesh extraction for easier debugging + + # ---------------------------------------------------------------------------- # + # PREP METADATA # + # ---------------------------------------------------------------------------- # + RES_UM = 500 + VERSION = 1 + ATLAS_NAME = "allen_human" + SPECIES = "Homo sapiens" + ATLAS_LINK = "http://download.alleninstitute.org/informatics-archive/allen_human_reference_atlas_3d_2020/version_1/" + CITATION = "Ding et al 2016, https://doi.org/10.1002/cne.24080" + ORIENTATION = "ipr" + + # ---------------------------------------------------------------------------- # + # PREP FILEPATHS # + # ---------------------------------------------------------------------------- # + + data_fld = Path( + r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy\Atlases\atlasesforbrainrender\AllenHuman" + ) + + annotations_image = data_fld / "annotation.nii" + anatomy_image = ( + data_fld + / "mni_icbm152_nlin_sym_09b" + / "mni_icbm152_pd_tal_nlin_sym_09b_hires.nii" + ) + + # Generated atlas path: + bg_root_dir = Path.home() / ".brainglobe" + bg_root_dir.mkdir(exist_ok=True) + + # Temporary folder for nrrd files download: + temp_path = Path(r"C:\Users\Federico\.brainglobe\humanev") + temp_path.mkdir(exist_ok=True) + + # Temporary folder for files before compressing: + uncompr_atlas_path = temp_path / ATLAS_NAME + uncompr_atlas_path.mkdir(exist_ok=True) + + # ---------------------------------------------------------------------------- # + # GET TEMPLATE # + # ---------------------------------------------------------------------------- # + annotation = brainio.load_any(annotations_image) # shape (394, 466, 378) + anatomy = brainio.load_any(anatomy_image) # shape (394, 466, 378) + + # Remove weird artefact + annotation = annotation[:200, :, :] + anatomy = anatomy[:200, :, :] + + # show(Volume(root_annotation), axes=1) + + # ---------------------------------------------------------------------------- # + # STRUCTURES HIERARCHY # + # ---------------------------------------------------------------------------- # + # Download structure tree + ######################### + + # RMA query to fetch structures for the structure graph + query_url = "http://api.brain-map.org/api/v2/data/query.json?criteria=model::Structure" + query_url += ",rma::criteria,[graph_id$eq%d]" % 16 + query_url += ( + ",rma::options[order$eq'structures.graph_order'][num_rows$eqall]" + ) + + http = urllib3.PoolManager() + r = http.request("GET", query_url) + data = json.loads(r.data.decode("utf-8"))["msg"] + structures = pd.read_json(json.dumps(data)) + + # Create empty list and collect all regions traversing the regions hierarchy: + regions_list = [] + + for i, region in structures.iterrows(): + if i == 0: + acronym = "root" + else: + acronym = region["acronym"] + + regions_list.append( + { + "name": region["name"], + "acronym": acronym, + "id": region["id"], + "rgb_triplet": StructureTree.hex_to_rgb( + region["color_hex_triplet"] + ), + "structure_id_path": StructureTree.path_to_list( + region["structure_id_path"] + ), + } + ) + ROOT_ID = regions_list[0]["id"] + + # ---------------------------------------------------------------------------- # + # CREATE MESHES # + # ---------------------------------------------------------------------------- # + print(f"Saving atlas data at {uncompr_atlas_path}") + meshes_dir_path = uncompr_atlas_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(regions_list) + print( + f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + ) + + # Mark which tree elements are in the annotation volume + labels = np.unique(annotation).astype(np.int32) + + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + # tree.show(data_property='has_label') + + # Remove nodes for which no mesh can be created + tree = prune_tree(tree) + print( + f"After pruning: # of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + ) + + # Mesh creation + closing_n_iters = 2 + start = time.time() + if PARALLEL: + print("Starting mesh creation in parallel") + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotation, + ROOT_ID, + closing_n_iters, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass # error with returning results from pool.map but we don't care + else: + print("Starting mesh creation") + + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + if node.tag == "root": + volume = annotation.copy() + volume[volume > 0] = node.identifier + else: + volume = annotation + + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + volume, + ROOT_ID, + closing_n_iters, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # Create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in regions_list: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + # print(f"No mesh file exists for: {s['name']}") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + # print(f"obj file for {s['name']} is too small.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + + # ---------------------------------------------------------------------------- # + # WRAP UP # + # ---------------------------------------------------------------------------- # + + # Wrap up, compress, and remove file: + print("Finalising atlas") + wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=VERSION, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(RES_UM,) * 3, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=anatomy, + annotation_stack=annotation, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + ) + + +# ---------------------------------------------------------------------------- # +# OLD CODE # +# ---------------------------------------------------------------------------- # + +# # Create meshes +# ############### +# meshes_dir = uncompr_atlas_path / descriptors.MESHES_DIRNAME +# meshes_dir.mkdir(exist_ok=True) + +# unique_values, unique_counts = np.unique( +# annotation_whole, return_counts=True +# ) +# voxel_counts = dict(zip(unique_values, unique_counts)) +# if 0 in voxel_counts: +# del voxel_counts[0] +# structures.set_index("id", inplace=True) + +# # Create root first +# root = [s for s in regions_list if s["acronym"] == "root"][0] +# root_idx = root["id"] +# root_volume = volume_utils.create_masked_array( +# annotation_whole, 0, greater_than=True +# ) +# savepath = meshes_dir / f'{root["id"]}.obj' +# if not savepath.exists(): +# root_mesh = mesh_utils.extract_mesh_from_mask( +# root_volume, savepath, smooth=False, decimate=True +# ) +# else: +# root_mesh = load(str(savepath)) + +# # Asses mesh extraction quality +# # mesh_utils.compare_mesh_and_volume(root_mesh, root_volume) + +# # ? Create meshes for leaf nodes +# start = time.time() +# pool = mp.Pool(mp.cpu_count() - 2) +# try: +# pool.map( +# create_structure_mesh, +# [ +# (structures, annotation_whole, meshes_dir, a) +# for a in voxel_counts +# ], +# ) +# except mp.pool.MaybeEncodingError: +# pass # error with returning results from pool.map but we don't care +# print( +# f"Creating meshes for {len(voxel_counts)} structures took: {round(time.time() - start, 3)}s" +# ) + +# # Show which regions were represented in the annotated volume +# regions_with_mesh = [structures.loc[a, "acronym"] for a in voxel_counts] + +# tree = StructureTree(regions_list).get_structures_tree() + +# for key, node in tree.nodes.items(): +# if node.tag in regions_with_mesh: +# has_mesh = True +# else: +# has_mesh = False +# node.data = Region(has_mesh) + +# # Remove regions that are children to the ones that which +# # were represented in the volume or were +# # at least some of their children had a mesh +# tree = prune_tree(tree) + +# # ? extract meshes for non leaf regions +# id_to_acronym_map = {s["id"]: s["acronym"] for s in regions_list} +# voxel_to_acro = {a: structures.loc[a, "acronym"] for a in voxel_counts} +# acronym_to_voxel = {v: k for k, v in voxel_to_acro.items()} +# non_leaf_nodes = [ +# s +# for s in regions_list +# if s["acronym"] != "root" and s["id"] not in voxel_counts +# ] + +# start = time.time() +# pool = mp.Pool(mp.cpu_count() - 2) +# try: +# pool.map( +# create_nonleaf_structure_mesh, +# [ +# ( +# nonleaf, +# meshes_dir, +# regions_list, +# id_to_acronym_map, +# acronym_to_voxel, +# annotation_whole, +# ) +# for nonleaf in non_leaf_nodes +# ], +# ) +# except mp.pool.MaybeEncodingError: +# pass # error with returning results from pool.map but we don't care +# print( +# f"Creating meshes for {len(non_leaf_nodes)} structures took: {round(time.time() - start, 3)}s" +# ) + +# # ? Fill in more of the regions that don't have mesh yet +# for repeat in range(4): +# for idx, node in tree.nodes.items(): +# savepath = meshes_dir / f"{idx}.obj" +# if not savepath.exists(): +# region = [r for r in regions_list if r["id"] == idx][0] +# args = ( +# region, +# meshes_dir, +# regions_list, +# id_to_acronym_map, +# acronym_to_voxel, +# annotation_whole, +# ) +# create_nonleaf_structure_mesh(args) + +# # Update tree and check that everyone got a mesh +# for idx, node in tree.nodes.items(): +# savepath = meshes_dir / f"{idx}.obj" +# if savepath.exists(): +# node.data.has_mesh = True + +# tree.show(data_property="has_mesh") + +# print( +# f"\n\nTotal number of structures left in tree: {tree.size()} - max depth: {tree.depth()}" +# ) + +# tree_regions = [node.identifier for k, node in tree.nodes.items()] +# pruned_regions_list = [r for r in regions_list if r["id"] in tree_regions] + +# # save regions list json: +# with open(uncompr_atlas_path / descriptors.STRUCTURES_FILENAME, "w") as f: +# json.dump(pruned_regions_list, f) + +# # Wrap up, compress, and remove file: +# ##################################### +# wrapup_atlas_from_dir( +# uncompr_atlas_path, +# CITATION, +# ATLAS_LINK, +# SPECIES, +# (RES_UM,) * 3, +# cleanup_files=False, +# compress=True, +# root=root_idx, +# ) diff --git a/atlas_gen/atlas_scripts/kim_unified_atlas.py b/atlas_gen/atlas_scripts/kim_unified_atlas.py new file mode 100644 index 00000000..e347e2f9 --- /dev/null +++ b/atlas_gen/atlas_scripts/kim_unified_atlas.py @@ -0,0 +1,259 @@ +import json +from rich.progress import track +import pandas as pd +import numpy as np +import time +import multiprocessing as mp +from pathlib import Path +from brainio.brainio import load_any +from allensdk.core.reference_space_cache import ReferenceSpaceCache + +# import sys + +# sys.path.append("./") +from atlas_gen.mesh_utils import create_region_mesh, Region +from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + + +if __name__ == "__main__": + PARALLEL = True # disable parallel mesh extraction for easier debugging + + # ---------------------------------------------------------------------------- # + # PREP METADATA # + # ---------------------------------------------------------------------------- # + RES_UM = 25 + VERSION = 1 + ATLAS_NAME = "kim_unified" + SPECIES = "Mus musculus" + ATLAS_LINK = "https://kimlab.io/brain-map/atlas/" + CITATION = "Chon et al. 2019, https://doi.org/10.1038/s41467-019-13057-w" + ORIENTATION = "als" + ROOT_ID = 997 + + # ---------------------------------------------------------------------------- # + # PREP FILEPATHS # + # ---------------------------------------------------------------------------- # + + paxinos_allen_directory = Path( + r"C:\Users\Federico\Downloads\kim_atlas_materials.tar\kim_atlas_materials" + ) + annotations_image = paxinos_allen_directory / "annotations_coronal.tif" + structures_file = paxinos_allen_directory / "structures.csv" + + # assume isotropic + ANNOTATIONS_RES_UM = 10 + + version = "0.1" + + # Generated atlas path: + bg_root_dir = Path.home() / ".brainglobe" + bg_root_dir.mkdir(exist_ok=True) + + # Temporary folder for nrrd files download: + temp_path = Path(r"C:\Users\Federico\.brainglobe\kimdev") + temp_path.mkdir(exist_ok=True) + downloading_path = temp_path / "downloading_path" + downloading_path.mkdir(exist_ok=True) + + # Temporary folder for files before compressing: + uncompr_atlas_path = temp_path / ATLAS_NAME + uncompr_atlas_path.mkdir(exist_ok=True) + + # ---------------------------------------------------------------------------- # + # GET TEMPLATE # + # ---------------------------------------------------------------------------- # + + # Load (and possibly downsample) annotated volume: + ######################################### + scaling_factor = ANNOTATIONS_RES_UM / RES_UM + print( + f"Loading: {annotations_image.name} and downscaling by: {scaling_factor}" + ) + annotated_volume = load_any( + annotations_image, + x_scaling_factor=scaling_factor, + y_scaling_factor=scaling_factor, + z_scaling_factor=scaling_factor, + anti_aliasing=False, + ) + + # Download template volume: + ######################################### + spacecache = ReferenceSpaceCache( + manifest=downloading_path / "manifest.json", + # downloaded files are stored relative to here + resolution=RES_UM, + reference_space_key="annotation/ccf_2017" + # use the latest version of the CCF + ) + + # Download + print("Downloading template file") + template_volume, _ = spacecache.get_template_volume() + print("Download completed...") + + # ---------------------------------------------------------------------------- # + # STRUCTURES HIERARCHY # + # ---------------------------------------------------------------------------- # + + # Parse region names & hierarchy + # ###################################### + df = pd.read_csv(structures_file) + df = df.drop(columns=["Unnamed: 0", "parent_id", "parent_acronym"]) + + # split by "/" and convert list of strings to list of ints + df["structure_id_path"] = ( + df["structure_id_path"] + .str.split(pat="/") + .map(lambda x: [int(i) for i in x]) + ) + + structures = df.to_dict("records") + + for structure in structures: + structure.update({"rgb_triplet": [255, 255, 255]}) + # root doesn't have a parent + if structure["id"] != 997: + structure["structure_id_path"].append(structure["id"]) + + # save regions list json: + with open(uncompr_atlas_path / "structures.json", "w") as f: + json.dump(structures, f) + + # ---------------------------------------------------------------------------- # + # CREATE MESHESH # + # ---------------------------------------------------------------------------- # + print(f"Saving atlas data at {uncompr_atlas_path}") + meshes_dir_path = uncompr_atlas_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + # Create and prune structures tree + tree = get_structures_tree(structures) + drop_from_tree = [ + "fiber_tracts", + "VentSys", + "bas", + ] # stuff we don't need meshes for + for drop in drop_from_tree: + print("Dropping from structures tree: ", drop) + dropped = tree.remove_subtree( + [nid for nid, n in tree.nodes.items() if n.tag == drop][0] + ) + + print( + f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + ) + + # Create a tree marking which brain regions are shown in the annotation + labels = np.unique(annotated_volume).astype(np.int32) + + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + # tree.show(data_property='has_label') + + # Mesh creation + closing_n_iters = 2 + start = time.time() + if PARALLEL: + print("Starting mesh creation in parallel") + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass # error with returning results from pool.map but we don't care + else: + print("Starting mesh creation") + + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # Create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + + # ---------------------------------------------------------------------------- # + # WRAP UP # + # ---------------------------------------------------------------------------- # + + # Wrap up, compress, and remove file: + print("Finalising atlas") + wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=VERSION, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(RES_UM,) * 3, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=True, + ) diff --git a/atlas_gen/atlas_scripts/mpin_zfish.py b/atlas_gen/atlas_scripts/mpin_zfish.py new file mode 100644 index 00000000..338fb89f --- /dev/null +++ b/atlas_gen/atlas_scripts/mpin_zfish.py @@ -0,0 +1,206 @@ +__version__ = "4" + +from pathlib import Path +import warnings +import zipfile +import requests +import tarfile +import tifffile +from tifffile import imread + +from allensdk.core.structure_tree import StructureTree +from atlas_gen.wrapup import wrapup_atlas_from_data + +from bg_atlasapi.utils import retrieve_over_http + +BASE_URL = r"https://fishatlas.neuro.mpg.de" + + +def download_line_stack(tg_line_name): + """Utility function to download a line from its name. + """ + reference_url = f"{BASE_URL}/media/brain_browser/Lines/{tg_line_name}/AverageData/Tiff_File/Average_{tg_line_name}.zip" + out_file_path = bg_root_dir / f"{tg_line_name}.zip" + retrieve_over_http(reference_url, out_file_path) + with zipfile.ZipFile(out_file_path, "r") as zip_ref: + zip_ref.extractall(bg_root_dir) + + return imread(str(next(bg_root_dir.glob("*.tif")))) + + +def add_path_inplace(parent): + """ Recursively traverse hierarchy of regions and append for each region + the full path of substructures in brainglobe standard list. + + Parameters + ---------- + parent : dict + node parsed from fishatlas website containing a "sub_regions" key + + """ + for ch in parent["sub_regions"]: + new_root = parent["structure_id_path"] + [ + ch["id"], + ] + + ch["structure_id_path"] = new_root + + add_path_inplace(ch) + + +def collect_all_inplace( + node, traversing_list, download_path, mesh_dict, +): + """ Recursively traverse a region hierarchy, download meshes, and append + regions to a list inplace. + + Parameters + ---------- + node + traversing_list + download_path + mesh_dict + + + """ + + # Append clean dictionary with brainglobe standard info: + traversing_list.append( + { + "name": node["name"], + "acronym": node["name"], + "id": node["id"], + "rgb_triplet": StructureTree.hex_to_rgb(node["color"]), + "structure_id_path": node["structure_id_path"], + } + ) + + # Url for the mesh: + mesh_url = ( + BASE_URL + node["files"]["file_3D"][:-4].replace("\\", "/") + ".stl" + ) + + # Try download, if mesh does not exist region is removed: + try: + filename = download_path / "{}.stl".format(node["id"]) + retrieve_over_http(mesh_url, filename) + + mesh_dict[node["id"]] = filename + except requests.exceptions.ConnectionError: + # Pop region from list: + message = "No mesh found for {}".format(traversing_list.pop()["name"]) + warnings.warn(message) + + for region in node["sub_regions"]: + collect_all_inplace(region, traversing_list, download_path, mesh_dict) + + +def create_atlas(version, bg_root_dir): + # Specify fixed information about the atlas: + RES_UM = 1 + ATLAS_NAME = "mpin_zfish" + SPECIES = "Danio rerio" + ATLAS_LINK = "http://fishatlas.neuro.mpg.de" + CITATION = "Kunst et al 2019, https://doi.org/10.1016/j.neuron.2019.04.034" + ORIENTATION = "lai" + + # Download reference: + ##################### + reference_stack = download_line_stack("HuCGCaMP5G") + + # Download accessory references: + ################################ + additional_references = dict() + for line in ["H2BGCaMP", "GAD1b"]: + additional_references[line] = download_line_stack(line) + + # Download annotation and hemispheres from GIN repo: + gin_url = "https://gin.g-node.org/brainglobe/mpin_zfish/raw/master/mpin_zfish_annotations.tar.gz" + compressed_zip_path = bg_root_dir / "annotations.tar" + retrieve_over_http(gin_url, compressed_zip_path) + + tar = tarfile.open(compressed_zip_path) + tar.extractall(path=bg_root_dir) + + extracted_dir = bg_root_dir / "mpin_zfish_annotations" + + annotation_stack = tifffile.imread( + str(extracted_dir / "mpin_zfish_annotation.tif") + ) + + hemispheres_stack = tifffile.imread( + str(extracted_dir / "mpin_zfish_hemispheres.tif") + ) + + # meshes from the website and stacks do not have the same orientation. + # Therefore, flip axes of the stacks so that bg-space reorientation is used on + # the meshes: + annotation_stack = annotation_stack.swapaxes(0, 2) + hemispheres_stack = hemispheres_stack.swapaxes(0, 2) + reference_stack = reference_stack.swapaxes(0, 2) + additional_references = { + k: v.swapaxes(0, 2) for k, v in additional_references.items() + } + + # Download structures tree and meshes: + ###################################### + regions_url = f"{BASE_URL}/neurons/get_brain_regions" + + meshes_dir_path = bg_root_dir / "meshes_temp_download" + meshes_dir_path.mkdir(exist_ok=True) + + # Download structures hierarchy: + structures = requests.get(regions_url).json()["brain_regions"] + + # Initiate dictionary with root info: + structures_dict = { + "name": "root", + "id": 0, + "sub_regions": structures.copy(), + "structure_id_path": [0], + "acronym": "root", + "files": { + "file_3D": "/media/Neurons_database/Brain_and_regions/Brains/Outline/Outline_new.txt" + }, + "color": "#ffffff", + } + + # Go through the regions hierarchy and create the structure path entry: + add_path_inplace(structures_dict) + + # Create empty list and collect all regions traversing the regions hierarchy: + structures_list = [] + meshes_dict = {} + collect_all_inplace( + structures_dict, structures_list, meshes_dir_path, meshes_dict + ) + + # Wrap up, compress, and remove file:0 + print(f"Finalising atlas") + wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=version, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(RES_UM,) * 3, + orientation=ORIENTATION, + root_id=0, + reference_stack=reference_stack, + annotation_stack=annotation_stack, + structures_list=structures_list, + meshes_dict=meshes_dict, + working_dir=bg_root_dir, + hemispheres_stack=hemispheres_stack, + cleanup_files=False, + compress=True, + additional_references=additional_references, + ) + + +if __name__ == "__main__": + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "fish" + bg_root_dir.mkdir(exist_ok=True, parents=True) + + create_atlas(__version__, bg_root_dir) diff --git a/atlas_gen/atlas_scripts/ratatlas.py b/atlas_gen/atlas_scripts/ratatlas.py new file mode 100644 index 00000000..6c59f81a --- /dev/null +++ b/atlas_gen/atlas_scripts/ratatlas.py @@ -0,0 +1,364 @@ +from atlas_gen.volume_utils import ( + extract_volume_surface, + load_labelled_volume, +) +from atlas_gen.metadata_utils import create_metadata_files +from brainio.brainio import load_any + + +from pathlib import Path +import pandas as pd +import json +import tarfile +import os + +import numpy as np +import tifffile + + +from tqdm import tqdm + +from vtkplotter import write, Volume + + +import sys + +sys.path.append(os.getcwd()) + + +ATLAS_NAME = "ratatlas" + +base_url = "" + +# Generated atlas path: +bg_root_dir = Path.home() / ".brainglobe" +bg_root_dir.mkdir(exist_ok=True) + +# Temporary folder for nrrd files download: +# temp_path = Path(tempfile.mkdtemp()) +temp_path = Path( + r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy\Atlases\atlasesforbrainrender\goldcustomrat" +) + + +# Temporary folder for files before compressing: +uncompr_atlas_path = temp_path / ATLAS_NAME +uncompr_atlas_path.mkdir(exist_ok=True) + + +# ---------------------------------------------------------------------------- # +# Load volume data # +# ---------------------------------------------------------------------------- # +# Load annotated and reference tiff stacks (already aligned to brainglobe by Adam) +# And save to folder with all atlas data +base_data_fld = Path( + r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy\Atlases\atlasesforbrainrender\goldcustomrat" +) + +for name in ["reference", "annotated"]: + loaded = load_any( + str(base_data_fld / f"{name}.tif") + ) # shape (186, 160, 160) + tifffile.imsave(str(uncompr_atlas_path / f"{name}.tiff"), loaded) + + +# ---------------------------------------------------------------------------- # +# LOAD/PARSE HIERARCHICAL DATA # +# ---------------------------------------------------------------------------- # + +""" + Hierarchy is organized: + + /major/submajor/minor + + hierarchy dataframe maps region names to voxel value in annotated.tiff (minor column) + major and submajors map major/submajor values in hierarchy to the corresponding name +""" + +hierarchy = pd.read_excel( + str(base_data_fld / "SwansonAtlasCategories-Mar_2_2005.xls"), + header=1, + usecols=["Abbreviation", "Name of Area", "Major", "Sub_Major", "Minor"], + nrows=1276, +) + +majors = pd.read_excel( + str(base_data_fld / "SwansonAtlasCategories-Mar_2_2005.xls"), + header=3, + usecols=[13, 14], + nrows=20, +) + +submajors = pd.read_excel( + str(base_data_fld / "SwansonAtlasCategories-Mar_2_2005.xls"), + header=3, + usecols=[15, 16], + nrows=89, +) + + +clean_hierarchy = dict( + abbreviation=[], + name=[], + major=[], + majornum=[], + submajor=[], + submajornum=[], + minor=[], +) +for i, region in hierarchy.iterrows(): + clean_hierarchy["abbreviation"].append(region.Abbreviation) + clean_hierarchy["name"].append(region["Name of Area"]) + clean_hierarchy["major"].append( + majors.loc[majors.ANC == region.Major]["ANC Name"].values[0] + ) + clean_hierarchy["majornum"].append( + majors.loc[majors.ANC == region.Major]["ANC"].values[0] + ) + clean_hierarchy["minor"].append(region["Minor"]) + try: + clean_hierarchy["submajor"].append( + submajors.loc[submajors.SubANC == region.Sub_Major][ + "SubANC Name" + ].values[0] + ) + clean_hierarchy["submajornum"].append( + int( + submajors.loc[submajors.SubANC == region.Sub_Major][ + "SubANC" + ].values[0] + ) + ) + except Exception as e: + print(e) + clean_hierarchy["submajor"].append(None) + clean_hierarchy["submajornum"].append(None) + + +clean_hierarchy = pd.DataFrame(clean_hierarchy) + +# ------------------------ Organize hierarchy metadata ----------------------- # + +idn = 0 + +""" + Given that the way the matadata is organised, not every region has a unique + numerical ID value associated with it (e.g. a region might have a minor 1, but + a submajor region's numerical value is also 1), here we reassign a numerical id + to each brain structure. number increase from root > majors > submajors > minors. +""" + +structures = [ + { + "acronym": "root", + "id": idn, + "name": "root", + "structure_id_path": [0], + "rgb_triplet": [255, 255, 255], + } +] + + +for i, major in majors.iterrows(): + if not isinstance(major["ANC Name"], str): + continue + + idn += 1 + structures.append( + { + "acronym": major["ANC Name"].replace(" ", "-"), + "id": idn, + "name": major["ANC Name"], + "structure_id_path": [0, idn], + "rgb_triplet": [255, 255, 255], + } + ) + + +for i, submajor in submajors.iterrows(): + # Get an entry in clean hierarchy with this submajor + try: + entry = clean_hierarchy.loc[ + clean_hierarchy.submajornum == submajor["SubANC"] + ].iloc[0] + except Exception as e: + print(e) + pass + + # Get path + idn += 1 + path = [0, int(entry.majornum), idn] + + # Append + structures.append( + { + "acronym": submajor["SubANC Name"].replace(" ", "-"), + "id": idn, + "name": submajor["SubANC Name"], + "structure_id_path": path, + "rgb_triplet": [255, 255, 255], + } + ) + + +for i, region in clean_hierarchy.iterrows(): + idn += 1 + if np.isnan(region.submajornum): + path = [0, region.majornum, idn] + + else: + path = [0, int(region.majornum), int(region.submajornum), idn] + + structures.append( + { + "acronym": region.abbreviation, + "id": idn, + "name": region.name, + "structure_id_path": path, + "rgb_triplet": [255, 255, 255], + } + ) + +# save regions list json: +with open(uncompr_atlas_path / "structures.json", "w") as f: + json.dump(structures, f) + + +# ---------------------------------------------------------------------------- # +# Create MESEHS # +# ---------------------------------------------------------------------------- # +print(f"Saving atlas data at {uncompr_atlas_path}") +meshes_dir_path = uncompr_atlas_path / "meshes" +meshes_dir_path.mkdir(exist_ok=True) + +volume = load_labelled_volume(load_any(str(base_data_fld / "annotated.tif"))) + +root = extract_volume_surface(volume) + +write(root, str(meshes_dir_path / "0.obj")) + +# First create a mesh for every minor region +volume_data = load_any(str(base_data_fld / "annotated.tif")) +for i, region in tqdm(clean_hierarchy.iterrows()): + structure = [ + s for s in structures if s["acronym"] == region["abbreviation"] + ][0] + savepath = str( + meshes_dir_path + / f'{structure["id"]}.obj'.replace("/", "-").replace("\\", "-") + ) + if os.path.isfile(savepath): + continue + + vol = np.zeros_like(volume_data) + + if not np.isin(np.float(region.minor), volume_data): + # print(f'{region.abbreviation} doesnt seem to appear in annotated dataset') + continue + + vol[volume_data == np.float32(region.minor)] = 1 + if np.max(vol) < 1: + raise ValueError + + write(extract_volume_surface(Volume(vol)), savepath) + + +# Create a mesh for every submajor and major region +for i, submajor in tqdm(submajors.iterrows()): + structure = [ + s + for s in structures + if s["acronym"] == submajor["SubANC Name"].replace(" ", "-") + ][0] + savepath = str( + meshes_dir_path + / f'{structure["id"]}.obj'.replace(" ", "-") + .replace("/", "-") + .replace("\\", "-") + ) + if os.path.isfile(savepath): + continue + + regions = list( + clean_hierarchy.loc[ + clean_hierarchy.submajor == submajor["SubANC Name"] + ].minor.values + ) + if not regions: + continue + + vol = np.zeros_like(volume_data) + + for region in regions: + vol[volume_data == region] = 1 + + if np.max(vol) < 1: + continue + + write(extract_volume_surface(Volume(vol)), savepath) + + +for i, major in tqdm(majors.iterrows()): + if not isinstance(major["ANC Name"], str): + continue + structure = [ + s + for s in structures + if s["acronym"] == major["ANC Name"].replace(" ", "-") + ][0] + savepath = str( + meshes_dir_path + / f'{structure["id"]}.obj'.replace(" ", "-") + .replace("/", "-") + .replace("\\", "-") + ) + if os.path.isfile(savepath): + continue + + regions = list( + clean_hierarchy.loc[ + clean_hierarchy.major == major["ANC Name"] + ].minor.values + ) + if not regions: + continue + + vol = np.zeros_like(volume_data) + + for region in regions: + vol[volume_data == region] = 1 + + if np.max(vol) < 1: + continue + + write(extract_volume_surface(Volume(vol)), savepath) + + +# ---------------------------------------------------------------------------- # +# FINAL METADATA AND SAVE # +# ---------------------------------------------------------------------------- # + +metadata_dict = { + "name": ATLAS_NAME, + "species": "Rattus Norvegicus", + "citation": "Swanson 2018, https://pubmed.ncbi.nlm.nih.gov/29277900/", + "atlas_link": "", + "symmetric": False, + "resolution": (1.25, 1.25, 1.25), + "shape": loaded.shape, +} + +with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f: + json.dump(metadata_dict, f) + + +# Create human readable files +create_metadata_files(uncompr_atlas_path, metadata_dict, structures) + + +# Compress folder: +output_filename = bg_root_dir / f"{uncompr_atlas_path.name}.tar.gz" +print(f"Saving compressed at {output_filename}") + +with tarfile.open(output_filename, "w:gz") as tar: + tar.add(uncompr_atlas_path, arcname=uncompr_atlas_path.name) diff --git a/atlas_gen/git_script.py b/atlas_gen/git_script.py new file mode 100644 index 00000000..9cd5a79d --- /dev/null +++ b/atlas_gen/git_script.py @@ -0,0 +1,72 @@ +from git import Repo +from pathlib import Path +import tempfile +from bg_atlasapi.config import read_config +import configparser +import atlas_gen +from importlib import import_module + +from atlas_gen.atlas_scripts.example_mouse import create_atlas + + +generation_dict = dict(example_mouse=[100]) + + +def get_atlas_repr(name): + parts = name.split("_") + # if atlas name with no version: + version_str = parts.pop() if not parts[-1].endswith("um") else None + resolution_str = parts.pop() + + atlas_name = "_".join(parts) + if version_str: + major_vers, minor_vers = version_str[2:].split(".") + else: + major_vers, minor_vers = None, None + return dict(name=atlas_name, + major_vers=major_vers, + minor_vers=minor_vers, + resolution=resolution_str[:-2]) + + + +cwd = Path.home() / "bg_auto" +cwd.mkdir(exist_ok=True) + + +if __name__ == "__main__": + repo_path = cwd / "atlas_repo" + atlas_gen_path = Path(__file__).parent + # Repo.clone_from("https://gin.g-node.org/vigji/bg_test", repo_path) + # us = input("GIN-GNode user: ") # Python 3 + # pw = input("GIN-GNode password: ") # Python 3 + # print(us, pw) + + conf = configparser.ConfigParser() + conf.read(repo_path / "last_versions.conf") + atlases_status = dict() + for k in conf["atlases"].keys(): + repr = get_atlas_repr(k) + + # Read versions from conf: + major_vers, minor_vers = conf["atlases"][k].split(".") + repr["major_vers"] = major_vers + repr["minor_vers"] = minor_vers + atlases_status[repr.pop("name")] =repr + + bg_atlas_version = atlas_gen.__version__ + + scripts_path = atlas_gen_path / "atlas_scripts" + + for n, res in generation_dict.items(): + # print(next(scripts_path.glob(f"{n}.py"))) + # print(n) + status = atlases_status[n] + mod = import_module(f"atlas_gen.atlas_scripts.{n}") + script_version = mod.__version__ + if bg_atlas_version >= status["major_vers"] and \ + script_version > status["minor_vers"]: + print(n, mod.create_atlas) + + + diff --git a/atlas_gen/mesh_utils.py b/atlas_gen/mesh_utils.py new file mode 100644 index 00000000..4b3f5cd0 --- /dev/null +++ b/atlas_gen/mesh_utils.py @@ -0,0 +1,252 @@ +try: + from vtkplotter import Mesh, write, load, show, Volume + from vtkplotter.applications import Browser, Slicer +except ModuleNotFoundError: + raise ModuleNotFoundError( + "Mesh generation with these utils requires vtkplotter\n" + + ' please install with "pip install vtkplotter -U"' + ) + +try: + import mcubes +except ModuleNotFoundError: + raise ModuleNotFoundError( + "Mesh generation with these utils requires PyMCubes\n" + + ' please install with "pip install PyMCubes -U"' + ) + + +import numpy as np +from pathlib import Path +import scipy +from atlas_gen.volume_utils import create_masked_array + +# ---------------------------------------------------------------------------- # +# MESH CREATION # +# ---------------------------------------------------------------------------- # + + +def extract_mesh_from_mask( + volume, + obj_filepath=None, + threshold=0.5, + smooth=False, + mcubes_smooth=False, + closing_n_iters=8, + decimate=True, + tol=0.0005, + use_marching_cubes=False, +): + """ + Returns a vtkplotter mesh actor with just the outer surface of a + binary mask volume. It's faster though less accurate than + extract_mesh_from_mask + + + Parameters + ---------- + obj_filepath: str or Path object + path to where the .obj mesh file will be saved + volume: 3d np.ndarray + threshold: float + min value to threshold the volume for isosurface extraction + smooth: bool + if True the surface mesh is smoothed + use_marching_cubes: bool: + if true PyMCubes is used to extract the volume's surface + it's slower and less accurate than vtkplotter though. + mcubes_smooth: bool, + if True mcubes.smooth is used before applying marching cubes + closing_n_iters: int + number of iterations of closing morphological operation. + set to None to avoid applying morphological operations + decimate: bool + If True the number of vertices is reduced through decimation + tol: float + parameter for decimation, larger values correspond to more aggressive decimation + + """ + # check savepath argument + if obj_filepath is not None: + if isinstance(obj_filepath, str): + obj_filepath = Path(obj_filepath) + + if not obj_filepath.parents[0].exists(): + raise FileExistsError( + "The folder where the .obj file is to be saved doesn't exist" + + f"\n {str(obj_filepath)}" + ) + + # Check volume argument + if np.min(volume) > 0 or np.max(volume) < 1: + raise ValueError( + "Argument volume should be a binary mask with only 0s and 1s when passing a np.ndarray" + ) + + # Apply morphological transformations + if closing_n_iters is not None: + volume = scipy.ndimage.morphology.binary_fill_holes(volume) + volume = scipy.ndimage.morphology.binary_closing( + volume, iterations=closing_n_iters + ) + + if not use_marching_cubes: + # Use faster algorithm + volume = Volume(volume) + mesh = volume.clone().isosurface(threshold=threshold).cap() + else: + print( + "The marching cubes algorithm might be rotated compared to your volume data" + ) + # Apply marching cubes and save to .obj + if mcubes_smooth: + smooth = mcubes.smooth(volume) + vertices, triangles = mcubes.marching_cubes(smooth, 0) + else: + vertices, triangles = mcubes.marching_cubes(volume, 0.5) + # create mesh + mesh = Mesh((vertices, triangles)) + + # Cleanup and save + if smooth: + mesh.smoothLaplacian() + + if decimate: + mesh.clean(tol=tol) + + mesh = mesh.extractLargestRegion() + + if obj_filepath is not None: + write(mesh, str(obj_filepath)) + + return mesh + + +def create_region_mesh(args): + """ + Automates the creation of a region's mesh. Given a volume of annotations + and a structures tree, it takes the volume's region corresponding to the + region of interest and all of it's children's labels and creates a mesh. + It takes a tuple of arguments to facilitaed parallel processing with + multiprocessing.pool.map + + Note, by default it avoids overwriting a structure's mesh if the + .obj file exists already. + + Parameters + ---------- + meshes_dir_path: pathlib Path object with folder where meshes are saved + tree: treelib.Tree with hierarchical structures information + node: tree's node corresponding to the region who's mesh is being created + labels: list of unique label annotations in annotated volume (list(np.unique(annotated_volume))) + annotaed_volume: 3d numpy array with annotaed volume + ROOT_ID: int, id of root structure (mesh creation is a bit more refined for that) + """ + # Split arguments + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + ) = args + + # Avoid ovewriting existing mesh + savepath = meshes_dir_path / f"{node.identifier}.obj" + if savepath.exists(): + return + + # Get lables for region and it's children + stree = tree.subtree(node.identifier) + ids = list(stree.nodes.keys()) + + # Keep only labels that are in the annotation volume + matched_labels = [i for i in ids if i in labels] + + if ( + not matched_labels + ): # it fails if the region and all of it's children are not in annotation + print(f"No labels found for {node.tag}") + return + else: + # Create mask and extract mesh + mask = create_masked_array(annotated_volume, ids) + + if not np.max(mask): + print(f"Empty mask for {node.tag}") + else: + if node.identifier == ROOT_ID: + extract_mesh_from_mask( + mask, obj_filepath=savepath, smooth=True + ) + else: + extract_mesh_from_mask( + mask, + obj_filepath=savepath, + smooth=True, + closing_n_iters=closing_n_iters, + ) + + +class Region(object): + """ + Class used to add metadata to treelib.Tree during atlas creation. Using this + means that you can then filter tree nodes depending on wether or not they have a mesh/label + """ + + def __init__(self, has_label): + self.has_label = has_label + + +# ---------------------------------------------------------------------------- # +# MESH INSPECTION # +# ---------------------------------------------------------------------------- # +def compare_mesh_and_volume(mesh, volume): + """ + Creates and interactive vtkplotter + visualisation to look at a reference volume + and a mesh at the same time. Can be used to + assess the quality of the mesh extraction. + + Parameters: + ----------- + + mesh: vtkplotter Mesh + volume: np.array or vtkplotter Volume + """ + if isinstance(volume, np.ndarray): + volume = Volume(volume) + + vp = Slicer(volume, bg2="white", showHisto=False) + vp.add(mesh.alpha(0.5)) + vp.show() + + +def inspect_meshses_folder(folder): + """ + Used to create an interactive vtkplotter visualisation + to scroll through all .obj files saved in a folder + + Parameters + ---------- + folder: str or Path object + path to folder with .obj files + """ + + if isinstance(folder, str): + folder = Path(folder) + + if not folder.exists(): + raise FileNotFoundError("The folder passed doesnt exist") + + Browser(load(str(folder))) + show() + + +if __name__ == "__main__": + folder = ( + r"C:\Users\Federico\.brainglobe\temp\allen_human_500um_v0.1\meshes" + ) + inspect_meshses_folder(folder) diff --git a/atlas_gen/metadata_utils.py b/atlas_gen/metadata_utils.py new file mode 100644 index 00000000..57d90ba5 --- /dev/null +++ b/atlas_gen/metadata_utils.py @@ -0,0 +1,138 @@ +""" + Automatic creation of + . structures.csv + . README.txt +""" +import re +import json +from datetime import datetime +from bg_atlasapi import descriptors + +import requests +from requests.exceptions import MissingSchema, InvalidURL, ConnectionError + +from atlas_gen.structure_json_to_csv import convert_structure_json_to_csv +from bg_atlasapi.structure_tree_util import get_structures_tree + + +def generate_metadata_dict( + name, + citation, + atlas_link, + species, + symmetric, + resolution, + orientation, + version, + shape, + transformation_mat, + additional_references, +): + + # Name should be author_species + assert len(name.split("_")) >= 2 + + # Control version formatting: + assert re.match("[0-9]+\\.[0-9]+", version) + + # We ask for DOI and correct link only if atlas is published: + if citation != "unpublished": + assert "doi" in citation + + # Test url: + try: + _ = requests.get(atlas_link) + except (MissingSchema, InvalidURL, ConnectionError): + raise InvalidURL( + "Ensure that the url is valid and formatted correctly!" + ) + + # Enforce correct format for symmetric, resolution and shape: + assert type(symmetric) == bool + assert len(resolution) == 3 + assert len(shape) == 3 + + resolution = tuple([float(v) for v in resolution]) + shape = tuple(int(v) for v in shape) + + assert type(additional_references) == list + + return dict( + name=name, + citation=citation, + atlas_link=atlas_link, + species=species, + symmetric=symmetric, + resolution=resolution, + orientation=orientation, + version=version, + shape=shape, + trasform_to_bg=tuple([tuple(m) for m in transformation_mat]), + additional_references=additional_references, + ) + + +def create_readme(uncompr_atlas_path, metadata_dict, structures): + readmepath = str(uncompr_atlas_path / "README.txt") + + # First write the structure tree + structuresTree = get_structures_tree(structures) + structuresTree.save2file(readmepath) + + # The prepend the header and info + with open(readmepath, "r") as original: + tree = original.read() + + with open(readmepath, "w") as out: + out.write("-- BRAINGLOBE ATLAS --\n") + + now = datetime.now() + out.write("Generated on: " + now.strftime("%d/%m/%Y") + "\n\n") + + out.write("------------------------------\n\n\n") + + for key, value in metadata_dict.items(): + out.write(f" {key}: {value}\n") + + out.write("\n\n\n") + out.write("------------------------------\n\n\n") + out.write("\n\n\n") + + out.write("-- BRAIN STRUCTURES TREE --\n") + + out.write(tree) + + +def create_structures_csv(uncompr_atlas_path, root): + """ + Converts an atlas structure json dictionary to csv. For cellfinder + compatibility and ease of browsing. + + Parameters + ---------- + uncompr_atlas_path : str or Path object + path to uncompressed atlas folder + """ + convert_structure_json_to_csv( + uncompr_atlas_path / "structures.json", root=root + ) + + +def create_metadata_files(dest_dir, metadata_dict, structures, root_id): + """ + Automatic creation of + . structures.csv + . README.txt + from an atlas files. All Files are saved in the uncompressed atlas folder + awaiting compression and upload to GIN. + + :param uncompr_atlas_path: path to uncompressed atlas folder + :param metadata_dict: dict with atlas metadata + :param structures: list of dictionaries with structures hierarchical info + """ + # write metadata dict: + with open(dest_dir / descriptors.METADATA_FILENAME, "w") as f: + json.dump(metadata_dict, f) + + create_structures_csv(dest_dir, root_id) + create_readme(dest_dir, metadata_dict, structures) diff --git a/atlas_gen/run_all.py b/atlas_gen/run_all.py new file mode 100644 index 00000000..77817501 --- /dev/null +++ b/atlas_gen/run_all.py @@ -0,0 +1,48 @@ +import atlas_gen.atlas_scripts as atlas_scripts +import pkgutil +from importlib import import_module +from pathlib import Path +import shutil + + +# A global working directory: +temp_root_dir = Path.home() / "temp_brainglobe_workingdir" +temp_root_dir.mkdir(exist_ok=True) + +# Directory where final atlases will be stored (and synch remotely): +dest_root_dir = Path.home() / "final_brainglobe_workingdir" +dest_root_dir.mkdir(exist_ok=True) + +# Here we can map multiple resolutions for each script. +# It could be expanded to multiplex more params. +resolutions_dict = dict(allen_mouse_atlas=[25, 100]) + +# List over modules in the atlas_scripts folder: +for (_, module_name, _) in pkgutil.iter_modules(atlas_scripts.__path__): + print(module_name) + # Import the module: + module = import_module(f"atlas_gen.atlas_scripts.{module_name}") + + # If create function is available: + if "create_atlas" in dir(module): + + # If multiple resolutions are required: + if module_name in resolutions_dict.keys(): + for res_um in resolutions_dict[module_name]: + # Make working directory for this atlas: + bg_root_dir = temp_root_dir / f"{module_name}_{res_um}um" + bg_root_dir.mkdir(exist_ok=True) + + module.create_atlas( + version=4, res_um=res_um, bg_root_dir=bg_root_dir + ) + + compressed_file = next( + bg_root_dir.glob("*_*_[0-9]*um_*.*.tar.gz") + ) + shutil.move(str(compressed_file), str(dest_root_dir)) + else: + module.create_atlas(version=4, bg_root_dir=bg_root_dir) + + compressed_file = next(bg_root_dir.glob("*_*_[0-9]*um_*.*.tar.gz")) + shutil.move(str(compressed_file), str(dest_root_dir)) diff --git a/atlas_gen/stacks.py b/atlas_gen/stacks.py new file mode 100644 index 00000000..34c5e985 --- /dev/null +++ b/atlas_gen/stacks.py @@ -0,0 +1,62 @@ +import tifffile +from bg_atlasapi import descriptors + + +def write_stack(stack, filename): + """ + Parameters + ---------- + stack + filename + + """ + tifffile.imsave(str(filename), stack) + + +def save_reference(stack, output_dir): + """ + Parameters + ---------- + stack + output_dir + """ + if stack.dtype != descriptors.REFERENCE_DTYPE: + stack = stack.astype(descriptors.REFERENCE_DTYPE) + write_stack(stack, output_dir / descriptors.REFERENCE_FILENAME) + + +def save_secondary_reference(stack, name, output_dir): + """ + Parameters + ---------- + stack + name + output_dir + """ + if stack.dtype != descriptors.REFERENCE_DTYPE: + stack = stack.astype(descriptors.REFERENCE_DTYPE) + write_stack(stack, output_dir / f"{name}.tiff") + + +def save_annotation(stack, output_dir): + """ + Parameters + ---------- + stack + output_dir + """ + if stack.dtype != descriptors.ANNOTATION_DTYPE: + stack = stack.astype(descriptors.ANNOTATION_DTYPE) + write_stack(stack, output_dir / descriptors.ANNOTATION_FILENAME) + + +def save_hemispheres(stack, output_dir): + """ + Parameters + ---------- + stack + output_dir + """ + if stack.dtype != descriptors.HEMISPHERES_DTYPE: + stack = stack.astype(descriptors.HEMISPHERES_DTYPE) + write_stack(stack, output_dir / descriptors.HEMISPHERES_FILENAME) diff --git a/atlas_gen/structure_json_to_csv.py b/atlas_gen/structure_json_to_csv.py new file mode 100644 index 00000000..04c9468c --- /dev/null +++ b/atlas_gen/structure_json_to_csv.py @@ -0,0 +1,83 @@ +from pathlib import Path +import pandas as pd + + +def structure_id_path_to_string(structure_id_path): + """ + Given a path (as a list of structure ids) to a specific structure, + return as a string of "/" separated structure ids + Parameters + ---------- + structure_id_path : list + list of ints defining the path to a region (which is the last element) + + Returns + ------- + str: + "/" separated string of structure ids + + """ + + path_string = "/" + for element in structure_id_path: + path_string = path_string + str(element) + "/" + return path_string + + +def get_parent_id(structure_id_path, root=997): + """ + Given a path (as a list of structure ids) to a specific structure, + return the id of the parent structure + + Parameters + ---------- + structure_id_path : list + list of ints defining the path to a region (which is the last element) + + root : int (optional) + Value for the root (whole brain) structure that has no parent. + + Returns + ------- + int or None : + id of the parent structure (or None if no parent) + """ + + if structure_id_path == [root]: + return None + else: + return int(structure_id_path[-2]) + + +def convert_structure_json_to_csv( + structure_json_path, destination_path=None, root=997 +): + """ + Converts an atlas structure json dictionary to csv. For cellfinder + compatibility and ease of browsing. + + Parameters + ---------- + structure_json_path : str or Path object + path to the json file + destination_path : str or Path object (optional) + Where to save the resulting csv file. Defaults to the same directory + as the json file. + """ + + structure_json_path = Path(structure_json_path) + + df = pd.read_json(structure_json_path) + df = df.drop(columns=["rgb_triplet"]) + df["parent_structure_id"] = df["structure_id_path"].apply( + get_parent_id, root=root + ) + df["structure_id_path"] = df["structure_id_path"].apply( + structure_id_path_to_string + ) + df = df.sort_values("name") + + if destination_path is None: + destination_path = structure_json_path.with_suffix(".csv") + + df.to_csv(destination_path, index=False) diff --git a/atlas_gen/structures.py b/atlas_gen/structures.py new file mode 100644 index 00000000..573cfa9c --- /dev/null +++ b/atlas_gen/structures.py @@ -0,0 +1,115 @@ +from bg_atlasapi.descriptors import STRUCTURE_TEMPLATE as STEMPLATE +from bg_atlasapi.structure_tree_util import get_structures_tree + + +def check_struct_consistency(structures): + """Ensures internal consistency of the structures list + Parameters + ---------- + structures + + Returns + ------- + + """ + assert type(structures) == list + assert type(structures[0]) == dict + + # Check that all structures have the correct keys and value types: + for struct in structures: + try: + assert struct.keys() == STEMPLATE.keys() + assert [ + isinstance(v, type(STEMPLATE[k])) for k, v in struct.items() + ] + except AssertionError: + raise AssertionError( + f"Inconsistencies found for structure {struct}" + ) + + +def get_structure_children(structures, region, use_tree=False): + """ + Given a list of dictionaries with structures data, + and a structure from the list, this function returns + the structures in the list that are children of + the given structure (region). + If use_tree is true it creates a StructureTree and uses that. + """ + if not isinstance(structures, list): + raise ValueError("structures should be a list") + if not isinstance(structures[0], dict): + raise ValueError("structures should be a list of dictionaries") + if not isinstance(region, dict): + raise ValueError( + "region should be a dictionary with a structures data" + ) + + if "id" not in region.keys() or "structure_id_path" not in region.keys(): + raise ValueError( + 'Incomplete structures dicts, need both "id" and "structure_id_path"' + ) + + if not use_tree: + sub_region_ids = [] + for subregion in structures: + if region["id"] in subregion["structure_id_path"]: + sub_region_ids.append(subregion["id"]) + else: + tree = get_structures_tree(structures) + sub_region_ids = [ + n.identifier for k, n in tree.subtree(region["id"]).nodes.items() + ] + + if sub_region_ids == []: + print(f'{region["acronym"]} doesnt seem to contain any other regions') + return None + else: + return sub_region_ids + + +def get_structure_terminal_nodes(structures, region): + """ + Given a list of dictionaries with structures data, + and a structure from the list, this function returns + the structures in the list that are children of + the given structure (region) that are leafs of the + struture tree + """ + + tree = get_structures_tree(structures) + + sub_region_ids = [ + n.identifier for n in tree.subtree(region["id"]).leaves() + ] + + if not sub_region_ids: + print(f'{region["acronym"]} doesnt seem to contain any other regions') + return None + else: + return sub_region_ids + + +# Used by show_which_structures_have_mesh +class Region(object): + def __init__(self, has_mesh): + self.has_mesh = has_mesh + + +def show_which_structures_have_mesh(structures, meshes_dir): + """ + It prints out a tree visualisation with + True for the regions that a mesh and false for the others + + """ + tree = get_structures_tree(structures) + + for idx, node in tree.nodes.items(): + savepath = meshes_dir / f"{idx}.obj" + if savepath.exists(): + has_mesh = True + else: + has_mesh = False + node.data = Region(has_mesh) + + tree.show(data_property="has_mesh") diff --git a/atlas_gen/volume_utils.py b/atlas_gen/volume_utils.py new file mode 100644 index 00000000..ae54fdca --- /dev/null +++ b/atlas_gen/volume_utils.py @@ -0,0 +1,102 @@ +""" + Code useful for dealing with volumetric data (e.g. allen annotation volume for the mouse atlas) + extracting surfaces from volumetric data .... +""" +try: + from vtkplotter import Volume +except ModuleNotFoundError: + raise ModuleNotFoundError( + "Mesh generation with these utils requires vtkplotter\n" + + ' please install with "pip install vtkplotter -U"' + ) + +from brainio import brainio + +import os +import numpy as np + + +def create_masked_array(volume, label, greater_than=False): + """ + Given a 2d o 3d numpy array and a + label value, creates a masked binary + array which is 1 when volume == label + and 0 otherwise + + Parameters + ---------- + volume: np.ndarray + (2d or 3d array) + label: int, float or list of int. + the masked array will be 1 where volume == label + greater_than: bool + if True, all voxels with value > label will be set to 1 + """ + if not isinstance(volume, np.ndarray): + raise ValueError( + f"Argument volume should be a numpy array not {type(volume)}" + ) + + arr = np.zeros_like(volume) + + if not isinstance(label, list) and not np.all(np.isin(label, volume)): + print(f"Label {label} is not in the array, returning empty mask") + return arr + # elif isinstance(label, list): + # if not np.any(np.isin(volume, label)): + # print(f"Label is not in the array, returning empty mask") + # return arr + + if not greater_than: + if not isinstance(label, list): + arr[volume == label] = 1 + else: + arr[np.isin(volume, label)] = 1 + else: + arr[volume > label] = 1 + return arr + + +# ----------------------------- vtkplotter utils ----------------------------- # +# This stuff is outdated, use the functions in mesh_utils.py +# to extract meshes from volumes + + +def load_labelled_volume(data, vmin=0, alpha=1, **kwargs): + """ + Load volume image from .nrrd file. + It assume that voxels with value = 0 are empty while voxels with values > 0 + are labelles (e.g. to indicate the location of a brain region in a reference atlas) + + :param data: str, path to file with volume data or 3d numpy array + :param vmin: float, values below this numner will be assigned an alpha=0 and not be visualized + :param **kwargs: kwargs to pass to the Volume class from vtkplotter + :param alpha: float in range [0, 1], transparency [for the part of volume with value > vmin] + """ + # Load/check volumetric data + if isinstance(data, str): # load from file + if not os.path.isfile(data): + raise FileNotFoundError(f"Volume data file {data} not found") + + try: + data = brainio.load_any(data) + except Exception as e: + raise ValueError( + f"Could not load volume data from file: {data} - {e}" + ) + + elif not isinstance(data, np.ndarray): + raise ValueError( + f"Data should be a filepath or np array, not: {data.__type__}" + ) + + # Create volume and set transparency range + vol = Volume(data, alpha=alpha, **kwargs) + + otf = vol.GetProperty().GetScalarOpacity() + otf.RemoveAllPoints() + otf.AddPoint(vmin, 0) # set to transparent + otf.AddPoint(vmin + 0.1, alpha) # set to opaque + otf.AddPoint(data.max(), alpha) + + return vol diff --git a/atlas_gen/wrapup.py b/atlas_gen/wrapup.py new file mode 100644 index 00000000..68a253d0 --- /dev/null +++ b/atlas_gen/wrapup.py @@ -0,0 +1,205 @@ +import json +import tarfile +import shutil +from pathlib import Path + +import tifffile +import bg_space as bgs +import meshio as mio + +from atlas_gen.metadata_utils import ( + create_metadata_files, + generate_metadata_dict, +) +from atlas_gen.stacks import ( + save_reference, + save_annotation, + save_hemispheres, + save_secondary_reference, +) +from atlas_gen.structures import check_struct_consistency + +from bg_atlasapi import descriptors + + +# This should be changed every time we make changes in the atlas +# structure: +ATLAS_VERSION = descriptors.ATLAS_MAJOR_V + + +def wrapup_atlas_from_data( + atlas_name, + atlas_minor_version, + citation, + atlas_link, + species, + resolution, + orientation, + root_id, + reference_stack, + annotation_stack, + structures_list, + meshes_dict, + working_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=False, + additional_references=dict(), +): + """ + Finalise an atlas with truly consistent format from all the data. + + Parameters + ---------- + atlas_name : str + Atlas name in the form author_species. + atlas_minor_version : int or str + Minor version number for this particular atlas. + citation : str + Citation for the atlas, if unpublished specify "unpublished". + atlas_link : str + Valid URL for the atlas. + species : str + Species name formatted as "CommonName (Genus species)". + resolution : tuple + Three elements tuple, resolution on three axes + orientation : + Orientation of the original atlas (tuple describing origin for BGSpace). + root_id : + Id of the root element of the atlas. + reference_stack : str or Path or numpy array + Reference stack for the atlas. If str or Path, will be read with tifffile. + annotation_stack : str or Path or numpy array + Annotation stack for the atlas. If str or Path, will be read with tifffile. + structures_list : list of dict + List of valid dictionary for structures. + meshes_dict : dict + dict of meshio-compatible mesh file paths in the form {sruct_id: meshpath} + working_dir : str or Path obj + Path where the atlas folder and compressed file will be generated. + hemispheres_stack : str or Path or numpy array, optional + Hemisphere stack for the atlas. If str or Path, will be read with tifffile. + If none is provided, atlas is assumed to be symmetric + cleanup_files : bool, optional + (Default value = False) + compress : bool, optional + (Default value = True) + scale_meshes: bool, optional + (Default values = False). If True the meshes points are scaled by the resolution + to ensure that they are specified in microns, regardless of the atlas resolution. + additional_references: dict, optional + (Default value = empty dict). Dictionary with secondary reference stacks. + """ + + version = f"{ATLAS_VERSION}.{atlas_minor_version}" + + # If no hemisphere file is given, assume the atlas is symmetric: + symmetric = hemispheres_stack is None + + # Instantiate BGSpace obj, using original stack size in um as meshes + # are un um: + original_shape = reference_stack.shape + volume_shape = tuple(res * s for res, s in zip(resolution, original_shape)) + space_convention = bgs.SpaceConvention(orientation, shape=volume_shape) + + # Check consistency of structures .json file: + check_struct_consistency(structures_list) + + atlas_dir_name = f"{atlas_name}_{resolution[0]}um_v{version}" + dest_dir = Path(working_dir) / atlas_dir_name + + # exist_ok would be more permissive but error-prone here as there might + # be old files + dest_dir.mkdir() + + stack_list = [reference_stack, annotation_stack] + saving_fun_list = [save_reference, save_annotation] + + # If the atlas is not symmetric, we are also providing an hemisphere stack: + if not symmetric: + stack_list += [ + hemispheres_stack, + ] + saving_fun_list += [ + save_hemispheres, + ] + + # write tiff stacks: + for stack, saving_function in zip(stack_list, saving_fun_list): + + if isinstance(stack, str) or isinstance(stack, Path): + stack = tifffile.imread(stack) + + # Reorient stacks if required: + stack = space_convention.map_stack_to( + descriptors.ATLAS_ORIENTATION, stack, copy=False + ) + shape = stack.shape + + saving_function(stack, dest_dir) + + for k, stack in additional_references.items(): + stack = space_convention.map_stack_to( + descriptors.ATLAS_ORIENTATION, stack, copy=False + ) + save_secondary_reference(stack, k, output_dir=dest_dir) + + # Reorient vertices of the mesh. + mesh_dest_dir = dest_dir / descriptors.MESHES_DIRNAME + mesh_dest_dir.mkdir() + + for mesh_id, meshfile in meshes_dict.items(): + mesh = mio.read(meshfile) + + # Reorient points: + mesh.points = space_convention.map_points_to( + descriptors.ATLAS_ORIENTATION, mesh.points + ) + + # Scale the mesh to be in microns, if necessary: + if scale_meshes: + mesh.points *= resolution + + # Save in meshes dir: + mio.write(mesh_dest_dir / f"{mesh_id}.obj", mesh) + + transformation_mat = space_convention.transformation_matrix_to( + descriptors.ATLAS_ORIENTATION + ) + + # save regions list json: + with open(dest_dir / descriptors.STRUCTURES_FILENAME, "w") as f: + json.dump(structures_list, f) + + # Finalize metadata dictionary: + metadata_dict = generate_metadata_dict( + name=atlas_name, + citation=citation, + atlas_link=atlas_link, + species=species, + symmetric=symmetric, + resolution=resolution, + orientation=descriptors.ATLAS_ORIENTATION, + version=version, + shape=shape, + transformation_mat=transformation_mat, + additional_references=[k for k in additional_references.keys()], + ) + + # Create human readable .csv and .txt files: + create_metadata_files(dest_dir, metadata_dict, structures_list, root_id) + + # Compress if required: + if compress: + output_filename = dest_dir.parent / f"{dest_dir.name}.tar.gz" + print(f"Saving compressed atlas data at: {output_filename}") + with tarfile.open(output_filename, "w:gz") as tar: + tar.add(dest_dir, arcname=dest_dir.name) + + # Cleanup if required: + if cleanup_files: + # Clean temporary directory and remove it: + shutil.rmtree(dest_dir) + + return output_filename diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..159ded91 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,21 @@ +[tool.black] +target-version = ['py36', 'py37', 'py38'] +skip-string-normalization = false +line-length = 79 +exclude = ''' +( + /( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist + | examples + )/ +) +''' \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..9b1ff66a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,24 @@ +numpy +tifffile +treelib +pandas +requests +meshio +click +rich +tqdm>=4.46.1 +bg-space +allensdk +sphinx +brainio>=0.0.16 +vtkplotter +recommonmark +sphinx_rtd_theme +pydoc-markdown +black +pytest-cov +pytest +gitpython +coverage +pre-commit +PyMCubes \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..58690141 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,18 @@ +[bumpversion] +current_version = 0.0.1 +commit = True +tag = True + +[bumpversion:file:setup.py] +search = version="{current_version}" +replace = version="{new_version}" + +[bumpversion:file:bg_atlasgen/__init__.py] +search = __version__ = "{current_version}" +replace = __version__ = "{new_version}" + +[flake8] +ignore = E203,W503,E501,E731,C901,W291,W293,E741 +max-line-length = 79 +max-complexity = 18 +exclude = __init__.py diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..6090a431 --- /dev/null +++ b/setup.py @@ -0,0 +1,31 @@ +from setuptools import setup, find_namespace_packages + +with open("requirements.txt") as f: + requirements = f.read().splitlines() + +setup( + name="bg-atlasgen", + version="0.0.1", + description="Scripts generation atlases and utilities for BrainGlobe", + install_requires=requirements, + python_requires=">=3.6", + entry_points={"console_scripts": []}, + packages=find_namespace_packages(exclude=("docs", "tests*")), + include_package_data=True, + url="https://github.com/brainglobe/bg-atlasgen", + author="Luigi Petrucco, Federico Claudi, Adam Tyson", + author_email="adam.tyson@ucl.ac.uk", + classifiers=[ + "Development Status :: 3 - Alpha", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows :: Windows 10", + "Operating System :: MacOS :: MacOS X", + "Programming Language :: Python", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + ], + zip_safe=False, +) From 8173167d737e151da0bc3de6c887dfddd66306eb Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 2 Sep 2020 16:55:41 +0200 Subject: [PATCH 004/103] gitignore --- .gitignore | 8 ++++++++ .idea/.gitignore | 3 --- .idea/bg-atlasgen.iml | 8 -------- .idea/inspectionProfiles/profiles_settings.xml | 6 ------ .idea/misc.xml | 4 ---- .idea/modules.xml | 8 -------- .idea/vcs.xml | 6 ------ 7 files changed, 8 insertions(+), 35 deletions(-) delete mode 100644 .idea/.gitignore delete mode 100644 .idea/bg-atlasgen.iml delete mode 100644 .idea/inspectionProfiles/profiles_settings.xml delete mode 100644 .idea/misc.xml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/vcs.xml diff --git a/.gitignore b/.gitignore index b6e47617..2bb9b60d 100644 --- a/.gitignore +++ b/.gitignore @@ -127,3 +127,11 @@ dmypy.json # Pyre type checker .pyre/ + +.idea/ +.vs/ +*.~lock.* + + +# macOS +*.DS_Store diff --git a/.idea/.gitignore b/.idea/.gitignore deleted file mode 100644 index 26d33521..00000000 --- a/.idea/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -# Default ignored files -/shelf/ -/workspace.xml diff --git a/.idea/bg-atlasgen.iml b/.idea/bg-atlasgen.iml deleted file mode 100644 index d0876a78..00000000 --- a/.idea/bg-atlasgen.iml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml deleted file mode 100644 index 105ce2da..00000000 --- a/.idea/inspectionProfiles/profiles_settings.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 26abc6bc..00000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index 694fd4bf..00000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7f..00000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file From 921c007a4f669fae6e410edea0abcf18b6f28c9f Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Thu, 3 Sep 2020 00:43:03 +0200 Subject: [PATCH 005/103] Almost at complete atlas autogeneration --- atlas_gen/atlas_scripts/example_mouse.py | 6 +- atlas_gen/git_script.py | 70 +++++++++++------------- atlas_gen/test_git.py | 21 +++++++ atlas_gen/wrapup.py | 8 +-- 4 files changed, 61 insertions(+), 44 deletions(-) create mode 100644 atlas_gen/test_git.py diff --git a/atlas_gen/atlas_scripts/example_mouse.py b/atlas_gen/atlas_scripts/example_mouse.py index 7e07ac39..db79dd98 100644 --- a/atlas_gen/atlas_scripts/example_mouse.py +++ b/atlas_gen/atlas_scripts/example_mouse.py @@ -11,10 +11,10 @@ from atlas_gen.wrapup import wrapup_atlas_from_data -def create_atlas(bg_root_dir): +def create_atlas(bg_root_dir, resolution): # Specify information about the atlas: - RES_UM = 100 + RES_UM = resolution # 100 ATLAS_NAME = "example_mouse" SPECIES = "Mus musculus" ATLAS_LINK = "http://www.brain-map.org.com" @@ -104,7 +104,7 @@ def create_atlas(bg_root_dir): compress=True, ) - return Path() + return output_filename if __name__ == "__main__": diff --git a/atlas_gen/git_script.py b/atlas_gen/git_script.py index 9cd5a79d..475bbdbc 100644 --- a/atlas_gen/git_script.py +++ b/atlas_gen/git_script.py @@ -5,29 +5,10 @@ import configparser import atlas_gen from importlib import import_module +import shutil +from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name -from atlas_gen.atlas_scripts.example_mouse import create_atlas - - -generation_dict = dict(example_mouse=[100]) - - -def get_atlas_repr(name): - parts = name.split("_") - # if atlas name with no version: - version_str = parts.pop() if not parts[-1].endswith("um") else None - resolution_str = parts.pop() - - atlas_name = "_".join(parts) - if version_str: - major_vers, minor_vers = version_str[2:].split(".") - else: - major_vers, minor_vers = None, None - return dict(name=atlas_name, - major_vers=major_vers, - minor_vers=minor_vers, - resolution=resolution_str[:-2]) - +GENERATION_DICT = dict(example_mouse=[100]) cwd = Path.home() / "bg_auto" @@ -42,31 +23,46 @@ def get_atlas_repr(name): # pw = input("GIN-GNode password: ") # Python 3 # print(us, pw) + # Read last versions from conf file: conf = configparser.ConfigParser() conf.read(repo_path / "last_versions.conf") - atlases_status = dict() - for k in conf["atlases"].keys(): - repr = get_atlas_repr(k) + # Find all atlases representation given the names in the conf: + atlases_repr = dict() + for k in conf["atlases"].keys(): + repr = atlas_repr_from_name(k) # Read versions from conf: - major_vers, minor_vers = conf["atlases"][k].split(".") - repr["major_vers"] = major_vers - repr["minor_vers"] = minor_vers - atlases_status[repr.pop("name")] =repr + repr["major_vers"], repr["minor_vers"] = conf["atlases"][k].split(".") + # Add as entries in a dict: + atlases_repr[repr.pop("name")] = repr - bg_atlas_version = atlas_gen.__version__ + # Major version is given by version of the atlas_gen module: + bg_atlasgen_version = atlas_gen.__version__ + # Path to the scripts to generate the atlases: scripts_path = atlas_gen_path / "atlas_scripts" - for n, res in generation_dict.items(): - # print(next(scripts_path.glob(f"{n}.py"))) - # print(n) - status = atlases_status[n] - mod = import_module(f"atlas_gen.atlas_scripts.{n}") - script_version = mod.__version__ + # Loop over the entries from the GENERATION_DICT configuration dict + for name, resolutions in GENERATION_DICT.items(): + status = atlases_repr[name] + module = import_module(f"atlas_gen.atlas_scripts.{name}") + script_version = module.__version__ + if bg_atlas_version >= status["major_vers"] and \ script_version > status["minor_vers"]: - print(n, mod.create_atlas) + print(name, module.create_atlas) + + for resolution in resolutions: + temp_dir = cwd / "tempdir" + temp_dir.mkdir(exist_ok=True) + + output_filename = module.create_atlas(temp_dir, resolution) + shutil.move(str(output_filename), repo_path) + shutil.rmtree(temp_dir) + k = atlas_name_from_repr(name, resolution) + conf["atlases"][k] = str(f"{bg_atlasgen_version}.{script_version}") + with open(repo_path / "last_versions.conf", "w") as f: + conf.write(f) diff --git a/atlas_gen/test_git.py b/atlas_gen/test_git.py new file mode 100644 index 00000000..5f678f0a --- /dev/null +++ b/atlas_gen/test_git.py @@ -0,0 +1,21 @@ +from git import Repo +from pathlib import Path + +GENERATION_DICT = dict(example_mouse=[100]) + + +cwd = Path.home() / "bg_auto" +cwd.mkdir(exist_ok=True) + + +if __name__ == "__main__": + repo_path = cwd / "atlas_repo" + atlas_gen_path = Path(__file__).parent + + repo = Repo(repo_path) + + #repo.git.add(".") + #repo.git.commit('-m', 'test commit', author='luigi.petrucco@gmail.com') + repo.git.pull() + repo.git.push() + diff --git a/atlas_gen/wrapup.py b/atlas_gen/wrapup.py index 68a253d0..228504c4 100644 --- a/atlas_gen/wrapup.py +++ b/atlas_gen/wrapup.py @@ -20,6 +20,7 @@ from atlas_gen.structures import check_struct_consistency from bg_atlasapi import descriptors +from bg_atlasapi.utils import atlas_name_from_repr # This should be changed every time we make changes in the atlas @@ -92,8 +93,6 @@ def wrapup_atlas_from_data( (Default value = empty dict). Dictionary with secondary reference stacks. """ - version = f"{ATLAS_VERSION}.{atlas_minor_version}" - # If no hemisphere file is given, assume the atlas is symmetric: symmetric = hemispheres_stack is None @@ -106,7 +105,8 @@ def wrapup_atlas_from_data( # Check consistency of structures .json file: check_struct_consistency(structures_list) - atlas_dir_name = f"{atlas_name}_{resolution[0]}um_v{version}" + atlas_dir_name = atlas_name_from_repr(atlas_name, resolution[0], + ATLAS_VERSION, atlas_minor_version) dest_dir = Path(working_dir) / atlas_dir_name # exist_ok would be more permissive but error-prone here as there might @@ -181,7 +181,7 @@ def wrapup_atlas_from_data( symmetric=symmetric, resolution=resolution, orientation=descriptors.ATLAS_ORIENTATION, - version=version, + version=f"{ATLAS_VERSION}.{atlas_minor_version}", shape=shape, transformation_mat=transformation_mat, additional_references=[k for k in additional_references.keys()], From 6ca9c7e0cf4abf10ef07785d4300e27c42972ad7 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Thu, 3 Sep 2020 09:50:40 +0200 Subject: [PATCH 006/103] Finalized script for automatic atlas generation --- {atlas_gen => bg_atlasgen}/__init__.py | 0 .../atlas_scripts/__init__.py | 0 .../atlas_scripts/allen_mouse.py | 2 +- .../atlas_scripts/example_mouse.py | 2 +- .../atlas_scripts/humanatlas.py | 4 +- .../atlas_scripts/kim_unified_atlas.py | 4 +- .../atlas_scripts/mpin_zfish.py | 2 +- .../atlas_scripts/ratatlas.py | 4 +- .../main_script.py | 43 +++++++++++++------ {atlas_gen => bg_atlasgen}/mesh_utils.py | 2 +- {atlas_gen => bg_atlasgen}/metadata_utils.py | 2 +- {atlas_gen => bg_atlasgen}/run_all.py | 2 +- {atlas_gen => bg_atlasgen}/stacks.py | 0 .../structure_json_to_csv.py | 0 {atlas_gen => bg_atlasgen}/structures.py | 0 {atlas_gen => bg_atlasgen}/test_git.py | 0 {atlas_gen => bg_atlasgen}/volume_utils.py | 0 {atlas_gen => bg_atlasgen}/wrapup.py | 6 +-- 18 files changed, 46 insertions(+), 27 deletions(-) rename {atlas_gen => bg_atlasgen}/__init__.py (100%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/__init__.py (100%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/allen_mouse.py (98%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/example_mouse.py (98%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/humanatlas.py (99%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/kim_unified_atlas.py (98%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/mpin_zfish.py (99%) rename {atlas_gen => bg_atlasgen}/atlas_scripts/ratatlas.py (98%) rename atlas_gen/git_script.py => bg_atlasgen/main_script.py (65%) rename {atlas_gen => bg_atlasgen}/mesh_utils.py (99%) rename {atlas_gen => bg_atlasgen}/metadata_utils.py (98%) rename {atlas_gen => bg_atlasgen}/run_all.py (97%) rename {atlas_gen => bg_atlasgen}/stacks.py (100%) rename {atlas_gen => bg_atlasgen}/structure_json_to_csv.py (100%) rename {atlas_gen => bg_atlasgen}/structures.py (100%) rename {atlas_gen => bg_atlasgen}/test_git.py (100%) rename {atlas_gen => bg_atlasgen}/volume_utils.py (100%) rename {atlas_gen => bg_atlasgen}/wrapup.py (98%) diff --git a/atlas_gen/__init__.py b/bg_atlasgen/__init__.py similarity index 100% rename from atlas_gen/__init__.py rename to bg_atlasgen/__init__.py diff --git a/atlas_gen/atlas_scripts/__init__.py b/bg_atlasgen/atlas_scripts/__init__.py similarity index 100% rename from atlas_gen/atlas_scripts/__init__.py rename to bg_atlasgen/atlas_scripts/__init__.py diff --git a/atlas_gen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py similarity index 98% rename from atlas_gen/atlas_scripts/allen_mouse.py rename to bg_atlasgen/atlas_scripts/allen_mouse.py index f7eb58a5..8f7ade1b 100644 --- a/atlas_gen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -8,7 +8,7 @@ from pathlib import Path from tqdm import tqdm -from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi import descriptors diff --git a/atlas_gen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py similarity index 98% rename from atlas_gen/atlas_scripts/example_mouse.py rename to bg_atlasgen/atlas_scripts/example_mouse.py index db79dd98..c0ca0d82 100644 --- a/atlas_gen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -8,7 +8,7 @@ from pathlib import Path from tqdm import tqdm -from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasgen.wrapup import wrapup_atlas_from_data def create_atlas(bg_root_dir, resolution): diff --git a/atlas_gen/atlas_scripts/humanatlas.py b/bg_atlasgen/atlas_scripts/humanatlas.py similarity index 99% rename from atlas_gen/atlas_scripts/humanatlas.py rename to bg_atlasgen/atlas_scripts/humanatlas.py index 7d9a4134..2aa96d26 100644 --- a/atlas_gen/atlas_scripts/humanatlas.py +++ b/bg_atlasgen/atlas_scripts/humanatlas.py @@ -13,8 +13,8 @@ # import sys # sys.path.append("./") -from atlas_gen.mesh_utils import create_region_mesh, Region -from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree diff --git a/atlas_gen/atlas_scripts/kim_unified_atlas.py b/bg_atlasgen/atlas_scripts/kim_unified_atlas.py similarity index 98% rename from atlas_gen/atlas_scripts/kim_unified_atlas.py rename to bg_atlasgen/atlas_scripts/kim_unified_atlas.py index e347e2f9..610d2bbe 100644 --- a/atlas_gen/atlas_scripts/kim_unified_atlas.py +++ b/bg_atlasgen/atlas_scripts/kim_unified_atlas.py @@ -11,8 +11,8 @@ # import sys # sys.path.append("./") -from atlas_gen.mesh_utils import create_region_mesh, Region -from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree diff --git a/atlas_gen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py similarity index 99% rename from atlas_gen/atlas_scripts/mpin_zfish.py rename to bg_atlasgen/atlas_scripts/mpin_zfish.py index 338fb89f..6d401685 100644 --- a/atlas_gen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -9,7 +9,7 @@ from tifffile import imread from allensdk.core.structure_tree import StructureTree -from atlas_gen.wrapup import wrapup_atlas_from_data +from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.utils import retrieve_over_http diff --git a/atlas_gen/atlas_scripts/ratatlas.py b/bg_atlasgen/atlas_scripts/ratatlas.py similarity index 98% rename from atlas_gen/atlas_scripts/ratatlas.py rename to bg_atlasgen/atlas_scripts/ratatlas.py index 6c59f81a..f827df4a 100644 --- a/atlas_gen/atlas_scripts/ratatlas.py +++ b/bg_atlasgen/atlas_scripts/ratatlas.py @@ -1,8 +1,8 @@ -from atlas_gen.volume_utils import ( +from bg_atlasgen.volume_utils import ( extract_volume_surface, load_labelled_volume, ) -from atlas_gen.metadata_utils import create_metadata_files +from bg_atlasgen.metadata_utils import create_metadata_files from brainio.brainio import load_any diff --git a/atlas_gen/git_script.py b/bg_atlasgen/main_script.py similarity index 65% rename from atlas_gen/git_script.py rename to bg_atlasgen/main_script.py index 475bbdbc..7a4fff2e 100644 --- a/atlas_gen/git_script.py +++ b/bg_atlasgen/main_script.py @@ -1,13 +1,13 @@ from git import Repo from pathlib import Path -import tempfile -from bg_atlasapi.config import read_config import configparser -import atlas_gen +import bg_atlasgen from importlib import import_module import shutil from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name +# Main dictionary specifying which atlases to generate +# and with which resolutions: GENERATION_DICT = dict(example_mouse=[100]) @@ -17,11 +17,12 @@ if __name__ == "__main__": repo_path = cwd / "atlas_repo" - atlas_gen_path = Path(__file__).parent - # Repo.clone_from("https://gin.g-node.org/vigji/bg_test", repo_path) - # us = input("GIN-GNode user: ") # Python 3 - # pw = input("GIN-GNode password: ") # Python 3 - # print(us, pw) + repo_path.mkdir(exist_ok=True) + + print("Cloning atlases repo...") + repo = Repo.clone_from("https://gin.g-node.org/vigji/bg_test", repo_path) + # us = input("GIN-GNode user: ") + # pw = input("GIN-GNode password: ") # Read last versions from conf file: conf = configparser.ConfigParser() @@ -37,32 +38,50 @@ atlases_repr[repr.pop("name")] = repr # Major version is given by version of the atlas_gen module: - bg_atlasgen_version = atlas_gen.__version__ + bg_atlasgen_version = bg_atlasgen.__version__ # Path to the scripts to generate the atlases: + atlas_gen_path = Path(__file__).parent scripts_path = atlas_gen_path / "atlas_scripts" # Loop over the entries from the GENERATION_DICT configuration dict + commit_log = "Updated: " for name, resolutions in GENERATION_DICT.items(): status = atlases_repr[name] module = import_module(f"atlas_gen.atlas_scripts.{name}") script_version = module.__version__ - if bg_atlas_version >= status["major_vers"] and \ + if bg_atlasgen_version >= status["major_vers"] and \ script_version > status["minor_vers"]: - print(name, module.create_atlas) + # Loop over all resolutions: for resolution in resolutions: + print(f"Generating {name}, {resolution} um...") + + # Make working directory for atlas generation: temp_dir = cwd / "tempdir" temp_dir.mkdir(exist_ok=True) + # Create and compress atlas: output_filename = module.create_atlas(temp_dir, resolution) + # Move atlas to repo: shutil.move(str(output_filename), repo_path) shutil.rmtree(temp_dir) + # Update config file with new version: k = atlas_name_from_repr(name, resolution) conf["atlases"][k] = str(f"{bg_atlasgen_version}.{script_version}") - with open(repo_path / "last_versions.conf", "w") as f: conf.write(f) + + # Update log for commit message: + commit_log += f"{output_filename}.name, " + + # Commit and push: + repo.git.add(".") + repo.git.commit('-m', commit_log) + repo.git.push() + + # Clear folder: + shutil.rmtree(repo_path) diff --git a/atlas_gen/mesh_utils.py b/bg_atlasgen/mesh_utils.py similarity index 99% rename from atlas_gen/mesh_utils.py rename to bg_atlasgen/mesh_utils.py index 4b3f5cd0..d00cbf4e 100644 --- a/atlas_gen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -19,7 +19,7 @@ import numpy as np from pathlib import Path import scipy -from atlas_gen.volume_utils import create_masked_array +from bg_atlasgen.volume_utils import create_masked_array # ---------------------------------------------------------------------------- # # MESH CREATION # diff --git a/atlas_gen/metadata_utils.py b/bg_atlasgen/metadata_utils.py similarity index 98% rename from atlas_gen/metadata_utils.py rename to bg_atlasgen/metadata_utils.py index 57d90ba5..30a8747b 100644 --- a/atlas_gen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -11,7 +11,7 @@ import requests from requests.exceptions import MissingSchema, InvalidURL, ConnectionError -from atlas_gen.structure_json_to_csv import convert_structure_json_to_csv +from bg_atlasgen.structure_json_to_csv import convert_structure_json_to_csv from bg_atlasapi.structure_tree_util import get_structures_tree diff --git a/atlas_gen/run_all.py b/bg_atlasgen/run_all.py similarity index 97% rename from atlas_gen/run_all.py rename to bg_atlasgen/run_all.py index 77817501..49d2772c 100644 --- a/atlas_gen/run_all.py +++ b/bg_atlasgen/run_all.py @@ -1,4 +1,4 @@ -import atlas_gen.atlas_scripts as atlas_scripts +import bg_atlasgen.atlas_scripts as atlas_scripts import pkgutil from importlib import import_module from pathlib import Path diff --git a/atlas_gen/stacks.py b/bg_atlasgen/stacks.py similarity index 100% rename from atlas_gen/stacks.py rename to bg_atlasgen/stacks.py diff --git a/atlas_gen/structure_json_to_csv.py b/bg_atlasgen/structure_json_to_csv.py similarity index 100% rename from atlas_gen/structure_json_to_csv.py rename to bg_atlasgen/structure_json_to_csv.py diff --git a/atlas_gen/structures.py b/bg_atlasgen/structures.py similarity index 100% rename from atlas_gen/structures.py rename to bg_atlasgen/structures.py diff --git a/atlas_gen/test_git.py b/bg_atlasgen/test_git.py similarity index 100% rename from atlas_gen/test_git.py rename to bg_atlasgen/test_git.py diff --git a/atlas_gen/volume_utils.py b/bg_atlasgen/volume_utils.py similarity index 100% rename from atlas_gen/volume_utils.py rename to bg_atlasgen/volume_utils.py diff --git a/atlas_gen/wrapup.py b/bg_atlasgen/wrapup.py similarity index 98% rename from atlas_gen/wrapup.py rename to bg_atlasgen/wrapup.py index 228504c4..8065d7f6 100644 --- a/atlas_gen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -7,17 +7,17 @@ import bg_space as bgs import meshio as mio -from atlas_gen.metadata_utils import ( +from bg_atlasgen.metadata_utils import ( create_metadata_files, generate_metadata_dict, ) -from atlas_gen.stacks import ( +from bg_atlasgen.stacks import ( save_reference, save_annotation, save_hemispheres, save_secondary_reference, ) -from atlas_gen.structures import check_struct_consistency +from bg_atlasgen.structures import check_struct_consistency from bg_atlasapi import descriptors from bg_atlasapi.utils import atlas_name_from_repr From 461820f8a7f1d1f390b46835b75f70ef799fd5b8 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Thu, 3 Sep 2020 10:26:22 +0200 Subject: [PATCH 007/103] Added template script for atlas generation --- bg_atlasgen/__init__.py | 2 +- bg_atlasgen/atlas_scripts/allen_mouse.py | 20 +++--- bg_atlasgen/atlas_scripts/example_mouse.py | 10 +-- bg_atlasgen/atlas_scripts/mpin_zfish.py | 22 +++--- bg_atlasgen/atlas_scripts/template_script.py | 71 ++++++++++++++++++++ bg_atlasgen/run_all.py | 4 +- bg_atlasgen/test_git.py | 4 +- 7 files changed, 104 insertions(+), 29 deletions(-) create mode 100644 bg_atlasgen/atlas_scripts/template_script.py diff --git a/bg_atlasgen/__init__.py b/bg_atlasgen/__init__.py index a53a3a5a..f754d2ee 100644 --- a/bg_atlasgen/__init__.py +++ b/bg_atlasgen/__init__.py @@ -1 +1 @@ -__version__ = "0" +__version__ = "0" # will set major version of all atlases diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py index 8f7ade1b..064a70d6 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -12,7 +12,7 @@ from bg_atlasapi import descriptors -def create_atlas(version, res_um, bg_root_dir): +def create_atlas(working_dir, resolution): # Specify information about the atlas: ATLAS_NAME = "allen_mouse" SPECIES = "Mus musculus" @@ -21,7 +21,7 @@ def create_atlas(version, res_um, bg_root_dir): ORIENTATION = "asr" # Temporary folder for nrrd files download: - download_dir_path = bg_root_dir / "downloading_path" + download_dir_path = working_dir / "downloading_path" download_dir_path.mkdir(exist_ok=True) # Download annotated and template volume: @@ -29,7 +29,7 @@ def create_atlas(version, res_um, bg_root_dir): spacecache = ReferenceSpaceCache( manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here - resolution=res_um, + resolution=resolution, reference_space_key="annotation/ccf_2017" # use the latest version of the CCF ) @@ -58,7 +58,7 @@ def create_atlas(version, res_um, bg_root_dir): structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids) # Directory for mesh saving: - meshes_dir = bg_root_dir / descriptors.MESHES_DIRNAME + meshes_dir = working_dir / descriptors.MESHES_DIRNAME space = ReferenceSpaceApi() meshes_dict = dict() @@ -84,25 +84,27 @@ def create_atlas(version, res_um, bg_root_dir): # Wrap up, compress, and remove file:0 print(f"Finalising atlas") - wrapup_atlas_from_data( + output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, - atlas_minor_version=version, + atlas_minor_version=__version__, citation=CITATION, atlas_link=ATLAS_LINK, species=SPECIES, - resolution=(res_um,) * 3, + resolution=(resolution,) * 3, orientation=ORIENTATION, root_id=997, reference_stack=template_volume, annotation_stack=annotated_volume, structures_list=structs_with_mesh, meshes_dict=meshes_dict, - working_dir=bg_root_dir, + working_dir=working_dir, hemispheres_stack=None, cleanup_files=False, compress=True, ) + return output_filename + if __name__ == "__main__": RES_UM = 25 @@ -110,4 +112,4 @@ def create_atlas(version, res_um, bg_root_dir): bg_root_dir = Path.home() / "brainglobe_workingdir" / "allen_mouse" bg_root_dir.mkdir(exist_ok=True) - create_atlas(__version__, RES_UM, bg_root_dir) + create_atlas(bg_root_dir, RES_UM) diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index c0ca0d82..041befe5 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -11,7 +11,7 @@ from bg_atlasgen.wrapup import wrapup_atlas_from_data -def create_atlas(bg_root_dir, resolution): +def create_atlas(working_dir, resolution): # Specify information about the atlas: RES_UM = resolution # 100 @@ -22,7 +22,7 @@ def create_atlas(bg_root_dir, resolution): ORIENTATION = "asr" # Temporary folder for nrrd files download: - download_dir_path = bg_root_dir / "downloading_path" + download_dir_path = working_dir / "downloading_path" download_dir_path.mkdir(exist_ok=True) # Download annotated and template volume: @@ -59,7 +59,7 @@ def create_atlas(bg_root_dir, resolution): structs_with_mesh = struct_tree.get_structures_by_set_id(mesh_set_ids)[:3] # Directory for mesh saving: - meshes_dir = bg_root_dir / "mesh_temp_download" + meshes_dir = working_dir / "mesh_temp_download" space = ReferenceSpaceApi() meshes_dict = dict() @@ -98,7 +98,7 @@ def create_atlas(bg_root_dir, resolution): annotation_stack=annotated_volume, structures_list=structs_with_mesh, meshes_dict=meshes_dict, - working_dir=bg_root_dir, + working_dir=working_dir, hemispheres_stack=None, cleanup_files=False, compress=True, @@ -112,4 +112,4 @@ def create_atlas(bg_root_dir, resolution): bg_root_dir = Path.home() / "brainglobe_workingdir" / "example" bg_root_dir.mkdir(exist_ok=True) - create_atlas(bg_root_dir) + create_atlas(working_dir, 100) diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index 6d401685..b7176034 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -95,9 +95,9 @@ def collect_all_inplace( collect_all_inplace(region, traversing_list, download_path, mesh_dict) -def create_atlas(version, bg_root_dir): +def create_atlas(working_dir, resolution): # Specify fixed information about the atlas: - RES_UM = 1 + RES_UM = resolution ATLAS_NAME = "mpin_zfish" SPECIES = "Danio rerio" ATLAS_LINK = "http://fishatlas.neuro.mpg.de" @@ -116,13 +116,13 @@ def create_atlas(version, bg_root_dir): # Download annotation and hemispheres from GIN repo: gin_url = "https://gin.g-node.org/brainglobe/mpin_zfish/raw/master/mpin_zfish_annotations.tar.gz" - compressed_zip_path = bg_root_dir / "annotations.tar" + compressed_zip_path = working_dir / "annotations.tar" retrieve_over_http(gin_url, compressed_zip_path) tar = tarfile.open(compressed_zip_path) - tar.extractall(path=bg_root_dir) + tar.extractall(path=working_dir) - extracted_dir = bg_root_dir / "mpin_zfish_annotations" + extracted_dir = working_dir / "mpin_zfish_annotations" annotation_stack = tifffile.imread( str(extracted_dir / "mpin_zfish_annotation.tif") @@ -146,7 +146,7 @@ def create_atlas(version, bg_root_dir): ###################################### regions_url = f"{BASE_URL}/neurons/get_brain_regions" - meshes_dir_path = bg_root_dir / "meshes_temp_download" + meshes_dir_path = working_dir / "meshes_temp_download" meshes_dir_path.mkdir(exist_ok=True) # Download structures hierarchy: @@ -177,9 +177,9 @@ def create_atlas(version, bg_root_dir): # Wrap up, compress, and remove file:0 print(f"Finalising atlas") - wrapup_atlas_from_data( + output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, - atlas_minor_version=version, + atlas_minor_version=__version__, citation=CITATION, atlas_link=ATLAS_LINK, species=SPECIES, @@ -190,17 +190,19 @@ def create_atlas(version, bg_root_dir): annotation_stack=annotation_stack, structures_list=structures_list, meshes_dict=meshes_dict, - working_dir=bg_root_dir, + working_dir=working_dir, hemispheres_stack=hemispheres_stack, cleanup_files=False, compress=True, additional_references=additional_references, ) + return output_filename + if __name__ == "__main__": # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "fish" bg_root_dir.mkdir(exist_ok=True, parents=True) - create_atlas(__version__, bg_root_dir) + create_atlas(bg_root_dir, 1) diff --git a/bg_atlasgen/atlas_scripts/template_script.py b/bg_atlasgen/atlas_scripts/template_script.py new file mode 100644 index 00000000..6930fdd1 --- /dev/null +++ b/bg_atlasgen/atlas_scripts/template_script.py @@ -0,0 +1,71 @@ +__version__ = "0" # will be used to set minor version of the atlas + +from bg_atlasgen.wrapup import wrapup_atlas_from_data + + +def create_atlas(working_dir, resolution): + """Function to generate source data for an atlas. + + Parameters + ---------- + working_dir : Path object + Path where atlas will be created. + resolution : + Resolution of the atlas, in um. + + Returns + ------- + Path object + Path to the final compressed atlas file. + + """ + + ATLAS_NAME = "" + SPECIES = "" + ATLAS_LINK = "" + CITATION = "" + ORIENTATION = "" + + # do stuff to create the atlas + template_volume = # volume with reference + annotated_volume = # volume with structures annotations + structures_list = # list of valid structure dictionaries + meshes_dict = # dictionary of files with region meshes + root_id = # id of the root structure + + # Put here additional reference stacks + # (different genotypes, filtered volumes, etc) + additional_references = dict() + + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(resolution,) * 3, # if isotropic - highly recommended + orientation=ORIENTATION, + root_id=root_id, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_list, + meshes_dict=meshes_dict, + working_dir=working_dir, + additional_references=additional_references, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + ) + + return output_filename + + +# To test stuff locally: +if __name__ == "__main__": + resolution = 100 # some resolution, in microns + + # Generated atlas path: + bg_root_dir = "/path/to/some/dir" + bg_root_dir.mkdir(exist_ok=True) + + create_atlas(bg_root_dir, resolution) \ No newline at end of file diff --git a/bg_atlasgen/run_all.py b/bg_atlasgen/run_all.py index 49d2772c..94a5b7ac 100644 --- a/bg_atlasgen/run_all.py +++ b/bg_atlasgen/run_all.py @@ -34,7 +34,7 @@ bg_root_dir.mkdir(exist_ok=True) module.create_atlas( - version=4, res_um=res_um, bg_root_dir=bg_root_dir + version=4, res_um=res_um, working_dir=bg_root_dir ) compressed_file = next( @@ -42,7 +42,7 @@ ) shutil.move(str(compressed_file), str(dest_root_dir)) else: - module.create_atlas(version=4, bg_root_dir=bg_root_dir) + module.create_atlas(version=4, working_dir=bg_root_dir) compressed_file = next(bg_root_dir.glob("*_*_[0-9]*um_*.*.tar.gz")) shutil.move(str(compressed_file), str(dest_root_dir)) diff --git a/bg_atlasgen/test_git.py b/bg_atlasgen/test_git.py index 5f678f0a..ee327829 100644 --- a/bg_atlasgen/test_git.py +++ b/bg_atlasgen/test_git.py @@ -14,8 +14,8 @@ repo = Repo(repo_path) - #repo.git.add(".") - #repo.git.commit('-m', 'test commit', author='luigi.petrucco@gmail.com') + # repo.git.add(".") + # repo.git.commit('-m', 'test commit', author='luigi.petrucco@gmail.com') repo.git.pull() repo.git.push() From b42ea884254873c988e9a9285f94e0990d55939e Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Thu, 3 Sep 2020 12:53:17 +0200 Subject: [PATCH 008/103] Template description --- bg_atlasgen/atlas_scripts/template_script.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bg_atlasgen/atlas_scripts/template_script.py b/bg_atlasgen/atlas_scripts/template_script.py index 6930fdd1..197d05be 100644 --- a/bg_atlasgen/atlas_scripts/template_script.py +++ b/bg_atlasgen/atlas_scripts/template_script.py @@ -1,3 +1,7 @@ +"""Template script for the generation of an atlas. Note that the script +has to be renamed to match the name of the atlas (e.g. allen_mouse.py) +""" + __version__ = "0" # will be used to set minor version of the atlas from bg_atlasgen.wrapup import wrapup_atlas_from_data From e6c7ed670ce1ccb926849fd9ea58c671068a88ff Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Thu, 3 Sep 2020 18:00:04 +0200 Subject: [PATCH 009/103] First working version of script for auto deployment --- bg_atlasgen/__init__.py | 2 +- bg_atlasgen/atlas_scripts/allen_mouse.py | 2 +- bg_atlasgen/atlas_scripts/example_mouse.py | 4 +- .../atlas_scripts/kim_unified_atlas.py | 56 ++++++------------- bg_atlasgen/atlas_scripts/mpin_zfish.py | 8 +-- bg_atlasgen/main_script.py | 28 +++++++--- bg_atlasgen/run_all.py | 48 ---------------- bg_atlasgen/wrapup.py | 6 +- 8 files changed, 47 insertions(+), 107 deletions(-) delete mode 100644 bg_atlasgen/run_all.py diff --git a/bg_atlasgen/__init__.py b/bg_atlasgen/__init__.py index f754d2ee..32ab782e 100644 --- a/bg_atlasgen/__init__.py +++ b/bg_atlasgen/__init__.py @@ -1 +1 @@ -__version__ = "0" # will set major version of all atlases +__version__ = "1" # will set major version of all atlases diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py index 064a70d6..703b6cdb 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -1,4 +1,4 @@ -__version__ = "3" +__version__ = "0" from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index 041befe5..ec187dc8 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -1,4 +1,4 @@ -__version__ = "4" +__version__ = "0" from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi @@ -84,7 +84,7 @@ def create_atlas(working_dir, resolution): ] # Wrap up, compress, and remove file: - print(f"Finalising atlas") + print("Finalising atlas") output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, diff --git a/bg_atlasgen/atlas_scripts/kim_unified_atlas.py b/bg_atlasgen/atlas_scripts/kim_unified_atlas.py index 610d2bbe..ae69d797 100644 --- a/bg_atlasgen/atlas_scripts/kim_unified_atlas.py +++ b/bg_atlasgen/atlas_scripts/kim_unified_atlas.py @@ -1,3 +1,4 @@ +__version__ = "0" import json from rich.progress import track import pandas as pd @@ -15,26 +16,16 @@ from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree +PARALLEL = True # disable parallel mesh extraction for easier debugging -if __name__ == "__main__": - PARALLEL = True # disable parallel mesh extraction for easier debugging - # ---------------------------------------------------------------------------- # - # PREP METADATA # - # ---------------------------------------------------------------------------- # - RES_UM = 25 - VERSION = 1 +def create_atlas(working_dir, resolution=10): ATLAS_NAME = "kim_unified" SPECIES = "Mus musculus" ATLAS_LINK = "https://kimlab.io/brain-map/atlas/" CITATION = "Chon et al. 2019, https://doi.org/10.1038/s41467-019-13057-w" - ORIENTATION = "als" + ORIENTATION = "asr" ROOT_ID = 997 - - # ---------------------------------------------------------------------------- # - # PREP FILEPATHS # - # ---------------------------------------------------------------------------- # - paxinos_allen_directory = Path( r"C:\Users\Federico\Downloads\kim_atlas_materials.tar\kim_atlas_materials" ) @@ -44,7 +35,6 @@ # assume isotropic ANNOTATIONS_RES_UM = 10 - version = "0.1" # Generated atlas path: bg_root_dir = Path.home() / ".brainglobe" @@ -66,30 +56,20 @@ # Load (and possibly downsample) annotated volume: ######################################### - scaling_factor = ANNOTATIONS_RES_UM / RES_UM - print( - f"Loading: {annotations_image.name} and downscaling by: {scaling_factor}" - ) - annotated_volume = load_any( - annotations_image, - x_scaling_factor=scaling_factor, - y_scaling_factor=scaling_factor, - z_scaling_factor=scaling_factor, - anti_aliasing=False, - ) + ### Load annotation from Kim - # Download template volume: + # Download annotated and template volume: ######################################### spacecache = ReferenceSpaceCache( - manifest=downloading_path / "manifest.json", + manifest=working_dir / "manifest.json", # downloaded files are stored relative to here - resolution=RES_UM, + resolution=resolution, reference_space_key="annotation/ccf_2017" # use the latest version of the CCF ) # Download - print("Downloading template file") + annotated_volume, _ = spacecache.get_annotation_volume() template_volume, _ = spacecache.get_template_volume() print("Download completed...") @@ -98,7 +78,7 @@ # ---------------------------------------------------------------------------- # # Parse region names & hierarchy - # ###################################### + # ############################## df = pd.read_csv(structures_file) df = df.drop(columns=["Unnamed: 0", "parent_id", "parent_acronym"]) @@ -121,9 +101,7 @@ with open(uncompr_atlas_path / "structures.json", "w") as f: json.dump(structures, f) - # ---------------------------------------------------------------------------- # - # CREATE MESHESH # - # ---------------------------------------------------------------------------- # + # Create meshes: print(f"Saving atlas data at {uncompr_atlas_path}") meshes_dir_path = uncompr_atlas_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) @@ -156,13 +134,11 @@ node.data = Region(is_label) - # tree.show(data_property='has_label') # Mesh creation closing_n_iters = 2 start = time.time() if PARALLEL: - print("Starting mesh creation in parallel") pool = mp.Pool(mp.cpu_count() - 2) @@ -185,8 +161,6 @@ except mp.pool.MaybeEncodingError: pass # error with returning results from pool.map but we don't care else: - print("Starting mesh creation") - for node in track( tree.nodes.values(), total=tree.size(), @@ -238,13 +212,13 @@ # Wrap up, compress, and remove file: print("Finalising atlas") - wrapup_atlas_from_data( + output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, - atlas_minor_version=VERSION, + atlas_minor_version=__version__, citation=CITATION, atlas_link=ATLAS_LINK, species=SPECIES, - resolution=(RES_UM,) * 3, + resolution=(resolution,) * 3, orientation=ORIENTATION, root_id=ROOT_ID, reference_stack=template_volume, @@ -257,3 +231,5 @@ compress=True, scale_meshes=True, ) + + return output_filename diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index b7176034..325a2cc2 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -1,4 +1,4 @@ -__version__ = "4" +__version__ = "0" from pathlib import Path import warnings @@ -16,7 +16,7 @@ BASE_URL = r"https://fishatlas.neuro.mpg.de" -def download_line_stack(tg_line_name): +def download_line_stack(bg_root_dir, tg_line_name): """Utility function to download a line from its name. """ reference_url = f"{BASE_URL}/media/brain_browser/Lines/{tg_line_name}/AverageData/Tiff_File/Average_{tg_line_name}.zip" @@ -106,13 +106,13 @@ def create_atlas(working_dir, resolution): # Download reference: ##################### - reference_stack = download_line_stack("HuCGCaMP5G") + reference_stack = download_line_stack(working_dir, "HuCGCaMP5G") # Download accessory references: ################################ additional_references = dict() for line in ["H2BGCaMP", "GAD1b"]: - additional_references[line] = download_line_stack(line) + additional_references[line] = download_line_stack(working_dir, line) # Download annotation and hemispheres from GIN repo: gin_url = "https://gin.g-node.org/brainglobe/mpin_zfish/raw/master/mpin_zfish_annotations.tar.gz" diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index 7a4fff2e..e4312646 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -1,4 +1,5 @@ from git import Repo +from git.exc import GitCommandError from pathlib import Path import configparser import bg_atlasgen @@ -8,7 +9,9 @@ # Main dictionary specifying which atlases to generate # and with which resolutions: -GENERATION_DICT = dict(example_mouse=[100]) +GENERATION_DICT = dict(mpin_zfish=[1], + allen_mouse=[100], + example_mouse=[100],) cwd = Path.home() / "bg_auto" @@ -17,10 +20,13 @@ if __name__ == "__main__": repo_path = cwd / "atlas_repo" - repo_path.mkdir(exist_ok=True) + if repo_path.exists(): + # Clear folder: + shutil.rmtree(repo_path) + repo_path.mkdir() print("Cloning atlases repo...") - repo = Repo.clone_from("https://gin.g-node.org/vigji/bg_test", repo_path) + repo = Repo.clone_from("https://gin.g-node.org/brainglobe/atlases", repo_path) # us = input("GIN-GNode user: ") # pw = input("GIN-GNode password: ") @@ -48,11 +54,12 @@ commit_log = "Updated: " for name, resolutions in GENERATION_DICT.items(): status = atlases_repr[name] - module = import_module(f"atlas_gen.atlas_scripts.{name}") + module = import_module(f"bg_atlasgen.atlas_scripts.{name}") script_version = module.__version__ - if bg_atlasgen_version >= status["major_vers"] and \ - script_version > status["minor_vers"]: + if bg_atlasgen_version > status["major_vers"] or \ + (bg_atlasgen_version > status["major_vers"] and + script_version > status["minor_vers"]): # Loop over all resolutions: for resolution in resolutions: @@ -79,9 +86,12 @@ commit_log += f"{output_filename}.name, " # Commit and push: - repo.git.add(".") - repo.git.commit('-m', commit_log) - repo.git.push() + try: + repo.git.add(".") + repo.git.commit('-m', commit_log) + repo.git.push() + except GitCommandError: + pass # Clear folder: shutil.rmtree(repo_path) diff --git a/bg_atlasgen/run_all.py b/bg_atlasgen/run_all.py deleted file mode 100644 index 94a5b7ac..00000000 --- a/bg_atlasgen/run_all.py +++ /dev/null @@ -1,48 +0,0 @@ -import bg_atlasgen.atlas_scripts as atlas_scripts -import pkgutil -from importlib import import_module -from pathlib import Path -import shutil - - -# A global working directory: -temp_root_dir = Path.home() / "temp_brainglobe_workingdir" -temp_root_dir.mkdir(exist_ok=True) - -# Directory where final atlases will be stored (and synch remotely): -dest_root_dir = Path.home() / "final_brainglobe_workingdir" -dest_root_dir.mkdir(exist_ok=True) - -# Here we can map multiple resolutions for each script. -# It could be expanded to multiplex more params. -resolutions_dict = dict(allen_mouse_atlas=[25, 100]) - -# List over modules in the atlas_scripts folder: -for (_, module_name, _) in pkgutil.iter_modules(atlas_scripts.__path__): - print(module_name) - # Import the module: - module = import_module(f"atlas_gen.atlas_scripts.{module_name}") - - # If create function is available: - if "create_atlas" in dir(module): - - # If multiple resolutions are required: - if module_name in resolutions_dict.keys(): - for res_um in resolutions_dict[module_name]: - # Make working directory for this atlas: - bg_root_dir = temp_root_dir / f"{module_name}_{res_um}um" - bg_root_dir.mkdir(exist_ok=True) - - module.create_atlas( - version=4, res_um=res_um, working_dir=bg_root_dir - ) - - compressed_file = next( - bg_root_dir.glob("*_*_[0-9]*um_*.*.tar.gz") - ) - shutil.move(str(compressed_file), str(dest_root_dir)) - else: - module.create_atlas(version=4, working_dir=bg_root_dir) - - compressed_file = next(bg_root_dir.glob("*_*_[0-9]*um_*.*.tar.gz")) - shutil.move(str(compressed_file), str(dest_root_dir)) diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index 8065d7f6..52e47118 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -17,6 +17,8 @@ save_hemispheres, save_secondary_reference, ) + +import bg_atlasgen from bg_atlasgen.structures import check_struct_consistency from bg_atlasapi import descriptors @@ -25,7 +27,7 @@ # This should be changed every time we make changes in the atlas # structure: -ATLAS_VERSION = descriptors.ATLAS_MAJOR_V +ATLAS_VERSION = bg_atlasgen.__version__ def wrapup_atlas_from_data( @@ -100,7 +102,7 @@ def wrapup_atlas_from_data( # are un um: original_shape = reference_stack.shape volume_shape = tuple(res * s for res, s in zip(resolution, original_shape)) - space_convention = bgs.SpaceConvention(orientation, shape=volume_shape) + space_convention = bgs.AnatomicalSpace(orientation, shape=volume_shape) # Check consistency of structures .json file: check_struct_consistency(structures_list) From 252f8067b314d4da8e2c186e1651cd2a15f6d40b Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Thu, 3 Sep 2020 18:09:51 +0200 Subject: [PATCH 010/103] Fixed metadata link --- bg_atlasgen/atlas_scripts/allen_mouse.py | 4 ++-- bg_atlasgen/atlas_scripts/example_mouse.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py index 703b6cdb..c13dda2a 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -1,4 +1,4 @@ -__version__ = "0" +__version__ = "1" from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi @@ -16,7 +16,7 @@ def create_atlas(working_dir, resolution): # Specify information about the atlas: ATLAS_NAME = "allen_mouse" SPECIES = "Mus musculus" - ATLAS_LINK = "http://www.brain-map.org.com" + ATLAS_LINK = "http://www.brain-map.com" CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" ORIENTATION = "asr" diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index ec187dc8..2519650d 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -1,4 +1,4 @@ -__version__ = "0" +__version__ = "1" from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi @@ -17,7 +17,7 @@ def create_atlas(working_dir, resolution): RES_UM = resolution # 100 ATLAS_NAME = "example_mouse" SPECIES = "Mus musculus" - ATLAS_LINK = "http://www.brain-map.org.com" + ATLAS_LINK = "http://www.brain-map.com" CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" ORIENTATION = "asr" From 24c9e8afd7271aa3d1038240bbcb46fcd0d488a2 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Fri, 4 Sep 2020 10:38:34 +0200 Subject: [PATCH 011/103] Trying to fix windows permission error when deleting folders --- bg_atlasgen/main_script.py | 46 ++++++++++++++++++++++++++------------ 1 file changed, 32 insertions(+), 14 deletions(-) diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index e4312646..b45d8dc1 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -4,29 +4,45 @@ import configparser import bg_atlasgen from importlib import import_module -import shutil from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name +import errno, os, stat, shutil + + +def handleRemoveReadonly(func, path, exc): + excvalue = exc[1] + if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: + os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 + func(path) + else: + raise + + +def delete_folder(path): + shutil.rmtree(path, ignore_errors=False, onerror=handleRemoveReadonly) + + # Main dictionary specifying which atlases to generate # and with which resolutions: GENERATION_DICT = dict(mpin_zfish=[1], - allen_mouse=[100], + allen_mouse=[10, 25, 100], example_mouse=[100],) -cwd = Path.home() / "bg_auto" +cwd = Path(r"D:\bg_auto")#Path.home() / "bg_auto" cwd.mkdir(exist_ok=True) if __name__ == "__main__": repo_path = cwd / "atlas_repo" if repo_path.exists(): - # Clear folder: - shutil.rmtree(repo_path) - repo_path.mkdir() + repo = Repo(repo_path) + repo.git.pull() + else: + repo_path.mkdir(exist_ok=True) - print("Cloning atlases repo...") - repo = Repo.clone_from("https://gin.g-node.org/brainglobe/atlases", repo_path) + print("Cloning atlases repo...") + repo = Repo.clone_from("https://gin.g-node.org/brainglobe/atlases", repo_path) # us = input("GIN-GNode user: ") # pw = input("GIN-GNode password: ") @@ -58,7 +74,7 @@ script_version = module.__version__ if bg_atlasgen_version > status["major_vers"] or \ - (bg_atlasgen_version > status["major_vers"] and + (bg_atlasgen_version == status["major_vers"] and script_version > status["minor_vers"]): # Loop over all resolutions: @@ -66,7 +82,7 @@ print(f"Generating {name}, {resolution} um...") # Make working directory for atlas generation: - temp_dir = cwd / "tempdir" + temp_dir = cwd / f"tempdir_{name}_{resolution}" temp_dir.mkdir(exist_ok=True) # Create and compress atlas: @@ -74,7 +90,7 @@ # Move atlas to repo: shutil.move(str(output_filename), repo_path) - shutil.rmtree(temp_dir) + # delete_folder(temp_dir) # Update config file with new version: k = atlas_name_from_repr(name, resolution) @@ -83,15 +99,17 @@ conf.write(f) # Update log for commit message: - commit_log += f"{output_filename}.name, " + commit_log += f"{output_filename.stem}, " # Commit and push: try: repo.git.add(".") repo.git.commit('-m', commit_log) - repo.git.push() except GitCommandError: pass + repo.git.push() + # Clear folder: - shutil.rmtree(repo_path) + repo.close() + # delete_folder(repo_path) From 2fcf04f22210d89e349b51fa4f8dc90f85fc673f Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 4 Sep 2020 13:48:41 +0100 Subject: [PATCH 012/103] add 50um to allen mouse --- bg_atlasgen/main_script.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index b45d8dc1..0c724c24 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -25,7 +25,7 @@ def delete_folder(path): # Main dictionary specifying which atlases to generate # and with which resolutions: GENERATION_DICT = dict(mpin_zfish=[1], - allen_mouse=[10, 25, 100], + allen_mouse=[10, 25, 50, 100], example_mouse=[100],) From 97e4d1c0011125d6689f508dccb9a5c8cbf260f6 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 21 Oct 2020 10:00:33 +0200 Subject: [PATCH 013/103] v1.2 for allen mouse --- bg_atlasgen/atlas_scripts/allen_mouse.py | 4 ++-- bg_atlasgen/atlas_scripts/example_mouse.py | 4 ++-- bg_atlasgen/main_script.py | 23 +++++++++++----------- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py index c13dda2a..3d0eac53 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -1,4 +1,4 @@ -__version__ = "1" +__version__ = "2" from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi @@ -16,7 +16,7 @@ def create_atlas(working_dir, resolution): # Specify information about the atlas: ATLAS_NAME = "allen_mouse" SPECIES = "Mus musculus" - ATLAS_LINK = "http://www.brain-map.com" + ATLAS_LINK = "http://www.brain-map.org" CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" ORIENTATION = "asr" diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index 2519650d..9486d981 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -1,4 +1,4 @@ -__version__ = "1" +__version__ = "2" from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi @@ -17,7 +17,7 @@ def create_atlas(working_dir, resolution): RES_UM = resolution # 100 ATLAS_NAME = "example_mouse" SPECIES = "Mus musculus" - ATLAS_LINK = "http://www.brain-map.com" + ATLAS_LINK = "http://www.brain-map.org" CITATION = "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007" ORIENTATION = "asr" diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index 0c724c24..6fb15080 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -8,6 +8,15 @@ import errno, os, stat, shutil +# Main dictionary specifying which atlases to generate +# and with which resolutions: +GENERATION_DICT = dict(mpin_zfish=[1], + allen_mouse=[10, 25, 50, 100], + example_mouse=[100],) + + +CWD = Path.home() / "bg_auto" + def handleRemoveReadonly(func, path, exc): excvalue = exc[1] @@ -22,19 +31,11 @@ def delete_folder(path): shutil.rmtree(path, ignore_errors=False, onerror=handleRemoveReadonly) -# Main dictionary specifying which atlases to generate -# and with which resolutions: -GENERATION_DICT = dict(mpin_zfish=[1], - allen_mouse=[10, 25, 50, 100], - example_mouse=[100],) - - -cwd = Path(r"D:\bg_auto")#Path.home() / "bg_auto" -cwd.mkdir(exist_ok=True) +CWD.mkdir(exist_ok=True) if __name__ == "__main__": - repo_path = cwd / "atlas_repo" + repo_path = CWD / "atlas_repo" if repo_path.exists(): repo = Repo(repo_path) repo.git.pull() @@ -82,7 +83,7 @@ def delete_folder(path): print(f"Generating {name}, {resolution} um...") # Make working directory for atlas generation: - temp_dir = cwd / f"tempdir_{name}_{resolution}" + temp_dir = CWD / f"tempdir_{name}_{resolution}" temp_dir.mkdir(exist_ok=True) # Create and compress atlas: From 375a03c5a342da16c0cc880d69d6039e108c3670 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 21 Oct 2020 10:10:25 +0200 Subject: [PATCH 014/103] blacked --- bg_atlasgen/atlas_scripts/example_mouse.py | 2 +- .../atlas_scripts/kim_unified_atlas.py | 11 +- bg_atlasgen/atlas_scripts/mpin_zfish.py | 7 +- bg_atlasgen/atlas_scripts/template_script.py | 12 +- bg_atlasgen/main_script.py | 30 +++-- bg_atlasgen/mesh_utils.py | 120 +++++++++--------- bg_atlasgen/metadata_utils.py | 18 +-- bg_atlasgen/structures.py | 24 ++-- bg_atlasgen/test_git.py | 1 - bg_atlasgen/volume_utils.py | 42 +++--- bg_atlasgen/wrapup.py | 5 +- 11 files changed, 138 insertions(+), 134 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index 9486d981..75a71d20 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -112,4 +112,4 @@ def create_atlas(working_dir, resolution): bg_root_dir = Path.home() / "brainglobe_workingdir" / "example" bg_root_dir.mkdir(exist_ok=True) - create_atlas(working_dir, 100) + # create_atlas(working_dir, 100) diff --git a/bg_atlasgen/atlas_scripts/kim_unified_atlas.py b/bg_atlasgen/atlas_scripts/kim_unified_atlas.py index ae69d797..f0154db2 100644 --- a/bg_atlasgen/atlas_scripts/kim_unified_atlas.py +++ b/bg_atlasgen/atlas_scripts/kim_unified_atlas.py @@ -6,7 +6,6 @@ import time import multiprocessing as mp from pathlib import Path -from brainio.brainio import load_any from allensdk.core.reference_space_cache import ReferenceSpaceCache # import sys @@ -29,12 +28,11 @@ def create_atlas(working_dir, resolution=10): paxinos_allen_directory = Path( r"C:\Users\Federico\Downloads\kim_atlas_materials.tar\kim_atlas_materials" ) - annotations_image = paxinos_allen_directory / "annotations_coronal.tif" + # annotations_image = paxinos_allen_directory / "annotations_coronal.tif" structures_file = paxinos_allen_directory / "structures.csv" # assume isotropic - ANNOTATIONS_RES_UM = 10 - + # ANNOTATIONS_RES_UM = 10 # Generated atlas path: bg_root_dir = Path.home() / ".brainglobe" @@ -56,7 +54,7 @@ def create_atlas(working_dir, resolution=10): # Load (and possibly downsample) annotated volume: ######################################### - ### Load annotation from Kim + # Load annotation from Kim # Download annotated and template volume: ######################################### @@ -115,7 +113,7 @@ def create_atlas(working_dir, resolution=10): ] # stuff we don't need meshes for for drop in drop_from_tree: print("Dropping from structures tree: ", drop) - dropped = tree.remove_subtree( + tree.remove_subtree( [nid for nid, n in tree.nodes.items() if n.tag == drop][0] ) @@ -134,7 +132,6 @@ def create_atlas(working_dir, resolution=10): node.data = Region(is_label) - # Mesh creation closing_n_iters = 2 start = time.time() diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index 325a2cc2..e240c071 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -17,8 +17,7 @@ def download_line_stack(bg_root_dir, tg_line_name): - """Utility function to download a line from its name. - """ + """Utility function to download a line from its name.""" reference_url = f"{BASE_URL}/media/brain_browser/Lines/{tg_line_name}/AverageData/Tiff_File/Average_{tg_line_name}.zip" out_file_path = bg_root_dir / f"{tg_line_name}.zip" retrieve_over_http(reference_url, out_file_path) @@ -29,7 +28,7 @@ def download_line_stack(bg_root_dir, tg_line_name): def add_path_inplace(parent): - """ Recursively traverse hierarchy of regions and append for each region + """Recursively traverse hierarchy of regions and append for each region the full path of substructures in brainglobe standard list. Parameters @@ -51,7 +50,7 @@ def add_path_inplace(parent): def collect_all_inplace( node, traversing_list, download_path, mesh_dict, ): - """ Recursively traverse a region hierarchy, download meshes, and append + """Recursively traverse a region hierarchy, download meshes, and append regions to a list inplace. Parameters diff --git a/bg_atlasgen/atlas_scripts/template_script.py b/bg_atlasgen/atlas_scripts/template_script.py index 197d05be..3a967ff7 100644 --- a/bg_atlasgen/atlas_scripts/template_script.py +++ b/bg_atlasgen/atlas_scripts/template_script.py @@ -31,11 +31,11 @@ def create_atlas(working_dir, resolution): ORIENTATION = "" # do stuff to create the atlas - template_volume = # volume with reference - annotated_volume = # volume with structures annotations - structures_list = # list of valid structure dictionaries - meshes_dict = # dictionary of files with region meshes - root_id = # id of the root structure + template_volume = None # volume with reference + annotated_volume = None # volume with structures annotations + structures_list = None # list of valid structure dictionaries + meshes_dict = None # dictionary of files with region meshes + root_id = None # id of the root structure # Put here additional reference stacks # (different genotypes, filtered volumes, etc) @@ -72,4 +72,4 @@ def create_atlas(working_dir, resolution): bg_root_dir = "/path/to/some/dir" bg_root_dir.mkdir(exist_ok=True) - create_atlas(bg_root_dir, resolution) \ No newline at end of file + create_atlas(bg_root_dir, resolution) diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index 6fb15080..207ac97b 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -6,13 +6,16 @@ from importlib import import_module from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name -import errno, os, stat, shutil +import errno +import os +import stat +import shutil # Main dictionary specifying which atlases to generate # and with which resolutions: -GENERATION_DICT = dict(mpin_zfish=[1], - allen_mouse=[10, 25, 50, 100], - example_mouse=[100],) +GENERATION_DICT = dict( + mpin_zfish=[1], allen_mouse=[10, 25, 50, 100], example_mouse=[100], +) CWD = Path.home() / "bg_auto" @@ -21,7 +24,7 @@ def handleRemoveReadonly(func, path, exc): excvalue = exc[1] if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: - os.chmod(path, stat.S_IRWXU| stat.S_IRWXG| stat.S_IRWXO) # 0777 + os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777 func(path) else: raise @@ -43,7 +46,9 @@ def delete_folder(path): repo_path.mkdir(exist_ok=True) print("Cloning atlases repo...") - repo = Repo.clone_from("https://gin.g-node.org/brainglobe/atlases", repo_path) + repo = Repo.clone_from( + "https://gin.g-node.org/brainglobe/atlases", repo_path + ) # us = input("GIN-GNode user: ") # pw = input("GIN-GNode password: ") @@ -74,9 +79,10 @@ def delete_folder(path): module = import_module(f"bg_atlasgen.atlas_scripts.{name}") script_version = module.__version__ - if bg_atlasgen_version > status["major_vers"] or \ - (bg_atlasgen_version == status["major_vers"] and - script_version > status["minor_vers"]): + if bg_atlasgen_version > status["major_vers"] or ( + bg_atlasgen_version == status["major_vers"] + and script_version > status["minor_vers"] + ): # Loop over all resolutions: for resolution in resolutions: @@ -95,7 +101,9 @@ def delete_folder(path): # Update config file with new version: k = atlas_name_from_repr(name, resolution) - conf["atlases"][k] = str(f"{bg_atlasgen_version}.{script_version}") + conf["atlases"][k] = str( + f"{bg_atlasgen_version}.{script_version}" + ) with open(repo_path / "last_versions.conf", "w") as f: conf.write(f) @@ -105,7 +113,7 @@ def delete_folder(path): # Commit and push: try: repo.git.add(".") - repo.git.commit('-m', commit_log) + repo.git.commit("-m", commit_log) except GitCommandError: pass diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index d00cbf4e..6669ea00 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -37,33 +37,33 @@ def extract_mesh_from_mask( tol=0.0005, use_marching_cubes=False, ): - """ - Returns a vtkplotter mesh actor with just the outer surface of a - binary mask volume. It's faster though less accurate than - extract_mesh_from_mask - - - Parameters - ---------- - obj_filepath: str or Path object - path to where the .obj mesh file will be saved - volume: 3d np.ndarray - threshold: float - min value to threshold the volume for isosurface extraction - smooth: bool - if True the surface mesh is smoothed - use_marching_cubes: bool: - if true PyMCubes is used to extract the volume's surface - it's slower and less accurate than vtkplotter though. - mcubes_smooth: bool, - if True mcubes.smooth is used before applying marching cubes - closing_n_iters: int - number of iterations of closing morphological operation. - set to None to avoid applying morphological operations - decimate: bool - If True the number of vertices is reduced through decimation - tol: float - parameter for decimation, larger values correspond to more aggressive decimation + """ + Returns a vtkplotter mesh actor with just the outer surface of a + binary mask volume. It's faster though less accurate than + extract_mesh_from_mask + + + Parameters + ---------- + obj_filepath: str or Path object + path to where the .obj mesh file will be saved + volume: 3d np.ndarray + threshold: float + min value to threshold the volume for isosurface extraction + smooth: bool + if True the surface mesh is smoothed + use_marching_cubes: bool: + if true PyMCubes is used to extract the volume's surface + it's slower and less accurate than vtkplotter though. + mcubes_smooth: bool, + if True mcubes.smooth is used before applying marching cubes + closing_n_iters: int + number of iterations of closing morphological operation. + set to None to avoid applying morphological operations + decimate: bool + If True the number of vertices is reduced through decimation + tol: float + parameter for decimation, larger values correspond to more aggressive decimation """ # check savepath argument @@ -124,23 +124,23 @@ def extract_mesh_from_mask( def create_region_mesh(args): """ - Automates the creation of a region's mesh. Given a volume of annotations - and a structures tree, it takes the volume's region corresponding to the - region of interest and all of it's children's labels and creates a mesh. - It takes a tuple of arguments to facilitaed parallel processing with - multiprocessing.pool.map - - Note, by default it avoids overwriting a structure's mesh if the - .obj file exists already. - - Parameters - ---------- - meshes_dir_path: pathlib Path object with folder where meshes are saved - tree: treelib.Tree with hierarchical structures information - node: tree's node corresponding to the region who's mesh is being created - labels: list of unique label annotations in annotated volume (list(np.unique(annotated_volume))) - annotaed_volume: 3d numpy array with annotaed volume - ROOT_ID: int, id of root structure (mesh creation is a bit more refined for that) + Automates the creation of a region's mesh. Given a volume of annotations + and a structures tree, it takes the volume's region corresponding to the + region of interest and all of it's children's labels and creates a mesh. + It takes a tuple of arguments to facilitaed parallel processing with + multiprocessing.pool.map + + Note, by default it avoids overwriting a structure's mesh if the + .obj file exists already. + + Parameters + ---------- + meshes_dir_path: pathlib Path object with folder where meshes are saved + tree: treelib.Tree with hierarchical structures information + node: tree's node corresponding to the region who's mesh is being created + labels: list of unique label annotations in annotated volume (list(np.unique(annotated_volume))) + annotaed_volume: 3d numpy array with annotaed volume + ROOT_ID: int, id of root structure (mesh creation is a bit more refined for that) """ # Split arguments ( @@ -192,8 +192,8 @@ def create_region_mesh(args): class Region(object): """ - Class used to add metadata to treelib.Tree during atlas creation. Using this - means that you can then filter tree nodes depending on wether or not they have a mesh/label + Class used to add metadata to treelib.Tree during atlas creation. Using this + means that you can then filter tree nodes depending on wether or not they have a mesh/label """ def __init__(self, has_label): @@ -205,16 +205,16 @@ def __init__(self, has_label): # ---------------------------------------------------------------------------- # def compare_mesh_and_volume(mesh, volume): """ - Creates and interactive vtkplotter - visualisation to look at a reference volume - and a mesh at the same time. Can be used to - assess the quality of the mesh extraction. + Creates and interactive vtkplotter + visualisation to look at a reference volume + and a mesh at the same time. Can be used to + assess the quality of the mesh extraction. - Parameters: - ----------- + Parameters: + ----------- - mesh: vtkplotter Mesh - volume: np.array or vtkplotter Volume + mesh: vtkplotter Mesh + volume: np.array or vtkplotter Volume """ if isinstance(volume, np.ndarray): volume = Volume(volume) @@ -226,13 +226,13 @@ def compare_mesh_and_volume(mesh, volume): def inspect_meshses_folder(folder): """ - Used to create an interactive vtkplotter visualisation - to scroll through all .obj files saved in a folder + Used to create an interactive vtkplotter visualisation + to scroll through all .obj files saved in a folder - Parameters - ---------- - folder: str or Path object - path to folder with .obj files + Parameters + ---------- + folder: str or Path object + path to folder with .obj files """ if isinstance(folder, str): diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index 30a8747b..d2f1cbe7 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -120,15 +120,15 @@ def create_structures_csv(uncompr_atlas_path, root): def create_metadata_files(dest_dir, metadata_dict, structures, root_id): """ - Automatic creation of - . structures.csv - . README.txt - from an atlas files. All Files are saved in the uncompressed atlas folder - awaiting compression and upload to GIN. - - :param uncompr_atlas_path: path to uncompressed atlas folder - :param metadata_dict: dict with atlas metadata - :param structures: list of dictionaries with structures hierarchical info + Automatic creation of + . structures.csv + . README.txt + from an atlas files. All Files are saved in the uncompressed atlas folder + awaiting compression and upload to GIN. + + :param uncompr_atlas_path: path to uncompressed atlas folder + :param metadata_dict: dict with atlas metadata + :param structures: list of dictionaries with structures hierarchical info """ # write metadata dict: with open(dest_dir / descriptors.METADATA_FILENAME, "w") as f: diff --git a/bg_atlasgen/structures.py b/bg_atlasgen/structures.py index 573cfa9c..c8c90f81 100644 --- a/bg_atlasgen/structures.py +++ b/bg_atlasgen/structures.py @@ -30,11 +30,11 @@ def check_struct_consistency(structures): def get_structure_children(structures, region, use_tree=False): """ - Given a list of dictionaries with structures data, - and a structure from the list, this function returns - the structures in the list that are children of - the given structure (region). - If use_tree is true it creates a StructureTree and uses that. + Given a list of dictionaries with structures data, + and a structure from the list, this function returns + the structures in the list that are children of + the given structure (region). + If use_tree is true it creates a StructureTree and uses that. """ if not isinstance(structures, list): raise ValueError("structures should be a list") @@ -70,11 +70,11 @@ def get_structure_children(structures, region, use_tree=False): def get_structure_terminal_nodes(structures, region): """ - Given a list of dictionaries with structures data, - and a structure from the list, this function returns - the structures in the list that are children of - the given structure (region) that are leafs of the - struture tree + Given a list of dictionaries with structures data, + and a structure from the list, this function returns + the structures in the list that are children of + the given structure (region) that are leafs of the + struture tree """ tree = get_structures_tree(structures) @@ -98,8 +98,8 @@ def __init__(self, has_mesh): def show_which_structures_have_mesh(structures, meshes_dir): """ - It prints out a tree visualisation with - True for the regions that a mesh and false for the others + It prints out a tree visualisation with + True for the regions that a mesh and false for the others """ tree = get_structures_tree(structures) diff --git a/bg_atlasgen/test_git.py b/bg_atlasgen/test_git.py index ee327829..05b2ff40 100644 --- a/bg_atlasgen/test_git.py +++ b/bg_atlasgen/test_git.py @@ -18,4 +18,3 @@ # repo.git.commit('-m', 'test commit', author='luigi.petrucco@gmail.com') repo.git.pull() repo.git.push() - diff --git a/bg_atlasgen/volume_utils.py b/bg_atlasgen/volume_utils.py index ae54fdca..8dc3db7e 100644 --- a/bg_atlasgen/volume_utils.py +++ b/bg_atlasgen/volume_utils.py @@ -18,19 +18,19 @@ def create_masked_array(volume, label, greater_than=False): """ - Given a 2d o 3d numpy array and a - label value, creates a masked binary - array which is 1 when volume == label - and 0 otherwise - - Parameters - ---------- - volume: np.ndarray - (2d or 3d array) - label: int, float or list of int. - the masked array will be 1 where volume == label - greater_than: bool - if True, all voxels with value > label will be set to 1 + Given a 2d o 3d numpy array and a + label value, creates a masked binary + array which is 1 when volume == label + and 0 otherwise + + Parameters + ---------- + volume: np.ndarray + (2d or 3d array) + label: int, float or list of int. + the masked array will be 1 where volume == label + greater_than: bool + if True, all voxels with value > label will be set to 1 """ if not isinstance(volume, np.ndarray): raise ValueError( @@ -64,14 +64,14 @@ def create_masked_array(volume, label, greater_than=False): def load_labelled_volume(data, vmin=0, alpha=1, **kwargs): """ - Load volume image from .nrrd file. - It assume that voxels with value = 0 are empty while voxels with values > 0 - are labelles (e.g. to indicate the location of a brain region in a reference atlas) - - :param data: str, path to file with volume data or 3d numpy array - :param vmin: float, values below this numner will be assigned an alpha=0 and not be visualized - :param **kwargs: kwargs to pass to the Volume class from vtkplotter - :param alpha: float in range [0, 1], transparency [for the part of volume with value > vmin] + Load volume image from .nrrd file. + It assume that voxels with value = 0 are empty while voxels with values > 0 + are labelles (e.g. to indicate the location of a brain region in a reference atlas) + + :param data: str, path to file with volume data or 3d numpy array + :param vmin: float, values below this numner will be assigned an alpha=0 and not be visualized + :param **kwargs: kwargs to pass to the Volume class from vtkplotter + :param alpha: float in range [0, 1], transparency [for the part of volume with value > vmin] """ # Load/check volumetric data if isinstance(data, str): # load from file diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index 52e47118..0b30a89e 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -107,8 +107,9 @@ def wrapup_atlas_from_data( # Check consistency of structures .json file: check_struct_consistency(structures_list) - atlas_dir_name = atlas_name_from_repr(atlas_name, resolution[0], - ATLAS_VERSION, atlas_minor_version) + atlas_dir_name = atlas_name_from_repr( + atlas_name, resolution[0], ATLAS_VERSION, atlas_minor_version + ) dest_dir = Path(working_dir) / atlas_dir_name # exist_ok would be more permissive but error-prone here as there might From bc32f7f218660730a9a205789c8acbb4dc56369c Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 15:03:06 +0100 Subject: [PATCH 015/103] Update requirements --- requirements.txt | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9b1ff66a..6976e29d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,8 +10,8 @@ tqdm>=4.46.1 bg-space allensdk sphinx -brainio>=0.0.16 -vtkplotter +imio +vedo recommonmark sphinx_rtd_theme pydoc-markdown @@ -21,4 +21,5 @@ pytest gitpython coverage pre-commit -PyMCubes \ No newline at end of file +PyMCubes +bg_atlasapi \ No newline at end of file From 61325753192a30081b50a7bdf4ae37e4cb74496b Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 15:03:48 +0100 Subject: [PATCH 016/103] vtkplotter -> vedo --- bg_atlasgen/atlas_scripts/ratatlas.py | 2 +- bg_atlasgen/mesh_utils.py | 28 +++++++++++++-------------- bg_atlasgen/volume_utils.py | 14 +++++++------- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/ratatlas.py b/bg_atlasgen/atlas_scripts/ratatlas.py index f827df4a..b15f3903 100644 --- a/bg_atlasgen/atlas_scripts/ratatlas.py +++ b/bg_atlasgen/atlas_scripts/ratatlas.py @@ -18,7 +18,7 @@ from tqdm import tqdm -from vtkplotter import write, Volume +from vedo import write, Volume import sys diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 6669ea00..6ca5c547 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,10 +1,10 @@ try: - from vtkplotter import Mesh, write, load, show, Volume - from vtkplotter.applications import Browser, Slicer + from vedo import Mesh, write, load, show, Volume + from vedo.applications import Browser, Slicer except ModuleNotFoundError: raise ModuleNotFoundError( - "Mesh generation with these utils requires vtkplotter\n" - + ' please install with "pip install vtkplotter -U"' + "Mesh generation with these utils requires vedo\n" + + ' please install with "pip install vedo -U"' ) try: @@ -38,7 +38,7 @@ def extract_mesh_from_mask( use_marching_cubes=False, ): """ - Returns a vtkplotter mesh actor with just the outer surface of a + Returns a vedo mesh actor with just the outer surface of a binary mask volume. It's faster though less accurate than extract_mesh_from_mask @@ -54,7 +54,7 @@ def extract_mesh_from_mask( if True the surface mesh is smoothed use_marching_cubes: bool: if true PyMCubes is used to extract the volume's surface - it's slower and less accurate than vtkplotter though. + it's slower and less accurate than vedo though. mcubes_smooth: bool, if True mcubes.smooth is used before applying marching cubes closing_n_iters: int @@ -139,7 +139,7 @@ def create_region_mesh(args): tree: treelib.Tree with hierarchical structures information node: tree's node corresponding to the region who's mesh is being created labels: list of unique label annotations in annotated volume (list(np.unique(annotated_volume))) - annotaed_volume: 3d numpy array with annotaed volume + annotated_volume: 3d numpy array with annotaed volume ROOT_ID: int, id of root structure (mesh creation is a bit more refined for that) """ # Split arguments @@ -153,7 +153,7 @@ def create_region_mesh(args): closing_n_iters, ) = args - # Avoid ovewriting existing mesh + # Avoid overwriting existing mesh savepath = meshes_dir_path / f"{node.identifier}.obj" if savepath.exists(): return @@ -205,7 +205,7 @@ def __init__(self, has_label): # ---------------------------------------------------------------------------- # def compare_mesh_and_volume(mesh, volume): """ - Creates and interactive vtkplotter + Creates and interactive vedo visualisation to look at a reference volume and a mesh at the same time. Can be used to assess the quality of the mesh extraction. @@ -213,8 +213,8 @@ def compare_mesh_and_volume(mesh, volume): Parameters: ----------- - mesh: vtkplotter Mesh - volume: np.array or vtkplotter Volume + mesh: vedo Mesh + volume: np.array or vtkvedoplotter Volume """ if isinstance(volume, np.ndarray): volume = Volume(volume) @@ -224,9 +224,9 @@ def compare_mesh_and_volume(mesh, volume): vp.show() -def inspect_meshses_folder(folder): +def inspect_meshes_folder(folder): """ - Used to create an interactive vtkplotter visualisation + Used to create an interactive vedo visualisation to scroll through all .obj files saved in a folder Parameters @@ -249,4 +249,4 @@ def inspect_meshses_folder(folder): folder = ( r"C:\Users\Federico\.brainglobe\temp\allen_human_500um_v0.1\meshes" ) - inspect_meshses_folder(folder) + inspect_meshes_folder(folder) diff --git a/bg_atlasgen/volume_utils.py b/bg_atlasgen/volume_utils.py index 8dc3db7e..7de0ccf9 100644 --- a/bg_atlasgen/volume_utils.py +++ b/bg_atlasgen/volume_utils.py @@ -3,14 +3,14 @@ extracting surfaces from volumetric data .... """ try: - from vtkplotter import Volume + from vedo import Volume except ModuleNotFoundError: raise ModuleNotFoundError( - "Mesh generation with these utils requires vtkplotter\n" - + ' please install with "pip install vtkplotter -U"' + "Mesh generation with these utils requires vedo\n" + + ' please install with "pip install vedo -U"' ) -from brainio import brainio +import imio import os import numpy as np @@ -57,7 +57,7 @@ def create_masked_array(volume, label, greater_than=False): return arr -# ----------------------------- vtkplotter utils ----------------------------- # +# ----------------------------- vedo utils ----------------------------- # # This stuff is outdated, use the functions in mesh_utils.py # to extract meshes from volumes @@ -70,7 +70,7 @@ def load_labelled_volume(data, vmin=0, alpha=1, **kwargs): :param data: str, path to file with volume data or 3d numpy array :param vmin: float, values below this numner will be assigned an alpha=0 and not be visualized - :param **kwargs: kwargs to pass to the Volume class from vtkplotter + :param **kwargs: kwargs to pass to the Volume class from vedo :param alpha: float in range [0, 1], transparency [for the part of volume with value > vmin] """ # Load/check volumetric data @@ -79,7 +79,7 @@ def load_labelled_volume(data, vmin=0, alpha=1, **kwargs): raise FileNotFoundError(f"Volume data file {data} not found") try: - data = brainio.load_any(data) + data = imio.load_any(data) except Exception as e: raise ValueError( f"Could not load volume data from file: {data} - {e}" From 2bcd6f3a575a4ef2bb0b08fd27ab87361951d2b0 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 15:04:06 +0100 Subject: [PATCH 017/103] Format with black --- bg_atlasgen/atlas_scripts/mpin_zfish.py | 5 ++++- bg_atlasgen/main_script.py | 4 +++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index e240c071..b8280191 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -48,7 +48,10 @@ def add_path_inplace(parent): def collect_all_inplace( - node, traversing_list, download_path, mesh_dict, + node, + traversing_list, + download_path, + mesh_dict, ): """Recursively traverse a region hierarchy, download meshes, and append regions to a list inplace. diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index 207ac97b..bebbcfa1 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -14,7 +14,9 @@ # Main dictionary specifying which atlases to generate # and with which resolutions: GENERATION_DICT = dict( - mpin_zfish=[1], allen_mouse=[10, 25, 50, 100], example_mouse=[100], + mpin_zfish=[1], + allen_mouse=[10, 25, 50, 100], + example_mouse=[100], ) From dfcadc8b990d02a80694663b68096d62a1d34c77 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 15:04:29 +0100 Subject: [PATCH 018/103] Update kim mouse atlas generation --- .../{kim_unified_atlas.py => kim_mouse.py} | 91 +++++++++---------- 1 file changed, 44 insertions(+), 47 deletions(-) rename bg_atlasgen/atlas_scripts/{kim_unified_atlas.py => kim_mouse.py} (75%) diff --git a/bg_atlasgen/atlas_scripts/kim_unified_atlas.py b/bg_atlasgen/atlas_scripts/kim_mouse.py similarity index 75% rename from bg_atlasgen/atlas_scripts/kim_unified_atlas.py rename to bg_atlasgen/atlas_scripts/kim_mouse.py index f0154db2..5e7d552b 100644 --- a/bg_atlasgen/atlas_scripts/kim_unified_atlas.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -1,65 +1,70 @@ __version__ = "0" import json +import imio from rich.progress import track import pandas as pd + import numpy as np import time +import tarfile import multiprocessing as mp + from pathlib import Path from allensdk.core.reference_space_cache import ReferenceSpaceCache +from bg_atlasapi import utils -# import sys -# sys.path.append("./") from bg_atlasgen.mesh_utils import create_region_mesh, Region from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree -PARALLEL = True # disable parallel mesh extraction for easier debugging +PARALLEL = False # disable parallel mesh extraction for easier debugging -def create_atlas(working_dir, resolution=10): - ATLAS_NAME = "kim_unified" +def create_atlas(working_dir, resolution): + ATLAS_NAME = "kim_mouse" SPECIES = "Mus musculus" ATLAS_LINK = "https://kimlab.io/brain-map/atlas/" CITATION = "Chon et al. 2019, https://doi.org/10.1038/s41467-019-13057-w" ORIENTATION = "asr" ROOT_ID = 997 - paxinos_allen_directory = Path( - r"C:\Users\Federico\Downloads\kim_atlas_materials.tar\kim_atlas_materials" - ) - # annotations_image = paxinos_allen_directory / "annotations_coronal.tif" - structures_file = paxinos_allen_directory / "structures.csv" + ANNOTATIONS_RES_UM = 10 + ATLAS_FILE_URL = "https://gin.g-node.org/brainglobe/kim_atlas_materials/raw/master/kim_atlas_materials.tar.gz" - # assume isotropic - # ANNOTATIONS_RES_UM = 10 + # Temporary folder for download: + download_dir_path = working_dir / "downloading_path" + download_dir_path.mkdir(exist_ok=True) + atlas_files_dir = download_dir_path / "atlas_files" - # Generated atlas path: - bg_root_dir = Path.home() / ".brainglobe" - bg_root_dir.mkdir(exist_ok=True) + ## Download atlas_file + utils.check_internet_connection() - # Temporary folder for nrrd files download: - temp_path = Path(r"C:\Users\Federico\.brainglobe\kimdev") - temp_path.mkdir(exist_ok=True) - downloading_path = temp_path / "downloading_path" - downloading_path.mkdir(exist_ok=True) + destination_path = download_dir_path / "atlas_download" + utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) - # Temporary folder for files before compressing: - uncompr_atlas_path = temp_path / ATLAS_NAME - uncompr_atlas_path.mkdir(exist_ok=True) + tar = tarfile.open(destination_path) + tar.extractall(path=atlas_files_dir) + tar.close() + + destination_path.unlink() + + structures_file = atlas_files_dir / "kim_atlas" / "structures.csv" + annotations_file = atlas_files_dir / "kim_atlas" / "annotation.tiff" # ---------------------------------------------------------------------------- # # GET TEMPLATE # # ---------------------------------------------------------------------------- # # Load (and possibly downsample) annotated volume: - ######################################### - # Load annotation from Kim + scaling = ANNOTATIONS_RES_UM / resolution + annotated_volume = imio.load_img_stack( + annotations_file, scaling, scaling, scaling, anti_aliasing=False + ) # Download annotated and template volume: ######################################### spacecache = ReferenceSpaceCache( - manifest=working_dir / "manifest.json", + manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here resolution=resolution, reference_space_key="annotation/ccf_2017" @@ -67,7 +72,6 @@ def create_atlas(working_dir, resolution=10): ) # Download - annotated_volume, _ = spacecache.get_annotation_volume() template_volume, _ = spacecache.get_template_volume() print("Download completed...") @@ -96,32 +100,16 @@ def create_atlas(working_dir, resolution=10): structure["structure_id_path"].append(structure["id"]) # save regions list json: - with open(uncompr_atlas_path / "structures.json", "w") as f: + with open(download_dir_path / "structures.json", "w") as f: json.dump(structures, f) # Create meshes: - print(f"Saving atlas data at {uncompr_atlas_path}") - meshes_dir_path = uncompr_atlas_path / "meshes" + print(f"Saving atlas data at {download_dir_path}") + meshes_dir_path = download_dir_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) - # Create and prune structures tree tree = get_structures_tree(structures) - drop_from_tree = [ - "fiber_tracts", - "VentSys", - "bas", - ] # stuff we don't need meshes for - for drop in drop_from_tree: - print("Dropping from structures tree: ", drop) - tree.remove_subtree( - [nid for nid, n in tree.nodes.items() if n.tag == drop][0] - ) - print( - f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}" - ) - - # Create a tree marking which brain regions are shown in the annotation labels = np.unique(annotated_volume).astype(np.int32) for key, node in tree.nodes.items(): @@ -222,7 +210,7 @@ def create_atlas(working_dir, resolution=10): annotation_stack=annotated_volume, structures_list=structures_with_mesh, meshes_dict=meshes_dict, - working_dir=bg_root_dir, + working_dir=working_dir, hemispheres_stack=None, cleanup_files=False, compress=True, @@ -230,3 +218,12 @@ def create_atlas(working_dir, resolution=10): ) return output_filename + + +if __name__ == "__main__": + resolution = 100 # some resolution, in microns + + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" + bg_root_dir.mkdir(exist_ok=True, parents=True) + create_atlas(bg_root_dir, resolution) From c0cbf69c8e13ed1719ffaf137b592fd393dfede1 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 16:21:44 +0100 Subject: [PATCH 019/103] Improve downsampling --- bg_atlasgen/atlas_scripts/kim_mouse.py | 28 ++++++++++++++++---------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/bg_atlasgen/atlas_scripts/kim_mouse.py index 5e7d552b..cccc77c8 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -1,19 +1,20 @@ __version__ = "0" -import json -import imio -from rich.progress import track -import pandas as pd -import numpy as np +import json import time import tarfile +import tifffile + +import pandas as pd +import numpy as np import multiprocessing as mp +from rich.progress import track from pathlib import Path +from scipy.ndimage import zoom from allensdk.core.reference_space_cache import ReferenceSpaceCache from bg_atlasapi import utils - from bg_atlasgen.mesh_utils import create_region_mesh, Region from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree @@ -32,7 +33,7 @@ def create_atlas(working_dir, resolution): ATLAS_FILE_URL = "https://gin.g-node.org/brainglobe/kim_atlas_materials/raw/master/kim_atlas_materials.tar.gz" # Temporary folder for download: - download_dir_path = working_dir / "downloading_path" + download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) atlas_files_dir = download_dir_path / "atlas_files" @@ -57,8 +58,10 @@ def create_atlas(working_dir, resolution): # Load (and possibly downsample) annotated volume: scaling = ANNOTATIONS_RES_UM / resolution - annotated_volume = imio.load_img_stack( - annotations_file, scaling, scaling, scaling, anti_aliasing=False + + annotated_volume = tifffile.imread(annotations_file) + annotated_volume = zoom( + annotated_volume, (scaling, scaling, scaling), order=0, prefilter=False ) # Download annotated and template volume: @@ -110,7 +113,10 @@ def create_atlas(working_dir, resolution): tree = get_structures_tree(structures) - labels = np.unique(annotated_volume).astype(np.int32) + rot_vol = np.rot90(annotated_volume, axes=(0, 1)) + labels = np.unique(rot_vol).astype(np.int32) + + # labels = np.unique(annotated_volume).astype(np.int32) for key, node in tree.nodes.items(): if key in labels: @@ -221,7 +227,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - resolution = 100 # some resolution, in microns + resolution = 25 # some resolution, in microns # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" From 1c631914050f511caa9e551bc193ba2213a9c843 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 17:13:35 +0100 Subject: [PATCH 020/103] revert roations --- bg_atlasgen/atlas_scripts/kim_mouse.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/bg_atlasgen/atlas_scripts/kim_mouse.py index cccc77c8..1782a5a1 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -113,10 +113,7 @@ def create_atlas(working_dir, resolution): tree = get_structures_tree(structures) - rot_vol = np.rot90(annotated_volume, axes=(0, 1)) - labels = np.unique(rot_vol).astype(np.int32) - - # labels = np.unique(annotated_volume).astype(np.int32) + labels = np.unique(annotated_volume).astype(np.int32) for key, node in tree.nodes.items(): if key in labels: @@ -227,7 +224,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - resolution = 25 # some resolution, in microns + resolution = 100 # some resolution, in microns # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" From e821b8d90b97afef41ba929d731a1b61ed678720 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 23 Oct 2020 17:20:53 +0100 Subject: [PATCH 021/103] add kim to main script --- bg_atlasgen/main_script.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index bebbcfa1..ab4f2650 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -16,6 +16,7 @@ GENERATION_DICT = dict( mpin_zfish=[1], allen_mouse=[10, 25, 50, 100], + kim_mouse=[10, 25, 50, 100], example_mouse=[100], ) From 2d82ad7db850df823de3cf2a9ebbdbb87eaf96b3 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Mon, 26 Oct 2020 14:10:38 +0000 Subject: [PATCH 022/103] rotate annotation volume for meshes --- bg_atlasgen/atlas_scripts/kim_mouse.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/bg_atlasgen/atlas_scripts/kim_mouse.py index 1782a5a1..c25996a9 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -113,8 +113,9 @@ def create_atlas(working_dir, resolution): tree = get_structures_tree(structures) - labels = np.unique(annotated_volume).astype(np.int32) + rotated_annotations = np.rot90(annotated_volume, axes=(0, 2)) + labels = np.unique(rotated_annotations).astype(np.int32) for key, node in tree.nodes.items(): if key in labels: is_label = True @@ -139,7 +140,7 @@ def create_atlas(working_dir, resolution): node, tree, labels, - annotated_volume, + rotated_annotations, ROOT_ID, closing_n_iters, ) @@ -160,7 +161,7 @@ def create_atlas(working_dir, resolution): node, tree, labels, - annotated_volume, + rotated_annotations, ROOT_ID, closing_n_iters, ) @@ -224,7 +225,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - resolution = 100 # some resolution, in microns + resolution = 50 # some resolution, in microns # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" From 2c0962f426fc01d6a449f682ad8a9c7207ad7f43 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Thu, 3 Dec 2020 18:18:26 +0000 Subject: [PATCH 023/103] add osten mouse atlas --- bg_atlasgen/atlas_scripts/osten_mouse.py | 236 +++++++++++++++++++++++ bg_atlasgen/main_script.py | 1 + 2 files changed, 237 insertions(+) create mode 100644 bg_atlasgen/atlas_scripts/osten_mouse.py diff --git a/bg_atlasgen/atlas_scripts/osten_mouse.py b/bg_atlasgen/atlas_scripts/osten_mouse.py new file mode 100644 index 00000000..e1251ec6 --- /dev/null +++ b/bg_atlasgen/atlas_scripts/osten_mouse.py @@ -0,0 +1,236 @@ +__version__ = "0" + +import json +import time +import tarfile +import tifffile + +import pandas as pd +import numpy as np +import multiprocessing as mp + +from rich.progress import track +from pathlib import Path +from scipy.ndimage import zoom +from allensdk.core.reference_space_cache import ReferenceSpaceCache +from bg_atlasapi import utils + +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + +PARALLEL = False # disable parallel mesh extraction for easier debugging + + +def create_atlas(working_dir, resolution): + ATLAS_NAME = "osten_mouse" + SPECIES = "Mus musculus" + ATLAS_LINK = "https://doi.org/10.1016/j.celrep.2014.12.014" + CITATION = "Kim et al. 2015, https://doi.org/10.1016/j.celrep.2014.12.014" + ORIENTATION = "asr" + ROOT_ID = 997 + ANNOTATIONS_RES_UM = 10 + ATLAS_FILE_URL = "https://gin.g-node.org/brainglobe/osten_atlas_materials/raw/master/osten_atlas_materials.tar.gz" + + # Temporary folder for download: + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + atlas_files_dir = download_dir_path / "atlas_files" + + ## Download atlas_file + utils.check_internet_connection() + + destination_path = download_dir_path / "atlas_download" + utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) + + tar = tarfile.open(destination_path) + tar.extractall(path=atlas_files_dir) + tar.close() + + destination_path.unlink() + + structures_file = atlas_files_dir / "osten_atlas" / "structures.csv" + annotations_file = atlas_files_dir / "osten_atlas" / "annotation.tiff" + + # ---------------------------------------------------------------------------- # + # GET TEMPLATE # + # ---------------------------------------------------------------------------- # + + # Load (and possibly downsample) annotated volume: + scaling = ANNOTATIONS_RES_UM / resolution + + annotated_volume = tifffile.imread(annotations_file) + annotated_volume = zoom( + annotated_volume, (scaling, scaling, scaling), order=0, prefilter=False + ) + + # Download annotated and template volume: + ######################################### + spacecache = ReferenceSpaceCache( + manifest=download_dir_path / "manifest.json", + # downloaded files are stored relative to here + resolution=resolution, + reference_space_key="annotation/ccf_2017" + # use the latest version of the CCF + ) + + # Download + template_volume, _ = spacecache.get_template_volume() + print("Download completed...") + + # ---------------------------------------------------------------------------- # + # STRUCTURES HIERARCHY # + # ---------------------------------------------------------------------------- # + + # Parse region names & hierarchy + # ############################## + df = pd.read_csv(structures_file) + df = df.drop(columns=["parent_id"]) + + # split by "/" and convert list of strings to list of ints + df["structure_id_path"] = ( + df["structure_id_path"] + .str.split(pat="/") + .map(lambda x: [int(i) for i in x]) + ) + df["structure_id_path"] = ( + df["structure_id_path"] + .map(lambda x: x[:-1]) + ) + structures = df.to_dict("records") + structures[0000]["structure_id_path"] = [997] + for structure in structures: + structure.update({"rgb_triplet": [255, 255, 255]}) + # root doesn't have a parent + if structure["id"] != 997: + structure["structure_id_path"].append(structure["id"]) + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structures, f) + + # Create meshes: + print(f"Saving atlas data at {download_dir_path}") + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structures) + + rotated_annotations = np.rot90(annotated_volume, axes=(0, 2)) + + labels = np.unique(rotated_annotations).astype(np.int32) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + # Mesh creation + closing_n_iters = 2 + start = time.time() + if PARALLEL: + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + rotated_annotations, + ROOT_ID, + closing_n_iters, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass # error with returning results from pool.map but we don't care + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + rotated_annotations, + ROOT_ID, + closing_n_iters, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # Create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + + # ---------------------------------------------------------------------------- # + # WRAP UP # + # ---------------------------------------------------------------------------- # + + # Wrap up, compress, and remove file: + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(resolution,) * 3, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=True, + ) + + return output_filename + + +if __name__ == "__main__": + resolution = 100 # some resolution, in microns + + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "osten_mouse" + bg_root_dir.mkdir(exist_ok=True, parents=True) + create_atlas(bg_root_dir, resolution) diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index ab4f2650..1fe45725 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -17,6 +17,7 @@ mpin_zfish=[1], allen_mouse=[10, 25, 50, 100], kim_mouse=[10, 25, 50, 100], + osten_mouse=[10, 25, 50, 100], example_mouse=[100], ) From 426464913bac45f57c10ae593f60af496f5534c4 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 30 Jun 2021 15:37:32 +0200 Subject: [PATCH 024/103] Added function for creating intermediate hierarchy structures masks --- bg_atlasgen/mesh_utils.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 6669ea00..6c863d4b 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -21,6 +21,36 @@ import scipy from bg_atlasgen.volume_utils import create_masked_array + +def region_mask_from_annotation(structure_id, + annotation, + structures_list, + ): + """Generate mask for a structure from an annotation file + and a list of structures. + + Parameters + ---------- + structure_id : int + id of the structure + annotation : np.array + annotation stack for the atlas + structures_list : list + list of structure dictionaries + + Returns + ------- + + """ + + mask_stack = np.zeros(annotation.shape, np.uint8) + + for curr_structure in structures_list: + if structure_id in curr_structure["structure_id_path"]: + mask_stack[annotation == curr_structure["id"]] = 1 + + return mask_stack + # ---------------------------------------------------------------------------- # # MESH CREATION # # ---------------------------------------------------------------------------- # From bc7518d16c4d2754e248a16cbad5809ec821962f Mon Sep 17 00:00:00 2001 From: Mathieu Date: Wed, 21 Jul 2021 14:14:59 +0200 Subject: [PATCH 025/103] Fix extract_largest_region and compare_mesh_volume --- bg_atlasgen/mesh_utils.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 5852cf64..c2bd8ca0 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,6 +1,6 @@ try: from vedo import Mesh, write, load, show, Volume - from vedo.applications import Browser, Slicer + from vedo.applications import Browser, SlicerPlotter except ModuleNotFoundError: raise ModuleNotFoundError( "Mesh generation with these utils requires vedo\n" @@ -66,6 +66,7 @@ def extract_mesh_from_mask( decimate=True, tol=0.0005, use_marching_cubes=False, + extract_largest=False, ): """ Returns a vedo mesh actor with just the outer surface of a @@ -94,6 +95,9 @@ def extract_mesh_from_mask( If True the number of vertices is reduced through decimation tol: float parameter for decimation, larger values correspond to more aggressive decimation + extract_largest: bool + If True only the largest region are extracted. It can cause issues for + bilateral regions as only one will remain """ # check savepath argument @@ -144,7 +148,8 @@ def extract_mesh_from_mask( if decimate: mesh.clean(tol=tol) - mesh = mesh.extractLargestRegion() + if extract_largest: + mesh = mesh.extractLargestRegion() if obj_filepath is not None: write(mesh, str(obj_filepath)) @@ -249,7 +254,7 @@ def compare_mesh_and_volume(mesh, volume): if isinstance(volume, np.ndarray): volume = Volume(volume) - vp = Slicer(volume, bg2="white", showHisto=False) + vp = SlicerPlotter(volume, bg2="white", showHisto=False) vp.add(mesh.alpha(0.5)) vp.show() From 370dfb03de465f8c76f5c19cddfec08a2ab92c31 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 6 Aug 2021 16:59:49 +0100 Subject: [PATCH 026/103] reformat with black --- bg_atlasgen/atlas_scripts/osten_mouse.py | 7 ++----- bg_atlasgen/mesh_utils.py | 10 ++++++---- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/osten_mouse.py b/bg_atlasgen/atlas_scripts/osten_mouse.py index e1251ec6..f5580203 100644 --- a/bg_atlasgen/atlas_scripts/osten_mouse.py +++ b/bg_atlasgen/atlas_scripts/osten_mouse.py @@ -93,10 +93,7 @@ def create_atlas(working_dir, resolution): .str.split(pat="/") .map(lambda x: [int(i) for i in x]) ) - df["structure_id_path"] = ( - df["structure_id_path"] - .map(lambda x: x[:-1]) - ) + df["structure_id_path"] = df["structure_id_path"].map(lambda x: x[:-1]) structures = df.to_dict("records") structures[0000]["structure_id_path"] = [997] for structure in structures: @@ -228,7 +225,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - resolution = 100 # some resolution, in microns + resolution = 100 # some resolution, in microns # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "osten_mouse" diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index c2bd8ca0..b03c07e4 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -22,10 +22,11 @@ from bg_atlasgen.volume_utils import create_masked_array -def region_mask_from_annotation(structure_id, - annotation, - structures_list, - ): +def region_mask_from_annotation( + structure_id, + annotation, + structures_list, +): """Generate mask for a structure from an annotation file and a list of structures. @@ -51,6 +52,7 @@ def region_mask_from_annotation(structure_id, return mask_stack + # ---------------------------------------------------------------------------- # # MESH CREATION # # ---------------------------------------------------------------------------- # From 707905a07d6b96e666124a0846432dcd4f3cf87e Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 6 Aug 2021 17:03:50 +0100 Subject: [PATCH 027/103] save additional metadata --- bg_atlasgen/metadata_utils.py | 8 ++++++-- bg_atlasgen/wrapup.py | 11 ++++++++++- setup.py | 10 ++++------ 3 files changed, 20 insertions(+), 9 deletions(-) diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index d2f1cbe7..39183ab9 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -118,7 +118,9 @@ def create_structures_csv(uncompr_atlas_path, root): ) -def create_metadata_files(dest_dir, metadata_dict, structures, root_id): +def create_metadata_files( + dest_dir, metadata_dict, structures, root_id, additional_metadata={} +): """ Automatic creation of . structures.csv @@ -129,10 +131,12 @@ def create_metadata_files(dest_dir, metadata_dict, structures, root_id): :param uncompr_atlas_path: path to uncompressed atlas folder :param metadata_dict: dict with atlas metadata :param structures: list of dictionaries with structures hierarchical info + :param additional_metadata: Dict to add to atlas metadata """ # write metadata dict: with open(dest_dir / descriptors.METADATA_FILENAME, "w") as f: - json.dump(metadata_dict, f) + # only save additional metadata to json, don't include in readme + json.dump(metadata_dict | additional_metadata, f) create_structures_csv(dest_dir, root_id) create_readme(dest_dir, metadata_dict, structures) diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index 0b30a89e..f8525351 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -49,6 +49,7 @@ def wrapup_atlas_from_data( compress=True, scale_meshes=False, additional_references=dict(), + additional_metadata={}, ): """ Finalise an atlas with truly consistent format from all the data. @@ -93,6 +94,8 @@ def wrapup_atlas_from_data( to ensure that they are specified in microns, regardless of the atlas resolution. additional_references: dict, optional (Default value = empty dict). Dictionary with secondary reference stacks. + additional_metadata: dict, optional + Additional metadata to write to metadata.json """ # If no hemisphere file is given, assume the atlas is symmetric: @@ -191,7 +194,13 @@ def wrapup_atlas_from_data( ) # Create human readable .csv and .txt files: - create_metadata_files(dest_dir, metadata_dict, structures_list, root_id) + create_metadata_files( + dest_dir, + metadata_dict, + structures_list, + root_id, + additional_metadata=additional_metadata, + ) # Compress if required: if compress: diff --git a/setup.py b/setup.py index 6090a431..50718af9 100644 --- a/setup.py +++ b/setup.py @@ -5,25 +5,23 @@ setup( name="bg-atlasgen", - version="0.0.1", + version="0.0.2", description="Scripts generation atlases and utilities for BrainGlobe", install_requires=requirements, - python_requires=">=3.6", + python_requires=">=3.9", entry_points={"console_scripts": []}, packages=find_namespace_packages(exclude=("docs", "tests*")), include_package_data=True, url="https://github.com/brainglobe/bg-atlasgen", author="Luigi Petrucco, Federico Claudi, Adam Tyson", - author_email="adam.tyson@ucl.ac.uk", + author_email="code@adamltyson.com", classifiers=[ "Development Status :: 3 - Alpha", "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows :: Windows 10", "Operating System :: MacOS :: MacOS X", "Programming Language :: Python", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Intended Audience :: Developers", "Intended Audience :: Science/Research", ], From fb22377e2a34253426a389d342c9b76a34c24c6a Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 10 Aug 2021 14:49:22 +0100 Subject: [PATCH 028/103] consistent syntax for empty dict --- bg_atlasgen/wrapup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index f8525351..1bced03a 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -48,7 +48,7 @@ def wrapup_atlas_from_data( cleanup_files=False, compress=True, scale_meshes=False, - additional_references=dict(), + additional_references={}, additional_metadata={}, ): """ From 56b2e000188ca2596a18055831ea15f06de0d2f7 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 10 Aug 2021 14:50:02 +0100 Subject: [PATCH 029/103] Update docstring --- bg_atlasgen/wrapup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index 1bced03a..2786ba0c 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -95,7 +95,7 @@ def wrapup_atlas_from_data( additional_references: dict, optional (Default value = empty dict). Dictionary with secondary reference stacks. additional_metadata: dict, optional - Additional metadata to write to metadata.json + (Default value = empty dict). Additional metadata to write to metadata.json """ # If no hemisphere file is given, assume the atlas is symmetric: From 7c0c4bfc23157c88f719e063bc2ac5aaec8d14ac Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 10 Aug 2021 15:24:21 +0100 Subject: [PATCH 030/103] Add atlas packager metadata --- bg_atlasgen/metadata_utils.py | 2 ++ bg_atlasgen/wrapup.py | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index 39183ab9..bfe41048 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -27,6 +27,7 @@ def generate_metadata_dict( shape, transformation_mat, additional_references, + atlas_packager, ): # Name should be author_species @@ -69,6 +70,7 @@ def generate_metadata_dict( shape=shape, trasform_to_bg=tuple([tuple(m) for m in transformation_mat]), additional_references=additional_references, + atlas_packager=atlas_packager, ) diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index 2786ba0c..e26c3c20 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -44,6 +44,7 @@ def wrapup_atlas_from_data( structures_list, meshes_dict, working_dir, + atlas_packager=None, hemispheres_stack=None, cleanup_files=False, compress=True, @@ -82,6 +83,9 @@ def wrapup_atlas_from_data( dict of meshio-compatible mesh file paths in the form {sruct_id: meshpath} working_dir : str or Path obj Path where the atlas folder and compressed file will be generated. + atlas_packager : str or None + Credit for those responsible for converting the atlas into the BrainGlobe + format. hemispheres_stack : str or Path or numpy array, optional Hemisphere stack for the atlas. If str or Path, will be read with tifffile. If none is provided, atlas is assumed to be symmetric @@ -191,6 +195,7 @@ def wrapup_atlas_from_data( shape=shape, transformation_mat=transformation_mat, additional_references=[k for k in additional_references.keys()], + atlas_packager=atlas_packager, ) # Create human readable .csv and .txt files: From 8140a82ca73327766bac64fa137a78fb1737fd2e Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 10 Aug 2021 15:34:18 +0100 Subject: [PATCH 031/103] python 3.8 compatibility --- bg_atlasgen/metadata_utils.py | 2 +- setup.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index bfe41048..b97f72b9 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -138,7 +138,7 @@ def create_metadata_files( # write metadata dict: with open(dest_dir / descriptors.METADATA_FILENAME, "w") as f: # only save additional metadata to json, don't include in readme - json.dump(metadata_dict | additional_metadata, f) + json.dump({**metadata_dict, **additional_metadata}, f) create_structures_csv(dest_dir, root_id) create_readme(dest_dir, metadata_dict, structures) diff --git a/setup.py b/setup.py index 50718af9..df43ecb6 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ version="0.0.2", description="Scripts generation atlases and utilities for BrainGlobe", install_requires=requirements, - python_requires=">=3.9", + python_requires=">=3.8", entry_points={"console_scripts": []}, packages=find_namespace_packages(exclude=("docs", "tests*")), include_package_data=True, @@ -21,6 +21,7 @@ "Operating System :: Microsoft :: Windows :: Windows 10", "Operating System :: MacOS :: MacOS X", "Programming Language :: Python", + "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Intended Audience :: Developers", "Intended Audience :: Science/Research", From 9612e915898d953847ecff70027173d5a2951581 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Thu, 12 Aug 2021 17:00:54 +0100 Subject: [PATCH 032/103] Add mouse spinal cord atlas --- bg_atlasgen/atlas_scripts/allen_cord.py | 253 ++++++++++++++++++++++++ 1 file changed, 253 insertions(+) create mode 100644 bg_atlasgen/atlas_scripts/allen_cord.py diff --git a/bg_atlasgen/atlas_scripts/allen_cord.py b/bg_atlasgen/atlas_scripts/allen_cord.py new file mode 100644 index 00000000..a358327f --- /dev/null +++ b/bg_atlasgen/atlas_scripts/allen_cord.py @@ -0,0 +1,253 @@ +__version__ = "0" + +import json +import time +import tifffile +import zipfile + +import pandas as pd +import numpy as np +import multiprocessing as mp + +from rich.progress import track +from pathlib import Path + +from bg_atlasapi import utils +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + +PARALLEL = True + + +def download_atlas_files(download_dir_path, atlas_file_url): + utils.check_internet_connection() + + atlas_files_dir = download_dir_path / "atlas_files" + destination_path = download_dir_path / "atlas_download" + utils.retrieve_over_http(atlas_file_url, destination_path) + + with zipfile.ZipFile(destination_path, "r") as zip_ref: + zip_ref.extractall(atlas_files_dir) + + atlas_files_dir = atlas_files_dir / "SC_P56_Atlas_10x10x20_v5_2020" + return atlas_files_dir + + +def parse_structures(structures_file, root_id): + df = pd.read_csv(structures_file) + df = df.rename(columns={"parent_ID": "parent_structure_id"}) + df = df.drop( + columns=[ + "output_id", + "parent_acronym", + "children_acronym", + "children_IDs", + ] + ) + + df["rgb_triplet"] = df.apply(lambda x: [x.red, x.green, x.blue], axis=1) + df["structure_id_path"] = df.apply(lambda x: [x.id], axis=1) + + df = df.drop(columns=["red", "green", "blue"]) + + structures = df.to_dict("records") + structures = create_structure_hierarchy(structures, df, root_id) + return structures + + +def create_structure_hierarchy(structures, df, root_id): + for structure in structures: + if structure["id"] != root_id: + parent_id = structure["parent_structure_id"] + while True: + structure["structure_id_path"] = [parent_id] + structure[ + "structure_id_path" + ] + if parent_id != root_id: + parent_id = int( + df[df["id"] == parent_id]["parent_structure_id"] + ) + else: + break + else: + structure["name"] = "root" + structure["acronym"] = "root" + + del structure["parent_structure_id"] + + return structures + + +def create_meshes(download_dir_path, structures, annotated_volume, root_id): + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structures) + + labels = np.unique(annotated_volume).astype(np.int32) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + # Mesh creation + closing_n_iters = 2 + start = time.time() + if PARALLEL: + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + root_id, + closing_n_iters, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + root_id, + closing_n_iters, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + return meshes_dir_path + + +def create_mesh_dict(structures, meshes_dir_path): + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + return meshes_dict, structures_with_mesh + + +def create_atlas(working_dir): + ATLAS_NAME = "allen_cord" + SPECIES = "Mus musculus" + ATLAS_LINK = "https://data.mendeley.com/datasets/4rrggzv5d5/1" + CITATION = ( + "Fiederling et al. 2021, https://doi.org/10.1101/2021.05.06.443008" + ) + ORIENTATION = "asr" + RESOLUTION = (20, 10, 10) + ROOT_ID = 250 + ATLAS_FILE_URL = ( + "https://md-datasets-cache-zipfiles-prod.s3.eu-west-1." + "amazonaws.com/4rrggzv5d5-1.zip" + ) + ATLAS_PACKAGER = "MetaCell LLC, Ltd." + + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + + # Download atlas files from Mendeley + atlas_files_dir = download_atlas_files(download_dir_path, ATLAS_FILE_URL) + + ## Load files + structures_file = atlas_files_dir / "Atlas_Regions.csv" + reference_file = atlas_files_dir / "Template.tif" + annotations_file = atlas_files_dir / "Annotation.tif" + segments_file = atlas_files_dir / "Segments.csv" + + annotated_volume = tifffile.imread(annotations_file) + template_volume = tifffile.imread(reference_file) + + atlas_segments = pd.read_csv(segments_file) + atlas_segments = dict(atlas_segments=atlas_segments.to_dict("records")) + + ## Parse structure metadata + structures = parse_structures(structures_file, ROOT_ID) + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structures, f) + + # Create meshes: + print(f"Saving atlas data at {download_dir_path}") + meshes_dir_path = create_meshes( + download_dir_path, structures, annotated_volume, ROOT_ID + ) + meshes_dict, structures_with_mesh = create_mesh_dict( + structures, meshes_dir_path + ) + + # Wrap up, compress, and remove file: + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=RESOLUTION, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + atlas_packager=ATLAS_PACKAGER, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=True, + additional_metadata=atlas_segments, + ) + + return output_filename + + +if __name__ == "__main__": + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "allen_cord" + bg_root_dir.mkdir(exist_ok=True, parents=True) + create_atlas(bg_root_dir) From 19ef46150498a643ff291ad5a159b5855152aa4c Mon Sep 17 00:00:00 2001 From: Ben Kantor Date: Sun, 31 Oct 2021 15:03:31 +0200 Subject: [PATCH 033/103] add WHS SD Rat atlas --- bg_atlasgen/atlas_scripts/whs_sd_rat.py | 295 ++++++++++++++++++++++++ 1 file changed, 295 insertions(+) create mode 100644 bg_atlasgen/atlas_scripts/whs_sd_rat.py diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/bg_atlasgen/atlas_scripts/whs_sd_rat.py new file mode 100644 index 00000000..7500e15e --- /dev/null +++ b/bg_atlasgen/atlas_scripts/whs_sd_rat.py @@ -0,0 +1,295 @@ +__version__ = "0" + +import json +import multiprocessing as mp +import time +import zipfile +from pathlib import Path + +import imio +import numpy as np +import xmltodict +from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree +from rich.progress import track + +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data + +PARALLEL = True + + +def download_atlas_files(download_dir_path, atlas_file_url, ATLAS_NAME): + atlas_files_dir = download_dir_path / ATLAS_NAME + + if atlas_files_dir.exists(): + return atlas_files_dir + + utils.check_internet_connection() + + download_name = ATLAS_NAME + "_atlas.zip" + destination_path = download_dir_path / download_name + utils.retrieve_over_http(atlas_file_url, destination_path) + + with zipfile.ZipFile(destination_path, "r") as zip_ref: + zip_ref.extractall(atlas_files_dir) + + return atlas_files_dir + + +def parse_structures_xml(root, path=None, structures=None): + structures = structures or [] + path = path or [] + + rgb_triplet = tuple(int(root["@color"][i : i + 2], 16) for i in (1, 3, 5)) + id = int(root["@id"]) + struct = { + "name": root["@name"], + "acronym": root["@abbreviation"], + "id": int(root["@id"]), + "structure_id_path": path + [id], + "rgb_triplet": rgb_triplet, + } + structures.append(struct) + + if "label" in root: + if isinstance(root["label"], list): + for label in root["label"]: + parse_structures_xml( + label, path=path + [id], structures=structures + ) + else: + parse_structures_xml( + root["label"], path=path + [id], structures=structures + ) + + return structures + + +def parse_structures(structures_file: Path): + root = xmltodict.parse(structures_file.read_text())["milf"]["structure"] + root["@abbreviation"] = "root" + root["@color"] = "#ffffff" + root["@id"] = "10000" + root["@name"] = "Root" + + structures = parse_structures_xml(root) + return structures + + +def create_structure_hierarchy(structures, df, root_id): + for structure in structures: + if structure["id"] != root_id: + parent_id = structure["parent_structure_id"] + while True: + structure["structure_id_path"] = [parent_id] + structure[ + "structure_id_path" + ] + if parent_id != root_id: + parent_id = int( + df[df["id"] == parent_id]["parent_structure_id"] + ) + else: + break + else: + structure["name"] = "root" + structure["acronym"] = "root" + + del structure["parent_structure_id"] + + return structures + + +def create_meshes(download_dir_path, structures, annotated_volume, root_id): + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structures) + + labels = set(np.unique(annotated_volume).astype(np.int32)) + + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + node.data = Region(is_label) + + # Mesh creation + closing_n_iters = 2 + start = time.time() + if PARALLEL: + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + root_id, + closing_n_iters, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + root_id, + closing_n_iters, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + return meshes_dir_path + + +def create_mesh_dict(structures, meshes_dir_path): + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + return meshes_dict, structures_with_mesh + + +def create_atlas(working_dir): + ATLAS_NAME = "whs_sd_rat" + SPECIES = "Rattus norvegicus" + ATLAS_LINK = "https://www.nitrc.org/projects/whs-sd-atlas" + CITATION = ( + "Papp et al 2014, https://doi.org/10.1016/j.neuroimage.2014.04.001" + ) + ORIENTATION = "lpi" + RESOLUTION = (39, 39, 39) + ROOT_ID = 10000 + ATLAS_FILE_URL = "https://www.nitrc.org/frs/download.php/12263/MBAT_WHS_SD_rat_atlas_v4_pack.zip" + ATLAS_PACKAGER = ( + "Ben Kantor, Tel Aviv University, Israel, benkantor@mail.tau.ac.il" + ) + + assert len(ORIENTATION) == 3, ( + "Orientation is not 3 characters, Got" + ORIENTATION + ) + assert len(RESOLUTION) == 3, "Resolution is not correct, Got " + RESOLUTION + assert ( + ATLAS_FILE_URL + ), "No download link provided for atlas in ATLAS_FILE_URL" + + # Generated atlas path: + working_dir = working_dir / "brainglobe_workingdir" / ATLAS_NAME + working_dir.mkdir(exist_ok=True, parents=True) + + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + + # Download atlas files from link provided + print("Downloading atlas from link: ", ATLAS_FILE_URL) + atlas_files_dir = download_atlas_files( + download_dir_path, ATLAS_FILE_URL, ATLAS_NAME + ) + atlas_files_dir = atlas_files_dir / "MBAT_WHS_SD_rat_atlas_v4_pack/Data" + + # Parse structure metadata + structures = parse_structures( + atlas_files_dir / "WHS_SD_rat_atlas_v4_labels.ilf" + ) + + # Load files + annotation_stack = imio.load_any( + atlas_files_dir / "WHS_SD_rat_atlas_v4.nii.gz", as_numpy=True + ).astype(np.int64) + reference_stack = imio.load_any( + atlas_files_dir / "WHS_SD_rat_T2star_v1.01.nii.gz", as_numpy=True + ) + + # Clean junk from reference file + reference_stack *= annotation_stack > 0 + + # Create hemispheres stack + hemispheres_stack = np.full(reference_stack.shape, 2, dtype=np.uint8) + hemispheres_stack[:244] = 1 + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structures, f) + + # Create meshes: + print(f"Saving atlas data at {download_dir_path}") + annotation_stack_for_mesh = annotation_stack + meshes_dir_path = create_meshes( + download_dir_path, structures, annotation_stack_for_mesh, ROOT_ID + ) + + meshes_dict, structures_with_mesh = create_mesh_dict( + structures, meshes_dir_path + ) + + # Wrap up, compress, and remove file: + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=RESOLUTION, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=reference_stack, + annotation_stack=annotation_stack, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + atlas_packager=ATLAS_PACKAGER, + hemispheres_stack=hemispheres_stack, + cleanup_files=False, + compress=True, + scale_meshes=True, + ) + + return output_filename + + +if __name__ == "__main__": + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" + bg_root_dir.mkdir(exist_ok=True, parents=True) + create_atlas(bg_root_dir) From f9e1d85d28cd97d0d6de29dd9b6cb37a3c60f04d Mon Sep 17 00:00:00 2001 From: Ben Kantor Date: Thu, 4 Nov 2021 17:18:49 +0200 Subject: [PATCH 034/103] add xmltodict to requirements.txt --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6976e29d..473dcf28 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,4 +22,5 @@ gitpython coverage pre-commit PyMCubes -bg_atlasapi \ No newline at end of file +bg_atlasapi +xmltodict From 79a644624f3518dd51c2c679bed67a83519d38fb Mon Sep 17 00:00:00 2001 From: Ben Kantor Date: Sun, 7 Nov 2021 19:12:45 +0200 Subject: [PATCH 035/103] remove structures that are missing from annotation volume --- bg_atlasgen/atlas_scripts/whs_sd_rat.py | 26 ++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/bg_atlasgen/atlas_scripts/whs_sd_rat.py index 7500e15e..3897ae9b 100644 --- a/bg_atlasgen/atlas_scripts/whs_sd_rat.py +++ b/bg_atlasgen/atlas_scripts/whs_sd_rat.py @@ -100,14 +100,10 @@ def create_structure_hierarchy(structures, df, root_id): return structures -def create_meshes(download_dir_path, structures, annotated_volume, root_id): +def create_meshes(download_dir_path, tree, annotated_volume, labels, root_id): meshes_dir_path = download_dir_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) - tree = get_structures_tree(structures) - - labels = set(np.unique(annotated_volume).astype(np.int32)) - for key, node in tree.nodes.items(): if key in labels: is_label = True @@ -240,6 +236,23 @@ def create_atlas(working_dir): atlas_files_dir / "WHS_SD_rat_T2star_v1.01.nii.gz", as_numpy=True ) + # Remove structure with missing annotations + tree = get_structures_tree(structures) + labels = set(np.unique(annotation_stack).astype(np.int32)) + existing_structures = [] + for structure in structures: + stree = tree.subtree(structure["id"]) + ids = set(stree.nodes.keys()) + matched_labels = ids & labels + if matched_labels: + existing_structures.append(structure) + else: + node = tree.nodes[structure["id"]] + print( + f"{node.tag} not found in annotation volume, removing from list of structures..." + ) + structures = existing_structures + # Clean junk from reference file reference_stack *= annotation_stack > 0 @@ -253,9 +266,8 @@ def create_atlas(working_dir): # Create meshes: print(f"Saving atlas data at {download_dir_path}") - annotation_stack_for_mesh = annotation_stack meshes_dir_path = create_meshes( - download_dir_path, structures, annotation_stack_for_mesh, ROOT_ID + download_dir_path, tree, annotation_stack, labels, ROOT_ID ) meshes_dict, structures_with_mesh = create_mesh_dict( From ca31ce71f1907c9c8aef994fd80f5025803d88ca Mon Sep 17 00:00:00 2001 From: Ben Kantor Date: Sun, 7 Nov 2021 19:32:46 +0200 Subject: [PATCH 036/103] remove structures that are missing from annotation volume --- bg_atlasgen/atlas_scripts/whs_sd_rat.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/bg_atlasgen/atlas_scripts/whs_sd_rat.py index 3897ae9b..0e698332 100644 --- a/bg_atlasgen/atlas_scripts/whs_sd_rat.py +++ b/bg_atlasgen/atlas_scripts/whs_sd_rat.py @@ -116,7 +116,7 @@ def create_meshes(download_dir_path, tree, annotated_volume, labels, root_id): start = time.time() if PARALLEL: - pool = mp.Pool(mp.cpu_count() - 2) + pool = mp.Pool(min(mp.cpu_count() - 2, 16)) try: pool.map( @@ -252,6 +252,7 @@ def create_atlas(working_dir): f"{node.tag} not found in annotation volume, removing from list of structures..." ) structures = existing_structures + tree = get_structures_tree(structures) # Clean junk from reference file reference_stack *= annotation_stack > 0 From 2e44cca2ff03ae6211f8d92640b5e73443a72add Mon Sep 17 00:00:00 2001 From: Kailyn Fields <84587654+kailynkfields@users.noreply.github.com> Date: Tue, 9 Nov 2021 14:55:01 -0500 Subject: [PATCH 037/103] Fixed vedo Slicer import issue --- bg_atlasgen/mesh_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 5852cf64..9a40da86 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,6 +1,6 @@ try: from vedo import Mesh, write, load, show, Volume - from vedo.applications import Browser, Slicer + from vedo.applications import Browser, SlicerPlotter except ModuleNotFoundError: raise ModuleNotFoundError( "Mesh generation with these utils requires vedo\n" From 327606872b006d6f1dd63f0b00a6e6bc354287a3 Mon Sep 17 00:00:00 2001 From: Kailyn Fields <84587654+kailynkfields@users.noreply.github.com> Date: Tue, 9 Nov 2021 15:11:08 -0500 Subject: [PATCH 038/103] Update mesh_utils.py --- bg_atlasgen/mesh_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 9a40da86..b4bec9cc 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -249,7 +249,7 @@ def compare_mesh_and_volume(mesh, volume): if isinstance(volume, np.ndarray): volume = Volume(volume) - vp = Slicer(volume, bg2="white", showHisto=False) + vp = SlicerPlotter(volume, bg2="white", showHisto=False) vp.add(mesh.alpha(0.5)) vp.show() From 55f52d44f95989c3033f7ee0a6842e0a47aa509e Mon Sep 17 00:00:00 2001 From: Kailyn Fields <84587654+kailynkfields@users.noreply.github.com> Date: Fri, 19 Nov 2021 15:16:54 -0500 Subject: [PATCH 039/103] Create azba_zfish.py --- bg_atlasgen/atlas_scripts/azba_zfish.py | 219 ++++++++++++++++++++++++ 1 file changed, 219 insertions(+) create mode 100644 bg_atlasgen/atlas_scripts/azba_zfish.py diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py new file mode 100644 index 00000000..03fa3ec0 --- /dev/null +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Script to generate a Brainglobe compatible atlas object for the Adult Zebrafish Brain Atlas (AZBA) + +@author: Kailyn Fields, kailyn.fields@wayne.edu + +""" + +__version__ = "0" + +import csv +import time +import tifffile +import tarfile + +import numpy as np +import multiprocessing as mp + +from rich.progress import track +from pathlib import Path +from bg_atlasgen.mesh_utils import Region, create_region_mesh +from bg_atlasapi.structure_tree_util import get_structures_tree +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi import utils + +PARALLEL = False #Disable for debugging mesh creation + +def create_atlas(working_dir, resolution): + + #metadata + ATLAS_NAME = "azba_zfish" + SPECIES = "Danio rerio" + ATLAS_LINK = "http://www.azba.wayne.edu" + CITATION = "Kenney et al. 2021, https://doi.org/10.1101/2021.05.04.442625" + ATLAS_FILE_URL = "http://www.azba.wayne.edu/2021-08-22_AZBA.tar.gz" + ORIENTATION = "las" + ROOT_ID = 9999 + ATLAS_PACKAGER = "Kailyn Fields, kailyn.fields@wayne.edu" + ADDITIONAL_METADATA = {} + + + #setup folder for downloading + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + atlas_path = download_dir_path / f"{ATLAS_NAME}" + + #download atlas files + utils.check_internet_connection() + destination_path = download_dir_path / "atlas_download" + utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) + + #unpack the atlas download folder + tar = tarfile.open(destination_path) + tar.extractall(path=atlas_path) + tar.close() + destination_path.unlink() + + print("Atlas files download completed") + + #paths + structures_file = atlas_path / "2021-08-22_AZBA_labels.csv" + annotations_file = atlas_path / "2021-08-22_AZBA_segmentation.tif" + reference_file = atlas_path / "20180219_AZBA_topro_average_2020.tif" + reference_af = atlas_path / "20180628_AZBA_AF_average.tif" + meshes_dir_path = atlas_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + #adding autofluorescence image as additional reference file, main reference file is topro + autofluo = tifffile.imread(reference_af) + ADDITIONAL_REFERENCES = {"autofluorescence" : autofluo} + + #open structures.csv and prep for dictionary parsing + print("Creating structure tree") + zfishFile = open(structures_file) + zfishDictReader = csv.DictReader(zfishFile) + + #empty list to populate with dictionaries + hierarchy = [] + + #parse through csv file and populate hierarchy list + for row in zfishDictReader: + hierarchy.append(row) + + #make string to int and list of int conversions in 'id', 'structure_id_path', and 'rgb_triplet' key values + for i in range(0, len(hierarchy)): + hierarchy[i]['id'] = int(hierarchy[i]['id']) + for j in range(0, len(hierarchy)): + hierarchy[j]['structure_id_path'] = list(map(int, hierarchy[j]['structure_id_path'].split("/"))) + for k in range(0, len(hierarchy)): + try: + hierarchy[k]['rgb_triplet'] = list(map(int,hierarchy[k]['rgb_triplet'].split("/"))) + except ValueError: + hierarchy[k]['rgb_triplet'] = [255, 255, 255] + + #remove clear label (id 0) from hierarchy. ITK-Snap uses this to label unlabeled areas, but this convention + #interferes with the root mask generation and is unnecessary for this application + hierarchy.remove(hierarchy[1]) + + #use tifffile to read annotated file + annotated_volume = tifffile.imread(annotations_file) + + print(f"Saving atlas data at {atlas_path}") + tree = get_structures_tree(hierarchy) + print(f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}") + + #generate binary mask for mesh creation + labels = np.unique(annotated_volume).astype(np.int_) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + #mesh creation + closing_n_iters = 2 + start = time.time() + + if PARALLEL: + + print("Multiprocessing mesh creation...") + pool = mp.Pool(int(mp.cpu_count() / 2)) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass + + else: + + print("Multiprocessing disabled") + + for node in track( + tree.nodes.values(), + total=tree.size(), + description = "Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + ) + ) + + print("Finished mesh extraction in : ", round((time.time() - start) / 60, 2), " minutes") + + #create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in hierarchy: + #check if a mesh was created + mesh_path = meshes_dir_path / f"{s['id']}.obj" + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it.") + continue + else: + #check that the mesh actually exists and isn't empty + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + structures_with_mesh.append(s) + meshes_dict[s['id']] = mesh_path + + print(f"In the end, {len(structures_with_mesh)} structures with mesh are kept") + + #import reference file with tifffile so it can be read in wrapup_atlas_from_data + reference = tifffile.imread(reference_file) + + # wrap up atlas file + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name = ATLAS_NAME, + atlas_minor_version=__version__, + citation = CITATION, + atlas_link = ATLAS_LINK, + species = SPECIES, + resolution=(resolution,) * 3, + orientation = ORIENTATION, + root_id = ROOT_ID, + reference_stack = reference, + annotation_stack = annotations_file, + structures_list = hierarchy, + meshes_dict = meshes_dict, + working_dir = working_dir, + atlas_packager = ATLAS_PACKAGER, + additional_metadata = ADDITIONAL_METADATA, + additional_references = ADDITIONAL_REFERENCES, + ) + + return output_filename + +if __name__ == "__main__": + resolution = 4 + + #generated atlas path + bg_root_dir = Path.home() / "brainglobe_workingdir" + bg_root_dir.mkdir(exist_ok=True, parents=True) + create_atlas(bg_root_dir, resolution) From 53e23dbb858507079f54cbe5bf48a7e53fb230ad Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Sun, 21 Nov 2021 14:20:16 +0000 Subject: [PATCH 040/103] Standardise temporary output directory --- bg_atlasgen/atlas_scripts/whs_sd_rat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/bg_atlasgen/atlas_scripts/whs_sd_rat.py index 0e698332..9f4d8c33 100644 --- a/bg_atlasgen/atlas_scripts/whs_sd_rat.py +++ b/bg_atlasgen/atlas_scripts/whs_sd_rat.py @@ -210,7 +210,7 @@ def create_atlas(working_dir): ), "No download link provided for atlas in ATLAS_FILE_URL" # Generated atlas path: - working_dir = working_dir / "brainglobe_workingdir" / ATLAS_NAME + working_dir = working_dir / ATLAS_NAME working_dir.mkdir(exist_ok=True, parents=True) download_dir_path = working_dir / "downloads" From 844592b33c4175f5a78648732933df3f9cbb940d Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Fri, 26 Nov 2021 17:21:55 +0000 Subject: [PATCH 041/103] add [allenmouse] extra dependency (#17) --- README.md | 29 +++++++++++++++++++++++++++++ requirements.txt | 1 - setup.py | 1 + 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index bac9c95c..e32919d3 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,32 @@ # BG-AtlasGen Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi` module. + + +### To contribute +1) Fork this repo + +2) Clone your repo +```bash +git clone https://github.com/USERNAME/bg-atlasgen) +``` + +3) Install an editable version +```bash +cd bg-atlasgen +pip install -e . +``` +4) Create a script to package your atlas, and place into +`bg_atlasgen/atlas_scripts`. Please see other scripts for examples. + +Your script should contain everything required to run. The raw data should be +hosted on a publicly accessible repository so that anyone can run the script + to recreate the atlas. + +If you need to add any dependencies, please add them as an extra in the +setup.py file, e.g.: + +```python +extras_require={"allenmouse": ["allensdk"], + "newatlas": ["dep1", "dep2"]} +``` diff --git a/requirements.txt b/requirements.txt index 473dcf28..a3dc40ab 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,6 @@ click rich tqdm>=4.46.1 bg-space -allensdk sphinx imio vedo diff --git a/setup.py b/setup.py index df43ecb6..02f6cdfc 100644 --- a/setup.py +++ b/setup.py @@ -8,6 +8,7 @@ version="0.0.2", description="Scripts generation atlases and utilities for BrainGlobe", install_requires=requirements, + extras_require={"allenmouse": ["allensdk"]}, python_requires=">=3.8", entry_points={"console_scripts": []}, packages=find_namespace_packages(exclude=("docs", "tests*")), From 74b08ee4cd58bd2cbb1d440e228568d20fa0fe1d Mon Sep 17 00:00:00 2001 From: Kailyn Fields <84587654+kailynkfields@users.noreply.github.com> Date: Tue, 30 Nov 2021 15:02:17 -0500 Subject: [PATCH 042/103] Allow multiple downloads at once, updated citation --- bg_atlasgen/atlas_scripts/azba_zfish.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py index 03fa3ec0..141ae4c0 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -32,7 +32,7 @@ def create_atlas(working_dir, resolution): ATLAS_NAME = "azba_zfish" SPECIES = "Danio rerio" ATLAS_LINK = "http://www.azba.wayne.edu" - CITATION = "Kenney et al. 2021, https://doi.org/10.1101/2021.05.04.442625" + CITATION = "Kenney et al. 2021, https://doi.org/10.7554/elife.69988" ATLAS_FILE_URL = "http://www.azba.wayne.edu/2021-08-22_AZBA.tar.gz" ORIENTATION = "las" ROOT_ID = 9999 @@ -41,6 +41,7 @@ def create_atlas(working_dir, resolution): #setup folder for downloading + working_dir = working_dir / ATLAS_NAME download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) atlas_path = download_dir_path / f"{ATLAS_NAME}" From 4384efb7e718cbb96bcc4e843563e86696a4060f Mon Sep 17 00:00:00 2001 From: Federico Claudi Date: Wed, 19 Jan 2022 09:55:02 +0000 Subject: [PATCH 043/103] Mesh cleaning improvements (#15) * gitignore: added vscode * gitignore: added vscode * added rich fancy traceback for easier debugging * replaced mesh cleaning with decimation * pre-commit cleanup * added TEST mode to atlas creation * added smoothing options to all atlas gen scripts * removed ratlas.py since outdated and theres a better atlas * atlas gen update * added smoothing and decimation to adult zebrafish atlas Co-authored-by: Luigi Petrucco --- .gitignore | 4 + bg_atlasgen/__init__.py | 4 + bg_atlasgen/atlas_scripts/allen_cord.py | 68 ++++- bg_atlasgen/atlas_scripts/azba_zfish.py | 19 +- bg_atlasgen/atlas_scripts/humanatlas.py | 17 +- bg_atlasgen/atlas_scripts/kim_mouse.py | 19 +- bg_atlasgen/atlas_scripts/mpin_zfish.py | 5 +- bg_atlasgen/atlas_scripts/osten_mouse.py | 8 +- bg_atlasgen/atlas_scripts/ratatlas.py | 364 ----------------------- bg_atlasgen/atlas_scripts/whs_sd_rat.py | 6 + bg_atlasgen/mesh_utils.py | 80 ++--- 11 files changed, 167 insertions(+), 427 deletions(-) delete mode 100644 bg_atlasgen/atlas_scripts/ratatlas.py diff --git a/.gitignore b/.gitignore index 2bb9b60d..14e63f8d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ +.vscode + # Byte-compiled / optimized / DLL files +__pycache__ +*.__pycache__ __pycache__/ *.py[cod] *$py.class diff --git a/bg_atlasgen/__init__.py b/bg_atlasgen/__init__.py index 32ab782e..fb935ef5 100644 --- a/bg_atlasgen/__init__.py +++ b/bg_atlasgen/__init__.py @@ -1 +1,5 @@ __version__ = "1" # will set major version of all atlases + +from rich.traceback import install + +install() diff --git a/bg_atlasgen/atlas_scripts/allen_cord.py b/bg_atlasgen/atlas_scripts/allen_cord.py index a358327f..f19bedc1 100644 --- a/bg_atlasgen/atlas_scripts/allen_cord.py +++ b/bg_atlasgen/atlas_scripts/allen_cord.py @@ -1,29 +1,48 @@ -__version__ = "0" +__version__ = "1" import json import time import tifffile import zipfile + import pandas as pd import numpy as np import multiprocessing as mp - +from random import choices +from loguru import logger from rich.progress import track from pathlib import Path +# import sys + +# sys.path.append("./") + from bg_atlasapi import utils -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import ( + create_region_mesh, + Region, + inspect_meshes_folder, +) from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree PARALLEL = True +TEST = False -def download_atlas_files(download_dir_path, atlas_file_url): +def download_atlas_files(download_dir_path: Path, atlas_file_url: str) -> Path: utils.check_internet_connection() atlas_files_dir = download_dir_path / "atlas_files" + + # only download data if they weren't already downloaded + if atlas_files_dir.exists(): + print("Not downloading atlas since it was downloaded already already") + return atlas_files_dir / "SC_P56_Atlas_10x10x20_v5_2020" + else: + print("Downloading atlas data") + destination_path = download_dir_path / "atlas_download" utils.retrieve_over_http(atlas_file_url, destination_path) @@ -96,9 +115,22 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): # Mesh creation closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation start = time.time() - if PARALLEL: + # check how many regions to create the meshes for + nodes = list(tree.nodes.values()) + if TEST: + logger.info( + f"Creating atlas in test mode: selecting 10 random regions for mesh creation" + ) + nodes = choices(nodes, k=10) + + if PARALLEL: + print( + f"Creating {tree.size()} meshes in parallel with {mp.cpu_count() - 2} CPU cores" + ) pool = mp.Pool(mp.cpu_count() - 2) try: @@ -113,17 +145,18 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): annotated_volume, root_id, closing_n_iters, + decimate_fraction, + smooth, ) - for node in tree.nodes.values() + for node in nodes ], ) except mp.pool.MaybeEncodingError: pass else: + print(f"Creating {len(nodes)} meshes") for node in track( - tree.nodes.values(), - total=tree.size(), - description="Creating meshes", + nodes, total=len(nodes), description="Creating meshes", ): create_region_mesh( ( @@ -134,6 +167,8 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): annotated_volume, root_id, closing_n_iters, + decimate_fraction, + smooth, ) ) @@ -142,6 +177,11 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): round((time.time() - start) / 60, 2), " minutes", ) + + if TEST: + # create visualization of the various meshes + inspect_meshes_folder(meshes_dir_path) + return meshes_dir_path @@ -191,7 +231,7 @@ def create_atlas(working_dir): # Download atlas files from Mendeley atlas_files_dir = download_atlas_files(download_dir_path, ATLAS_FILE_URL) - ## Load files + # Load files structures_file = atlas_files_dir / "Atlas_Regions.csv" reference_file = atlas_files_dir / "Template.tif" annotations_file = atlas_files_dir / "Annotation.tif" @@ -203,7 +243,7 @@ def create_atlas(working_dir): atlas_segments = pd.read_csv(segments_file) atlas_segments = dict(atlas_segments=atlas_segments.to_dict("records")) - ## Parse structure metadata + # Parse structure metadata structures = parse_structures(structures_file, ROOT_ID) # save regions list json: @@ -247,7 +287,11 @@ def create_atlas(working_dir): if __name__ == "__main__": + # Generated atlas path: - bg_root_dir = Path.home() / "brainglobe_workingdir" / "allen_cord" + bg_root_dir = Path.home() / "brainglobe_workingdir" / "allen_cord_smooth" bg_root_dir.mkdir(exist_ok=True, parents=True) + + # generate atlas + print(f'Creating atlas and saving it at "{bg_root_dir}"') create_atlas(bg_root_dir) diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py index 141ae4c0..4ddc8bb7 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -13,13 +13,14 @@ import time import tifffile import tarfile +from random import choices import numpy as np import multiprocessing as mp from rich.progress import track from pathlib import Path -from bg_atlasgen.mesh_utils import Region, create_region_mesh +from bg_atlasgen.mesh_utils import Region, create_region_mesh, inspect_meshes_folder from bg_atlasapi.structure_tree_util import get_structures_tree from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi import utils @@ -42,6 +43,7 @@ def create_atlas(working_dir, resolution): #setup folder for downloading working_dir = working_dir / ATLAS_NAME + working_dir.mkdir(exist_ok=True) download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) atlas_path = download_dir_path / f"{ATLAS_NAME}" @@ -118,6 +120,9 @@ def create_atlas(working_dir, resolution): #mesh creation closing_n_iters = 2 start = time.time() + + decimate_fraction = 0.3 + smooth = True if PARALLEL: @@ -146,9 +151,9 @@ def create_atlas(working_dir, resolution): else: print("Multiprocessing disabled") - - for node in track( - tree.nodes.values(), + # nodes = list(tree.nodes.values()) + # nodes = choices(nodes, k=10) + for node in track(tree.nodes.values(), total=tree.size(), description = "Creating meshes", ): @@ -161,6 +166,8 @@ def create_atlas(working_dir, resolution): annotated_volume, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth ) ) @@ -186,8 +193,8 @@ def create_atlas(working_dir, resolution): print(f"In the end, {len(structures_with_mesh)} structures with mesh are kept") #import reference file with tifffile so it can be read in wrapup_atlas_from_data - reference = tifffile.imread(reference_file) - + reference = tifffile.imread(reference_file) + inspect_meshes_folder(meshes_dir_path) # wrap up atlas file print("Finalising atlas") output_filename = wrapup_atlas_from_data( diff --git a/bg_atlasgen/atlas_scripts/humanatlas.py b/bg_atlasgen/atlas_scripts/humanatlas.py index 2aa96d26..461da014 100644 --- a/bg_atlasgen/atlas_scripts/humanatlas.py +++ b/bg_atlasgen/atlas_scripts/humanatlas.py @@ -13,7 +13,11 @@ # import sys # sys.path.append("./") -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import ( + create_region_mesh, + Region, + inspect_meshes_folder, +) from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree @@ -51,6 +55,7 @@ def prune_tree(tree): if __name__ == "__main__": PARALLEL = False # disable parallel mesh extraction for easier debugging + TEST = False # ---------------------------------------------------------------------------- # # PREP METADATA # @@ -177,6 +182,8 @@ def prune_tree(tree): # Mesh creation closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation start = time.time() if PARALLEL: print("Starting mesh creation in parallel") @@ -195,6 +202,8 @@ def prune_tree(tree): annotation, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth, ) for node in tree.nodes.values() ], @@ -224,6 +233,8 @@ def prune_tree(tree): volume, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth, ) ) @@ -233,6 +244,10 @@ def prune_tree(tree): " minutes", ) + if TEST: + # create visualization of the various meshes + inspect_meshes_folder(meshes_dir_path) + # Create meshes dict meshes_dict = dict() structures_with_mesh = [] diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/bg_atlasgen/atlas_scripts/kim_mouse.py index c25996a9..132d62f9 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -1,4 +1,4 @@ -__version__ = "0" +__version__ = "1" import json import time @@ -13,8 +13,12 @@ from pathlib import Path from scipy.ndimage import zoom from allensdk.core.reference_space_cache import ReferenceSpaceCache -from bg_atlasapi import utils +# import sys + +# sys.path.append("./") + +from bg_atlasapi import utils from bg_atlasgen.mesh_utils import create_region_mesh, Region from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree @@ -37,7 +41,7 @@ def create_atlas(working_dir, resolution): download_dir_path.mkdir(exist_ok=True) atlas_files_dir = download_dir_path / "atlas_files" - ## Download atlas_file + # Download atlas_file utils.check_internet_connection() destination_path = download_dir_path / "atlas_download" @@ -126,6 +130,9 @@ def create_atlas(working_dir, resolution): # Mesh creation closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation + start = time.time() if PARALLEL: @@ -143,6 +150,8 @@ def create_atlas(working_dir, resolution): rotated_annotations, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth, ) for node in tree.nodes.values() ], @@ -164,6 +173,8 @@ def create_atlas(working_dir, resolution): rotated_annotations, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth, ) ) @@ -225,7 +236,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - resolution = 50 # some resolution, in microns + resolution = 10 # some resolution, in microns (10, 25, 50, 100) # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index b8280191..e240c071 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -48,10 +48,7 @@ def add_path_inplace(parent): def collect_all_inplace( - node, - traversing_list, - download_path, - mesh_dict, + node, traversing_list, download_path, mesh_dict, ): """Recursively traverse a region hierarchy, download meshes, and append regions to a list inplace. diff --git a/bg_atlasgen/atlas_scripts/osten_mouse.py b/bg_atlasgen/atlas_scripts/osten_mouse.py index f5580203..26b18b99 100644 --- a/bg_atlasgen/atlas_scripts/osten_mouse.py +++ b/bg_atlasgen/atlas_scripts/osten_mouse.py @@ -37,7 +37,7 @@ def create_atlas(working_dir, resolution): download_dir_path.mkdir(exist_ok=True) atlas_files_dir = download_dir_path / "atlas_files" - ## Download atlas_file + # Download atlas_file utils.check_internet_connection() destination_path = download_dir_path / "atlas_download" @@ -126,6 +126,8 @@ def create_atlas(working_dir, resolution): # Mesh creation closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation start = time.time() if PARALLEL: @@ -143,6 +145,8 @@ def create_atlas(working_dir, resolution): rotated_annotations, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth, ) for node in tree.nodes.values() ], @@ -164,6 +168,8 @@ def create_atlas(working_dir, resolution): rotated_annotations, ROOT_ID, closing_n_iters, + decimate_fraction, + smooth, ) ) diff --git a/bg_atlasgen/atlas_scripts/ratatlas.py b/bg_atlasgen/atlas_scripts/ratatlas.py deleted file mode 100644 index b15f3903..00000000 --- a/bg_atlasgen/atlas_scripts/ratatlas.py +++ /dev/null @@ -1,364 +0,0 @@ -from bg_atlasgen.volume_utils import ( - extract_volume_surface, - load_labelled_volume, -) -from bg_atlasgen.metadata_utils import create_metadata_files -from brainio.brainio import load_any - - -from pathlib import Path -import pandas as pd -import json -import tarfile -import os - -import numpy as np -import tifffile - - -from tqdm import tqdm - -from vedo import write, Volume - - -import sys - -sys.path.append(os.getcwd()) - - -ATLAS_NAME = "ratatlas" - -base_url = "" - -# Generated atlas path: -bg_root_dir = Path.home() / ".brainglobe" -bg_root_dir.mkdir(exist_ok=True) - -# Temporary folder for nrrd files download: -# temp_path = Path(tempfile.mkdtemp()) -temp_path = Path( - r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy\Atlases\atlasesforbrainrender\goldcustomrat" -) - - -# Temporary folder for files before compressing: -uncompr_atlas_path = temp_path / ATLAS_NAME -uncompr_atlas_path.mkdir(exist_ok=True) - - -# ---------------------------------------------------------------------------- # -# Load volume data # -# ---------------------------------------------------------------------------- # -# Load annotated and reference tiff stacks (already aligned to brainglobe by Adam) -# And save to folder with all atlas data -base_data_fld = Path( - r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy\Atlases\atlasesforbrainrender\goldcustomrat" -) - -for name in ["reference", "annotated"]: - loaded = load_any( - str(base_data_fld / f"{name}.tif") - ) # shape (186, 160, 160) - tifffile.imsave(str(uncompr_atlas_path / f"{name}.tiff"), loaded) - - -# ---------------------------------------------------------------------------- # -# LOAD/PARSE HIERARCHICAL DATA # -# ---------------------------------------------------------------------------- # - -""" - Hierarchy is organized: - - /major/submajor/minor - - hierarchy dataframe maps region names to voxel value in annotated.tiff (minor column) - major and submajors map major/submajor values in hierarchy to the corresponding name -""" - -hierarchy = pd.read_excel( - str(base_data_fld / "SwansonAtlasCategories-Mar_2_2005.xls"), - header=1, - usecols=["Abbreviation", "Name of Area", "Major", "Sub_Major", "Minor"], - nrows=1276, -) - -majors = pd.read_excel( - str(base_data_fld / "SwansonAtlasCategories-Mar_2_2005.xls"), - header=3, - usecols=[13, 14], - nrows=20, -) - -submajors = pd.read_excel( - str(base_data_fld / "SwansonAtlasCategories-Mar_2_2005.xls"), - header=3, - usecols=[15, 16], - nrows=89, -) - - -clean_hierarchy = dict( - abbreviation=[], - name=[], - major=[], - majornum=[], - submajor=[], - submajornum=[], - minor=[], -) -for i, region in hierarchy.iterrows(): - clean_hierarchy["abbreviation"].append(region.Abbreviation) - clean_hierarchy["name"].append(region["Name of Area"]) - clean_hierarchy["major"].append( - majors.loc[majors.ANC == region.Major]["ANC Name"].values[0] - ) - clean_hierarchy["majornum"].append( - majors.loc[majors.ANC == region.Major]["ANC"].values[0] - ) - clean_hierarchy["minor"].append(region["Minor"]) - try: - clean_hierarchy["submajor"].append( - submajors.loc[submajors.SubANC == region.Sub_Major][ - "SubANC Name" - ].values[0] - ) - clean_hierarchy["submajornum"].append( - int( - submajors.loc[submajors.SubANC == region.Sub_Major][ - "SubANC" - ].values[0] - ) - ) - except Exception as e: - print(e) - clean_hierarchy["submajor"].append(None) - clean_hierarchy["submajornum"].append(None) - - -clean_hierarchy = pd.DataFrame(clean_hierarchy) - -# ------------------------ Organize hierarchy metadata ----------------------- # - -idn = 0 - -""" - Given that the way the matadata is organised, not every region has a unique - numerical ID value associated with it (e.g. a region might have a minor 1, but - a submajor region's numerical value is also 1), here we reassign a numerical id - to each brain structure. number increase from root > majors > submajors > minors. -""" - -structures = [ - { - "acronym": "root", - "id": idn, - "name": "root", - "structure_id_path": [0], - "rgb_triplet": [255, 255, 255], - } -] - - -for i, major in majors.iterrows(): - if not isinstance(major["ANC Name"], str): - continue - - idn += 1 - structures.append( - { - "acronym": major["ANC Name"].replace(" ", "-"), - "id": idn, - "name": major["ANC Name"], - "structure_id_path": [0, idn], - "rgb_triplet": [255, 255, 255], - } - ) - - -for i, submajor in submajors.iterrows(): - # Get an entry in clean hierarchy with this submajor - try: - entry = clean_hierarchy.loc[ - clean_hierarchy.submajornum == submajor["SubANC"] - ].iloc[0] - except Exception as e: - print(e) - pass - - # Get path - idn += 1 - path = [0, int(entry.majornum), idn] - - # Append - structures.append( - { - "acronym": submajor["SubANC Name"].replace(" ", "-"), - "id": idn, - "name": submajor["SubANC Name"], - "structure_id_path": path, - "rgb_triplet": [255, 255, 255], - } - ) - - -for i, region in clean_hierarchy.iterrows(): - idn += 1 - if np.isnan(region.submajornum): - path = [0, region.majornum, idn] - - else: - path = [0, int(region.majornum), int(region.submajornum), idn] - - structures.append( - { - "acronym": region.abbreviation, - "id": idn, - "name": region.name, - "structure_id_path": path, - "rgb_triplet": [255, 255, 255], - } - ) - -# save regions list json: -with open(uncompr_atlas_path / "structures.json", "w") as f: - json.dump(structures, f) - - -# ---------------------------------------------------------------------------- # -# Create MESEHS # -# ---------------------------------------------------------------------------- # -print(f"Saving atlas data at {uncompr_atlas_path}") -meshes_dir_path = uncompr_atlas_path / "meshes" -meshes_dir_path.mkdir(exist_ok=True) - -volume = load_labelled_volume(load_any(str(base_data_fld / "annotated.tif"))) - -root = extract_volume_surface(volume) - -write(root, str(meshes_dir_path / "0.obj")) - -# First create a mesh for every minor region -volume_data = load_any(str(base_data_fld / "annotated.tif")) -for i, region in tqdm(clean_hierarchy.iterrows()): - structure = [ - s for s in structures if s["acronym"] == region["abbreviation"] - ][0] - savepath = str( - meshes_dir_path - / f'{structure["id"]}.obj'.replace("/", "-").replace("\\", "-") - ) - if os.path.isfile(savepath): - continue - - vol = np.zeros_like(volume_data) - - if not np.isin(np.float(region.minor), volume_data): - # print(f'{region.abbreviation} doesnt seem to appear in annotated dataset') - continue - - vol[volume_data == np.float32(region.minor)] = 1 - if np.max(vol) < 1: - raise ValueError - - write(extract_volume_surface(Volume(vol)), savepath) - - -# Create a mesh for every submajor and major region -for i, submajor in tqdm(submajors.iterrows()): - structure = [ - s - for s in structures - if s["acronym"] == submajor["SubANC Name"].replace(" ", "-") - ][0] - savepath = str( - meshes_dir_path - / f'{structure["id"]}.obj'.replace(" ", "-") - .replace("/", "-") - .replace("\\", "-") - ) - if os.path.isfile(savepath): - continue - - regions = list( - clean_hierarchy.loc[ - clean_hierarchy.submajor == submajor["SubANC Name"] - ].minor.values - ) - if not regions: - continue - - vol = np.zeros_like(volume_data) - - for region in regions: - vol[volume_data == region] = 1 - - if np.max(vol) < 1: - continue - - write(extract_volume_surface(Volume(vol)), savepath) - - -for i, major in tqdm(majors.iterrows()): - if not isinstance(major["ANC Name"], str): - continue - structure = [ - s - for s in structures - if s["acronym"] == major["ANC Name"].replace(" ", "-") - ][0] - savepath = str( - meshes_dir_path - / f'{structure["id"]}.obj'.replace(" ", "-") - .replace("/", "-") - .replace("\\", "-") - ) - if os.path.isfile(savepath): - continue - - regions = list( - clean_hierarchy.loc[ - clean_hierarchy.major == major["ANC Name"] - ].minor.values - ) - if not regions: - continue - - vol = np.zeros_like(volume_data) - - for region in regions: - vol[volume_data == region] = 1 - - if np.max(vol) < 1: - continue - - write(extract_volume_surface(Volume(vol)), savepath) - - -# ---------------------------------------------------------------------------- # -# FINAL METADATA AND SAVE # -# ---------------------------------------------------------------------------- # - -metadata_dict = { - "name": ATLAS_NAME, - "species": "Rattus Norvegicus", - "citation": "Swanson 2018, https://pubmed.ncbi.nlm.nih.gov/29277900/", - "atlas_link": "", - "symmetric": False, - "resolution": (1.25, 1.25, 1.25), - "shape": loaded.shape, -} - -with open(uncompr_atlas_path / "atlas_metadata.json", "w") as f: - json.dump(metadata_dict, f) - - -# Create human readable files -create_metadata_files(uncompr_atlas_path, metadata_dict, structures) - - -# Compress folder: -output_filename = bg_root_dir / f"{uncompr_atlas_path.name}.tar.gz" -print(f"Saving compressed at {output_filename}") - -with tarfile.open(output_filename, "w:gz") as tar: - tar.add(uncompr_atlas_path, arcname=uncompr_atlas_path.name) diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/bg_atlasgen/atlas_scripts/whs_sd_rat.py index 9f4d8c33..b9801cc0 100644 --- a/bg_atlasgen/atlas_scripts/whs_sd_rat.py +++ b/bg_atlasgen/atlas_scripts/whs_sd_rat.py @@ -113,6 +113,8 @@ def create_meshes(download_dir_path, tree, annotated_volume, labels, root_id): # Mesh creation closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation start = time.time() if PARALLEL: @@ -130,6 +132,8 @@ def create_meshes(download_dir_path, tree, annotated_volume, labels, root_id): annotated_volume, root_id, closing_n_iters, + decimate_fraction, + smooth, ) for node in tree.nodes.values() ], @@ -151,6 +155,8 @@ def create_meshes(download_dir_path, tree, annotated_volume, labels, root_id): annotated_volume, root_id, closing_n_iters, + decimate_fraction, + smooth, ) ) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index b03c07e4..42e0b2d1 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -15,17 +15,20 @@ + ' please install with "pip install PyMCubes -U"' ) - +from loguru import logger import numpy as np from pathlib import Path import scipy from bg_atlasgen.volume_utils import create_masked_array +# ---------------------------------------------------------------------------- # +# MESH CREATION # +# ---------------------------------------------------------------------------- # + + def region_mask_from_annotation( - structure_id, - annotation, - structures_list, + structure_id, annotation, structures_list, ): """Generate mask for a structure from an annotation file and a list of structures. @@ -53,20 +56,14 @@ def region_mask_from_annotation( return mask_stack -# ---------------------------------------------------------------------------- # -# MESH CREATION # -# ---------------------------------------------------------------------------- # - - def extract_mesh_from_mask( volume, obj_filepath=None, threshold=0.5, - smooth=False, + smooth: bool = False, mcubes_smooth=False, closing_n_iters=8, - decimate=True, - tol=0.0005, + decimate_fraction: float = 0.6, # keep 60% of original fertices use_marching_cubes=False, extract_largest=False, ): @@ -93,10 +90,13 @@ def extract_mesh_from_mask( closing_n_iters: int number of iterations of closing morphological operation. set to None to avoid applying morphological operations - decimate: bool - If True the number of vertices is reduced through decimation + decimate_fraction: float in range [0, 1]. + What fraction of the original number of vertices is to be kept. E.g. .5 means that + 50% of the vertices are kept, the others are removed tol: float - parameter for decimation, larger values correspond to more aggressive decimation + parameter for decimation, larger values correspond to more aggressive decimation. + E.g. 0.02 -> points that are closer than 2% of the size of the meshe's bounding box are + identified and removed (only one is kep) extract_largest: bool If True only the largest region are extracted. It can cause issues for bilateral regions as only one will remain @@ -140,19 +140,20 @@ def extract_mesh_from_mask( vertices, triangles = mcubes.marching_cubes(smooth, 0) else: vertices, triangles = mcubes.marching_cubes(volume, 0.5) + # create mesh mesh = Mesh((vertices, triangles)) # Cleanup and save - if smooth: - mesh.smoothLaplacian() - - if decimate: - mesh.clean(tol=tol) - if extract_largest: mesh = mesh.extractLargestRegion() + # decimate + mesh.decimate(decimate_fraction, method="pro") + + if smooth: + mesh.smoothLaplacian() + if obj_filepath is not None: write(mesh, str(obj_filepath)) @@ -180,20 +181,22 @@ def create_region_mesh(args): ROOT_ID: int, id of root structure (mesh creation is a bit more refined for that) """ # Split arguments - ( - meshes_dir_path, - node, - tree, - labels, - annotated_volume, - ROOT_ID, - closing_n_iters, - ) = args + logger.debug(f"Creating mesh for region {args[1].identifier}") + meshes_dir_path = args[0] + node = args[1] + tree = args[2] + labels = args[3] + annotated_volume = args[4] + ROOT_ID = args[5] + closing_n_iters = args[6] + decimate_fraction = args[7] + smooth = args[8] # Avoid overwriting existing mesh savepath = meshes_dir_path / f"{node.identifier}.obj" - if savepath.exists(): - return + # if savepath.exists(): + # logger.debug(f"Mesh file save path exists already, skipping.") + # return # Get lables for region and it's children stree = tree.subtree(node.identifier) @@ -216,14 +219,18 @@ def create_region_mesh(args): else: if node.identifier == ROOT_ID: extract_mesh_from_mask( - mask, obj_filepath=savepath, smooth=True + mask, + obj_filepath=savepath, + smooth=smooth, + decimate_fraction=decimate_fraction, ) else: extract_mesh_from_mask( mask, obj_filepath=savepath, - smooth=True, + smooth=smooth, closing_n_iters=closing_n_iters, + decimate_fraction=decimate_fraction, ) @@ -278,7 +285,10 @@ def inspect_meshes_folder(folder): if not folder.exists(): raise FileNotFoundError("The folder passed doesnt exist") - Browser(load(str(folder))) + mesh_files = folder.glob("*.obj") + + Browser([load(str(mf)).c("w").lw(0.25).lc("k") for mf in mesh_files]) + logger.debug("visualization ready") show() From 4b6f765689826803e0d0541c7a676082c3762224 Mon Sep 17 00:00:00 2001 From: Kailyn Fields <84587654+kailynkfields@users.noreply.github.com> Date: Fri, 28 Jan 2022 11:01:03 -0500 Subject: [PATCH 044/103] Update azba_zfish.py Changing the reference file to be af and the additional references to be topro --- bg_atlasgen/atlas_scripts/azba_zfish.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py index 4ddc8bb7..9fc54b1e 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -64,14 +64,14 @@ def create_atlas(working_dir, resolution): #paths structures_file = atlas_path / "2021-08-22_AZBA_labels.csv" annotations_file = atlas_path / "2021-08-22_AZBA_segmentation.tif" - reference_file = atlas_path / "20180219_AZBA_topro_average_2020.tif" - reference_af = atlas_path / "20180628_AZBA_AF_average.tif" + reference_topro = atlas_path / "20180219_AZBA_topro_average_2020.tif" + reference_file = atlas_path / "20180628_AZBA_AF_average.tif" meshes_dir_path = atlas_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) - #adding autofluorescence image as additional reference file, main reference file is topro - autofluo = tifffile.imread(reference_af) - ADDITIONAL_REFERENCES = {"autofluorescence" : autofluo} + #adding topro image as additional reference file, main reference file is autofl + topro = tifffile.imread(reference_topro) + ADDITIONAL_REFERENCES = {"TO-PRO" : topro} #open structures.csv and prep for dictionary parsing print("Creating structure tree") From d6ac4546a9e2aad1aa0dcb6d58348bfcca315709 Mon Sep 17 00:00:00 2001 From: Luigi Petrucco Date: Wed, 9 Feb 2022 23:12:23 +0100 Subject: [PATCH 045/103] some fixes for mpin atlas --- bg_atlasgen/atlas_scripts/azba_zfish.py | 4 +- bg_atlasgen/atlas_scripts/mpin_zfish.py | 70 +++++++++++++++++++++---- 2 files changed, 63 insertions(+), 11 deletions(-) diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py index 9fc54b1e..6fa0b88f 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -7,7 +7,7 @@ """ -__version__ = "0" +__version__ = "1" import csv import time @@ -194,7 +194,7 @@ def create_atlas(working_dir, resolution): #import reference file with tifffile so it can be read in wrapup_atlas_from_data reference = tifffile.imread(reference_file) - inspect_meshes_folder(meshes_dir_path) + # inspect_meshes_folder(meshes_dir_path) # wrap up atlas file print("Finalising atlas") output_filename = wrapup_atlas_from_data( diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index e240c071..f5842f5f 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -1,12 +1,18 @@ -__version__ = "0" +__version__ = "1" from pathlib import Path import warnings import zipfile import requests import tarfile -import tifffile from tifffile import imread +from bg_atlasgen.mesh_utils import extract_mesh_from_mask +import tarfile +import zipfile +from pathlib import Path + +import numpy as np +from scipy.ndimage import binary_dilation, binary_erosion, binary_fill_holes from allensdk.core.structure_tree import StructureTree from bg_atlasgen.wrapup import wrapup_atlas_from_data @@ -24,7 +30,7 @@ def download_line_stack(bg_root_dir, tg_line_name): with zipfile.ZipFile(out_file_path, "r") as zip_ref: zip_ref.extractall(bg_root_dir) - return imread(str(next(bg_root_dir.glob("*.tif")))) + return imread(str(next(bg_root_dir.glob(f"*{tg_line_name}*.tif")))) def add_path_inplace(parent): @@ -102,6 +108,7 @@ def create_atlas(working_dir, resolution): ATLAS_LINK = "http://fishatlas.neuro.mpg.de" CITATION = "Kunst et al 2019, https://doi.org/10.1016/j.neuron.2019.04.034" ORIENTATION = "lai" + ATLAS_PACKAGER = "Luigi Petrucco, luigi.petrucco@gmail.com" # Download reference: ##################### @@ -114,7 +121,7 @@ def create_atlas(working_dir, resolution): additional_references[line] = download_line_stack(working_dir, line) # Download annotation and hemispheres from GIN repo: - gin_url = "https://gin.g-node.org/brainglobe/mpin_zfish/raw/master/mpin_zfish_annotations.tar.gz" + gin_url = "https://gin.g-node.org/brainglobe/mpin_zfish/raw/master/mpin_zfish_annotations_meshes.tar.gz" compressed_zip_path = working_dir / "annotations.tar" retrieve_over_http(gin_url, compressed_zip_path) @@ -123,11 +130,16 @@ def create_atlas(working_dir, resolution): extracted_dir = working_dir / "mpin_zfish_annotations" - annotation_stack = tifffile.imread( + annotation_stack = imread( str(extracted_dir / "mpin_zfish_annotation.tif") ) - hemispheres_stack = tifffile.imread( + # Pad 1 voxel around the whole annotation: + annotation_stack[[0, -1], :, :] = 0 + annotation_stack[:, [0, -1], :] = 0 + annotation_stack[:, :, [0, -1]] = 0 + + hemispheres_stack = imread( str(extracted_dir / "mpin_zfish_hemispheres.tif") ) @@ -141,6 +153,30 @@ def create_atlas(working_dir, resolution): k: v.swapaxes(0, 2) for k, v in additional_references.items() } + # Improve the annotation by defining a region that encompasses the whole brain but + # not the eyes. This will be aside from the official hierarchy: + BRAIN_ID = 2 # add this as not defined in the source + + # Ugly padding required not to have border artefacts in the binary operations: + + shape_stack = list(annotation_stack.shape) + pad = 100 + shape_stack[2] = shape_stack[2] + pad * 2 + brain_mask = np.zeros(shape_stack, dtype=np.uint8) + + # Exclude eyes from brain mask: + brain_mask[:, :, pad:-pad][(annotation_stack > 0) & (annotation_stack != 808)] = 255 + + # Perform binary operations: + brain_mask = binary_dilation(brain_mask, iterations=50) + brain_mask = binary_erosion(brain_mask, iterations=50) + brain_mask = binary_fill_holes(brain_mask) + + # Remove padding: + brain_mask = brain_mask[:, :, pad:-pad] + + annotation_stack[(annotation_stack == 0) & (brain_mask > 0)] = BRAIN_ID + # Download structures tree and meshes: ###################################### regions_url = f"{BASE_URL}/neurons/get_brain_regions" @@ -152,11 +188,12 @@ def create_atlas(working_dir, resolution): structures = requests.get(regions_url).json()["brain_regions"] # Initiate dictionary with root info: + ROOT_ID = 1 # add this as not defined in the source structures_dict = { "name": "root", - "id": 0, + "id": ROOT_ID, "sub_regions": structures.copy(), - "structure_id_path": [0], + "structure_id_path": [ROOT_ID], "acronym": "root", "files": { "file_3D": "/media/Neurons_database/Brain_and_regions/Brains/Outline/Outline_new.txt" @@ -174,6 +211,20 @@ def create_atlas(working_dir, resolution): structures_dict, structures_list, meshes_dir_path, meshes_dict ) + # Artificially add entry for brain region: + brain_struct_entry = { + "name": "brain", + "id": BRAIN_ID, + "structure_id_path": [ROOT_ID, BRAIN_ID], + "acronym": "brain", + "rgb_triplet": [255, 255, 255], + } + structures_list.append(brain_struct_entry) + + # Use recalculated meshes that are smoothed with Blender and uploaded in G-Node: + for sid in [ROOT_ID, BRAIN_ID]: + meshes_dict[sid] = extracted_dir / f"{sid}.stl" + # Wrap up, compress, and remove file:0 print(f"Finalising atlas") output_filename = wrapup_atlas_from_data( @@ -184,7 +235,7 @@ def create_atlas(working_dir, resolution): species=SPECIES, resolution=(RES_UM,) * 3, orientation=ORIENTATION, - root_id=0, + root_id=1, reference_stack=reference_stack, annotation_stack=annotation_stack, structures_list=structures_list, @@ -194,6 +245,7 @@ def create_atlas(working_dir, resolution): cleanup_files=False, compress=True, additional_references=additional_references, + atlas_packager=ATLAS_PACKAGER ) return output_filename From eea5259c02ef12b25a67277648ed4a6925a8c52a Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Mon, 30 May 2022 18:27:46 +0100 Subject: [PATCH 046/103] Fix tiny typo (#22) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e32919d3..da5fbe22 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi 2) Clone your repo ```bash -git clone https://github.com/USERNAME/bg-atlasgen) +git clone https://github.com/USERNAME/bg-atlasgen ``` 3) Install an editable version From 2e4320b91405bb8626bad8d561aea5c3602dc656 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 31 May 2022 13:45:29 +0100 Subject: [PATCH 047/103] Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index da5fbe22..70f2847b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ # BG-AtlasGen +#### For full instructions to add a new BrainGlobe atlas, please see [here](https://docs.brainglobe.info/bg-atlasapi/adding-a-new-atlas). + Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi` module. From fcd5f8749bd92a9f900faeb88fe865011d97f5be Mon Sep 17 00:00:00 2001 From: Gubra-ApS <61732801+Gubra-ApS@users.noreply.github.com> Date: Tue, 31 May 2022 17:37:59 +0200 Subject: [PATCH 048/103] Add lsfm atlas script (#18) * Update lsfm mouse atlas script * 2nd update LSFM mouse atlas script Co-authored-by: Johanna Co-authored-by: Johanna Perens --- .../atlas_scripts/perens_lsfm_mouse.py | 335 ++++++++++++++++++ bg_atlasgen/mesh_utils.py | 4 +- 2 files changed, 337 insertions(+), 2 deletions(-) create mode 100644 bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py diff --git a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py b/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py new file mode 100644 index 00000000..57752028 --- /dev/null +++ b/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py @@ -0,0 +1,335 @@ +__version__ = "0" + +import json +import time +import tarfile +import tifffile +import subprocess + +import pandas as pd +import numpy as np +import multiprocessing as mp +import SimpleITK as sitk + +from rich.progress import track +from pathlib import Path +from scipy.ndimage import zoom +# from allensdk.core.reference_space_cache import ReferenceSpaceCache +from bg_atlasapi import utils + +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + +PARALLEL = False # disable parallel mesh extraction for easier debugging + + +# %% +### Additional functions ##################################################### + +############################################################################## +def get_id_from_acronym(df, acronym): + ''' + Get Allen's brain atlas ID from brain region acronym(s) + + Call: + get_id_from_acronym(df, acronym) + + Args: + df (pandas dataframe) : atlas table file [see atlas.load_table()] + acronym (string or list of strings) : brain region acronym(s) + + Returns: + ID (int or list of ints) : brain region ID(s) corresponding to input acronym(s) + ''' + + # create as list if necessary + if not isinstance(acronym, list): + acronym = [acronym] + + if len(acronym) > 1: + ID_list = [] + for acro in acronym: + ID = df['id'][df['acronym'] == acro].item() + ID_list.append(ID) + return ID_list + else: + return df['id'][df['acronym'] == acronym[0]].item() + + # return df['id'][df['acronym'] == acronym].item() # OLD VERSION + + +def get_acronym_from_id(df, ID): + ''' + Get Allen's brain atlas acronym from brain region ID(s) + + Call: + get_acronym_from_ID(df, acronym) + + Args: + df (pandas dataframe) : atlas table dataframe [see atlas.load_table()] + ID (int or list of int) : brain region ID(s) + + Returns: + acronym (string or list of strings) : brain region acronym(s) corresponding to input ID(s) + ''' + + # create as list if necessary + if not isinstance(ID, list): + ID = [ID] + + if len(ID) > 1: + acronym_list = [] + for id in ID: + acronym = df['acronym'][df['id'] == id].item() + acronym_list.append(acronym) + return acronym_list + else: + return df['acronym'][df['id'] == ID[0]].item() + + +def tree_traverse_child2parent(df, child_id, ids): + parent = df['parent_id'][df['id'] == child_id].item() + + if not np.isnan(parent): + id = df['id'][df['id'] == parent].item() + ids.append(id) + tree_traverse_child2parent(df, parent, ids) + return ids + else: + return ids + + +def get_all_parents(df, key): + ''' + Get all parent IDs/acronyms in Allen's brain atlas hierarchical structure' + + Call: + get_all_children(df, key) + + Args: + df (pandas dataframe) : atlas table dataframe [see atlas.load_table()] + key (int/string) : atlas region ID/acronym + + Returns: + parents (list) : brain region acronym corresponding to input ID + ''' + + if isinstance(key, str): # if input is acronym convert to ID + list_parent_ids = tree_traverse_child2parent(df, get_id_from_acronym(df, key), []) + elif isinstance(key, int): + list_parent_ids = tree_traverse_child2parent(df, key, []) + + if isinstance(key, str): # if input is acronym convert IDs to acronyms + parents = [] + for id in list_parent_ids: + parents.append(get_acronym_from_id(df, id)) + elif isinstance(key, int): + parents = list_parent_ids.copy() + + return parents + + +############################################################################## + +############################################################################## +# %% + + +def create_atlas(working_dir, resolution): + ATLAS_NAME = "perens_lsfm_mouse" + SPECIES = "Mus musculus" + ATLAS_LINK = "https://github.com/Gubra-ApS/LSFM-mouse-brain-atlas" + CITATION = "Perens et al. 2021, https://doi.org/10.1007/s12021-020-09490-8" + ORIENTATION = "rai" + ROOT_ID = 997 + ANNOTATIONS_RES_UM = 20 + ATLAS_FILE_URL = "https://github.com/Gubra-ApS/LSFM-mouse-brain-atlas/archive/master.tar.gz" + + # Temporary folder for download: + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + atlas_files_dir = download_dir_path / "atlas_files" + + ## Download atlas_file + utils.check_internet_connection() + + destination_path = download_dir_path / "atlas_download.tar.gz" + utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) + + tar = tarfile.open(destination_path) + tar.extractall(path=atlas_files_dir) + tar.close() + + destination_path.unlink() + + # structures_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "ARA2_annotation_info.csv" + structures_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "ARA2_annotation_info_avail_regions.csv" + annotations_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "gubra_ano_olf.nii.gz" + reference_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "gubra_template_olf.nii.gz" + + annotated_volume = sitk.GetArrayFromImage(sitk.ReadImage(str(annotations_file))) + template_volume = sitk.GetArrayFromImage(sitk.ReadImage(str(reference_file))) + annotated_volume = np.rot90(annotated_volume, axes=(0, 2)) + template_volume = np.rot90(template_volume, axes=(0, 2)) + + print("Download completed...") + + # ---------------------------------------------------------------------------- # + # STRUCTURES HIERARCHY # + # ---------------------------------------------------------------------------- # + + # Parse region names & hierarchy + # ############################## + df = pd.read_csv(structures_file) + + # Make region hierarchy and gather colors to one list + parents = [] + rgb = [] + for index, row in df.iterrows(): + temp_id = row['id'] + temp_parents = get_all_parents(df, temp_id) + parents.append(temp_parents[::-1]) + + temp_rgb = [row['red'], row['green'], row['blue']] + rgb.append(temp_rgb) + + df = df.drop(columns=["parent_id", "red", "green", "blue"]) + df = df.assign(structure_id_path=parents) + df = df.assign(rgb_triplet=rgb) + df.loc[0, "structure_id_path"] = [997] + + structures = df.to_dict("records") + + for structure in structures: + # root doesn't have a parent + if structure["id"] != 997: + structure["structure_id_path"].append(structure["id"]) + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structures, f) + + # Create meshes: + print(f"Saving atlas data at {download_dir_path}") + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structures) + + labels = np.unique(annotated_volume).astype(np.int32) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + # Mesh creation + closing_n_iters = 2 + start = time.time() + if PARALLEL: + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass # error with returning results from pool.map but we don't care + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # Create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + + # ---------------------------------------------------------------------------- # + # WRAP UP # + # ---------------------------------------------------------------------------- # + + # Wrap up, compress, and remove file: + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(resolution,) * 3, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=True, + ) + + return output_filename + + +if __name__ == "__main__": + resolution = 20 # some resolution, in microns + + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "perens_lsfm_mouse" + bg_root_dir.mkdir(exist_ok=True, parents=True) + create_atlas(bg_root_dir, resolution) \ No newline at end of file diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 42e0b2d1..8d2fc903 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,6 +1,6 @@ try: from vedo import Mesh, write, load, show, Volume - from vedo.applications import Browser, SlicerPlotter + from vedo.applications import Browser, Slicer3DPlotter except ModuleNotFoundError: raise ModuleNotFoundError( "Mesh generation with these utils requires vedo\n" @@ -263,7 +263,7 @@ def compare_mesh_and_volume(mesh, volume): if isinstance(volume, np.ndarray): volume = Volume(volume) - vp = SlicerPlotter(volume, bg2="white", showHisto=False) + vp = Slicer3DPlotter(volume, bg2="white", showHisto=False) vp.add(mesh.alpha(0.5)) vp.show() From 7530600b8575113c03024c4b24642005574c2d6e Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 31 May 2022 16:38:18 +0100 Subject: [PATCH 049/103] Update requirements.txt --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index a3dc40ab..2f096f4c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,3 +23,4 @@ pre-commit PyMCubes bg_atlasapi xmltodict +SimpleITK From 6e2f8c74ff1ca2af9f7a0b4bbe38a468ec7afdba Mon Sep 17 00:00:00 2001 From: David Young Date: Fri, 1 Jul 2022 19:31:51 +0800 Subject: [PATCH 050/103] Add ADMBA 3D developing mouse atlases (#25) * Add ADMBA 3D developing mouse atlases Extend the `mouse_e15_5` script by @pr4deepr to build all the atlases in the 3D reconstructed version of the Allen Developing Mouse Brain Atlas series. This build script is essentially the same except for the addition of a data class to configure the atlas parameters, which is implemented for each atlas. * Fix atlas orientation to LSA Change the input orientation to LSA to fit the NumPy convention used in BrainGlobe. Add docstrings describing the orientation and resolution assumptions in the configuration data class. * Update package authors --- .../atlas_scripts/admba_3d_dev_mouse.py | 381 ++++++++++++++++++ 1 file changed, 381 insertions(+) create mode 100644 bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py diff --git a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py new file mode 100644 index 00000000..d82729d5 --- /dev/null +++ b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py @@ -0,0 +1,381 @@ +__version__ = "0" + +import dataclasses +import json +import time +import zipfile + +from os import listdir, path +from typing import Tuple + +import pandas as pd +import numpy as np +import multiprocessing as mp + +from rich.progress import track +from pathlib import Path + +from bg_atlasapi import utils +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + +from skimage import io + +PARALLEL = True + + +def download_atlas_files(download_dir_path, atlas_file_url, ATLAS_NAME): + utils.check_internet_connection() + + atlas_files_dir = download_dir_path / ATLAS_NAME + try: + download_name = ATLAS_NAME + "_atlas.zip" + except TypeError: + download_name = ATLAS_NAME / "_atlas.zip" + destination_path = download_dir_path / download_name + utils.retrieve_over_http(atlas_file_url, destination_path) + + with zipfile.ZipFile(destination_path, "r") as zip_ref: + zip_ref.extractall(atlas_files_dir) + + return atlas_files_dir + + +def parse_structures(structures_file, root_id): + df = pd.read_csv(structures_file) + df = df.rename(columns={"Parent": "parent_structure_id"}) + df = df.rename(columns={"Region": "id"}) + df = df.rename(columns={"RegionName": "name"}) + df = df.rename(columns={"RegionAbbr": "acronym"}) + df = df.drop(columns=["Level"]) + # get length of labels so as to generate rgb values + no_items = df.shape[0] + # Random values for RGB + # could use this instead? + rgb_list = [[ + np.random.randint(0, 255), np.random.randint(0, 255), + np.random.randint(0, 255)] for i in range(no_items)] + rgb_list = pd.DataFrame(rgb_list, columns=['red', 'green', 'blue']) + + df["rgb_triplet"] = rgb_list.apply( + lambda x: [x.red.item(), x.green.item(), x.blue.item()], axis=1) + df["structure_id_path"] = df.apply(lambda x: [x.id], axis=1) + structures = df.to_dict("records") + structures = create_structure_hierarchy(structures, df, root_id) + return structures + + +def create_structure_hierarchy(structures, df, root_id): + for structure in structures: + if structure["id"] != root_id: + parent_id = structure["parent_structure_id"] + while True: + structure["structure_id_path"] = [parent_id] + structure[ + "structure_id_path" + ] + if parent_id != root_id: + parent_id = int( + df[df["id"] == parent_id]["parent_structure_id"] + ) + else: + break + else: + structure["name"] = "root" + structure["acronym"] = "root" + + del structure["parent_structure_id"] + + return structures + + +def create_meshes(download_dir_path, structures, annotated_volume, root_id): + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structures) + + labels = np.unique(annotated_volume).astype(np.int32) + + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + node.data = Region(is_label) + + # Mesh creation + closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation + start = time.time() + if PARALLEL: + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + root_id, + closing_n_iters, + decimate_fraction, + smooth, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + root_id, + closing_n_iters, + decimate_fraction, + smooth, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + return meshes_dir_path + + +def create_mesh_dict(structures, meshes_dir_path): + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + return meshes_dict, structures_with_mesh + + +@dataclasses.dataclass +class AtlasConfig: + """Data class to configure atlas creation.""" + atlas_name: str + species: str + atlas_link: str + atlas_file_url: str + #: Input orientation in 3-letter notation using the NumPy system with origin + #: at top left corner of first plane. Axis 0 = front to back, 1 = top to + #: bottom, 2 = left to right. Output orientation will be ASR. + orientation: str + #: Resolution to match the output orientation of ASR. + resolution: Tuple[float, float, float] + citation: str + root_id: int + atlas_packager: str + + +def create_atlas(working_dir: Path = Path.home(), + atlas_config: "AtlasConfig" = None): + assert len(atlas_config.orientation) == 3, \ + f"Orientation is not 3 characters, Got {atlas_config.orientation}" + assert len(atlas_config.resolution) == 3, \ + f"Resolution is not correct, Got {atlas_config.resolution}" + assert atlas_config.atlas_file_url, \ + f"No download link provided for atlas in {atlas_config.atlas_file_url}" + if type(working_dir) == str: + working_dir = Path(working_dir) + # Generated atlas path: + working_dir = (working_dir / "brainglobe_workingdir" / + atlas_config.atlas_name) + working_dir.mkdir(exist_ok=True, parents=True) + + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + if path.isdir(atlas_config.atlas_file_url): + print("Setting atlas to directory: ", atlas_config.atlas_file_url) + atlas_files_dir = atlas_config.atlas_file_url + else: + # Download atlas files from link provided + print("Downloading atlas from link: ", atlas_config.atlas_file_url) + atlas_files_dir = download_atlas_files( + download_dir_path, atlas_config.atlas_file_url, + atlas_config.atlas_name) + ## Load files + + structures_file = atlas_files_dir / ( + [f for f in listdir(atlas_files_dir) if "region_ids_ADMBA" in f][0]) + + reference_file = atlas_files_dir / ( + [f for f in listdir(atlas_files_dir) if "atlasVolume.mhd" in f][0]) + + annotations_file = atlas_files_dir / ( + [f for f in listdir(atlas_files_dir) if "annotation.mhd" in f][0]) + # segments_file = atlas_files_dir / "Segments.csv" + + annotated_volume = io.imread(annotations_file) + template_volume = io.imread(reference_file) + + ## Parse structure metadata + structures = parse_structures(structures_file, atlas_config.root_id) + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structures, f) + + # Create meshes: + print(f"Saving atlas data at {download_dir_path}") + meshes_dir_path = create_meshes( + download_dir_path, structures, annotated_volume, atlas_config.root_id + ) + + meshes_dict, structures_with_mesh = create_mesh_dict( + structures, meshes_dir_path + ) + + # Wrap up, compress, and remove file: + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=atlas_config.atlas_name, + atlas_minor_version=__version__, + citation=atlas_config.citation, + atlas_link=atlas_config.atlas_link, + species=atlas_config.species, + resolution=atlas_config.resolution, + orientation=atlas_config.orientation, + root_id=atlas_config.root_id, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + atlas_packager=atlas_config.atlas_packager, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=True + ) + print("Done. Atlas generated at: ", output_filename) + return output_filename + + +if __name__ == "__main__": + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" + bg_root_dir.mkdir(exist_ok=True, parents=True) + + # set up E11.5 atlas settings and use as template for rest of brains + e11_5_config = AtlasConfig( + atlas_name="admba_3d_e11_5_mouse", + species="Mus musculus", + atlas_link="https://search.kg.ebrains.eu/instances/8ab25629-bdac-47d0-bc86-6f3aa3885f29", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000023_3Drecon-ADMBA-E11pt5_pub", + orientation="lsa", + resolution=(16, 16, 20), + citation="Young et al. 2021, https://doi.org/10.7554/eLife.61408", + root_id=15564, + atlas_packager="Pradeep Rajasekhar, WEHI, Australia, rajasekhardotp@wehidotedudotau; David Young, UCSF, United States, davedotyoung@ucsfdotedu", + ) + + # E13.5 atlas, with updated name and URLs + e13_5_config = dataclasses.replace( + e11_5_config, + atlas_name="admba_3d_e13_5_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/bdb89f61-8dc4-4255-b4d5-50d470958b58", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000024_3Drecon-ADMBA-E13pt5_pub", + ) + + # E15.5 atlas + e15_5_config = dataclasses.replace( + e11_5_config, + atlas_name="admba_3d_e15_5_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/Dataset/51a81ae5-d821-437a-a6d5-9b1f963cfe9b", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000025_3Drecon-ADMBA-E15pt5_pub", + ) + + # E18.5 atlas + e18_5_config = dataclasses.replace( + e11_5_config, + atlas_name="admba_3d_e18_5_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/633b41be-867a-4611-8570-82271aebd516", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000026_3Drecon-ADMBA-E18pt5_pub", + ) + + # P4 atlas, which has different resolutions + p4_config = dataclasses.replace( + e11_5_config, + atlas_name="admba_3d_p4_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/eea3589f-d74b-4988-8f4c-fd9ae8e3a4b3", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000027_3Drecon-ADMBA-P4_pub", + resolution=(16.752, 16.752, 20), + ) + + # P14 atlas, which has slightly different resolutions + p14_config = dataclasses.replace( + e11_5_config, + atlas_name="admba_3d_p14_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/114e50aa-156c-4283-af73-11b7f03d287e", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000028_3Drecon-ADMBA-P14_pub", + resolution=(16.752, 16.752, 25), + ) + + # P28 atlas, which has same resolutions as P14 + p28_config = dataclasses.replace( + p14_config, + atlas_name="admba_3d_p28_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/3a1153f0-6779-43bd-9f02-f92700a585a4", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000029_3Drecon-ADMBA-P28_pub", + ) + + # P56 atlas, which has different resolutions + p56_config = dataclasses.replace( + e11_5_config, + atlas_name="admba_3d_p56_mouse", + atlas_link="https://search.kg.ebrains.eu/instances/a7e99105-1ec2-42e2-a53a-7aa0f2b78135", + atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000030_3Drecon-ADMBA-P56_pub", + resolution=(25, 25, 25), + ) + + # atlases to create + configs = ( + e11_5_config, + e13_5_config, + e15_5_config, + e18_5_config, + p4_config, + p14_config, + p28_config, + p56_config, + ) + + # create each atlas + for config in configs: + create_atlas(bg_root_dir, config) From c9d090d17bd4c0af85aa85b4fd9d745fc9b8d7b7 Mon Sep 17 00:00:00 2001 From: Sam Clothier Date: Wed, 31 Aug 2022 10:28:24 +0100 Subject: [PATCH 051/103] Add script for the Princeton mouse brain atlas. (#27) * Added atlas generation script for the Princeton mouse brain atlas. Modified mesh_utils.py for fix. * Modified atlas minor version, changed atlas orientation, and removed unnecessary html fetch of region hierarchy. * Finalised atlas minor version. Co-authored-by: Sam --- bg_atlasgen/atlas_scripts/princeton_mouse.py | 226 +++++++++++++++++++ bg_atlasgen/mesh_utils.py | 4 +- 2 files changed, 228 insertions(+), 2 deletions(-) create mode 100644 bg_atlasgen/atlas_scripts/princeton_mouse.py diff --git a/bg_atlasgen/atlas_scripts/princeton_mouse.py b/bg_atlasgen/atlas_scripts/princeton_mouse.py new file mode 100644 index 00000000..13a8f19a --- /dev/null +++ b/bg_atlasgen/atlas_scripts/princeton_mouse.py @@ -0,0 +1,226 @@ +__version__ = "0" +__atlas__ = "princeton_mouse" + +import tifffile +import os.path +import numpy as np +import pandas as pd +import json +import time +import multiprocessing as mp + +from rich.progress import track +from pathlib import Path +from bg_atlasapi import utils +from scipy.ndimage import zoom + +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree + +PARALLEL = False + +def create_atlas(working_dir, resolution): + # Specify information about the atlas: + ATLAS_NAME = __atlas__ + SPECIES = "Mus musculus" + ATLAS_LINK = "https://brainmaps.princeton.edu/2020/09/princeton-mouse-brain-atlas-links/" + CITATION = "Pisano et al 2021, https://doi.org/10.1016/j.celrep.2021.109721" + ORIENTATION = "las" + ROOT_ID = 997 + ATLAS_RES = 20 + PACKAGER = "Sam Clothier. sam.clothier.18@ucl.ac.uk" + + # Download the atlas tissue and annotation TIFFs: + ###################################### + + reference_download_url = "https://brainmaps.princeton.edu/pma_tissue" + annotation_download_url = "https://brainmaps.princeton.edu/pma_annotations" + + # Temporary folder for nrrd files download: + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + + utils.check_internet_connection() + reference_dest_path = download_dir_path / "reference_download.tif" + annotation_dest_path = download_dir_path / "annotation_download.tif" + + if not os.path.isfile(reference_dest_path): + print('Downloading tissue volume...') + utils.retrieve_over_http(reference_download_url, reference_dest_path) + if not os.path.isfile(annotation_dest_path): + print("Downloading annotation stack...") + utils.retrieve_over_http(annotation_download_url, annotation_dest_path) + print("Download complete.") + + template_volume = tifffile.imread(reference_dest_path) + template_volume = np.array(template_volume) + annotated_volume = tifffile.imread(annotation_dest_path) + annotated_volume = np.array(annotated_volume) + + scaling = ATLAS_RES / resolution + annotated_volume = zoom( + annotated_volume, (scaling, scaling, scaling), order=0, prefilter=False + ) + + # Download structures tree and define regions: + ###################################### + + structures_download_url = "https://brainmaps.princeton.edu/pma_id_table" + structures_dest_path = download_dir_path / "structures_download.csv" + if not os.path.isfile(structures_dest_path): + utils.retrieve_over_http(structures_download_url, structures_dest_path) + + structures = pd.read_csv(structures_dest_path) + structures = structures.drop(columns=['parent_name','parent_acronym','voxels_in_structure']) + + # create structure_id_path column + def get_inheritance_list_from(id_val): + inheritance_list = [id_val] + def add_parent_id(child_id): + if child_id != 997: # don't look for the parent of the root area + parent_id = structures.loc[structures['id'] == child_id, 'parent_structure_id'].values[0] + inheritance_list.insert(0, int(parent_id)) + add_parent_id(parent_id) + add_parent_id(id_val) + return inheritance_list + structures['structure_id_path'] = structures['id'].map(lambda x: get_inheritance_list_from(x)) + + # create rgb_triplet column + structures['rgb_triplet'] = '[255, 255, 255]' + structures['rgb_triplet'] = structures['rgb_triplet'].map(lambda x: json.loads(x)) + + # order dataframe and convert to list of dictionaries specifying parameters for each area + structures = structures[['acronym', 'id', 'name', 'structure_id_path','rgb_triplet']] + structs_dict = structures.to_dict(orient='records') + print(structs_dict) + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structs_dict, f) + + + # Create region meshes: + ###################################### + + print(f"Saving atlas data at {download_dir_path}") + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structs_dict) + rotated_annotations = np.rot90(annotated_volume, axes=(0, 2)) + + labels = np.unique(rotated_annotations).astype(np.int32) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + node.data = Region(is_label) + + # Mesh creation + closing_n_iters = 2 + decimate_fraction = 0.2 + smooth = False # smooth meshes after creation + start = time.time() + if PARALLEL: + pool = mp.Pool(mp.cpu_count() - 2) + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + rotated_annotations, + ROOT_ID, + closing_n_iters, + decimate_fraction, + smooth, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass # error with returning results from pool.map but we don't care + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + rotated_annotations, + ROOT_ID, + closing_n_iters, + decimate_fraction, + smooth, + ) + ) + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # Create meshes dict + meshes_dict = dict() + structs_with_mesh = [] + for s in structs_dict: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structs_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structs_with_mesh)} structures with mesh are kept" + ) + + # Wrap up, compress, and remove file: + print(f"Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(resolution,) * 3, + orientation=ORIENTATION, + root_id=997, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structs_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + atlas_packager=PACKAGER, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + ) + + return output_filename + + +if __name__ == "__main__": + RES_UM = 20 + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / __atlas__ + bg_root_dir.mkdir(exist_ok=True, parents=True) + + create_atlas(bg_root_dir, RES_UM) \ No newline at end of file diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 8d2fc903..42e0b2d1 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,6 +1,6 @@ try: from vedo import Mesh, write, load, show, Volume - from vedo.applications import Browser, Slicer3DPlotter + from vedo.applications import Browser, SlicerPlotter except ModuleNotFoundError: raise ModuleNotFoundError( "Mesh generation with these utils requires vedo\n" @@ -263,7 +263,7 @@ def compare_mesh_and_volume(mesh, volume): if isinstance(volume, np.ndarray): volume = Volume(volume) - vp = Slicer3DPlotter(volume, bg2="white", showHisto=False) + vp = SlicerPlotter(volume, bg2="white", showHisto=False) vp.add(mesh.alpha(0.5)) vp.show() From 9190e1e5b558fd32c1a9413d6fcdeea0ba7434b7 Mon Sep 17 00:00:00 2001 From: Joe Ziminski <55797454+JoeZiminski@users.noreply.github.com> Date: Fri, 6 Jan 2023 13:54:46 +0000 Subject: [PATCH 052/103] KimLabDevCCFv001 (neuroinformatics team form) (#31) * Create KimLabDevCCFv001.py * Update KimLabDevCCFv001.py * Update and rename KimLabDevCCFv001.py to KimLabDevCCF.py * Update KimLabDevCCF.py * first commit * updated up to mesh creation, working through VTK type error * kimlabdevccf working version 1 reference only * rename atlasgen file * start adding additional references * add filenames for additional references * fix unfinished handling of multi-templates * update pre-commit black version to fix error with _unicode fun import from click * tidy and install pre-commit * fix change of plotter I erronoeusly made when using older python version * testing kim atlsa with additional references * Update requirements.txt * add loguru requirement (at least windows 10) * fix VTK type error * remove path * remove test code * update to new download dir * used to run atlas * update atlas code for general use * remove standard tempalte from additional references * further clean up * edit to run in single script * fix False flag used to not run meshes * get working on UNIX * use PARALLEL and update atlas name * update template key STP to stp Co-authored-by: NovaFae <33562343+faekronman@users.noreply.github.com> Co-authored-by: Adam Tyson --- .pre-commit-config.yaml | 2 +- .../kim_developmental_ccf_mouse.py | 341 ++++++++++++++++++ bg_atlasgen/mesh_utils.py | 12 +- requirements.txt | 1 + 4 files changed, 350 insertions(+), 6 deletions(-) create mode 100644 bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 73b679a1..cf97b625 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/python/black - rev: 19.10b0 + rev: 22.10.0 hooks: - id: black pass_filenames: true diff --git a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py b/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py new file mode 100644 index 00000000..7526567e --- /dev/null +++ b/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py @@ -0,0 +1,341 @@ +__version__ = "1" + +import json +import time +import tarfile + +import pandas as pd +import numpy as np +import multiprocessing as mp + +from rich.progress import track +from pathlib import Path +from scipy.ndimage import zoom + +from bg_atlasapi import utils +from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.wrapup import wrapup_atlas_from_data +from bg_atlasapi.structure_tree_util import get_structures_tree +import imio +import zipfile +import os + +PARALLEL = True # disable parallel mesh extraction for easier debugging + + +def clean_up_df_entries(df): + """ + Remove ' from string entries in the csv + """ + df["Acronym"] = df["Acronym"].apply(lambda x: x.replace("'", "")) + df["Name"] = df["Name"].apply(lambda x: x.replace("'", "")) + df["ID"] = df["ID"].apply( + lambda x: int(x) + ) # convert from numpy to int() for dumping as json + + ints = [int(ele) for ele in df["ID"]] + df["ID"] = ints + + +def get_structure_id_path_from_id(id, id_dict, root_id): + """ + Create the structure_id_path for a region + from a dict mapping id to parent_id + """ + structure_id_path = [id] + if id == root_id: + return structure_id_path + + while True: + + parent = int(id_dict[id]) + structure_id_path.insert(0, parent) + + if parent == root_id: + break + + id = parent + + return structure_id_path + + +def create_atlas(working_dir, resolution, reference_key, reference_filename, mesh_creation, existing_mesh_dir_path=None): + """""" + ATLAS_NAME = f"kim_dev_mouse_{reference_key}" + SPECIES = "Mus musculus" + ATLAS_LINK = "https://data.mendeley.com/datasets/2svx788ddf/1" + CITATION = "Kim, Yongsoo (2022), “KimLabDevCCFv001”, Mendeley Data, V1, doi: 10.17632/2svx788ddf.1" + ORIENTATION = "asl" + ROOT_ID = 99999999 + ANNOTATIONS_RES_UM = 10 + ATLAS_FILE_URL = "https://prod-dcd-datasets-cache-zipfiles.s3.eu-west-1.amazonaws.com/2svx788ddf-1.zip" + + # Temporary folder for download: + download_dir_path = working_dir / "downloads" + download_dir_path.mkdir(exist_ok=True) + atlas_files_dir = download_dir_path / "atlas_files" + + utils.check_internet_connection() + + destination_path = download_dir_path / "atlas_download" + + utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) + + with zipfile.ZipFile( + download_dir_path / "atlas_download", "r" + ) as zip_ref: + zip_ref.extractall(atlas_files_dir) + + destination_path.unlink() + + # Set paths to volumes + structures_file = ( + atlas_files_dir + / "KimLabDevCCFv001" + / "KimLabDevCCFv001_MouseOntologyStructure.csv" + ) + annotations_file = ( + atlas_files_dir + / "KimLabDevCCFv001" + / "10um" + / "KimLabDevCCFv001_Annotations_ASL_Oriented_10um.nii.gz" + ) + template_file = ( + atlas_files_dir + / "KimLabDevCCFv001" + / "10um" + / reference_filename + ) + + # ---------------------------------------------------------------------------- # + # GET TEMPLATE # + # ---------------------------------------------------------------------------- # + + # Load (and possibly downsample) annotated volume: + scaling = ANNOTATIONS_RES_UM / resolution + + annotated_volume = imio.load_nii(annotations_file, as_array=True) + template_volume = imio.load_nii(template_file, as_array=True) + + annotated_volume = zoom( + annotated_volume, (scaling, scaling, scaling), order=0, prefilter=False + ) + + # ---------------------------------------------------------------------------- # + # STRUCTURES HIERARCHY # + # ---------------------------------------------------------------------------- # + + # Parse region names & hierarchy + df = pd.read_csv(structures_file) + clean_up_df_entries(df) + + df.loc[len(df)] = ["root", ROOT_ID, "root", ROOT_ID] + df.append(["root", ROOT_ID, "root", ROOT_ID]) + + id_dict = dict(zip(df["ID"], df["Parent ID"])) + + assert id_dict[15564] == "[]" + id_dict[15564] = ROOT_ID + + structures = [] + for row in range(df.shape[0]): + + entry = { + "acronym": df["Acronym"][row], + "id": int(df["ID"][row]), # from np.int for JSON serialization + "name": df["Name"][row], + "structure_id_path": get_structure_id_path_from_id( + int(df["ID"][row]), id_dict, ROOT_ID + ), + "rgb_triplet": [255, 255, 255], + } + + structures.append(entry) + + # save regions list json: + with open(download_dir_path / "structures.json", "w") as f: + json.dump(structures, f) + + # ---------------------------------------------------------------------------- # + # Create Meshes # + # ---------------------------------------------------------------------------- # + + print(f"Saving atlas data at {download_dir_path}") + + if mesh_creation == "copy": + meshes_dir_path = Path(existing_mesh_dir_path) + else: + meshes_dir_path = download_dir_path / "meshes" + meshes_dir_path.mkdir(exist_ok=True) + + tree = get_structures_tree(structures) + + rotated_annotations = np.rot90( + annotated_volume, axes=(0, 2) + ) + + labels = np.unique(rotated_annotations).astype(np.int32) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + if mesh_creation == "generate": + + closing_n_iters = 2 + decimate_fraction = 0.04 + smooth = False # smooth meshes after creation + + start = time.time() + + if PARALLEL: + + pool = mp.Pool(mp.cpu_count() - 2) + + try: + pool.map( + create_region_mesh, + [ + ( + meshes_dir_path, + node, + tree, + labels, + rotated_annotations, + ROOT_ID, + closing_n_iters, + decimate_fraction, + smooth, + ) + for node in tree.nodes.values() + ], + ) + except mp.pool.MaybeEncodingError: + pass # error with returning results from pool.map but we don't care + else: + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + rotated_annotations, + ROOT_ID, + closing_n_iters, + decimate_fraction, + smooth, + ) + ) + + print( + "Finished mesh extraction in: ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # Create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in structures: + # Check if a mesh was created + mesh_path = meshes_dir_path / f'{s["id"]}.obj' + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it") + continue + else: + # Check that the mesh actually exists (i.e. not empty) + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + + # ---------------------------------------------------------------------------- # + # WRAP UP # + # ---------------------------------------------------------------------------- # + + # Wrap up, compress, and remove file: + print("Finalising atlas") + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=(resolution,) * 3, + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=template_volume, + annotation_stack=annotated_volume, + structures_list=structures_with_mesh, + meshes_dict=meshes_dict, + working_dir=working_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + scale_meshes=True, + ) + + return output_filename + + +if __name__ == "__main__": + """ + This atlas is too large to package into a single atlas. Hence it is split + with one atlas per reference. To avoid re-generating the meshes for each creation, + the script should be run once with mesh_creation = 'generate'. This will generate + the standard template atlas with the meshes. For the rest of the references, + use mesh_creation = 'copy' and set the existing_mesh_dir_path + to the previously-generated meshes. + + Note the decimate fraction is set to 0.04 to further reduce size of this large atlas. + """ + resolution = 10 # some resolution, in microns (10, 25, 50, 100) + + # Generated atlas path: + bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" + bg_root_dir.mkdir(exist_ok=True, parents=True) + + # First create the standard template, including all meshes + + create_atlas(bg_root_dir, + resolution, + reference_key="stp", + reference_filename="CCFv3_average_template_ASL_Oriented_u16_10um.nii.gz", + mesh_creation="generate") + + # Now get the mesh path from the previously generated atlas and use this + # for all other atlases + + additional_references = { + "idisco": "KimLabDevCCFv001_iDiscoLSFM2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_a0": "KimLabDevCCFv001_P56_MRI-a02CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_adc": "KimLabDevCCFv001_P56_MRI-adc2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_dwi": "KimLabDevCCFv001_P56_MRI-dwi2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_fa": "KimLabDevCCFv001_P56_MRI-fa2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_mtr": "KimLabDevCCFv001_P56_MRI-MTR2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_t2": "KimLabDevCCFv001_P56_MRI-T22CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + } + + existing_mesh_dir_path = bg_root_dir / "downloads" / "meshes" + + for reference_key, reference_filename in additional_references.items(): + create_atlas(bg_root_dir, + resolution, + reference_key, + reference_filename, + mesh_creation="copy", + existing_mesh_dir_path=existing_mesh_dir_path) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 42e0b2d1..d9259119 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,6 +1,6 @@ try: from vedo import Mesh, write, load, show, Volume - from vedo.applications import Browser, SlicerPlotter + from vedo.applications import Browser, Slicer3DPlotter except ModuleNotFoundError: raise ModuleNotFoundError( "Mesh generation with these utils requires vedo\n" @@ -28,7 +28,9 @@ def region_mask_from_annotation( - structure_id, annotation, structures_list, + structure_id, + annotation, + structures_list, ): """Generate mask for a structure from an annotation file and a list of structures. @@ -121,10 +123,10 @@ def extract_mesh_from_mask( # Apply morphological transformations if closing_n_iters is not None: - volume = scipy.ndimage.morphology.binary_fill_holes(volume) + volume = scipy.ndimage.morphology.binary_fill_holes(volume).astype(int) volume = scipy.ndimage.morphology.binary_closing( volume, iterations=closing_n_iters - ) + ).astype(int) if not use_marching_cubes: # Use faster algorithm @@ -263,7 +265,7 @@ def compare_mesh_and_volume(mesh, volume): if isinstance(volume, np.ndarray): volume = Volume(volume) - vp = SlicerPlotter(volume, bg2="white", showHisto=False) + vp = Slicer3DPlotter(volume, bg2="white", showHisto=False) vp.add(mesh.alpha(0.5)) vp.show() diff --git a/requirements.txt b/requirements.txt index 2f096f4c..2bda406b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,3 +24,4 @@ PyMCubes bg_atlasapi xmltodict SimpleITK +loguru From d5dbe275656e2cfa684aec41ae75764ad1402c2d Mon Sep 17 00:00:00 2001 From: David Stansby Date: Wed, 1 Feb 2023 10:06:42 +0000 Subject: [PATCH 053/103] Add testing config (#35) * Add testing config * Specify branchs for workflow trigger * Bump pre-commit versions * Fix cov flag * Run black on code base * Clean up requirements * Move requirements to setup.py * Fix setup.py black --- .github/workflows/test_and_deploy.yml | 35 ++++ .pre-commit-config.yaml | 9 +- .../atlas_scripts/admba_3d_dev_mouse.py | 121 ++++++----- bg_atlasgen/atlas_scripts/allen_cord.py | 4 +- bg_atlasgen/atlas_scripts/azba_zfish.py | 188 ++++++++++-------- .../kim_developmental_ccf_mouse.py | 58 +++--- bg_atlasgen/atlas_scripts/mpin_zfish.py | 15 +- .../atlas_scripts/perens_lsfm_mouse.py | 70 ++++--- bg_atlasgen/atlas_scripts/princeton_mouse.py | 41 ++-- requirements.txt | 27 --- setup.py | 20 +- tox.ini | 15 ++ 12 files changed, 357 insertions(+), 246 deletions(-) create mode 100644 .github/workflows/test_and_deploy.yml delete mode 100644 requirements.txt create mode 100644 tox.ini diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml new file mode 100644 index 00000000..fbfc212b --- /dev/null +++ b/.github/workflows/test_and_deploy.yml @@ -0,0 +1,35 @@ +name: tests + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: brainglobe/actions/lint@v1 + + test: + needs: lint + name: ${{ matrix.os }} py${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + # Run across a mixture of Python versions and operating systems + include: + - os: ubuntu-latest + python-version: "3.11" + - os: macos-latest + python-version: "3.10" + - os: windows-latest + python-version: "3.9" + - os: ubuntu-latest + python-version: "3.8" + + steps: + - uses: brainglobe/actions/test@v1 + with: + python-version: ${{ matrix.python-version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cf97b625..3256a40f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,6 @@ repos: - repo: https://github.com/python/black - rev: 22.10.0 + rev: 22.12.0 hooks: - id: black pass_filenames: true -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.7.9 - hooks: - - id: flake8 - pass_filenames: true - # this seems to need to be here in addition to setup.cfg - exclude: __init__.py \ No newline at end of file diff --git a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py index d82729d5..29142dbc 100644 --- a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py +++ b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py @@ -27,7 +27,7 @@ def download_atlas_files(download_dir_path, atlas_file_url, ATLAS_NAME): utils.check_internet_connection() - + atlas_files_dir = download_dir_path / ATLAS_NAME try: download_name = ATLAS_NAME + "_atlas.zip" @@ -35,10 +35,10 @@ def download_atlas_files(download_dir_path, atlas_file_url, ATLAS_NAME): download_name = ATLAS_NAME / "_atlas.zip" destination_path = download_dir_path / download_name utils.retrieve_over_http(atlas_file_url, destination_path) - + with zipfile.ZipFile(destination_path, "r") as zip_ref: zip_ref.extractall(atlas_files_dir) - + return atlas_files_dir @@ -53,13 +53,19 @@ def parse_structures(structures_file, root_id): no_items = df.shape[0] # Random values for RGB # could use this instead? - rgb_list = [[ - np.random.randint(0, 255), np.random.randint(0, 255), - np.random.randint(0, 255)] for i in range(no_items)] - rgb_list = pd.DataFrame(rgb_list, columns=['red', 'green', 'blue']) - + rgb_list = [ + [ + np.random.randint(0, 255), + np.random.randint(0, 255), + np.random.randint(0, 255), + ] + for i in range(no_items) + ] + rgb_list = pd.DataFrame(rgb_list, columns=["red", "green", "blue"]) + df["rgb_triplet"] = rgb_list.apply( - lambda x: [x.red.item(), x.green.item(), x.blue.item()], axis=1) + lambda x: [x.red.item(), x.green.item(), x.blue.item()], axis=1 + ) df["structure_id_path"] = df.apply(lambda x: [x.id], axis=1) structures = df.to_dict("records") structures = create_structure_hierarchy(structures, df, root_id) @@ -83,36 +89,36 @@ def create_structure_hierarchy(structures, df, root_id): else: structure["name"] = "root" structure["acronym"] = "root" - + del structure["parent_structure_id"] - + return structures def create_meshes(download_dir_path, structures, annotated_volume, root_id): meshes_dir_path = download_dir_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) - + tree = get_structures_tree(structures) - + labels = np.unique(annotated_volume).astype(np.int32) - + for key, node in tree.nodes.items(): if key in labels: is_label = True else: is_label = False node.data = Region(is_label) - + # Mesh creation closing_n_iters = 2 decimate_fraction = 0.2 smooth = False # smooth meshes after creation start = time.time() if PARALLEL: - + pool = mp.Pool(mp.cpu_count() - 2) - + try: pool.map( create_region_mesh, @@ -135,9 +141,9 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): pass else: for node in track( - tree.nodes.values(), - total=tree.size(), - description="Creating meshes", + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", ): create_region_mesh( ( @@ -152,7 +158,7 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): smooth, ) ) - + print( "Finished mesh extraction in: ", round((time.time() - start) / 60, 2), @@ -175,10 +181,10 @@ def create_mesh_dict(structures, meshes_dir_path): if mesh_path.stat().st_size < 512: print(f"obj file for {s} is too small, ignoring it.") continue - + structures_with_mesh.append(s) meshes_dict[s["id"]] = mesh_path - + print( f"In the end, {len(structures_with_mesh)} structures with mesh are kept" ) @@ -188,6 +194,7 @@ def create_mesh_dict(structures, meshes_dir_path): @dataclasses.dataclass class AtlasConfig: """Data class to configure atlas creation.""" + atlas_name: str species: str atlas_link: str @@ -203,21 +210,26 @@ class AtlasConfig: atlas_packager: str -def create_atlas(working_dir: Path = Path.home(), - atlas_config: "AtlasConfig" = None): - assert len(atlas_config.orientation) == 3, \ - f"Orientation is not 3 characters, Got {atlas_config.orientation}" - assert len(atlas_config.resolution) == 3, \ - f"Resolution is not correct, Got {atlas_config.resolution}" - assert atlas_config.atlas_file_url, \ - f"No download link provided for atlas in {atlas_config.atlas_file_url}" +def create_atlas( + working_dir: Path = Path.home(), atlas_config: "AtlasConfig" = None +): + assert ( + len(atlas_config.orientation) == 3 + ), f"Orientation is not 3 characters, Got {atlas_config.orientation}" + assert ( + len(atlas_config.resolution) == 3 + ), f"Resolution is not correct, Got {atlas_config.resolution}" + assert ( + atlas_config.atlas_file_url + ), f"No download link provided for atlas in {atlas_config.atlas_file_url}" if type(working_dir) == str: working_dir = Path(working_dir) # Generated atlas path: - working_dir = (working_dir / "brainglobe_workingdir" / - atlas_config.atlas_name) + working_dir = ( + working_dir / "brainglobe_workingdir" / atlas_config.atlas_name + ) working_dir.mkdir(exist_ok=True, parents=True) - + download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) if path.isdir(atlas_config.atlas_file_url): @@ -227,40 +239,45 @@ def create_atlas(working_dir: Path = Path.home(), # Download atlas files from link provided print("Downloading atlas from link: ", atlas_config.atlas_file_url) atlas_files_dir = download_atlas_files( - download_dir_path, atlas_config.atlas_file_url, - atlas_config.atlas_name) + download_dir_path, + atlas_config.atlas_file_url, + atlas_config.atlas_name, + ) ## Load files - + structures_file = atlas_files_dir / ( - [f for f in listdir(atlas_files_dir) if "region_ids_ADMBA" in f][0]) - + [f for f in listdir(atlas_files_dir) if "region_ids_ADMBA" in f][0] + ) + reference_file = atlas_files_dir / ( - [f for f in listdir(atlas_files_dir) if "atlasVolume.mhd" in f][0]) - + [f for f in listdir(atlas_files_dir) if "atlasVolume.mhd" in f][0] + ) + annotations_file = atlas_files_dir / ( - [f for f in listdir(atlas_files_dir) if "annotation.mhd" in f][0]) + [f for f in listdir(atlas_files_dir) if "annotation.mhd" in f][0] + ) # segments_file = atlas_files_dir / "Segments.csv" - + annotated_volume = io.imread(annotations_file) template_volume = io.imread(reference_file) - + ## Parse structure metadata structures = parse_structures(structures_file, atlas_config.root_id) - + # save regions list json: with open(download_dir_path / "structures.json", "w") as f: json.dump(structures, f) - + # Create meshes: print(f"Saving atlas data at {download_dir_path}") meshes_dir_path = create_meshes( download_dir_path, structures, annotated_volume, atlas_config.root_id ) - + meshes_dict, structures_with_mesh = create_mesh_dict( structures, meshes_dir_path ) - + # Wrap up, compress, and remove file: print("Finalising atlas") output_filename = wrapup_atlas_from_data( @@ -281,7 +298,7 @@ def create_atlas(working_dir: Path = Path.home(), hemispheres_stack=None, cleanup_files=False, compress=True, - scale_meshes=True + scale_meshes=True, ) print("Done. Atlas generated at: ", output_filename) return output_filename @@ -291,7 +308,7 @@ def create_atlas(working_dir: Path = Path.home(), # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" bg_root_dir.mkdir(exist_ok=True, parents=True) - + # set up E11.5 atlas settings and use as template for rest of brains e11_5_config = AtlasConfig( atlas_name="admba_3d_e11_5_mouse", @@ -363,7 +380,7 @@ def create_atlas(working_dir: Path = Path.home(), atlas_file_url="https://data.kg.ebrains.eu/zip?container=https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000030_3Drecon-ADMBA-P56_pub", resolution=(25, 25, 25), ) - + # atlases to create configs = ( e11_5_config, @@ -375,7 +392,7 @@ def create_atlas(working_dir: Path = Path.home(), p28_config, p56_config, ) - + # create each atlas for config in configs: create_atlas(bg_root_dir, config) diff --git a/bg_atlasgen/atlas_scripts/allen_cord.py b/bg_atlasgen/atlas_scripts/allen_cord.py index f19bedc1..4a18c324 100644 --- a/bg_atlasgen/atlas_scripts/allen_cord.py +++ b/bg_atlasgen/atlas_scripts/allen_cord.py @@ -156,7 +156,9 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): else: print(f"Creating {len(nodes)} meshes") for node in track( - nodes, total=len(nodes), description="Creating meshes", + nodes, + total=len(nodes), + description="Creating meshes", ): create_region_mesh( ( diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py index 6fa0b88f..cd43c345 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -20,115 +20,125 @@ from rich.progress import track from pathlib import Path -from bg_atlasgen.mesh_utils import Region, create_region_mesh, inspect_meshes_folder +from bg_atlasgen.mesh_utils import ( + Region, + create_region_mesh, + inspect_meshes_folder, +) from bg_atlasapi.structure_tree_util import get_structures_tree from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi import utils -PARALLEL = False #Disable for debugging mesh creation - +PARALLEL = False # Disable for debugging mesh creation + + def create_atlas(working_dir, resolution): - - #metadata + + # metadata ATLAS_NAME = "azba_zfish" SPECIES = "Danio rerio" ATLAS_LINK = "http://www.azba.wayne.edu" CITATION = "Kenney et al. 2021, https://doi.org/10.7554/elife.69988" ATLAS_FILE_URL = "http://www.azba.wayne.edu/2021-08-22_AZBA.tar.gz" ORIENTATION = "las" - ROOT_ID = 9999 + ROOT_ID = 9999 ATLAS_PACKAGER = "Kailyn Fields, kailyn.fields@wayne.edu" ADDITIONAL_METADATA = {} - - - #setup folder for downloading - working_dir = working_dir / ATLAS_NAME + + # setup folder for downloading + working_dir = working_dir / ATLAS_NAME working_dir.mkdir(exist_ok=True) download_dir_path = working_dir / "downloads" download_dir_path.mkdir(exist_ok=True) atlas_path = download_dir_path / f"{ATLAS_NAME}" - - #download atlas files + + # download atlas files utils.check_internet_connection() destination_path = download_dir_path / "atlas_download" utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) - - #unpack the atlas download folder + + # unpack the atlas download folder tar = tarfile.open(destination_path) tar.extractall(path=atlas_path) tar.close() destination_path.unlink() - + print("Atlas files download completed") - - #paths + + # paths structures_file = atlas_path / "2021-08-22_AZBA_labels.csv" annotations_file = atlas_path / "2021-08-22_AZBA_segmentation.tif" reference_topro = atlas_path / "20180219_AZBA_topro_average_2020.tif" reference_file = atlas_path / "20180628_AZBA_AF_average.tif" meshes_dir_path = atlas_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) - - #adding topro image as additional reference file, main reference file is autofl + + # adding topro image as additional reference file, main reference file is autofl topro = tifffile.imread(reference_topro) - ADDITIONAL_REFERENCES = {"TO-PRO" : topro} - - #open structures.csv and prep for dictionary parsing + ADDITIONAL_REFERENCES = {"TO-PRO": topro} + + # open structures.csv and prep for dictionary parsing print("Creating structure tree") zfishFile = open(structures_file) zfishDictReader = csv.DictReader(zfishFile) - - #empty list to populate with dictionaries + + # empty list to populate with dictionaries hierarchy = [] - - #parse through csv file and populate hierarchy list + + # parse through csv file and populate hierarchy list for row in zfishDictReader: hierarchy.append(row) - - #make string to int and list of int conversions in 'id', 'structure_id_path', and 'rgb_triplet' key values + + # make string to int and list of int conversions in 'id', 'structure_id_path', and 'rgb_triplet' key values for i in range(0, len(hierarchy)): - hierarchy[i]['id'] = int(hierarchy[i]['id']) + hierarchy[i]["id"] = int(hierarchy[i]["id"]) for j in range(0, len(hierarchy)): - hierarchy[j]['structure_id_path'] = list(map(int, hierarchy[j]['structure_id_path'].split("/"))) - for k in range(0, len(hierarchy)): + hierarchy[j]["structure_id_path"] = list( + map(int, hierarchy[j]["structure_id_path"].split("/")) + ) + for k in range(0, len(hierarchy)): try: - hierarchy[k]['rgb_triplet'] = list(map(int,hierarchy[k]['rgb_triplet'].split("/"))) + hierarchy[k]["rgb_triplet"] = list( + map(int, hierarchy[k]["rgb_triplet"].split("/")) + ) except ValueError: - hierarchy[k]['rgb_triplet'] = [255, 255, 255] - - #remove clear label (id 0) from hierarchy. ITK-Snap uses this to label unlabeled areas, but this convention - #interferes with the root mask generation and is unnecessary for this application - hierarchy.remove(hierarchy[1]) - - #use tifffile to read annotated file + hierarchy[k]["rgb_triplet"] = [255, 255, 255] + + # remove clear label (id 0) from hierarchy. ITK-Snap uses this to label unlabeled areas, but this convention + # interferes with the root mask generation and is unnecessary for this application + hierarchy.remove(hierarchy[1]) + + # use tifffile to read annotated file annotated_volume = tifffile.imread(annotations_file) - + print(f"Saving atlas data at {atlas_path}") tree = get_structures_tree(hierarchy) - print(f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}") - - #generate binary mask for mesh creation + print( + f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + ) + + # generate binary mask for mesh creation labels = np.unique(annotated_volume).astype(np.int_) for key, node in tree.nodes.items(): if key in labels: is_label = True else: is_label = False - + node.data = Region(is_label) - - #mesh creation + + # mesh creation closing_n_iters = 2 start = time.time() decimate_fraction = 0.3 smooth = True - + if PARALLEL: - + print("Multiprocessing mesh creation...") pool = mp.Pool(int(mp.cpu_count() / 2)) - + try: pool.map( create_region_mesh, @@ -140,22 +150,23 @@ def create_atlas(working_dir, resolution): labels, annotated_volume, ROOT_ID, - closing_n_iters + closing_n_iters, ) for node in tree.nodes.values() ], ) except mp.pool.MaybeEncodingError: pass - + else: - + print("Multiprocessing disabled") # nodes = list(tree.nodes.values()) # nodes = choices(nodes, k=10) - for node in track(tree.nodes.values(), + for node in track( + tree.nodes.values(), total=tree.size(), - description = "Creating meshes", + description="Creating meshes", ): create_region_mesh( ( @@ -167,61 +178,68 @@ def create_atlas(working_dir, resolution): ROOT_ID, closing_n_iters, decimate_fraction, - smooth + smooth, ) ) - - print("Finished mesh extraction in : ", round((time.time() - start) / 60, 2), " minutes") - - #create meshes dict + + print( + "Finished mesh extraction in : ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # create meshes dict meshes_dict = dict() structures_with_mesh = [] for s in hierarchy: - #check if a mesh was created + # check if a mesh was created mesh_path = meshes_dir_path / f"{s['id']}.obj" if not mesh_path.exists(): print(f"No mesh file exists for: {s}, ignoring it.") continue else: - #check that the mesh actually exists and isn't empty + # check that the mesh actually exists and isn't empty if mesh_path.stat().st_size < 512: print(f"obj file for {s} is too small, ignoring it.") continue structures_with_mesh.append(s) - meshes_dict[s['id']] = mesh_path - - print(f"In the end, {len(structures_with_mesh)} structures with mesh are kept") - - #import reference file with tifffile so it can be read in wrapup_atlas_from_data + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + ) + + # import reference file with tifffile so it can be read in wrapup_atlas_from_data reference = tifffile.imread(reference_file) # inspect_meshes_folder(meshes_dir_path) # wrap up atlas file print("Finalising atlas") output_filename = wrapup_atlas_from_data( - atlas_name = ATLAS_NAME, + atlas_name=ATLAS_NAME, atlas_minor_version=__version__, - citation = CITATION, - atlas_link = ATLAS_LINK, - species = SPECIES, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, resolution=(resolution,) * 3, - orientation = ORIENTATION, - root_id = ROOT_ID, - reference_stack = reference, - annotation_stack = annotations_file, - structures_list = hierarchy, - meshes_dict = meshes_dict, - working_dir = working_dir, - atlas_packager = ATLAS_PACKAGER, - additional_metadata = ADDITIONAL_METADATA, - additional_references = ADDITIONAL_REFERENCES, - ) - + orientation=ORIENTATION, + root_id=ROOT_ID, + reference_stack=reference, + annotation_stack=annotations_file, + structures_list=hierarchy, + meshes_dict=meshes_dict, + working_dir=working_dir, + atlas_packager=ATLAS_PACKAGER, + additional_metadata=ADDITIONAL_METADATA, + additional_references=ADDITIONAL_REFERENCES, + ) + return output_filename - + + if __name__ == "__main__": resolution = 4 - - #generated atlas path + + # generated atlas path bg_root_dir = Path.home() / "brainglobe_workingdir" bg_root_dir.mkdir(exist_ok=True, parents=True) create_atlas(bg_root_dir, resolution) diff --git a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py b/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py index 7526567e..d64eca70 100644 --- a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py @@ -59,7 +59,14 @@ def get_structure_id_path_from_id(id, id_dict, root_id): return structure_id_path -def create_atlas(working_dir, resolution, reference_key, reference_filename, mesh_creation, existing_mesh_dir_path=None): +def create_atlas( + working_dir, + resolution, + reference_key, + reference_filename, + mesh_creation, + existing_mesh_dir_path=None, +): """""" ATLAS_NAME = f"kim_dev_mouse_{reference_key}" SPECIES = "Mus musculus" @@ -81,9 +88,7 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes utils.retrieve_over_http(ATLAS_FILE_URL, destination_path) - with zipfile.ZipFile( - download_dir_path / "atlas_download", "r" - ) as zip_ref: + with zipfile.ZipFile(download_dir_path / "atlas_download", "r") as zip_ref: zip_ref.extractall(atlas_files_dir) destination_path.unlink() @@ -101,10 +106,7 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes / "KimLabDevCCFv001_Annotations_ASL_Oriented_10um.nii.gz" ) template_file = ( - atlas_files_dir - / "KimLabDevCCFv001" - / "10um" - / reference_filename + atlas_files_dir / "KimLabDevCCFv001" / "10um" / reference_filename ) # ---------------------------------------------------------------------------- # @@ -170,9 +172,7 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes tree = get_structures_tree(structures) - rotated_annotations = np.rot90( - annotated_volume, axes=(0, 2) - ) + rotated_annotations = np.rot90(annotated_volume, axes=(0, 2)) labels = np.unique(rotated_annotations).astype(np.int32) for key, node in tree.nodes.items(): @@ -262,7 +262,7 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes print( f"In the end, {len(structures_with_mesh)} structures with mesh are kept" ) - + # ---------------------------------------------------------------------------- # # WRAP UP # # ---------------------------------------------------------------------------- # @@ -298,12 +298,12 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes with one atlas per reference. To avoid re-generating the meshes for each creation, the script should be run once with mesh_creation = 'generate'. This will generate the standard template atlas with the meshes. For the rest of the references, - use mesh_creation = 'copy' and set the existing_mesh_dir_path + use mesh_creation = 'copy' and set the existing_mesh_dir_path to the previously-generated meshes. - - Note the decimate fraction is set to 0.04 to further reduce size of this large atlas. + + Note the decimate fraction is set to 0.04 to further reduce size of this large atlas. """ - resolution = 10 # some resolution, in microns (10, 25, 50, 100) + resolution = 10 # some resolution, in microns (10, 25, 50, 100) # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "kim_mouse" @@ -311,11 +311,13 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes # First create the standard template, including all meshes - create_atlas(bg_root_dir, - resolution, - reference_key="stp", - reference_filename="CCFv3_average_template_ASL_Oriented_u16_10um.nii.gz", - mesh_creation="generate") + create_atlas( + bg_root_dir, + resolution, + reference_key="stp", + reference_filename="CCFv3_average_template_ASL_Oriented_u16_10um.nii.gz", + mesh_creation="generate", + ) # Now get the mesh path from the previously generated atlas and use this # for all other atlases @@ -333,9 +335,11 @@ def create_atlas(working_dir, resolution, reference_key, reference_filename, mes existing_mesh_dir_path = bg_root_dir / "downloads" / "meshes" for reference_key, reference_filename in additional_references.items(): - create_atlas(bg_root_dir, - resolution, - reference_key, - reference_filename, - mesh_creation="copy", - existing_mesh_dir_path=existing_mesh_dir_path) + create_atlas( + bg_root_dir, + resolution, + reference_key, + reference_filename, + mesh_creation="copy", + existing_mesh_dir_path=existing_mesh_dir_path, + ) diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index f5842f5f..118d7ed7 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -54,7 +54,10 @@ def add_path_inplace(parent): def collect_all_inplace( - node, traversing_list, download_path, mesh_dict, + node, + traversing_list, + download_path, + mesh_dict, ): """Recursively traverse a region hierarchy, download meshes, and append regions to a list inplace. @@ -130,9 +133,7 @@ def create_atlas(working_dir, resolution): extracted_dir = working_dir / "mpin_zfish_annotations" - annotation_stack = imread( - str(extracted_dir / "mpin_zfish_annotation.tif") - ) + annotation_stack = imread(str(extracted_dir / "mpin_zfish_annotation.tif")) # Pad 1 voxel around the whole annotation: annotation_stack[[0, -1], :, :] = 0 @@ -165,7 +166,9 @@ def create_atlas(working_dir, resolution): brain_mask = np.zeros(shape_stack, dtype=np.uint8) # Exclude eyes from brain mask: - brain_mask[:, :, pad:-pad][(annotation_stack > 0) & (annotation_stack != 808)] = 255 + brain_mask[:, :, pad:-pad][ + (annotation_stack > 0) & (annotation_stack != 808) + ] = 255 # Perform binary operations: brain_mask = binary_dilation(brain_mask, iterations=50) @@ -245,7 +248,7 @@ def create_atlas(working_dir, resolution): cleanup_files=False, compress=True, additional_references=additional_references, - atlas_packager=ATLAS_PACKAGER + atlas_packager=ATLAS_PACKAGER, ) return output_filename diff --git a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py b/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py index 57752028..4f0cdc45 100644 --- a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py +++ b/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py @@ -14,6 +14,7 @@ from rich.progress import track from pathlib import Path from scipy.ndimage import zoom + # from allensdk.core.reference_space_cache import ReferenceSpaceCache from bg_atlasapi import utils @@ -29,7 +30,7 @@ ############################################################################## def get_id_from_acronym(df, acronym): - ''' + """ Get Allen's brain atlas ID from brain region acronym(s) Call: @@ -41,7 +42,7 @@ def get_id_from_acronym(df, acronym): Returns: ID (int or list of ints) : brain region ID(s) corresponding to input acronym(s) - ''' + """ # create as list if necessary if not isinstance(acronym, list): @@ -50,17 +51,17 @@ def get_id_from_acronym(df, acronym): if len(acronym) > 1: ID_list = [] for acro in acronym: - ID = df['id'][df['acronym'] == acro].item() + ID = df["id"][df["acronym"] == acro].item() ID_list.append(ID) return ID_list else: - return df['id'][df['acronym'] == acronym[0]].item() + return df["id"][df["acronym"] == acronym[0]].item() # return df['id'][df['acronym'] == acronym].item() # OLD VERSION def get_acronym_from_id(df, ID): - ''' + """ Get Allen's brain atlas acronym from brain region ID(s) Call: @@ -72,7 +73,7 @@ def get_acronym_from_id(df, ID): Returns: acronym (string or list of strings) : brain region acronym(s) corresponding to input ID(s) - ''' + """ # create as list if necessary if not isinstance(ID, list): @@ -81,18 +82,18 @@ def get_acronym_from_id(df, ID): if len(ID) > 1: acronym_list = [] for id in ID: - acronym = df['acronym'][df['id'] == id].item() + acronym = df["acronym"][df["id"] == id].item() acronym_list.append(acronym) return acronym_list else: - return df['acronym'][df['id'] == ID[0]].item() + return df["acronym"][df["id"] == ID[0]].item() def tree_traverse_child2parent(df, child_id, ids): - parent = df['parent_id'][df['id'] == child_id].item() + parent = df["parent_id"][df["id"] == child_id].item() if not np.isnan(parent): - id = df['id'][df['id'] == parent].item() + id = df["id"][df["id"] == parent].item() ids.append(id) tree_traverse_child2parent(df, parent, ids) return ids @@ -101,7 +102,7 @@ def tree_traverse_child2parent(df, child_id, ids): def get_all_parents(df, key): - ''' + """ Get all parent IDs/acronyms in Allen's brain atlas hierarchical structure' Call: @@ -113,10 +114,12 @@ def get_all_parents(df, key): Returns: parents (list) : brain region acronym corresponding to input ID - ''' + """ if isinstance(key, str): # if input is acronym convert to ID - list_parent_ids = tree_traverse_child2parent(df, get_id_from_acronym(df, key), []) + list_parent_ids = tree_traverse_child2parent( + df, get_id_from_acronym(df, key), [] + ) elif isinstance(key, int): list_parent_ids = tree_traverse_child2parent(df, key, []) @@ -164,12 +167,31 @@ def create_atlas(working_dir, resolution): destination_path.unlink() # structures_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "ARA2_annotation_info.csv" - structures_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "ARA2_annotation_info_avail_regions.csv" - annotations_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "gubra_ano_olf.nii.gz" - reference_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "gubra_template_olf.nii.gz" + structures_file = ( + atlas_files_dir + / "LSFM-mouse-brain-atlas-master" + / "LSFM_atlas_files" + / "ARA2_annotation_info_avail_regions.csv" + ) + annotations_file = ( + atlas_files_dir + / "LSFM-mouse-brain-atlas-master" + / "LSFM_atlas_files" + / "gubra_ano_olf.nii.gz" + ) + reference_file = ( + atlas_files_dir + / "LSFM-mouse-brain-atlas-master" + / "LSFM_atlas_files" + / "gubra_template_olf.nii.gz" + ) - annotated_volume = sitk.GetArrayFromImage(sitk.ReadImage(str(annotations_file))) - template_volume = sitk.GetArrayFromImage(sitk.ReadImage(str(reference_file))) + annotated_volume = sitk.GetArrayFromImage( + sitk.ReadImage(str(annotations_file)) + ) + template_volume = sitk.GetArrayFromImage( + sitk.ReadImage(str(reference_file)) + ) annotated_volume = np.rot90(annotated_volume, axes=(0, 2)) template_volume = np.rot90(template_volume, axes=(0, 2)) @@ -187,11 +209,11 @@ def create_atlas(working_dir, resolution): parents = [] rgb = [] for index, row in df.iterrows(): - temp_id = row['id'] + temp_id = row["id"] temp_parents = get_all_parents(df, temp_id) parents.append(temp_parents[::-1]) - temp_rgb = [row['red'], row['green'], row['blue']] + temp_rgb = [row["red"], row["green"], row["blue"]] rgb.append(temp_rgb) df = df.drop(columns=["parent_id", "red", "green", "blue"]) @@ -253,9 +275,9 @@ def create_atlas(working_dir, resolution): pass # error with returning results from pool.map but we don't care else: for node in track( - tree.nodes.values(), - total=tree.size(), - description="Creating meshes", + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", ): create_region_mesh( ( @@ -332,4 +354,4 @@ def create_atlas(working_dir, resolution): # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "perens_lsfm_mouse" bg_root_dir.mkdir(exist_ok=True, parents=True) - create_atlas(bg_root_dir, resolution) \ No newline at end of file + create_atlas(bg_root_dir, resolution) diff --git a/bg_atlasgen/atlas_scripts/princeton_mouse.py b/bg_atlasgen/atlas_scripts/princeton_mouse.py index 13a8f19a..2ed9259c 100644 --- a/bg_atlasgen/atlas_scripts/princeton_mouse.py +++ b/bg_atlasgen/atlas_scripts/princeton_mouse.py @@ -20,12 +20,15 @@ PARALLEL = False + def create_atlas(working_dir, resolution): # Specify information about the atlas: ATLAS_NAME = __atlas__ SPECIES = "Mus musculus" ATLAS_LINK = "https://brainmaps.princeton.edu/2020/09/princeton-mouse-brain-atlas-links/" - CITATION = "Pisano et al 2021, https://doi.org/10.1016/j.celrep.2021.109721" + CITATION = ( + "Pisano et al 2021, https://doi.org/10.1016/j.celrep.2021.109721" + ) ORIENTATION = "las" ROOT_ID = 997 ATLAS_RES = 20 @@ -46,7 +49,7 @@ def create_atlas(working_dir, resolution): annotation_dest_path = download_dir_path / "annotation_download.tif" if not os.path.isfile(reference_dest_path): - print('Downloading tissue volume...') + print("Downloading tissue volume...") utils.retrieve_over_http(reference_download_url, reference_dest_path) if not os.path.isfile(annotation_dest_path): print("Downloading annotation stack...") @@ -72,34 +75,46 @@ def create_atlas(working_dir, resolution): utils.retrieve_over_http(structures_download_url, structures_dest_path) structures = pd.read_csv(structures_dest_path) - structures = structures.drop(columns=['parent_name','parent_acronym','voxels_in_structure']) - + structures = structures.drop( + columns=["parent_name", "parent_acronym", "voxels_in_structure"] + ) + # create structure_id_path column def get_inheritance_list_from(id_val): inheritance_list = [id_val] + def add_parent_id(child_id): - if child_id != 997: # don't look for the parent of the root area - parent_id = structures.loc[structures['id'] == child_id, 'parent_structure_id'].values[0] + if child_id != 997: # don't look for the parent of the root area + parent_id = structures.loc[ + structures["id"] == child_id, "parent_structure_id" + ].values[0] inheritance_list.insert(0, int(parent_id)) add_parent_id(parent_id) + add_parent_id(id_val) return inheritance_list - structures['structure_id_path'] = structures['id'].map(lambda x: get_inheritance_list_from(x)) + + structures["structure_id_path"] = structures["id"].map( + lambda x: get_inheritance_list_from(x) + ) # create rgb_triplet column - structures['rgb_triplet'] = '[255, 255, 255]' - structures['rgb_triplet'] = structures['rgb_triplet'].map(lambda x: json.loads(x)) + structures["rgb_triplet"] = "[255, 255, 255]" + structures["rgb_triplet"] = structures["rgb_triplet"].map( + lambda x: json.loads(x) + ) # order dataframe and convert to list of dictionaries specifying parameters for each area - structures = structures[['acronym', 'id', 'name', 'structure_id_path','rgb_triplet']] - structs_dict = structures.to_dict(orient='records') + structures = structures[ + ["acronym", "id", "name", "structure_id_path", "rgb_triplet"] + ] + structs_dict = structures.to_dict(orient="records") print(structs_dict) # save regions list json: with open(download_dir_path / "structures.json", "w") as f: json.dump(structs_dict, f) - # Create region meshes: ###################################### @@ -223,4 +238,4 @@ def add_parent_id(child_id): bg_root_dir = Path.home() / "brainglobe_workingdir" / __atlas__ bg_root_dir.mkdir(exist_ok=True, parents=True) - create_atlas(bg_root_dir, RES_UM) \ No newline at end of file + create_atlas(bg_root_dir, RES_UM) diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 2bda406b..00000000 --- a/requirements.txt +++ /dev/null @@ -1,27 +0,0 @@ -numpy -tifffile -treelib -pandas -requests -meshio -click -rich -tqdm>=4.46.1 -bg-space -sphinx -imio -vedo -recommonmark -sphinx_rtd_theme -pydoc-markdown -black -pytest-cov -pytest -gitpython -coverage -pre-commit -PyMCubes -bg_atlasapi -xmltodict -SimpleITK -loguru diff --git a/setup.py b/setup.py index 02f6cdfc..d84bf84f 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,22 @@ from setuptools import setup, find_namespace_packages -with open("requirements.txt") as f: - requirements = f.read().splitlines() - +requirements = [ + "numpy", + "tifffile", + "treelib", + "pandas", + "requests", + "meshio", + "rich", + "tqdm>=4.46.1", + "imio", + "vedo", + "PyMCubes", + "bg_atlasapi", + "xmltodict", + "SimpleITK", + "loguru", +] setup( name="bg-atlasgen", version="0.0.2", diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..e8310ab6 --- /dev/null +++ b/tox.ini @@ -0,0 +1,15 @@ +[tox] +envlist = py{38,39,310,311} + +[gh-actions] +python = + 3.8: py38 + 3.9: py39 + 3.10: py310 + 3.11: py311 + +[testenv] +extras = + dev +commands = + python -c "import bg_atlasgen" From 577cd0620eed11f80e49e87c813f1e6f6c95d0aa Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 3 Feb 2023 13:52:23 +0000 Subject: [PATCH 054/103] Replace brainglobe with NI actions (#37) --- .github/workflows/test_and_deploy.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml index fbfc212b..a03b394a 100644 --- a/.github/workflows/test_and_deploy.yml +++ b/.github/workflows/test_and_deploy.yml @@ -10,7 +10,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: brainglobe/actions/lint@v1 + - uses: neuroinformatics-unit/actions/lint@v1 test: needs: lint @@ -30,6 +30,6 @@ jobs: python-version: "3.8" steps: - - uses: brainglobe/actions/test@v1 + - uses: neuroinformatics-unit/actions/test@v1 with: python-version: ${{ matrix.python-version }} From a226d5dc88659c59c536524c36b41bf497e9d772 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 21 Feb 2023 16:59:12 +0000 Subject: [PATCH 055/103] Use pyproject.toml --- pyproject.toml | 106 +++++++++++++++++++++++++++++++++++++++++-------- setup.cfg | 18 --------- setup.py | 45 --------------------- 3 files changed, 89 insertions(+), 80 deletions(-) delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/pyproject.toml b/pyproject.toml index 159ded91..deae0bea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,21 +1,93 @@ +[project] +name = "bg-atlasgen" +description = "Scripts generation atlases and utilities for BrainGlobe" +readme = "README.md" +license = {file = "LICENSE"} +authors = [ + {name = "Luigi Petrucco, Federico Claudi, Adam Tyson", email = "code@adamltyson.com"}, +] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows :: Windows 10", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", +] +requires-python = ">=3.8" +dependencies = [ + "PyMCubes", + "SimpleITK", + "bg_atlasapi", + "imio", + "loguru", + "meshio", + "numpy", + "pandas", + "requests", + "rich", + "tifffile", + "tqdm>=4.46.1", + "treelib", + "vedo", + "xmltodict", +] +dynamic = ["version"] + +[project.urls] +Homepage = "https://github.com/brainglobe/bg-atlasgen" + +[project.optional-dependencies] +allenmouse = [ + "allensdk", +] + +[build-system] +requires = [ + "setuptools>=45", + "wheel", + "setuptools_scm[toml]>=6.2", +] +build-backend = "setuptools.build_meta" + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +include = ["bg_atlasgen*"] + +[tool.pytest.ini_options] +addopts = "--cov=bg_atlasgen" + [tool.black] target-version = ['py36', 'py37', 'py38'] skip-string-normalization = false line-length = 79 -exclude = ''' -( - /( - \.eggs - | \.git - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - | examples - )/ -) -''' \ No newline at end of file + +[tool.setuptools_scm] + +[tool.check-manifest] +ignore = [ + "*.yaml", + ".bumpversion.cfg", + "tox.ini", + "tests/*", + "tests/test_unit/*", + "tests/test_integration/*", + ".flake8" +] + +[tool.ruff] +line-length = 79 +exclude = ["__init__.py","build",".eggs"] +select = ["I", "E", "F"] +fix = true + +[tool.cibuildwheel] +build = "cp38-* cp39-* cp310-* cp311-*" + +[tool.cibuildwheel.macos] +archs = ["x86_64", "arm64"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 58690141..00000000 --- a/setup.cfg +++ /dev/null @@ -1,18 +0,0 @@ -[bumpversion] -current_version = 0.0.1 -commit = True -tag = True - -[bumpversion:file:setup.py] -search = version="{current_version}" -replace = version="{new_version}" - -[bumpversion:file:bg_atlasgen/__init__.py] -search = __version__ = "{current_version}" -replace = __version__ = "{new_version}" - -[flake8] -ignore = E203,W503,E501,E731,C901,W291,W293,E741 -max-line-length = 79 -max-complexity = 18 -exclude = __init__.py diff --git a/setup.py b/setup.py deleted file mode 100644 index d84bf84f..00000000 --- a/setup.py +++ /dev/null @@ -1,45 +0,0 @@ -from setuptools import setup, find_namespace_packages - -requirements = [ - "numpy", - "tifffile", - "treelib", - "pandas", - "requests", - "meshio", - "rich", - "tqdm>=4.46.1", - "imio", - "vedo", - "PyMCubes", - "bg_atlasapi", - "xmltodict", - "SimpleITK", - "loguru", -] -setup( - name="bg-atlasgen", - version="0.0.2", - description="Scripts generation atlases and utilities for BrainGlobe", - install_requires=requirements, - extras_require={"allenmouse": ["allensdk"]}, - python_requires=">=3.8", - entry_points={"console_scripts": []}, - packages=find_namespace_packages(exclude=("docs", "tests*")), - include_package_data=True, - url="https://github.com/brainglobe/bg-atlasgen", - author="Luigi Petrucco, Federico Claudi, Adam Tyson", - author_email="code@adamltyson.com", - classifiers=[ - "Development Status :: 3 - Alpha", - "Operating System :: POSIX :: Linux", - "Operating System :: Microsoft :: Windows :: Windows 10", - "Operating System :: MacOS :: MacOS X", - "Programming Language :: Python", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - ], - zip_safe=False, -) From c9b04b90e881c310115a7b1021eb88cd4cc584c8 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 24 Feb 2023 09:23:29 +0000 Subject: [PATCH 056/103] Update supported Python metadata --- pyproject.toml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index deae0bea..9bc59fc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,12 +10,12 @@ classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows :: Windows 10", - "Operating System :: POSIX :: Linux", + "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ] requires-python = ">=3.8" dependencies = [ @@ -63,7 +63,7 @@ include = ["bg_atlasgen*"] addopts = "--cov=bg_atlasgen" [tool.black] -target-version = ['py36', 'py37', 'py38'] +target-version = ['py38', 'py39', 'py310', 'py311'] skip-string-normalization = false line-length = 79 From d00db2f81c3b7a92f1e7928e4bd74e6415abe515 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 24 Feb 2023 09:24:01 +0000 Subject: [PATCH 057/103] Clean manifest check --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9bc59fc3..97bd7f58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,12 +72,10 @@ line-length = 79 [tool.check-manifest] ignore = [ "*.yaml", - ".bumpversion.cfg", "tox.ini", "tests/*", "tests/test_unit/*", "tests/test_integration/*", - ".flake8" ] [tool.ruff] From ebdb976d8190227fff89a47783d5d720ce7d297c Mon Sep 17 00:00:00 2001 From: David Stansby Date: Fri, 24 Feb 2023 11:39:50 +0000 Subject: [PATCH 058/103] Remove cibuildwheel config --- pyproject.toml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 97bd7f58..3901607d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,9 +83,3 @@ line-length = 79 exclude = ["__init__.py","build",".eggs"] select = ["I", "E", "F"] fix = true - -[tool.cibuildwheel] -build = "cp38-* cp39-* cp310-* cp311-*" - -[tool.cibuildwheel.macos] -archs = ["x86_64", "arm64"] From ea9acb6a348ac77633547f845b33f492fa9f74cf Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 27 Feb 2023 10:07:34 +0000 Subject: [PATCH 059/103] Catch warnings during tests (#39) --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 3901607d..24fc3a5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,6 +61,10 @@ include = ["bg_atlasgen*"] [tool.pytest.ini_options] addopts = "--cov=bg_atlasgen" +filterwarnings = [ + "error", +] + [tool.black] target-version = ['py38', 'py39', 'py310', 'py311'] From 3b92d544d1495a6a12ddca97912f73489dfb72af Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 27 Feb 2023 10:56:07 +0000 Subject: [PATCH 060/103] Add standard pre-commit config (#40) * Add standard pre-commit config * Add automated pre-commit fixes * Ignore line too long --- .pre-commit-config.yaml | 26 +++++++++++++++---- README.md | 10 +++---- .../atlas_scripts/admba_3d_dev_mouse.py | 18 +++++-------- bg_atlasgen/atlas_scripts/allen_cord.py | 25 ++++++++---------- bg_atlasgen/atlas_scripts/allen_mouse.py | 8 +++--- bg_atlasgen/atlas_scripts/azba_zfish.py | 19 +++++--------- bg_atlasgen/atlas_scripts/example_mouse.py | 5 ++-- bg_atlasgen/atlas_scripts/humanatlas.py | 16 ++++++------ .../kim_developmental_ccf_mouse.py | 23 ++++++---------- bg_atlasgen/atlas_scripts/kim_mouse.py | 23 +++++++--------- bg_atlasgen/atlas_scripts/mpin_zfish.py | 17 +++++------- bg_atlasgen/atlas_scripts/osten_mouse.py | 20 +++++++------- .../atlas_scripts/perens_lsfm_mouse.py | 21 ++++++--------- bg_atlasgen/atlas_scripts/princeton_mouse.py | 20 +++++++------- bg_atlasgen/atlas_scripts/whs_sd_rat.py | 3 +-- bg_atlasgen/main_script.py | 18 ++++++------- bg_atlasgen/mesh_utils.py | 9 ++++--- bg_atlasgen/metadata_utils.py | 11 ++++---- bg_atlasgen/structure_json_to_csv.py | 1 + bg_atlasgen/test_git.py | 3 ++- bg_atlasgen/volume_utils.py | 4 +-- bg_atlasgen/wrapup.py | 16 +++++------- pyproject.toml | 2 ++ 23 files changed, 149 insertions(+), 169 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3256a40f..40764052 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,22 @@ repos: -- repo: https://github.com/python/black - rev: 22.12.0 - hooks: - - id: black - pass_filenames: true + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: end-of-file-fixer + - id: mixed-line-ending + args: [--fix=lf] + - id: requirements-txt-fixer + - id: trailing-whitespace + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.0.240 + hooks: + - id: ruff + args: [ --config=pyproject.toml ] + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + args: [--config=pyproject.toml] diff --git a/README.md b/README.md index 70f2847b..8a356328 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi ### To contribute 1) Fork this repo -2) Clone your repo +2) Clone your repo ```bash git clone https://github.com/USERNAME/bg-atlasgen ``` @@ -16,16 +16,16 @@ git clone https://github.com/USERNAME/bg-atlasgen 3) Install an editable version ```bash cd bg-atlasgen -pip install -e . +pip install -e . ``` -4) Create a script to package your atlas, and place into +4) Create a script to package your atlas, and place into `bg_atlasgen/atlas_scripts`. Please see other scripts for examples. -Your script should contain everything required to run. The raw data should be +Your script should contain everything required to run. The raw data should be hosted on a publicly accessible repository so that anyone can run the script to recreate the atlas. -If you need to add any dependencies, please add them as an extra in the +If you need to add any dependencies, please add them as an extra in the setup.py file, e.g.: ```python diff --git a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py index 29142dbc..6280c604 100644 --- a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py +++ b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py @@ -2,26 +2,23 @@ import dataclasses import json +import multiprocessing as mp import time import zipfile - from os import listdir, path +from pathlib import Path from typing import Tuple -import pandas as pd import numpy as np -import multiprocessing as mp - -from rich.progress import track -from pathlib import Path - +import pandas as pd from bg_atlasapi import utils -from bg_atlasgen.mesh_utils import create_region_mesh, Region -from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree - +from rich.progress import track from skimage import io +from bg_atlasgen.mesh_utils import Region, create_region_mesh +from bg_atlasgen.wrapup import wrapup_atlas_from_data + PARALLEL = True @@ -116,7 +113,6 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): smooth = False # smooth meshes after creation start = time.time() if PARALLEL: - pool = mp.Pool(mp.cpu_count() - 2) try: diff --git a/bg_atlasgen/atlas_scripts/allen_cord.py b/bg_atlasgen/atlas_scripts/allen_cord.py index 4a18c324..97563573 100644 --- a/bg_atlasgen/atlas_scripts/allen_cord.py +++ b/bg_atlasgen/atlas_scripts/allen_cord.py @@ -1,31 +1,29 @@ __version__ = "1" import json +import multiprocessing as mp import time -import tifffile import zipfile +from pathlib import Path +from random import choices - -import pandas as pd import numpy as np -import multiprocessing as mp -from random import choices -from loguru import logger -from rich.progress import track -from pathlib import Path +import pandas as pd +import tifffile # import sys - # sys.path.append("./") - from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree +from loguru import logger +from rich.progress import track + from bg_atlasgen.mesh_utils import ( - create_region_mesh, Region, + create_region_mesh, inspect_meshes_folder, ) from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.structure_tree_util import get_structures_tree PARALLEL = True TEST = False @@ -123,7 +121,7 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): nodes = list(tree.nodes.values()) if TEST: logger.info( - f"Creating atlas in test mode: selecting 10 random regions for mesh creation" + "Creating atlas in test mode: selecting 10 random regions for mesh creation" ) nodes = choices(nodes, k=10) @@ -289,7 +287,6 @@ def create_atlas(working_dir): if __name__ == "__main__": - # Generated atlas path: bg_root_dir = Path.home() / "brainglobe_workingdir" / "allen_cord_smooth" bg_root_dir.mkdir(exist_ok=True, parents=True) diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py index 3d0eac53..1de0f46c 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -1,15 +1,15 @@ __version__ = "2" +from pathlib import Path + from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi from allensdk.core.reference_space_cache import ReferenceSpaceCache - +from bg_atlasapi import descriptors from requests import exceptions -from pathlib import Path from tqdm import tqdm from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi import descriptors def create_atlas(working_dir, resolution): @@ -83,7 +83,7 @@ def create_atlas(working_dir, resolution): ] # Wrap up, compress, and remove file:0 - print(f"Finalising atlas") + print("Finalising atlas") output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/bg_atlasgen/atlas_scripts/azba_zfish.py index cd43c345..6aa44252 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/bg_atlasgen/atlas_scripts/azba_zfish.py @@ -10,30 +10,27 @@ __version__ = "1" import csv -import time -import tifffile +import multiprocessing as mp import tarfile -from random import choices +import time +from pathlib import Path import numpy as np -import multiprocessing as mp - +import tifffile +from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track -from pathlib import Path + from bg_atlasgen.mesh_utils import ( Region, create_region_mesh, - inspect_meshes_folder, ) -from bg_atlasapi.structure_tree_util import get_structures_tree from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi import utils PARALLEL = False # Disable for debugging mesh creation def create_atlas(working_dir, resolution): - # metadata ATLAS_NAME = "azba_zfish" SPECIES = "Danio rerio" @@ -135,7 +132,6 @@ def create_atlas(working_dir, resolution): smooth = True if PARALLEL: - print("Multiprocessing mesh creation...") pool = mp.Pool(int(mp.cpu_count() / 2)) @@ -159,7 +155,6 @@ def create_atlas(working_dir, resolution): pass else: - print("Multiprocessing disabled") # nodes = list(tree.nodes.values()) # nodes = choices(nodes, k=10) diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index 75a71d20..5f757a0d 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -1,18 +1,17 @@ __version__ = "2" +from pathlib import Path + from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi from allensdk.core.reference_space_cache import ReferenceSpaceCache - from requests import exceptions -from pathlib import Path from tqdm import tqdm from bg_atlasgen.wrapup import wrapup_atlas_from_data def create_atlas(working_dir, resolution): - # Specify information about the atlas: RES_UM = resolution # 100 ATLAS_NAME = "example_mouse" diff --git a/bg_atlasgen/atlas_scripts/humanatlas.py b/bg_atlasgen/atlas_scripts/humanatlas.py index 461da014..93aa6eb1 100644 --- a/bg_atlasgen/atlas_scripts/humanatlas.py +++ b/bg_atlasgen/atlas_scripts/humanatlas.py @@ -1,25 +1,25 @@ import json -from rich.progress import track -import pandas as pd -import numpy as np -import time import multiprocessing as mp +import time from pathlib import Path + +import numpy as np +import pandas as pd import treelib -from brainio import brainio import urllib3 from allensdk.core.structure_tree import StructureTree +from bg_atlasapi.structure_tree_util import get_structures_tree +from brainio import brainio +from rich.progress import track # import sys - # sys.path.append("./") from bg_atlasgen.mesh_utils import ( - create_region_mesh, Region, + create_region_mesh, inspect_meshes_folder, ) from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.structure_tree_util import get_structures_tree def prune_tree(tree): diff --git a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py b/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py index d64eca70..df309df0 100644 --- a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py @@ -1,24 +1,21 @@ __version__ = "1" import json +import multiprocessing as mp import time -import tarfile +import zipfile +from pathlib import Path -import pandas as pd +import imio import numpy as np -import multiprocessing as mp - +import pandas as pd +from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track -from pathlib import Path from scipy.ndimage import zoom -from bg_atlasapi import utils -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import Region, create_region_mesh from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.structure_tree_util import get_structures_tree -import imio -import zipfile -import os PARALLEL = True # disable parallel mesh extraction for easier debugging @@ -47,7 +44,6 @@ def get_structure_id_path_from_id(id, id_dict, root_id): return structure_id_path while True: - parent = int(id_dict[id]) structure_id_path.insert(0, parent) @@ -141,7 +137,6 @@ def create_atlas( structures = [] for row in range(df.shape[0]): - entry = { "acronym": df["Acronym"][row], "id": int(df["ID"][row]), # from np.int for JSON serialization @@ -184,7 +179,6 @@ def create_atlas( node.data = Region(is_label) if mesh_creation == "generate": - closing_n_iters = 2 decimate_fraction = 0.04 smooth = False # smooth meshes after creation @@ -192,7 +186,6 @@ def create_atlas( start = time.time() if PARALLEL: - pool = mp.Pool(mp.cpu_count() - 2) try: diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/bg_atlasgen/atlas_scripts/kim_mouse.py index 132d62f9..63e78bbf 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -1,27 +1,25 @@ __version__ = "1" import json -import time +import multiprocessing as mp import tarfile -import tifffile +import time +from pathlib import Path -import pandas as pd import numpy as np -import multiprocessing as mp - -from rich.progress import track -from pathlib import Path -from scipy.ndimage import zoom +import pandas as pd +import tifffile from allensdk.core.reference_space_cache import ReferenceSpaceCache # import sys - # sys.path.append("./") - from bg_atlasapi import utils -from bg_atlasgen.mesh_utils import create_region_mesh, Region -from bg_atlasgen.wrapup import wrapup_atlas_from_data from bg_atlasapi.structure_tree_util import get_structures_tree +from rich.progress import track +from scipy.ndimage import zoom + +from bg_atlasgen.mesh_utils import Region, create_region_mesh +from bg_atlasgen.wrapup import wrapup_atlas_from_data PARALLEL = False # disable parallel mesh extraction for easier debugging @@ -135,7 +133,6 @@ def create_atlas(working_dir, resolution): start = time.time() if PARALLEL: - pool = mp.Pool(mp.cpu_count() - 2) try: diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/bg_atlasgen/atlas_scripts/mpin_zfish.py index 118d7ed7..a09bf4a5 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/bg_atlasgen/atlas_scripts/mpin_zfish.py @@ -1,24 +1,19 @@ __version__ = "1" -from pathlib import Path -import warnings -import zipfile -import requests -import tarfile -from tifffile import imread -from bg_atlasgen.mesh_utils import extract_mesh_from_mask import tarfile +import warnings import zipfile from pathlib import Path import numpy as np +import requests +from allensdk.core.structure_tree import StructureTree +from bg_atlasapi.utils import retrieve_over_http from scipy.ndimage import binary_dilation, binary_erosion, binary_fill_holes +from tifffile import imread -from allensdk.core.structure_tree import StructureTree from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.utils import retrieve_over_http - BASE_URL = r"https://fishatlas.neuro.mpg.de" @@ -229,7 +224,7 @@ def create_atlas(working_dir, resolution): meshes_dict[sid] = extracted_dir / f"{sid}.stl" # Wrap up, compress, and remove file:0 - print(f"Finalising atlas") + print("Finalising atlas") output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, diff --git a/bg_atlasgen/atlas_scripts/osten_mouse.py b/bg_atlasgen/atlas_scripts/osten_mouse.py index 26b18b99..bd9e97aa 100644 --- a/bg_atlasgen/atlas_scripts/osten_mouse.py +++ b/bg_atlasgen/atlas_scripts/osten_mouse.py @@ -1,23 +1,22 @@ __version__ = "0" import json -import time +import multiprocessing as mp import tarfile -import tifffile +import time +from pathlib import Path -import pandas as pd import numpy as np -import multiprocessing as mp - -from rich.progress import track -from pathlib import Path -from scipy.ndimage import zoom +import pandas as pd +import tifffile from allensdk.core.reference_space_cache import ReferenceSpaceCache from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree +from rich.progress import track +from scipy.ndimage import zoom -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import Region, create_region_mesh from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False # disable parallel mesh extraction for easier debugging @@ -130,7 +129,6 @@ def create_atlas(working_dir, resolution): smooth = False # smooth meshes after creation start = time.time() if PARALLEL: - pool = mp.Pool(mp.cpu_count() - 2) try: diff --git a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py b/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py index 4f0cdc45..8f4b4202 100644 --- a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py +++ b/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py @@ -1,26 +1,22 @@ __version__ = "0" import json -import time +import multiprocessing as mp import tarfile -import tifffile -import subprocess +import time +from pathlib import Path -import pandas as pd import numpy as np -import multiprocessing as mp +import pandas as pd import SimpleITK as sitk -from rich.progress import track -from pathlib import Path -from scipy.ndimage import zoom - # from allensdk.core.reference_space_cache import ReferenceSpaceCache from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree +from rich.progress import track -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import Region, create_region_mesh from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False # disable parallel mesh extraction for easier debugging @@ -28,6 +24,7 @@ # %% ### Additional functions ##################################################### + ############################################################################## def get_id_from_acronym(df, acronym): """ @@ -146,7 +143,6 @@ def create_atlas(working_dir, resolution): CITATION = "Perens et al. 2021, https://doi.org/10.1007/s12021-020-09490-8" ORIENTATION = "rai" ROOT_ID = 997 - ANNOTATIONS_RES_UM = 20 ATLAS_FILE_URL = "https://github.com/Gubra-ApS/LSFM-mouse-brain-atlas/archive/master.tar.gz" # Temporary folder for download: @@ -252,7 +248,6 @@ def create_atlas(working_dir, resolution): closing_n_iters = 2 start = time.time() if PARALLEL: - pool = mp.Pool(mp.cpu_count() - 2) try: diff --git a/bg_atlasgen/atlas_scripts/princeton_mouse.py b/bg_atlasgen/atlas_scripts/princeton_mouse.py index 2ed9259c..fa5f8ccd 100644 --- a/bg_atlasgen/atlas_scripts/princeton_mouse.py +++ b/bg_atlasgen/atlas_scripts/princeton_mouse.py @@ -1,22 +1,22 @@ __version__ = "0" __atlas__ = "princeton_mouse" -import tifffile -import os.path -import numpy as np -import pandas as pd import json -import time import multiprocessing as mp - -from rich.progress import track +import os.path +import time from pathlib import Path + +import numpy as np +import pandas as pd +import tifffile from bg_atlasapi import utils +from bg_atlasapi.structure_tree_util import get_structures_tree +from rich.progress import track from scipy.ndimage import zoom -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import Region, create_region_mesh from bg_atlasgen.wrapup import wrapup_atlas_from_data -from bg_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False @@ -208,7 +208,7 @@ def add_parent_id(child_id): ) # Wrap up, compress, and remove file: - print(f"Finalising atlas") + print("Finalising atlas") output_filename = wrapup_atlas_from_data( atlas_name=ATLAS_NAME, atlas_minor_version=__version__, diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/bg_atlasgen/atlas_scripts/whs_sd_rat.py index b9801cc0..9fe976d2 100644 --- a/bg_atlasgen/atlas_scripts/whs_sd_rat.py +++ b/bg_atlasgen/atlas_scripts/whs_sd_rat.py @@ -13,7 +13,7 @@ from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track -from bg_atlasgen.mesh_utils import create_region_mesh, Region +from bg_atlasgen.mesh_utils import Region, create_region_mesh from bg_atlasgen.wrapup import wrapup_atlas_from_data PARALLEL = True @@ -117,7 +117,6 @@ def create_meshes(download_dir_path, tree, annotated_volume, labels, root_id): smooth = False # smooth meshes after creation start = time.time() if PARALLEL: - pool = mp.Pool(min(mp.cpu_count() - 2, 16)) try: diff --git a/bg_atlasgen/main_script.py b/bg_atlasgen/main_script.py index 1fe45725..a9e1ffad 100644 --- a/bg_atlasgen/main_script.py +++ b/bg_atlasgen/main_script.py @@ -1,15 +1,16 @@ -from git import Repo -from git.exc import GitCommandError -from pathlib import Path import configparser -import bg_atlasgen -from importlib import import_module -from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name - import errno import os -import stat import shutil +import stat +from importlib import import_module +from pathlib import Path + +from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name +from git import Repo +from git.exc import GitCommandError + +import bg_atlasgen # Main dictionary specifying which atlases to generate # and with which resolutions: @@ -87,7 +88,6 @@ def delete_folder(path): bg_atlasgen_version == status["major_vers"] and script_version > status["minor_vers"] ): - # Loop over all resolutions: for resolution in resolutions: print(f"Generating {name}, {resolution} um...") diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index d9259119..03d171dc 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -1,5 +1,5 @@ try: - from vedo import Mesh, write, load, show, Volume + from vedo import Mesh, Volume, load, show, write from vedo.applications import Browser, Slicer3DPlotter except ModuleNotFoundError: raise ModuleNotFoundError( @@ -15,12 +15,13 @@ + ' please install with "pip install PyMCubes -U"' ) -from loguru import logger -import numpy as np from pathlib import Path + +import numpy as np import scipy -from bg_atlasgen.volume_utils import create_masked_array +from loguru import logger +from bg_atlasgen.volume_utils import create_masked_array # ---------------------------------------------------------------------------- # # MESH CREATION # diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index b97f72b9..18a4d8e0 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -1,18 +1,18 @@ """ - Automatic creation of + Automatic creation of . structures.csv . README.txt """ -import re import json +import re from datetime import datetime -from bg_atlasapi import descriptors import requests -from requests.exceptions import MissingSchema, InvalidURL, ConnectionError +from bg_atlasapi import descriptors +from bg_atlasapi.structure_tree_util import get_structures_tree +from requests.exceptions import ConnectionError, InvalidURL, MissingSchema from bg_atlasgen.structure_json_to_csv import convert_structure_json_to_csv -from bg_atlasapi.structure_tree_util import get_structures_tree def generate_metadata_dict( @@ -29,7 +29,6 @@ def generate_metadata_dict( additional_references, atlas_packager, ): - # Name should be author_species assert len(name.split("_")) >= 2 diff --git a/bg_atlasgen/structure_json_to_csv.py b/bg_atlasgen/structure_json_to_csv.py index 04c9468c..3021f3fd 100644 --- a/bg_atlasgen/structure_json_to_csv.py +++ b/bg_atlasgen/structure_json_to_csv.py @@ -1,4 +1,5 @@ from pathlib import Path + import pandas as pd diff --git a/bg_atlasgen/test_git.py b/bg_atlasgen/test_git.py index 05b2ff40..1488fed8 100644 --- a/bg_atlasgen/test_git.py +++ b/bg_atlasgen/test_git.py @@ -1,6 +1,7 @@ -from git import Repo from pathlib import Path +from git import Repo + GENERATION_DICT = dict(example_mouse=[100]) diff --git a/bg_atlasgen/volume_utils.py b/bg_atlasgen/volume_utils.py index 7de0ccf9..d2e2234c 100644 --- a/bg_atlasgen/volume_utils.py +++ b/bg_atlasgen/volume_utils.py @@ -10,9 +10,9 @@ + ' please install with "pip install vedo -U"' ) -import imio - import os + +import imio import numpy as np diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index e26c3c20..03723d78 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -1,30 +1,27 @@ import json -import tarfile import shutil +import tarfile from pathlib import Path -import tifffile import bg_space as bgs import meshio as mio +import tifffile +from bg_atlasapi import descriptors +from bg_atlasapi.utils import atlas_name_from_repr +import bg_atlasgen from bg_atlasgen.metadata_utils import ( create_metadata_files, generate_metadata_dict, ) from bg_atlasgen.stacks import ( - save_reference, save_annotation, save_hemispheres, + save_reference, save_secondary_reference, ) - -import bg_atlasgen from bg_atlasgen.structures import check_struct_consistency -from bg_atlasapi import descriptors -from bg_atlasapi.utils import atlas_name_from_repr - - # This should be changed every time we make changes in the atlas # structure: ATLAS_VERSION = bg_atlasgen.__version__ @@ -137,7 +134,6 @@ def wrapup_atlas_from_data( # write tiff stacks: for stack, saving_function in zip(stack_list, saving_fun_list): - if isinstance(stack, str) or isinstance(stack, Path): stack = tifffile.imread(stack) diff --git a/pyproject.toml b/pyproject.toml index 24fc3a5a..798c8f29 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,4 +86,6 @@ ignore = [ line-length = 79 exclude = ["__init__.py","build",".eggs"] select = ["I", "E", "F"] +# E501 Line too long +ignore = ["E501"] fix = true From 423598b37fc2848b9d2f796ba804f5e212d45945 Mon Sep 17 00:00:00 2001 From: David Stansby Date: Mon, 20 Mar 2023 13:48:50 +0000 Subject: [PATCH 061/103] Fix branches that CI is run on (#42) --- .github/workflows/test_and_deploy.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml index a03b394a..ca332f7e 100644 --- a/.github/workflows/test_and_deploy.yml +++ b/.github/workflows/test_and_deploy.yml @@ -2,9 +2,8 @@ name: tests on: push: - branches: [ "master" ] + branches: [ main ] pull_request: - branches: [ "master" ] jobs: lint: From b1ee389a8227ab47bc0bbe48dd6d90c74b2bd504 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Mon, 19 Jun 2023 14:15:17 +0100 Subject: [PATCH 062/103] Update link (#48) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8a356328..dc7353fd 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # BG-AtlasGen -#### For full instructions to add a new BrainGlobe atlas, please see [here](https://docs.brainglobe.info/bg-atlasapi/adding-a-new-atlas). +#### For full instructions to add a new BrainGlobe atlas, please see [here](https://brainglobe.info/documentation/bg-atlasapi/adding-a-new-atlas.html). Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi` module. From a7e10eec322822e160cf543b50d87e4a1283cc5f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 20 Jun 2023 08:05:57 +0100 Subject: [PATCH 063/103] [pre-commit.ci] pre-commit autoupdate (#49) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/charliermarsh/ruff-pre-commit: v0.0.240 → v0.0.272](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.240...v0.0.272) - [github.com/psf/black: 23.1.0 → 23.3.0](https://github.com/psf/black/compare/23.1.0...23.3.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 40764052..9939e22b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,12 +11,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.240 + rev: v0.0.272 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.1.0 + rev: 23.3.0 hooks: - id: black args: [--config=pyproject.toml] From fdf31908d97053a400ac092256cb68a94b22ffbb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 08:41:42 +0100 Subject: [PATCH 064/103] [pre-commit.ci] pre-commit autoupdate (#50) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/charliermarsh/ruff-pre-commit: v0.0.272 → v0.0.275](https://github.com/charliermarsh/ruff-pre-commit/compare/v0.0.272...v0.0.275) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9939e22b..fe59844a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.272 + rev: v0.0.275 hooks: - id: ruff args: [ --config=pyproject.toml ] From ad79edb761f3a09efd794d2c6f4d5067971a718d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 08:11:19 +0100 Subject: [PATCH 065/103] [pre-commit.ci] pre-commit autoupdate (#52) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - https://github.com/charliermarsh/ruff-pre-commit → https://github.com/astral-sh/ruff-pre-commit - [github.com/astral-sh/ruff-pre-commit: v0.0.275 → v0.0.276](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.275...v0.0.276) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe59844a..9fc0e4ab 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,8 +10,8 @@ repos: args: [--fix=lf] - id: requirements-txt-fixer - id: trailing-whitespace - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.0.275 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.0.276 hooks: - id: ruff args: [ --config=pyproject.toml ] From a3200c34b47d2cf8ea0c1554e785b66ba29dd5db Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 11 Jul 2023 07:19:15 +0100 Subject: [PATCH 066/103] [pre-commit.ci] pre-commit autoupdate (#53) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.276 → v0.0.277](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.276...v0.0.277) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9fc0e4ab..64312322 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.276 + rev: v0.0.277 hooks: - id: ruff args: [ --config=pyproject.toml ] From ec229da4a36a8264aace220bb25632cccb5f0724 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 13:15:19 +0100 Subject: [PATCH 067/103] [pre-commit.ci] pre-commit autoupdate (#54) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.277 → v0.0.278](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.277...v0.0.278) - [github.com/psf/black: 23.3.0 → 23.7.0](https://github.com/psf/black/compare/23.3.0...23.7.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 64312322..7bd83508 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,12 +11,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.277 + rev: v0.0.278 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.3.0 + rev: 23.7.0 hooks: - id: black args: [--config=pyproject.toml] From 21a31620d6b88557ded14fb46094e627a8f9acff Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 25 Jul 2023 07:41:28 -0700 Subject: [PATCH 068/103] [pre-commit.ci] pre-commit autoupdate (#55) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.278 → v0.0.280](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.278...v0.0.280) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7bd83508..294b6741 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.278 + rev: v0.0.280 hooks: - id: ruff args: [ --config=pyproject.toml ] From f9e4cc158ab855e546713177f85b7a7db8ef2633 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 10:26:45 +0100 Subject: [PATCH 069/103] [pre-commit.ci] pre-commit autoupdate (#57) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.280 → v0.0.281](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.280...v0.0.281) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 294b6741..f02d58d7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.280 + rev: v0.0.281 hooks: - id: ruff args: [ --config=pyproject.toml ] From 2140031b20aed5cc3ebe984ff76fc57827652ea1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 8 Aug 2023 09:43:14 +0100 Subject: [PATCH 070/103] [pre-commit.ci] pre-commit autoupdate (#59) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.281 → v0.0.282](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.281...v0.0.282) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f02d58d7..3ddbd82e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.281 + rev: v0.0.282 hooks: - id: ruff args: [ --config=pyproject.toml ] From c73227543ece47fb72805fac99c8b72245c02940 Mon Sep 17 00:00:00 2001 From: Alessandro Felder Date: Thu, 10 Aug 2023 15:40:12 +0100 Subject: [PATCH 071/103] Update to newer vedo API (#58) --- bg_atlasgen/mesh_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/mesh_utils.py b/bg_atlasgen/mesh_utils.py index 03d171dc..2320edf7 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/bg_atlasgen/mesh_utils.py @@ -132,7 +132,7 @@ def extract_mesh_from_mask( if not use_marching_cubes: # Use faster algorithm volume = Volume(volume) - mesh = volume.clone().isosurface(threshold=threshold).cap() + mesh = volume.clone().isosurface(value=threshold).cap() else: print( "The marching cubes algorithm might be rotated compared to your volume data" From 5dd38905fec3d3d190112dea4fe9b6977ff0a298 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 17 Aug 2023 14:28:08 +0100 Subject: [PATCH 072/103] [pre-commit.ci] pre-commit autoupdate (#64) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.282 → v0.0.284](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.282...v0.0.284) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3ddbd82e..d3184ad4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.282 + rev: v0.0.284 hooks: - id: ruff args: [ --config=pyproject.toml ] From fc4e4cbabeda8794eb4e214c75fbb2fe14302a85 Mon Sep 17 00:00:00 2001 From: Alessandro Felder Date: Mon, 21 Aug 2023 16:40:21 +0200 Subject: [PATCH 073/103] scale mesh points before mapping to new anatomical space (#62) Co-authored-by: Will Graham <32364977+willGraham01@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- bg_atlasgen/wrapup.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bg_atlasgen/wrapup.py b/bg_atlasgen/wrapup.py index 03723d78..c82d1b3e 100644 --- a/bg_atlasgen/wrapup.py +++ b/bg_atlasgen/wrapup.py @@ -158,15 +158,15 @@ def wrapup_atlas_from_data( for mesh_id, meshfile in meshes_dict.items(): mesh = mio.read(meshfile) + if scale_meshes: + # Scale the mesh to the desired resolution, BEFORE transforming: + mesh.points *= resolution + # Reorient points: mesh.points = space_convention.map_points_to( descriptors.ATLAS_ORIENTATION, mesh.points ) - # Scale the mesh to be in microns, if necessary: - if scale_meshes: - mesh.points *= resolution - # Save in meshes dir: mio.write(mesh_dest_dir / f"{mesh_id}.obj", mesh) From 399b6b25dec0f6babcf95e400c0b1bffc8898423 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 22 Aug 2023 12:42:11 +0100 Subject: [PATCH 074/103] [pre-commit.ci] pre-commit autoupdate (#66) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.284 → v0.0.285](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.284...v0.0.285) * update type assertion --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Adam Tyson --- .pre-commit-config.yaml | 2 +- bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py | 2 +- bg_atlasgen/metadata_utils.py | 4 ++-- bg_atlasgen/structures.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d3184ad4..5b58b31e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.284 + rev: v0.0.285 hooks: - id: ruff args: [ --config=pyproject.toml ] diff --git a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py index 6280c604..4cacdb39 100644 --- a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py +++ b/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py @@ -218,7 +218,7 @@ def create_atlas( assert ( atlas_config.atlas_file_url ), f"No download link provided for atlas in {atlas_config.atlas_file_url}" - if type(working_dir) == str: + if isinstance(working_dir, str): working_dir = Path(working_dir) # Generated atlas path: working_dir = ( diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index 18a4d8e0..f6c985fd 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -48,14 +48,14 @@ def generate_metadata_dict( ) # Enforce correct format for symmetric, resolution and shape: - assert type(symmetric) == bool + assert isinstance(symmetric, bool) assert len(resolution) == 3 assert len(shape) == 3 resolution = tuple([float(v) for v in resolution]) shape = tuple(int(v) for v in shape) - assert type(additional_references) == list + assert isinstance(additional_references, list) return dict( name=name, diff --git a/bg_atlasgen/structures.py b/bg_atlasgen/structures.py index c8c90f81..4266988a 100644 --- a/bg_atlasgen/structures.py +++ b/bg_atlasgen/structures.py @@ -12,8 +12,8 @@ def check_struct_consistency(structures): ------- """ - assert type(structures) == list - assert type(structures[0]) == dict + assert isinstance(structures, list) + assert isinstance(structures[0], dict) # Check that all structures have the correct keys and value types: for struct in structures: From d946d4f777bb5e9e9157ad5c788e75fdfcda47d9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 29 Aug 2023 10:13:46 +0100 Subject: [PATCH 075/103] [pre-commit.ci] pre-commit autoupdate (#67) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.285 → v0.0.286](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.285...v0.0.286) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5b58b31e..f8e50f12 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.285 + rev: v0.0.286 hooks: - id: ruff args: [ --config=pyproject.toml ] From 70fd9012238e3194239fe1787048767057ae75c2 Mon Sep 17 00:00:00 2001 From: Adam Tyson Date: Tue, 29 Aug 2023 10:46:46 +0100 Subject: [PATCH 076/103] Set pre-commit autoupdate frequency to monthly (#68) --- .pre-commit-config.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f8e50f12..df8a0a83 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,7 @@ +# Configuring https://pre-commit.ci/ +ci: + autoupdate_schedule: monthly + repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 From 495cdc3ef798e5dbe2f1534429bee6a22b5d66e6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 11:22:07 +0100 Subject: [PATCH 077/103] [pre-commit.ci] pre-commit autoupdate (#70) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.286 → v0.0.287](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.286...v0.0.287) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index df8a0a83..0aee931d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,7 +15,7 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.286 + rev: v0.0.287 hooks: - id: ruff args: [ --config=pyproject.toml ] From 510f7d3dc54c9dfdc1c03d959fa3860d1188cae8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 10:10:57 +0100 Subject: [PATCH 078/103] [pre-commit.ci] pre-commit autoupdate (#83) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.0.287 → v0.0.292](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.287...v0.0.292) - [github.com/psf/black: 23.7.0 → 23.9.1](https://github.com/psf/black/compare/23.7.0...23.9.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0aee931d..6ac9461a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,12 +15,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.287 + rev: v0.0.292 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black args: [--config=pyproject.toml] From 8e71c2288a61efc063fdd19ddb407215f7c6b95e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 7 Nov 2023 10:47:51 +0000 Subject: [PATCH 079/103] [pre-commit.ci] pre-commit autoupdate (#95) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.4.0 → v4.5.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.4.0...v4.5.0) - [github.com/astral-sh/ruff-pre-commit: v0.0.292 → v0.1.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.0.292...v0.1.4) - [github.com/psf/black: 23.9.1 → 23.10.1](https://github.com/psf/black/compare/23.9.1...23.10.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ac9461a..4438f0ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,7 +4,7 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-docstring-first - id: check-executables-have-shebangs @@ -15,12 +15,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.292 + rev: v0.1.4 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.9.1 + rev: 23.10.1 hooks: - id: black args: [--config=pyproject.toml] From d304152996e96c05d9c110c4e7ce5e0647bd8ba3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 5 Dec 2023 08:32:24 +0000 Subject: [PATCH 080/103] [pre-commit.ci] pre-commit autoupdate (#107) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.4 → v0.1.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.4...v0.1.6) - [github.com/psf/black: 23.10.1 → 23.11.0](https://github.com/psf/black/compare/23.10.1...23.11.0) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4438f0ca..9dfd5e16 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,12 +15,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.4 + rev: v0.1.6 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.11.0 hooks: - id: black args: [--config=pyproject.toml] From 24bae4796e375bf6a759517a4b7a231f542fc88a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 07:35:57 +0000 Subject: [PATCH 081/103] [pre-commit.ci] pre-commit autoupdate (#109) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.6 → v0.1.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.6...v0.1.9) - [github.com/psf/black: 23.11.0 → 23.12.1](https://github.com/psf/black/compare/23.11.0...23.12.1) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9dfd5e16..2dac3f50 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,12 +15,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.6 + rev: v0.1.9 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.11.0 + rev: 23.12.1 hooks: - id: black args: [--config=pyproject.toml] From 93e95ab2a97ad53bd1094cac3fed0e869c904e07 Mon Sep 17 00:00:00 2001 From: viktorpm <50667179+viktorpm@users.noreply.github.com> Date: Mon, 22 Jan 2024 17:24:31 +0000 Subject: [PATCH 082/103] draft validation functions (#90) * draft validation functions * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * run on all atlases, don't crash on assertion error * fixing atlas path * Clearer output printing * tidy up validation script, remove weird test_git * add dev install, make test structure, initial tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add tests and return for _assert_close() * add test for validate mesh matches annotation * fix linting * update version for actions * drop py3.8 in tox, run pytest in tox * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix copy-paste error in pytest command * drop py3.8 from gh action workflow file too * Adding docstrings to validation script * Making path tests stricter, breaking up long strings, adding diff_tolerance argument to _assert_close function * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * restructuring validate_mesh_matches_image_extents function, adding comments * testing expected files and meshes directory separately * looping through validation functions and parameters to catch individual errors * removing hard coded path, generalising to all atlases * adding successful_validations list * tidying up duplications * fix recursive bug * addressing Niko's final comments, cleaning code --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Alessandro Felder Co-authored-by: alessandrofelder --- .github/workflows/test_and_deploy.yml | 6 +- bg_atlasgen/test_git.py | 21 ---- bg_atlasgen/validate_atlases.py | 168 ++++++++++++++++++++++++++ pyproject.toml | 19 ++- tests/__init__.py | 0 tests/test_unit/test_validation.py | 54 +++++++++ tox.ini | 5 +- 7 files changed, 239 insertions(+), 34 deletions(-) delete mode 100644 bg_atlasgen/test_git.py create mode 100644 bg_atlasgen/validate_atlases.py create mode 100644 tests/__init__.py create mode 100644 tests/test_unit/test_validation.py diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml index ca332f7e..7831c081 100644 --- a/.github/workflows/test_and_deploy.yml +++ b/.github/workflows/test_and_deploy.yml @@ -9,7 +9,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: neuroinformatics-unit/actions/lint@v1 + - uses: neuroinformatics-unit/actions/lint@v2 test: needs: lint @@ -25,10 +25,8 @@ jobs: python-version: "3.10" - os: windows-latest python-version: "3.9" - - os: ubuntu-latest - python-version: "3.8" steps: - - uses: neuroinformatics-unit/actions/test@v1 + - uses: neuroinformatics-unit/actions/test@v2 with: python-version: ${{ matrix.python-version }} diff --git a/bg_atlasgen/test_git.py b/bg_atlasgen/test_git.py deleted file mode 100644 index 1488fed8..00000000 --- a/bg_atlasgen/test_git.py +++ /dev/null @@ -1,21 +0,0 @@ -from pathlib import Path - -from git import Repo - -GENERATION_DICT = dict(example_mouse=[100]) - - -cwd = Path.home() / "bg_auto" -cwd.mkdir(exist_ok=True) - - -if __name__ == "__main__": - repo_path = cwd / "atlas_repo" - atlas_gen_path = Path(__file__).parent - - repo = Repo(repo_path) - - # repo.git.add(".") - # repo.git.commit('-m', 'test commit', author='luigi.petrucco@gmail.com') - repo.git.pull() - repo.git.push() diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py new file mode 100644 index 00000000..f886af4c --- /dev/null +++ b/bg_atlasgen/validate_atlases.py @@ -0,0 +1,168 @@ +"""Script to validate atlases""" + + +from pathlib import Path + +import numpy as np +from bg_atlasapi import BrainGlobeAtlas +from bg_atlasapi.config import get_brainglobe_dir +from bg_atlasapi.list_atlases import ( + get_all_atlases_lastversions, + get_atlases_lastversions, +) +from bg_atlasapi.update_atlases import update_atlas + + +def validate_atlas_files(atlas_path: Path): + """Checks if basic files exist in the atlas folder""" + + assert atlas_path.is_dir(), f"Atlas path {atlas_path} not found" + expected_files = [ + "annotation.tiff", + "reference.tiff", + "metadata.json", + "structures.json", + ] + for expected_file_name in expected_files: + expected_path = Path(atlas_path / expected_file_name) + assert ( + expected_path.is_file() + ), f"Expected file not found at {expected_path}" + + meshes_path = atlas_path / "meshes" + assert meshes_path.is_dir(), f"Meshes path {meshes_path} not found" + return True + + +def _assert_close(mesh_coord, annotation_coord, pixel_size, diff_tolerance=10): + """ + Helper function to check if the mesh and the annotation coordinate + are closer to each other than an arbitrary tolerance value times the pixel size. + The default tolerance value is 10. + """ + assert abs(mesh_coord - annotation_coord) <= diff_tolerance * pixel_size, ( + f"Mesh coordinate {mesh_coord} and annotation coordinate {annotation_coord}", + f"differ by more than {diff_tolerance} times pixel size {pixel_size}", + ) + return True + + +def validate_mesh_matches_image_extents(atlas: BrainGlobeAtlas): + """Checks if the mesh and the image extents are similar""" + + root_mesh = atlas.mesh_from_structure("root") + annotation_image = atlas.annotation + resolution = atlas.resolution + + # minimum and maximum values of the annotation image (z, y, x) + z_range, y_range, x_range = np.nonzero(annotation_image) + z_min, z_max = np.min(z_range), np.max(z_range) + y_min, y_max = np.min(y_range), np.max(y_range) + x_min, x_max = np.min(x_range), np.max(x_range) + + # minimum and maximum values of the annotation image scaled by the atlas resolution + z_min_scaled, z_max_scaled = z_min * resolution[0], z_max * resolution[0] + y_min_scaled, y_max_scaled = y_min * resolution[1], y_max * resolution[1] + x_min_scaled, x_max_scaled = x_min * resolution[2], x_max * resolution[2] + + # z, y and x coordinates of the root mesh (extent of the whole object) + mesh_points = root_mesh.points + z_coords, y_coords, x_coords = ( + mesh_points[:, 0], + mesh_points[:, 1], + mesh_points[:, 2], + ) + + # minimum and maximum coordinates of the root mesh + z_min_mesh, z_max_mesh = np.min(z_coords), np.max(z_coords) + y_min_mesh, y_max_mesh = np.min(y_coords), np.max(y_coords) + x_min_mesh, x_max_mesh = np.min(x_coords), np.max(x_coords) + + # checking if root mesh and image are on the same scale + _assert_close(z_min_mesh, z_min_scaled, resolution[0]) + _assert_close(z_max_mesh, z_max_scaled, resolution[0]) + _assert_close(y_min_mesh, y_min_scaled, resolution[1]) + _assert_close(y_max_mesh, y_max_scaled, resolution[1]) + _assert_close(x_min_mesh, x_min_scaled, resolution[2]) + _assert_close(x_max_mesh, x_max_scaled, resolution[2]) + + return True + + +def open_for_visual_check(): + # implement visual checks later + pass + + +def validate_checksum(): + # implement later + pass + + +def check_additional_references(): + # check additional references are different, but have same dimensions + pass + + +def validate_atlas(atlas_name, version, all_validation_functions): + """Validates the latest version of a given atlas""" + + print(atlas_name, version) + BrainGlobeAtlas(atlas_name) + updated = get_atlases_lastversions()[atlas_name]["updated"] + if not updated: + update_atlas(atlas_name) + + validation_function_parameters = [ + # validate_atlas_files(atlas_path: Path) + (Path(get_brainglobe_dir() / f"{atlas_name}_v{version}"),), + # validate_mesh_matches_image_extents(atlas: BrainGlobeAtlas) + (BrainGlobeAtlas(atlas_name),), + # open_for_visual_check() + (), + # validate_checksum() + (), + # check_additional_references() + (), + ] + + # list to store the errors of the failed validations + failed_validations = [] + successful_validations = [] + + for i, validation_function in enumerate(all_validation_functions): + try: + validation_function(*validation_function_parameters[i]) + successful_validations.append((atlas_name, validation_function)) + except AssertionError as error: + failed_validations.append((atlas_name, validation_function, error)) + + return successful_validations, failed_validations + + +if __name__ == "__main__": + # list to store the validation functions + all_validation_functions = [ + validate_atlas_files, + validate_mesh_matches_image_extents, + open_for_visual_check, + validate_checksum, + check_additional_references, + ] + + valid_atlases = [] + invalid_atlases = [] + for atlas_name, version in get_all_atlases_lastversions().items(): + successful_validations, failed_validations = validate_atlas( + atlas_name, version, all_validation_functions + ) + for item in successful_validations: + valid_atlases.append(item) + for item in failed_validations: + invalid_atlases.append(item) + + print("Summary") + print("### Valid atlases ###") + print(valid_atlases) + print("### Invalid atlases ###") + print(invalid_atlases) diff --git a/pyproject.toml b/pyproject.toml index 798c8f29..20d92590 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,19 @@ allenmouse = [ "allensdk", ] +dev = [ + "pytest", + "pytest-cov", + "pytest-mock", + "coverage", + "tox", + "black", + "mypy", + "pre-commit", + "ruff", + "setuptools_scm", +] + [build-system] requires = [ "setuptools>=45", @@ -59,12 +72,6 @@ include-package-data = true [tool.setuptools.packages.find] include = ["bg_atlasgen*"] -[tool.pytest.ini_options] -addopts = "--cov=bg_atlasgen" -filterwarnings = [ - "error", -] - [tool.black] target-version = ['py38', 'py39', 'py310', 'py311'] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_unit/test_validation.py b/tests/test_unit/test_validation.py new file mode 100644 index 00000000..ce4cb8b4 --- /dev/null +++ b/tests/test_unit/test_validation.py @@ -0,0 +1,54 @@ +from pathlib import Path + +import numpy as np +import pytest +from bg_atlasapi import BrainGlobeAtlas +from bg_atlasapi.config import get_brainglobe_dir + +from bg_atlasgen.validate_atlases import ( + _assert_close, + validate_atlas_files, + validate_mesh_matches_image_extents, +) + + +def test_validate_mesh_matches_image_extents(): + atlas = BrainGlobeAtlas("allen_mouse_100um") + assert validate_mesh_matches_image_extents(atlas) + + +def test_validate_mesh_matches_image_extents_negative(mocker): + atlas = BrainGlobeAtlas("allen_mouse_100um") + flipped_annotation_image = np.transpose(atlas.annotation) + mocker.patch( + "bg_atlasapi.BrainGlobeAtlas.annotation", + new_callable=mocker.PropertyMock, + return_value=flipped_annotation_image, + ) + with pytest.raises( + AssertionError, match="differ by more than 10 times pixel size" + ): + validate_mesh_matches_image_extents(atlas) + + +def test_valid_atlas_files(): + _ = BrainGlobeAtlas("allen_mouse_100um") + atlas_path = Path(get_brainglobe_dir()) / "allen_mouse_100um_v1.2" + assert validate_atlas_files(atlas_path) + + +def test_invalid_atlas_path(): + atlas_path = Path.home() + with pytest.raises(AssertionError, match="Expected file not found"): + validate_atlas_files(atlas_path) + + +def test_assert_close(): + assert _assert_close(99.5, 8, 10) + + +def test_assert_close_negative(): + with pytest.raises( + AssertionError, match="differ by more than 10 times pixel size" + ): + _assert_close(99.5, 30, 2) diff --git a/tox.ini b/tox.ini index e8310ab6..a53937f4 100644 --- a/tox.ini +++ b/tox.ini @@ -1,9 +1,8 @@ [tox] -envlist = py{38,39,310,311} +envlist = py{39,310,311} [gh-actions] python = - 3.8: py38 3.9: py39 3.10: py310 3.11: py311 @@ -12,4 +11,4 @@ python = extras = dev commands = - python -c "import bg_atlasgen" + pytest -v --color=yes --cov=bg_atlasgen --cov-report=xml From a4acdb4df79c0f74efa03107b74b7065ce908bd1 Mon Sep 17 00:00:00 2001 From: viktorpm <50667179+viktorpm@users.noreply.github.com> Date: Tue, 23 Jan 2024 15:35:31 +0000 Subject: [PATCH 083/103] Structure validation (#110) * draft validation functions * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * run on all atlases, don't crash on assertion error * fixing atlas path * Clearer output printing * tidy up validation script, remove weird test_git * add dev install, make test structure, initial tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add tests and return for _assert_close() * add test for validate mesh matches annotation * fix linting * update version for actions * drop py3.8 in tox, run pytest in tox * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix copy-paste error in pytest command * drop py3.8 from gh action workflow file too * Adding docstrings to validation script * wip: draft structure validation function * Making path tests stricter, breaking up long strings, adding diff_tolerance argument to _assert_close function * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * restructuring validate_mesh_matches_image_extents function, adding comments * testing expected files and meshes directory separately * looping through validation functions and parameters to catch individual errors * removing hard coded path, generalising to all atlases * adding successful_validations list * tidying up duplications * fix recursive bug * checkout finished validate_atlases.py from validation branch * adding validate_mesh_structure_pairs function * Update bg_atlasgen/validate_atlases.py Co-authored-by: Alessandro Felder * adding assertion to validate_mesh_structure_pairs function * checking IDs via bg_atlasapi, checking if IDs have mesh files and accesible through the atlas * Update bg_atlasgen/validate_atlases.py Co-authored-by: Alessandro Felder * passing atlas_name to validate_mesh_structure_pairs function * addressing Niko's final comments, cleaning code * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Alessandro Felder Co-authored-by: alessandrofelder --- bg_atlasgen/validate_atlases.py | 37 +++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py index f886af4c..ca92403c 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/bg_atlasgen/validate_atlases.py @@ -1,6 +1,7 @@ """Script to validate atlases""" +import os from pathlib import Path import numpy as np @@ -104,6 +105,36 @@ def check_additional_references(): pass +def validate_mesh_structure_pairs(atlas_name: str, atlas_path: Path): + # json_path = Path(atlas_path / "structures.json") + atlas = BrainGlobeAtlas(atlas_name) + + obj_path = Path(atlas_path / "meshes") + + ids_from_bg_atlas_api = list(atlas.structures.keys()) + ids_from_mesh_files = [ + int(Path(file).stem) + for file in os.listdir(obj_path) + if file.endswith(".obj") + ] + + in_mesh_not_bg = [] + for id in ids_from_mesh_files: + if id not in ids_from_bg_atlas_api: + in_mesh_not_bg.append(id) + + in_bg_not_mesh = [] + for id in ids_from_bg_atlas_api: + if id not in ids_from_mesh_files: + in_bg_not_mesh.append(id) + + if len(in_mesh_not_bg) or len(in_bg_not_mesh): + raise AssertionError( + f"Structures with ID {in_bg_not_mesh} are in the atlas, but don't have a corresponding mesh file; " + f"Structures with IDs {in_mesh_not_bg} have a mesh file, but are not accessible through the atlas." + ) + + def validate_atlas(atlas_name, version, all_validation_functions): """Validates the latest version of a given atlas""" @@ -124,6 +155,11 @@ def validate_atlas(atlas_name, version, all_validation_functions): (), # check_additional_references() (), + # validate_mesh_structure_pairs(atlas_name: str, atlas_path: Path): + ( + atlas_name, + Path(get_brainglobe_dir() / f"{atlas_name}_v{version}"), + ), ] # list to store the errors of the failed validations @@ -148,6 +184,7 @@ def validate_atlas(atlas_name, version, all_validation_functions): open_for_visual_check, validate_checksum, check_additional_references, + validate_mesh_structure_pairs, ] valid_atlases = [] From 92597961c2927ae3052aae743dcb6e3428d9b6c3 Mon Sep 17 00:00:00 2001 From: Alessandro Felder Date: Mon, 29 Jan 2024 13:55:06 +0000 Subject: [PATCH 084/103] simplify validation function (#113) * simplify validation function - all validation functions just take an atlas object - means we don't need a parameters list - also simplify output list to be dicts of lists mapping the atlas name to values - another advantage of this is that it simplifies mocking of atlas functions in the tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Fix bug found in code review --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- bg_atlasgen/validate_atlases.py | 56 ++++++++++++------------------ tests/test_unit/test_validation.py | 41 +++++++++++++++------- 2 files changed, 52 insertions(+), 45 deletions(-) diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py index ca92403c..503883f6 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/bg_atlasgen/validate_atlases.py @@ -10,13 +10,18 @@ from bg_atlasapi.list_atlases import ( get_all_atlases_lastversions, get_atlases_lastversions, + get_local_atlas_version, ) from bg_atlasapi.update_atlases import update_atlas -def validate_atlas_files(atlas_path: Path): +def validate_atlas_files(atlas: BrainGlobeAtlas): """Checks if basic files exist in the atlas folder""" + atlas_path = ( + Path(get_brainglobe_dir()) + / f"{atlas.atlas_name}_v{get_local_atlas_version(atlas.atlas_name)}" + ) assert atlas_path.is_dir(), f"Atlas path {atlas_path} not found" expected_files = [ "annotation.tiff", @@ -90,28 +95,31 @@ def validate_mesh_matches_image_extents(atlas: BrainGlobeAtlas): return True -def open_for_visual_check(): +def open_for_visual_check(atlas: BrainGlobeAtlas): # implement visual checks later pass -def validate_checksum(): +def validate_checksum(atlas: BrainGlobeAtlas): # implement later pass -def check_additional_references(): +def check_additional_references(atlas: BrainGlobeAtlas): # check additional references are different, but have same dimensions pass -def validate_mesh_structure_pairs(atlas_name: str, atlas_path: Path): - # json_path = Path(atlas_path / "structures.json") - atlas = BrainGlobeAtlas(atlas_name) +def validate_mesh_structure_pairs(atlas: BrainGlobeAtlas): + """Ensure mesh files (.obj) exist for each expected structure in the atlas.""" + ids_from_bg_atlas_api = list(atlas.structures.keys()) + atlas_path = ( + Path(get_brainglobe_dir()) + / f"{atlas.atlas_name}_v{get_local_atlas_version(atlas.atlas_name)}" + ) obj_path = Path(atlas_path / "meshes") - ids_from_bg_atlas_api = list(atlas.structures.keys()) ids_from_mesh_files = [ int(Path(file).stem) for file in os.listdir(obj_path) @@ -135,7 +143,7 @@ def validate_mesh_structure_pairs(atlas_name: str, atlas_path: Path): ) -def validate_atlas(atlas_name, version, all_validation_functions): +def validate_atlas(atlas_name, version, validation_functions): """Validates the latest version of a given atlas""" print(atlas_name, version) @@ -144,34 +152,16 @@ def validate_atlas(atlas_name, version, all_validation_functions): if not updated: update_atlas(atlas_name) - validation_function_parameters = [ - # validate_atlas_files(atlas_path: Path) - (Path(get_brainglobe_dir() / f"{atlas_name}_v{version}"),), - # validate_mesh_matches_image_extents(atlas: BrainGlobeAtlas) - (BrainGlobeAtlas(atlas_name),), - # open_for_visual_check() - (), - # validate_checksum() - (), - # check_additional_references() - (), - # validate_mesh_structure_pairs(atlas_name: str, atlas_path: Path): - ( - atlas_name, - Path(get_brainglobe_dir() / f"{atlas_name}_v{version}"), - ), - ] - # list to store the errors of the failed validations - failed_validations = [] - successful_validations = [] + failed_validations = {atlas_name: []} + successful_validations = {atlas_name: []} - for i, validation_function in enumerate(all_validation_functions): + for i, validation_function in enumerate(validation_functions): try: - validation_function(*validation_function_parameters[i]) - successful_validations.append((atlas_name, validation_function)) + validation_function(BrainGlobeAtlas(atlas_name)) + successful_validations[atlas_name].append(validation_function) except AssertionError as error: - failed_validations.append((atlas_name, validation_function, error)) + failed_validations[atlas_name].append((validation_function, error)) return successful_validations, failed_validations diff --git a/tests/test_unit/test_validation.py b/tests/test_unit/test_validation.py index ce4cb8b4..5fcf9f09 100644 --- a/tests/test_unit/test_validation.py +++ b/tests/test_unit/test_validation.py @@ -1,4 +1,4 @@ -from pathlib import Path +import os import numpy as np import pytest @@ -12,13 +12,33 @@ ) -def test_validate_mesh_matches_image_extents(): - atlas = BrainGlobeAtlas("allen_mouse_100um") +@pytest.fixture +def atlas(): + """A fixture providing a low-res Allen Mouse atlas for testing. + Tests assume this atlas is valid""" + return BrainGlobeAtlas("allen_mouse_100um") + + +@pytest.fixture +def atlas_with_bad_reference_file(): + """A fixture providing an invalid version of Allen Mouse atlas for testing. + The atlas will have a misnamed template file that won't be found by the API + This fixture also does the clean-up after the test has run + """ + good_name = get_brainglobe_dir() / "allen_mouse_100um_v1.2/reference.tiff" + bad_name = ( + get_brainglobe_dir() / "allen_mouse_100um_v1.2/reference_bad.tiff" + ) + os.rename(good_name, bad_name) + yield BrainGlobeAtlas("allen_mouse_100um") + os.rename(bad_name, good_name) + + +def test_validate_mesh_matches_image_extents(atlas): assert validate_mesh_matches_image_extents(atlas) -def test_validate_mesh_matches_image_extents_negative(mocker): - atlas = BrainGlobeAtlas("allen_mouse_100um") +def test_validate_mesh_matches_image_extents_negative(mocker, atlas): flipped_annotation_image = np.transpose(atlas.annotation) mocker.patch( "bg_atlasapi.BrainGlobeAtlas.annotation", @@ -31,16 +51,13 @@ def test_validate_mesh_matches_image_extents_negative(mocker): validate_mesh_matches_image_extents(atlas) -def test_valid_atlas_files(): - _ = BrainGlobeAtlas("allen_mouse_100um") - atlas_path = Path(get_brainglobe_dir()) / "allen_mouse_100um_v1.2" - assert validate_atlas_files(atlas_path) +def test_valid_atlas_files(atlas): + assert validate_atlas_files(atlas) -def test_invalid_atlas_path(): - atlas_path = Path.home() +def test_invalid_atlas_path(atlas_with_bad_reference_file): with pytest.raises(AssertionError, match="Expected file not found"): - validate_atlas_files(atlas_path) + validate_atlas_files(atlas_with_bad_reference_file) def test_assert_close(): From 12252af821b3be4acea5a907fd187f867bb0b828 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 08:28:06 +0000 Subject: [PATCH 085/103] [pre-commit.ci] pre-commit autoupdate (#117) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.9 → v0.2.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.9...v0.2.0) - [github.com/psf/black: 23.12.1 → 24.1.1](https://github.com/psf/black/compare/23.12.1...24.1.1) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- bg_atlasgen/atlas_scripts/allen_mouse.py | 2 +- bg_atlasgen/atlas_scripts/example_mouse.py | 2 +- bg_atlasgen/atlas_scripts/kim_mouse.py | 2 +- bg_atlasgen/atlas_scripts/osten_mouse.py | 2 +- bg_atlasgen/metadata_utils.py | 1 + bg_atlasgen/validate_atlases.py | 1 - bg_atlasgen/volume_utils.py | 1 + 8 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2dac3f50..d7733484 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,12 +15,12 @@ repos: - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.9 + rev: v0.2.0 hooks: - id: ruff args: [ --config=pyproject.toml ] - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.1.1 hooks: - id: black args: [--config=pyproject.toml] diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/bg_atlasgen/atlas_scripts/allen_mouse.py index 1de0f46c..8d903bbe 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/bg_atlasgen/atlas_scripts/allen_mouse.py @@ -30,7 +30,7 @@ def create_atlas(working_dir, resolution): manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here resolution=resolution, - reference_space_key="annotation/ccf_2017" + reference_space_key="annotation/ccf_2017", # use the latest version of the CCF ) diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/bg_atlasgen/atlas_scripts/example_mouse.py index 5f757a0d..57fb25dc 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/bg_atlasgen/atlas_scripts/example_mouse.py @@ -30,7 +30,7 @@ def create_atlas(working_dir, resolution): manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here resolution=RES_UM, - reference_space_key="annotation/ccf_2017" + reference_space_key="annotation/ccf_2017", # use the latest version of the CCF ) diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/bg_atlasgen/atlas_scripts/kim_mouse.py index 63e78bbf..569a1039 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/bg_atlasgen/atlas_scripts/kim_mouse.py @@ -72,7 +72,7 @@ def create_atlas(working_dir, resolution): manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here resolution=resolution, - reference_space_key="annotation/ccf_2017" + reference_space_key="annotation/ccf_2017", # use the latest version of the CCF ) diff --git a/bg_atlasgen/atlas_scripts/osten_mouse.py b/bg_atlasgen/atlas_scripts/osten_mouse.py index bd9e97aa..1079fd72 100644 --- a/bg_atlasgen/atlas_scripts/osten_mouse.py +++ b/bg_atlasgen/atlas_scripts/osten_mouse.py @@ -69,7 +69,7 @@ def create_atlas(working_dir, resolution): manifest=download_dir_path / "manifest.json", # downloaded files are stored relative to here resolution=resolution, - reference_space_key="annotation/ccf_2017" + reference_space_key="annotation/ccf_2017", # use the latest version of the CCF ) diff --git a/bg_atlasgen/metadata_utils.py b/bg_atlasgen/metadata_utils.py index f6c985fd..57b125ca 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/bg_atlasgen/metadata_utils.py @@ -3,6 +3,7 @@ . structures.csv . README.txt """ + import json import re from datetime import datetime diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py index 503883f6..d993c5ca 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/bg_atlasgen/validate_atlases.py @@ -1,6 +1,5 @@ """Script to validate atlases""" - import os from pathlib import Path diff --git a/bg_atlasgen/volume_utils.py b/bg_atlasgen/volume_utils.py index d2e2234c..a00c04e6 100644 --- a/bg_atlasgen/volume_utils.py +++ b/bg_atlasgen/volume_utils.py @@ -2,6 +2,7 @@ Code useful for dealing with volumetric data (e.g. allen annotation volume for the mouse atlas) extracting surfaces from volumetric data .... """ + try: from vedo import Volume except ModuleNotFoundError: From ae19fada3c66c7aa60bf05d7f3802b2c6e82f3fa Mon Sep 17 00:00:00 2001 From: Will Graham <32364977+willGraham01@users.noreply.github.com> Date: Tue, 6 Feb 2024 14:15:11 +0000 Subject: [PATCH 086/103] Add manifest to ci (#118) * Add manifest to CI * Standardise tox.ini into pyproject --- .github/workflows/test_and_deploy.yml | 15 +++++-- MANIFEST.in | 8 ++++ pyproject.toml | 65 ++++++++++++++++----------- tox.ini | 14 ------ 4 files changed, 59 insertions(+), 43 deletions(-) create mode 100644 MANIFEST.in delete mode 100644 tox.ini diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml index 7831c081..774413fc 100644 --- a/.github/workflows/test_and_deploy.yml +++ b/.github/workflows/test_and_deploy.yml @@ -2,17 +2,26 @@ name: tests on: push: - branches: [ main ] + branches: + - "main" pull_request: + workflow_dispatch: jobs: - lint: + linting: + name: Check Linting runs-on: ubuntu-latest steps: - uses: neuroinformatics-unit/actions/lint@v2 + manifest: + name: Check Manifest + runs-on: ubuntu-latest + steps: + - uses: neuroinformatics-unit/actions/check_manifest@v2 + test: - needs: lint + needs: [linting, manifest] name: ${{ matrix.os }} py${{ matrix.python-version }} runs-on: ${{ matrix.os }} strategy: diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..c65627f1 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,8 @@ +exclude .pre-commit-config.yaml + +include LICENSE +include README.md + +graft bg_atlasgen *.py + +prune tests diff --git a/pyproject.toml b/pyproject.toml index 20d92590..68221d21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,9 +2,9 @@ name = "bg-atlasgen" description = "Scripts generation atlases and utilities for BrainGlobe" readme = "README.md" -license = {file = "LICENSE"} +license = { file = "LICENSE" } authors = [ - {name = "Luigi Petrucco, Federico Claudi, Adam Tyson", email = "code@adamltyson.com"}, + { name = "Luigi Petrucco, Federico Claudi, Adam Tyson", email = "code@adamltyson.com" }, ] classifiers = [ "Development Status :: 3 - Alpha", @@ -41,29 +41,23 @@ dynamic = ["version"] Homepage = "https://github.com/brainglobe/bg-atlasgen" [project.optional-dependencies] -allenmouse = [ - "allensdk", -] +allenmouse = ["allensdk"] dev = [ - "pytest", - "pytest-cov", - "pytest-mock", - "coverage", - "tox", - "black", - "mypy", - "pre-commit", - "ruff", - "setuptools_scm", + "pytest", + "pytest-cov", + "pytest-mock", + "coverage", + "tox", + "black", + "mypy", + "pre-commit", + "ruff", + "setuptools_scm", ] [build-system] -requires = [ - "setuptools>=45", - "wheel", - "setuptools_scm[toml]>=6.2", -] +requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -82,17 +76,36 @@ line-length = 79 [tool.check-manifest] ignore = [ - "*.yaml", - "tox.ini", - "tests/*", - "tests/test_unit/*", - "tests/test_integration/*", + "*.yaml", + "tox.ini", + "tests/*", + "tests/test_unit/*", + "tests/test_integration/*", ] [tool.ruff] line-length = 79 -exclude = ["__init__.py","build",".eggs"] +exclude = ["__init__.py", "build", ".eggs"] select = ["I", "E", "F"] # E501 Line too long ignore = ["E501"] fix = true + +[tool.tox] +legacy_tox_ini = """ +[tox] +envlist = py{39,310,311} + +[gh-actions] +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + +[testenv] +extras = + dev +commands = + pytest -v --color=yes --cov=bg_atlasgen --cov-report=xml + +""" diff --git a/tox.ini b/tox.ini deleted file mode 100644 index a53937f4..00000000 --- a/tox.ini +++ /dev/null @@ -1,14 +0,0 @@ -[tox] -envlist = py{39,310,311} - -[gh-actions] -python = - 3.9: py39 - 3.10: py310 - 3.11: py311 - -[testenv] -extras = - dev -commands = - pytest -v --color=yes --cov=bg_atlasgen --cov-report=xml From 03392f34408f11c14f72c9d5c4fedf93aa9f1ddf Mon Sep 17 00:00:00 2001 From: viktorpm <50667179+viktorpm@users.noreply.github.com> Date: Mon, 12 Feb 2024 10:13:36 +0000 Subject: [PATCH 087/103] printing data frames with validation function information (#115) * first test functions for validate_mesh_structure_pairs * storing atlases and successful/failed validation functions in a data frame * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * restoring test_validation.py to the original merged version. Chages are implemented on another branch * validate_atlases.py: going back to the version on main, appending only the name of the successful and failed functions (not the function object) to lists in validate_atlases function * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * populating dictionaries in for loop, writing JSON files * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * saving JSON files to ~/.brainglobe/atlases/validation * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * printing where to find the result files * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update bg_atlasgen/validate_atlases.py Co-authored-by: Alessandro Felder * Update bg_atlasgen/validate_atlases.py removing unused variables Co-authored-by: Alessandro Felder * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * saving only one JSON file with all the information * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * uncommenting test functions --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Alessandro Felder --- bg_atlasgen/validate_atlases.py | 51 +++++++++++++++++++++------------ 1 file changed, 33 insertions(+), 18 deletions(-) diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py index d993c5ca..56b76180 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/bg_atlasgen/validate_atlases.py @@ -1,5 +1,6 @@ """Script to validate atlases""" +import json import os from pathlib import Path @@ -151,18 +152,20 @@ def validate_atlas(atlas_name, version, validation_functions): if not updated: update_atlas(atlas_name) - # list to store the errors of the failed validations - failed_validations = {atlas_name: []} - successful_validations = {atlas_name: []} + validation_results = {atlas_name: []} - for i, validation_function in enumerate(validation_functions): + for i, validation_function in enumerate(all_validation_functions): try: validation_function(BrainGlobeAtlas(atlas_name)) - successful_validations[atlas_name].append(validation_function) + validation_results[atlas_name].append( + (validation_function.__name__, None, str("Pass")) + ) except AssertionError as error: - failed_validations[atlas_name].append((validation_function, error)) + validation_results[atlas_name].append( + (validation_function.__name__, str(error), str("Fail")) + ) - return successful_validations, failed_validations + return validation_results if __name__ == "__main__": @@ -178,17 +181,29 @@ def validate_atlas(atlas_name, version, validation_functions): valid_atlases = [] invalid_atlases = [] + validation_results = {} + for atlas_name, version in get_all_atlases_lastversions().items(): - successful_validations, failed_validations = validate_atlas( + temp_validation_results = validate_atlas( atlas_name, version, all_validation_functions ) - for item in successful_validations: - valid_atlases.append(item) - for item in failed_validations: - invalid_atlases.append(item) - - print("Summary") - print("### Valid atlases ###") - print(valid_atlases) - print("### Invalid atlases ###") - print(invalid_atlases) + validation_results.update(temp_validation_results) + + print("Validation has been completed") + print("Find validation_results.json in ~/.brainglobe/atlases/validation/") + + # Get the directory path + output_dir_path = str(get_brainglobe_dir() / "atlases/validation") + + # Create the directory if it doesn't exist + if not os.path.exists(output_dir_path): + os.makedirs(output_dir_path) + + # Open a file for writing (will overwrite any files from previous runs!) + with open( + str( + get_brainglobe_dir() / "atlases/validation/validation_results.json" + ), + "w", + ) as file: + json.dump(validation_results, file) From 795bdddcfeb643e9f850a39d83536a670e85a9d1 Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:28:04 +0000 Subject: [PATCH 088/103] bg-atlasapi -> brainglobe-atlasapi --- CITATION.cff | 4 ++-- README.md | 38 +++++++++++++++++++------------------- bg_atlasapi/__init__.py | 4 ++-- bg_atlasapi/descriptors.py | 2 +- pyproject.toml | 10 +++++----- 5 files changed, 29 insertions(+), 29 deletions(-) diff --git a/CITATION.cff b/CITATION.cff index 52ec8511..2b47b3c0 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -19,9 +19,9 @@ authors: affiliation: 'Sainsbury Wellcome Centre, University College London' orcid: 'https://orcid.org/0000-0003-3225-1130' email: code@adamltyson.com -repository-code: 'https://github.com/brainglobe/bg-atlasapi' +repository-code: 'https://github.com/brainglobe/brainglobe-atlasapi' url: >- - https://brainglobe.info/documentation/bg-atlasapi/index.html + https://brainglobe.info/documentation/brainglobe-atlasapi/index.html abstract: >- A common interface for programmers to download and process brain atlas data from multiple sources. diff --git a/README.md b/README.md index 58fe0931..a7f506a9 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,24 @@ -# BG-atlasAPI +# brainglobe-atlasapi -[![Python Version](https://img.shields.io/pypi/pyversions/bg-atlasapi.svg)](https://pypi.org/project/bg-atlasapi) -[![PyPI](https://img.shields.io/pypi/v/bg-atlasapi.svg)](https://pypi.org/project/bg-atlasapi/) -[![Wheel](https://img.shields.io/pypi/wheel/bg-atlasapi.svg)](https://pypi.org/project/bg-atlasapi) +[![Python Version](https://img.shields.io/pypi/pyversions/brainglobe-atlasapi.svg)](https://pypi.org/project/brainglobe-atlasapi) +[![PyPI](https://img.shields.io/pypi/v/brainglobe-atlasapi.svg)](https://pypi.org/project/brainglobe-atlasapi/) +[![Wheel](https://img.shields.io/pypi/wheel/brainglobe-atlasapi.svg)](https://pypi.org/project/brainglobe-atlasapi) [![Development Status](https://img.shields.io/pypi/status/brainatlas-api.svg)](https://github.com/SainsburyWellcomeCentre/brainatlas-api) -[![Downloads](https://pepy.tech/badge/bg-atlasapi)](https://pepy.tech/project/bg-atlasapi) -[![Tests](https://img.shields.io/github/actions/workflow/status/brainglobe/bg-atlasapi/test_and_deploy.yml?branch=main)]( - https://github.com/brainglobe/bg-atlasapi/actions) -[![codecov](https://codecov.io/gh/brainglobe/bg-atlasapi/branch/master/graph/badge.svg?token=WTFPFW0TE4)](https://codecov.io/gh/brainglobe/bg-atlasapi) +[![Downloads](https://pepy.tech/badge/brainglobe-atlasapi)](https://pepy.tech/project/brainglobe-atlasapi) +[![Tests](https://img.shields.io/github/actions/workflow/status/brainglobe/brainglobe-atlasapi/test_and_deploy.yml?branch=main)]( + https://github.com/brainglobe/brainglobe-atlasapi/actions) +[![codecov](https://codecov.io/gh/brainglobe/brainglobe-atlasapi/branch/master/graph/badge.svg?token=WTFPFW0TE4)](https://codecov.io/gh/brainglobe/brainglobe-atlasapi) [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black) [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) [![DOI](https://joss.theoj.org/papers/10.21105/joss.02668/status.svg)](https://doi.org/10.21105/joss.02668) [![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) [![Contributions](https://img.shields.io/badge/Contributions-Welcome-brightgreen.svg)](https://docs.brainglobe.info/cellfinder/contributing) -[![Website](https://img.shields.io/website?up_message=online&url=https%3A%2F%2Fbrainglobe.info)](https://brainglobe.info/documentation/bg-atlasapi/index.html) +[![Website](https://img.shields.io/website?up_message=online&url=https%3A%2F%2Fbrainglobe.info)](https://brainglobe.info/documentation/brainglobe-atlasapi/index.html) [![Twitter](https://img.shields.io/twitter/follow/brain_globe?style=social)](https://twitter.com/brain_globe) -The brainglobe atlas API (BG-AtlasAPI) provides a common interface for programmers to download and process brain atlas data from multiple sources. +The brainglobe atlas API (brainglobe-atlasapi) provides a common interface for programmers to download and process brain atlas data from multiple sources. ## Atlases available @@ -37,13 +37,13 @@ A number of atlases are in development, but those available currently are: * [Kim Lab Developmental CCF (P56)](https://data.mendeley.com/datasets/2svx788ddf/1) at 10 micron resolution with 8 reference images - STP, LSFM (iDISCO) and MRI (a0, adc, dwo, fa, MTR, T2) ## Installation -BG-AtlasAPI works with Python >3.6, and can be installed from PyPI with: +brainglobe-atlasapi works with Python >3.6, and can be installed from PyPI with: ```bash -pip install bg-atlasapi +pip install brainglobe-atlasapi ``` ## Usage -Full information can be found in the [documentation](https://brainglobe.info/documentation/bg-atlasapi/index.html) +Full information can be found in the [documentation](https://brainglobe.info/documentation/brainglobe-atlasapi/index.html) ### Python API **List of atlases** @@ -126,16 +126,16 @@ pprint(VISp) # 'structure_id_path': [997, 8, 567, 688, 695, 315, 669, 385]} ``` -### Note on coordinates in `bg-atlasapi` -Working with both image coordinates and cartesian coordinates in the same space can be confusing! In `bg-atlasapi`, the origin is always assumed to be in the upper left corner of the image (sectioning along the first dimension), the "ij" convention. This means that when plotting meshes and points using cartesian systems, you might encounter confusing behaviors coming from the fact that in cartesian plots one axis is inverted with respect to ij coordinates (vertical axis increases going up, image row indexes increase going down). To make things as consistent as possible, in `bg-atlasapi` the 0 of the meshes coordinates is assumed to coincide with the 0 index of the images stack, and meshes coordinates increase following the direction stack indexes increase. -To deal with transformations between your data space and `bg-atlasapi`, you might find the [brainglobe-space](https://github.com/brainglobe/brainglobe-space) package helpful. +### Note on coordinates in `brainglobe-atlasapi` +Working with both image coordinates and cartesian coordinates in the same space can be confusing! In `brainglobe-atlasapi`, the origin is always assumed to be in the upper left corner of the image (sectioning along the first dimension), the "ij" convention. This means that when plotting meshes and points using cartesian systems, you might encounter confusing behaviors coming from the fact that in cartesian plots one axis is inverted with respect to ij coordinates (vertical axis increases going up, image row indexes increase going down). To make things as consistent as possible, in `brainglobe-atlasapi` the 0 of the meshes coordinates is assumed to coincide with the 0 index of the images stack, and meshes coordinates increase following the direction stack indexes increase. +To deal with transformations between your data space and `brainglobe-atlasapi`, you might find the [brainglobe-space](https://github.com/brainglobe/brainglobe-space) package helpful. -# Contributing to bg-atlasapi +# Contributing to brainglobe-atlasapi **Contributors to bg-atlaspi are absolutely encouraged**, whether you want to fix bugs, add/request new features or simply ask questions. -If you would like to contribute to `bg-atlasapi` (or any of the downstream tools like [brainrender](https://github.com/brainglobe/brainrender) etc.) please get in touch by opening a new issue or pull request on [GitHub](https://github.com/brainglobe/bg-atlasapi). Please also see the [developers guide](https://brainglobe.info/developers/index.html). +If you would like to contribute to `brainglobe-atlasapi` (or any of the downstream tools like [brainrender](https://github.com/brainglobe/brainrender) etc.) please get in touch by opening a new issue or pull request on [GitHub](https://github.com/brainglobe/brainglobe-atlasapi). Please also see the [developers guide](https://brainglobe.info/developers/index.html). -Someone might have already asked a question you might have, so if you're not sure where to start, check out the [issues](https://github.com/brainglobe/bg-atlasapi/issues) (and the issues of the other repositories). +Someone might have already asked a question you might have, so if you're not sure where to start, check out the [issues](https://github.com/brainglobe/brainglobe-atlasapi/issues) (and the issues of the other repositories). ## Citation If you find the BrainGlobe Atlas API useful, please cite the paper in your work: diff --git a/bg_atlasapi/__init__.py b/bg_atlasapi/__init__.py index 30bf062a..0facf5d3 100644 --- a/bg_atlasapi/__init__.py +++ b/bg_atlasapi/__init__.py @@ -1,8 +1,8 @@ from importlib.metadata import PackageNotFoundError, metadata try: - __version__ = metadata("bg-atlasapi")["Version"] - __author__ = metadata("bg-atlasapi")["Author"] + __version__ = metadata("brainglobe-atlasapi")["Version"] + __author__ = metadata("brainglobe-atlasapi")["Author"] del metadata except PackageNotFoundError: # package is not installed diff --git a/bg_atlasapi/descriptors.py b/bg_atlasapi/descriptors.py index c3fdd64c..06def75d 100644 --- a/bg_atlasapi/descriptors.py +++ b/bg_atlasapi/descriptors.py @@ -3,7 +3,7 @@ # Base url of the gin repository: remote_url_base = "https://gin.g-node.org/brainglobe/atlases/raw/master/{}" -# Major version of atlases used by current bg-atlasapi release: +# Major version of atlases used by current brainglobe-atlasapi release: ATLAS_MAJOR_V = 0 # Entries and types from this template will be used to check atlas info diff --git a/pyproject.toml b/pyproject.toml index 3d91ac3a..3f3e97e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "bg-atlasapi" +name = "brainglobe-atlasapi" description = "A lightweight python module to interact with atlases for systems neuroscience" readme = "README.md" license = { file = "LICENSE" } @@ -32,10 +32,10 @@ dependencies = [ dynamic = ["version"] [project.urls] -"Homepage" = "https://brainglobe.info/documentation/bg-atlasapi/index.html" -"Source Code" = "https://github.com/brainglobe/bg-atlasapi" -"Bug Tracker" = "https://github.com/brainglobe/bg-atlasapi/issues" -"Documentation" = "https://brainglobe.info/documentation/bg-atlasapi/index.html" +"Homepage" = "https://brainglobe.info/documentation/brainglobe-atlasapi/index.html" +"Source Code" = "https://github.com/brainglobe/brainglobe-atlasapi" +"Bug Tracker" = "https://github.com/brainglobe/brainglobe-atlasapi/issues" +"Documentation" = "https://brainglobe.info/documentation/brainglobe-atlasapi/index.html" [project.optional-dependencies] dev = ["check-manifest", "pre-commit", "pytest", "pytest-cov"] From 120c3b2c183060d698105a2820d33447a5eced04 Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:29:11 +0000 Subject: [PATCH 089/103] bg_atlasapi -> brainglobe_atlasapi, package restructure --- .gitignore | 2 +- README.md | 6 ++--- .../__init__.py | 4 +-- .../bg_atlas.py | 8 +++--- {bg_atlasapi => brainglobe_atlasapi}/cli.py | 6 ++--- .../config.py | 0 {bg_atlasapi => brainglobe_atlasapi}/core.py | 6 ++--- .../descriptors.py | 0 .../list_atlases.py | 2 +- .../structure_class.py | 2 +- .../structure_tree_util.py | 0 .../update_atlases.py | 27 +++++++++++-------- {bg_atlasapi => brainglobe_atlasapi}/utils.py | 0 pyproject.toml | 8 +++--- tests/conftest.py | 2 +- tests/test_bg_atlas.py | 2 +- tests/test_cli.py | 2 +- tests/test_config_file.py | 4 +-- tests/test_core_atlas.py | 2 +- tests/test_list_atlases.py | 2 +- tests/test_structure_dict.py | 6 ++--- tests/test_update_atlas.py | 2 +- tests/test_utils.py | 2 +- 23 files changed, 50 insertions(+), 45 deletions(-) rename {bg_atlasapi => brainglobe_atlasapi}/__init__.py (70%) rename {bg_atlasapi => brainglobe_atlasapi}/bg_atlas.py (96%) rename {bg_atlasapi => brainglobe_atlasapi}/cli.py (91%) rename {bg_atlasapi => brainglobe_atlasapi}/config.py (100%) rename {bg_atlasapi => brainglobe_atlasapi}/core.py (98%) rename {bg_atlasapi => brainglobe_atlasapi}/descriptors.py (100%) rename {bg_atlasapi => brainglobe_atlasapi}/list_atlases.py (98%) rename {bg_atlasapi => brainglobe_atlasapi}/structure_class.py (96%) rename {bg_atlasapi => brainglobe_atlasapi}/structure_tree_util.py (100%) rename {bg_atlasapi => brainglobe_atlasapi}/update_atlases.py (72%) rename {bg_atlasapi => brainglobe_atlasapi}/utils.py (100%) diff --git a/.gitignore b/.gitignore index 0ca94130..cddbb655 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -bg_atlasapi/bg_config.conf +brainglobe_atlasapi/bg_config.conf workspace.py .vscode/ diff --git a/README.md b/README.md index a7f506a9..21f69151 100644 --- a/README.md +++ b/README.md @@ -47,9 +47,9 @@ Full information can be found in the [documentation](https://brainglobe.info/doc ### Python API **List of atlases** -To see a list of atlases use `bg_atlasapi.show_atlases` +To see a list of atlases use `brainglobe_atlasapi.show_atlases` ```python -from bg_atlasapi import show_atlases +from brainglobe_atlasapi import show_atlases show_atlases() # Brainglobe Atlases # ╭──────────────────────────────────┬────────────┬───────────────┬──────────────╮ @@ -74,7 +74,7 @@ All the features of each atlas can be accessed via the `BrainGlobeAtlas` class. e.g. for the 25um Allen Mouse Brain Atlas: ```python -from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas atlas = BrainGlobeAtlas("allen_mouse_25um") ``` diff --git a/bg_atlasapi/__init__.py b/brainglobe_atlasapi/__init__.py similarity index 70% rename from bg_atlasapi/__init__.py rename to brainglobe_atlasapi/__init__.py index 0facf5d3..d36b0416 100644 --- a/bg_atlasapi/__init__.py +++ b/brainglobe_atlasapi/__init__.py @@ -9,5 +9,5 @@ pass -from bg_atlasapi.bg_atlas import BrainGlobeAtlas -from bg_atlasapi.list_atlases import show_atlases +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.list_atlases import show_atlases diff --git a/bg_atlasapi/bg_atlas.py b/brainglobe_atlasapi/bg_atlas.py similarity index 96% rename from bg_atlasapi/bg_atlas.py rename to brainglobe_atlasapi/bg_atlas.py index cab2d152..34b80032 100644 --- a/bg_atlasapi/bg_atlas.py +++ b/brainglobe_atlasapi/bg_atlas.py @@ -6,8 +6,8 @@ from rich import print as rprint from rich.console import Console -from bg_atlasapi import config, core, descriptors, utils -from bg_atlasapi.utils import _rich_atlas_metadata +from brainglobe_atlasapi import config, core, descriptors, utils +from brainglobe_atlasapi.utils import _rich_atlas_metadata COMPRESSED_FILENAME = "atlas.tar.gz" @@ -82,7 +82,7 @@ def __init__( raise ValueError(f"{atlas_name} is not a valid atlas name!") rprint( - f"[magenta2]bg_atlasapi: {self.atlas_name} " + f"[magenta2]brainglobe_atlasapi: {self.atlas_name} " "not found locally. Downloading...[magenta2]" ) self.download_extract_file() @@ -188,7 +188,7 @@ def check_latest_version(self): if local != online: rprint( - f"[b][magenta2]bg_atlasapi[/b]: " + f"[b][magenta2]brainglobe_atlasapi[/b]: " f"[b]{self.atlas_name}[/b] version [b]{local}[/b]" f"is not the latest available ([b]{online}[/b]). " "To update the atlas run in the terminal:[/magenta2]\n" diff --git a/bg_atlasapi/cli.py b/brainglobe_atlasapi/cli.py similarity index 91% rename from bg_atlasapi/cli.py rename to brainglobe_atlasapi/cli.py index c699789a..e8687809 100644 --- a/bg_atlasapi/cli.py +++ b/brainglobe_atlasapi/cli.py @@ -1,8 +1,8 @@ import click -from bg_atlasapi.config import cli_modify_config -from bg_atlasapi.list_atlases import show_atlases -from bg_atlasapi.update_atlases import install_atlas, update_atlas +from brainglobe_atlasapi.config import cli_modify_config +from brainglobe_atlasapi.list_atlases import show_atlases +from brainglobe_atlasapi.update_atlases import install_atlas, update_atlas @click.command(context_settings={"help_option_names": ["-h", "--help"]}) diff --git a/bg_atlasapi/config.py b/brainglobe_atlasapi/config.py similarity index 100% rename from bg_atlasapi/config.py rename to brainglobe_atlasapi/config.py diff --git a/bg_atlasapi/core.py b/brainglobe_atlasapi/core.py similarity index 98% rename from bg_atlasapi/core.py rename to brainglobe_atlasapi/core.py index c1475b58..a24dd2a8 100644 --- a/bg_atlasapi/core.py +++ b/brainglobe_atlasapi/core.py @@ -6,7 +6,7 @@ import pandas as pd from brainglobe_space import AnatomicalSpace -from bg_atlasapi.descriptors import ( +from brainglobe_atlasapi.descriptors import ( ANNOTATION_FILENAME, HEMISPHERES_FILENAME, MESHES_DIRNAME, @@ -14,8 +14,8 @@ REFERENCE_FILENAME, STRUCTURES_FILENAME, ) -from bg_atlasapi.structure_class import StructuresDict -from bg_atlasapi.utils import read_json, read_tiff +from brainglobe_atlasapi.structure_class import StructuresDict +from brainglobe_atlasapi.utils import read_json, read_tiff class Atlas: diff --git a/bg_atlasapi/descriptors.py b/brainglobe_atlasapi/descriptors.py similarity index 100% rename from bg_atlasapi/descriptors.py rename to brainglobe_atlasapi/descriptors.py diff --git a/bg_atlasapi/list_atlases.py b/brainglobe_atlasapi/list_atlases.py similarity index 98% rename from bg_atlasapi/list_atlases.py rename to brainglobe_atlasapi/list_atlases.py index e4ff7ff2..92760d89 100644 --- a/bg_atlasapi/list_atlases.py +++ b/brainglobe_atlasapi/list_atlases.py @@ -6,7 +6,7 @@ from rich.panel import Panel from rich.table import Table -from bg_atlasapi import config, descriptors, utils +from brainglobe_atlasapi import config, descriptors, utils def get_downloaded_atlases(): diff --git a/bg_atlasapi/structure_class.py b/brainglobe_atlasapi/structure_class.py similarity index 96% rename from bg_atlasapi/structure_class.py rename to brainglobe_atlasapi/structure_class.py index 53c4b839..4d09c620 100644 --- a/bg_atlasapi/structure_class.py +++ b/brainglobe_atlasapi/structure_class.py @@ -3,7 +3,7 @@ import meshio as mio -from bg_atlasapi.structure_tree_util import get_structures_tree +from brainglobe_atlasapi.structure_tree_util import get_structures_tree class Structure(UserDict): diff --git a/bg_atlasapi/structure_tree_util.py b/brainglobe_atlasapi/structure_tree_util.py similarity index 100% rename from bg_atlasapi/structure_tree_util.py rename to brainglobe_atlasapi/structure_tree_util.py diff --git a/bg_atlasapi/update_atlases.py b/brainglobe_atlasapi/update_atlases.py similarity index 72% rename from bg_atlasapi/update_atlases.py rename to brainglobe_atlasapi/update_atlases.py index 57ba4f74..afc5d786 100644 --- a/bg_atlasapi/update_atlases.py +++ b/brainglobe_atlasapi/update_atlases.py @@ -2,12 +2,17 @@ from rich import print as rprint -from bg_atlasapi.bg_atlas import BrainGlobeAtlas, _version_str_from_tuple -from bg_atlasapi.list_atlases import get_downloaded_atlases +from brainglobe_atlasapi.bg_atlas import ( + Bbrainglobe_atlasapilas, + _version_str_from_tuple, +) +from brainglobe_atlasapi.list_atlases import get_downloaded_atlases + +brainglobe_atlasapi def update_atlas(atlas_name, force=False): - """Updates a bg_atlasapi atlas from the latest + """Updates a brainglobe_atlasapi atlas from the latest available version online. Arguments: @@ -20,21 +25,21 @@ def update_atlas(atlas_name, force=False): """ atlas = BrainGlobeAtlas(atlas_name=atlas_name) - +brainglobe_atlasapi # Check if we need to update if not force: if atlas.check_latest_version(): rprint( - f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} " + f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} " "is already updated " f"(version: {_version_str_from_tuple(atlas.local_version)})" "[/b]" - ) + )brainglobe_atlasapi return # Delete atlas folder rprint( - f"[b][magenta2]bg_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" + f"[b][magenta2]brainglobe_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" ) fld = atlas.brainglobe_dir / atlas.local_full_name shutil.rmtree(fld) @@ -44,12 +49,12 @@ def update_atlas(atlas_name, force=False): "of the atlas, aborting." ) - # Download again + # Download againbrainglobe_atlasapi atlas.download_extract_file() # Check that everything went well rprint( - f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} updated to version: " + f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} updated to version: " + f"{_version_str_from_tuple(atlas.remote_version)}[/magenta2][/b]" ) @@ -67,12 +72,12 @@ def install_atlas(atlas_name): # Check input: if not isinstance(atlas_name, str): raise ValueError(f"atlas name should be a string, not {atlas_name}") - +brainglobe_atlasapi # Check if already downloaded: available_atlases = get_downloaded_atlases() if atlas_name in available_atlases: rprint( - f"[b][magenta2]bg_atlasapi: installing {atlas_name}: " + f"[b][magenta2]brainglobe_atlasapi: installing {atlas_name}: " "atlas already installed![/magenta2][/b]" ) return diff --git a/bg_atlasapi/utils.py b/brainglobe_atlasapi/utils.py similarity index 100% rename from bg_atlasapi/utils.py rename to brainglobe_atlasapi/utils.py diff --git a/pyproject.toml b/pyproject.toml index 3f3e97e6..1cbeb3ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,17 +45,17 @@ requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [project.scripts] -brainglobe = "bg_atlasapi.cli:bg_cli" +brainglobe = "brainglobe_atlasapi.cli:bg_cli" [tool.setuptools] include-package-data = true [tool.setuptools.packages.find] -include = ["bg_atlasapi*"] +include = ["brainglobe_atlasapi*"] exclude = ["tests*"] [tool.pytest.ini_options] -addopts = "--cov=bg_atlasapi" +addopts = "--cov=brainglobe_atlasapi" filterwarnings = ["error"] markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"] @@ -96,5 +96,5 @@ python = [testenv] extras = dev -commands = pytest -v --color=yes --cov=bg_atlasapi --cov-report=xml +commands = pytest -v --color=yes --cov=brainglobe_atlasapi --cov-report=xml """ diff --git a/tests/conftest.py b/tests/conftest.py index d7d58161..41f197da 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,7 +4,7 @@ import pytest -from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas @pytest.fixture() diff --git a/tests/test_bg_atlas.py b/tests/test_bg_atlas.py index 449ecf96..7d200159 100644 --- a/tests/test_bg_atlas.py +++ b/tests/test_bg_atlas.py @@ -3,7 +3,7 @@ import pytest -from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas def test_versions(atlas): diff --git a/tests/test_cli.py b/tests/test_cli.py index 31b037da..462582c6 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,6 +1,6 @@ from click.testing import CliRunner -from bg_atlasapi import cli, config +from brainglobe_atlasapi import cli, config # This testing of the command line application does not really diff --git a/tests/test_config_file.py b/tests/test_config_file.py index 33723006..2e7e202d 100644 --- a/tests/test_config_file.py +++ b/tests/test_config_file.py @@ -5,7 +5,7 @@ import pytest from click.testing import CliRunner -from bg_atlasapi import bg_atlas, cli, config +from brainglobe_atlasapi import bg_atlas, cli, config @pytest.fixture() @@ -27,7 +27,7 @@ def test_config_creation(conf_path): # Ugly test zone: here we use the terminal commands, which edit the config -# file in the bg_atlasapi repo from which the tests are being run. +# file in the brainglobe_atlasapi repo from which the tests are being run. # This is not the cleanest way, the alternative would be to run this test in # a new env. @pytest.mark.slow diff --git a/tests/test_core_atlas.py b/tests/test_core_atlas.py index 6c60df83..a8b6cb94 100644 --- a/tests/test_core_atlas.py +++ b/tests/test_core_atlas.py @@ -6,7 +6,7 @@ import pytest import tifffile -from bg_atlasapi.core import AdditionalRefDict +from brainglobe_atlasapi.core import AdditionalRefDict def test_initialization(atlas): diff --git a/tests/test_list_atlases.py b/tests/test_list_atlases.py index 7030b76d..afa7b581 100644 --- a/tests/test_list_atlases.py +++ b/tests/test_list_atlases.py @@ -1,4 +1,4 @@ -from bg_atlasapi.list_atlases import ( +from brainglobe_atlasapi.list_atlases import ( get_atlases_lastversions, get_downloaded_atlases, get_local_atlas_version, diff --git a/tests/test_structure_dict.py b/tests/test_structure_dict.py index cd6c627b..211e87d3 100644 --- a/tests/test_structure_dict.py +++ b/tests/test_structure_dict.py @@ -1,9 +1,9 @@ import meshio as mio import pytest -from bg_atlasapi import descriptors -from bg_atlasapi.structure_class import StructuresDict -from bg_atlasapi.utils import read_json +from brainglobe_atlasapi import descriptors +from brainglobe_atlasapi.structure_class import StructuresDict +from brainglobe_atlasapi.utils import read_json structures_list = [ { diff --git a/tests/test_update_atlas.py b/tests/test_update_atlas.py index 433f6e86..abfc1ee2 100644 --- a/tests/test_update_atlas.py +++ b/tests/test_update_atlas.py @@ -1,6 +1,6 @@ import pytest -from bg_atlasapi import update_atlases +from brainglobe_atlasapi import update_atlases def test_update(): diff --git a/tests/test_utils.py b/tests/test_utils.py index 1d194872..1d6462d3 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,7 +4,7 @@ import requests from requests import HTTPError -from bg_atlasapi import utils +from brainglobe_atlasapiatlasapiatlasapiatlasapi import utils test_url = "https://gin.g-node.org/BrainGlobe/atlases/raw/master/example_mouse_100um_v1.2.tar.gz" From 639aafc44f69d234678990f67276f2655c7a511a Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:30:12 +0000 Subject: [PATCH 090/103] Move tests for API into separate subfolder --- tests/atlasapi/__init__.py | 0 tests/{ => atlasapi}/conftest.py | 0 tests/{ => atlasapi}/test_bg_atlas.py | 0 tests/{ => atlasapi}/test_cli.py | 0 tests/{ => atlasapi}/test_config_file.py | 0 tests/{ => atlasapi}/test_core_atlas.py | 0 tests/{ => atlasapi}/test_list_atlases.py | 0 tests/{ => atlasapi}/test_structure_dict.py | 0 tests/{ => atlasapi}/test_update_atlas.py | 0 tests/{ => atlasapi}/test_utils.py | 0 10 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/atlasapi/__init__.py rename tests/{ => atlasapi}/conftest.py (100%) rename tests/{ => atlasapi}/test_bg_atlas.py (100%) rename tests/{ => atlasapi}/test_cli.py (100%) rename tests/{ => atlasapi}/test_config_file.py (100%) rename tests/{ => atlasapi}/test_core_atlas.py (100%) rename tests/{ => atlasapi}/test_list_atlases.py (100%) rename tests/{ => atlasapi}/test_structure_dict.py (100%) rename tests/{ => atlasapi}/test_update_atlas.py (100%) rename tests/{ => atlasapi}/test_utils.py (100%) diff --git a/tests/atlasapi/__init__.py b/tests/atlasapi/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/conftest.py b/tests/atlasapi/conftest.py similarity index 100% rename from tests/conftest.py rename to tests/atlasapi/conftest.py diff --git a/tests/test_bg_atlas.py b/tests/atlasapi/test_bg_atlas.py similarity index 100% rename from tests/test_bg_atlas.py rename to tests/atlasapi/test_bg_atlas.py diff --git a/tests/test_cli.py b/tests/atlasapi/test_cli.py similarity index 100% rename from tests/test_cli.py rename to tests/atlasapi/test_cli.py diff --git a/tests/test_config_file.py b/tests/atlasapi/test_config_file.py similarity index 100% rename from tests/test_config_file.py rename to tests/atlasapi/test_config_file.py diff --git a/tests/test_core_atlas.py b/tests/atlasapi/test_core_atlas.py similarity index 100% rename from tests/test_core_atlas.py rename to tests/atlasapi/test_core_atlas.py diff --git a/tests/test_list_atlases.py b/tests/atlasapi/test_list_atlases.py similarity index 100% rename from tests/test_list_atlases.py rename to tests/atlasapi/test_list_atlases.py diff --git a/tests/test_structure_dict.py b/tests/atlasapi/test_structure_dict.py similarity index 100% rename from tests/test_structure_dict.py rename to tests/atlasapi/test_structure_dict.py diff --git a/tests/test_update_atlas.py b/tests/atlasapi/test_update_atlas.py similarity index 100% rename from tests/test_update_atlas.py rename to tests/atlasapi/test_update_atlas.py diff --git a/tests/test_utils.py b/tests/atlasapi/test_utils.py similarity index 100% rename from tests/test_utils.py rename to tests/atlasapi/test_utils.py From 80152d11ec59d2dcecfeeb440a6fe4e28e78c43e Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:33:38 +0000 Subject: [PATCH 091/103] Revert "bg_atlasapi -> brainglobe_atlasapi, package restructure" This reverts commit 120c3b2c183060d698105a2820d33447a5eced04. --- .gitignore | 2 +- README.md | 6 ++--- .../__init__.py | 4 +-- .../bg_atlas.py | 8 +++--- {brainglobe_atlasapi => bg_atlasapi}/cli.py | 6 ++--- .../config.py | 0 {brainglobe_atlasapi => bg_atlasapi}/core.py | 6 ++--- .../descriptors.py | 0 .../list_atlases.py | 2 +- .../structure_class.py | 2 +- .../structure_tree_util.py | 0 .../update_atlases.py | 27 ++++++++----------- {brainglobe_atlasapi => bg_atlasapi}/utils.py | 0 pyproject.toml | 8 +++--- tests/atlasapi/conftest.py | 2 +- tests/atlasapi/test_bg_atlas.py | 2 +- tests/atlasapi/test_cli.py | 2 +- tests/atlasapi/test_config_file.py | 4 +-- tests/atlasapi/test_core_atlas.py | 2 +- tests/atlasapi/test_list_atlases.py | 2 +- tests/atlasapi/test_structure_dict.py | 6 ++--- tests/atlasapi/test_update_atlas.py | 2 +- tests/atlasapi/test_utils.py | 2 +- 23 files changed, 45 insertions(+), 50 deletions(-) rename {brainglobe_atlasapi => bg_atlasapi}/__init__.py (70%) rename {brainglobe_atlasapi => bg_atlasapi}/bg_atlas.py (96%) rename {brainglobe_atlasapi => bg_atlasapi}/cli.py (91%) rename {brainglobe_atlasapi => bg_atlasapi}/config.py (100%) rename {brainglobe_atlasapi => bg_atlasapi}/core.py (98%) rename {brainglobe_atlasapi => bg_atlasapi}/descriptors.py (100%) rename {brainglobe_atlasapi => bg_atlasapi}/list_atlases.py (98%) rename {brainglobe_atlasapi => bg_atlasapi}/structure_class.py (96%) rename {brainglobe_atlasapi => bg_atlasapi}/structure_tree_util.py (100%) rename {brainglobe_atlasapi => bg_atlasapi}/update_atlases.py (72%) rename {brainglobe_atlasapi => bg_atlasapi}/utils.py (100%) diff --git a/.gitignore b/.gitignore index cddbb655..0ca94130 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -brainglobe_atlasapi/bg_config.conf +bg_atlasapi/bg_config.conf workspace.py .vscode/ diff --git a/README.md b/README.md index 21f69151..a7f506a9 100644 --- a/README.md +++ b/README.md @@ -47,9 +47,9 @@ Full information can be found in the [documentation](https://brainglobe.info/doc ### Python API **List of atlases** -To see a list of atlases use `brainglobe_atlasapi.show_atlases` +To see a list of atlases use `bg_atlasapi.show_atlases` ```python -from brainglobe_atlasapi import show_atlases +from bg_atlasapi import show_atlases show_atlases() # Brainglobe Atlases # ╭──────────────────────────────────┬────────────┬───────────────┬──────────────╮ @@ -74,7 +74,7 @@ All the features of each atlas can be accessed via the `BrainGlobeAtlas` class. e.g. for the 25um Allen Mouse Brain Atlas: ```python -from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas +from bg_atlasapi.bg_atlas import BrainGlobeAtlas atlas = BrainGlobeAtlas("allen_mouse_25um") ``` diff --git a/brainglobe_atlasapi/__init__.py b/bg_atlasapi/__init__.py similarity index 70% rename from brainglobe_atlasapi/__init__.py rename to bg_atlasapi/__init__.py index d36b0416..0facf5d3 100644 --- a/brainglobe_atlasapi/__init__.py +++ b/bg_atlasapi/__init__.py @@ -9,5 +9,5 @@ pass -from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas -from brainglobe_atlasapi.list_atlases import show_atlases +from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from bg_atlasapi.list_atlases import show_atlases diff --git a/brainglobe_atlasapi/bg_atlas.py b/bg_atlasapi/bg_atlas.py similarity index 96% rename from brainglobe_atlasapi/bg_atlas.py rename to bg_atlasapi/bg_atlas.py index 34b80032..cab2d152 100644 --- a/brainglobe_atlasapi/bg_atlas.py +++ b/bg_atlasapi/bg_atlas.py @@ -6,8 +6,8 @@ from rich import print as rprint from rich.console import Console -from brainglobe_atlasapi import config, core, descriptors, utils -from brainglobe_atlasapi.utils import _rich_atlas_metadata +from bg_atlasapi import config, core, descriptors, utils +from bg_atlasapi.utils import _rich_atlas_metadata COMPRESSED_FILENAME = "atlas.tar.gz" @@ -82,7 +82,7 @@ def __init__( raise ValueError(f"{atlas_name} is not a valid atlas name!") rprint( - f"[magenta2]brainglobe_atlasapi: {self.atlas_name} " + f"[magenta2]bg_atlasapi: {self.atlas_name} " "not found locally. Downloading...[magenta2]" ) self.download_extract_file() @@ -188,7 +188,7 @@ def check_latest_version(self): if local != online: rprint( - f"[b][magenta2]brainglobe_atlasapi[/b]: " + f"[b][magenta2]bg_atlasapi[/b]: " f"[b]{self.atlas_name}[/b] version [b]{local}[/b]" f"is not the latest available ([b]{online}[/b]). " "To update the atlas run in the terminal:[/magenta2]\n" diff --git a/brainglobe_atlasapi/cli.py b/bg_atlasapi/cli.py similarity index 91% rename from brainglobe_atlasapi/cli.py rename to bg_atlasapi/cli.py index e8687809..c699789a 100644 --- a/brainglobe_atlasapi/cli.py +++ b/bg_atlasapi/cli.py @@ -1,8 +1,8 @@ import click -from brainglobe_atlasapi.config import cli_modify_config -from brainglobe_atlasapi.list_atlases import show_atlases -from brainglobe_atlasapi.update_atlases import install_atlas, update_atlas +from bg_atlasapi.config import cli_modify_config +from bg_atlasapi.list_atlases import show_atlases +from bg_atlasapi.update_atlases import install_atlas, update_atlas @click.command(context_settings={"help_option_names": ["-h", "--help"]}) diff --git a/brainglobe_atlasapi/config.py b/bg_atlasapi/config.py similarity index 100% rename from brainglobe_atlasapi/config.py rename to bg_atlasapi/config.py diff --git a/brainglobe_atlasapi/core.py b/bg_atlasapi/core.py similarity index 98% rename from brainglobe_atlasapi/core.py rename to bg_atlasapi/core.py index a24dd2a8..c1475b58 100644 --- a/brainglobe_atlasapi/core.py +++ b/bg_atlasapi/core.py @@ -6,7 +6,7 @@ import pandas as pd from brainglobe_space import AnatomicalSpace -from brainglobe_atlasapi.descriptors import ( +from bg_atlasapi.descriptors import ( ANNOTATION_FILENAME, HEMISPHERES_FILENAME, MESHES_DIRNAME, @@ -14,8 +14,8 @@ REFERENCE_FILENAME, STRUCTURES_FILENAME, ) -from brainglobe_atlasapi.structure_class import StructuresDict -from brainglobe_atlasapi.utils import read_json, read_tiff +from bg_atlasapi.structure_class import StructuresDict +from bg_atlasapi.utils import read_json, read_tiff class Atlas: diff --git a/brainglobe_atlasapi/descriptors.py b/bg_atlasapi/descriptors.py similarity index 100% rename from brainglobe_atlasapi/descriptors.py rename to bg_atlasapi/descriptors.py diff --git a/brainglobe_atlasapi/list_atlases.py b/bg_atlasapi/list_atlases.py similarity index 98% rename from brainglobe_atlasapi/list_atlases.py rename to bg_atlasapi/list_atlases.py index 92760d89..e4ff7ff2 100644 --- a/brainglobe_atlasapi/list_atlases.py +++ b/bg_atlasapi/list_atlases.py @@ -6,7 +6,7 @@ from rich.panel import Panel from rich.table import Table -from brainglobe_atlasapi import config, descriptors, utils +from bg_atlasapi import config, descriptors, utils def get_downloaded_atlases(): diff --git a/brainglobe_atlasapi/structure_class.py b/bg_atlasapi/structure_class.py similarity index 96% rename from brainglobe_atlasapi/structure_class.py rename to bg_atlasapi/structure_class.py index 4d09c620..53c4b839 100644 --- a/brainglobe_atlasapi/structure_class.py +++ b/bg_atlasapi/structure_class.py @@ -3,7 +3,7 @@ import meshio as mio -from brainglobe_atlasapi.structure_tree_util import get_structures_tree +from bg_atlasapi.structure_tree_util import get_structures_tree class Structure(UserDict): diff --git a/brainglobe_atlasapi/structure_tree_util.py b/bg_atlasapi/structure_tree_util.py similarity index 100% rename from brainglobe_atlasapi/structure_tree_util.py rename to bg_atlasapi/structure_tree_util.py diff --git a/brainglobe_atlasapi/update_atlases.py b/bg_atlasapi/update_atlases.py similarity index 72% rename from brainglobe_atlasapi/update_atlases.py rename to bg_atlasapi/update_atlases.py index afc5d786..57ba4f74 100644 --- a/brainglobe_atlasapi/update_atlases.py +++ b/bg_atlasapi/update_atlases.py @@ -2,17 +2,12 @@ from rich import print as rprint -from brainglobe_atlasapi.bg_atlas import ( - Bbrainglobe_atlasapilas, - _version_str_from_tuple, -) -from brainglobe_atlasapi.list_atlases import get_downloaded_atlases - -brainglobe_atlasapi +from bg_atlasapi.bg_atlas import BrainGlobeAtlas, _version_str_from_tuple +from bg_atlasapi.list_atlases import get_downloaded_atlases def update_atlas(atlas_name, force=False): - """Updates a brainglobe_atlasapi atlas from the latest + """Updates a bg_atlasapi atlas from the latest available version online. Arguments: @@ -25,21 +20,21 @@ def update_atlas(atlas_name, force=False): """ atlas = BrainGlobeAtlas(atlas_name=atlas_name) -brainglobe_atlasapi + # Check if we need to update if not force: if atlas.check_latest_version(): rprint( - f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} " + f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} " "is already updated " f"(version: {_version_str_from_tuple(atlas.local_version)})" "[/b]" - )brainglobe_atlasapi + ) return # Delete atlas folder rprint( - f"[b][magenta2]brainglobe_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" + f"[b][magenta2]bg_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" ) fld = atlas.brainglobe_dir / atlas.local_full_name shutil.rmtree(fld) @@ -49,12 +44,12 @@ def update_atlas(atlas_name, force=False): "of the atlas, aborting." ) - # Download againbrainglobe_atlasapi + # Download again atlas.download_extract_file() # Check that everything went well rprint( - f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} updated to version: " + f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} updated to version: " + f"{_version_str_from_tuple(atlas.remote_version)}[/magenta2][/b]" ) @@ -72,12 +67,12 @@ def install_atlas(atlas_name): # Check input: if not isinstance(atlas_name, str): raise ValueError(f"atlas name should be a string, not {atlas_name}") -brainglobe_atlasapi + # Check if already downloaded: available_atlases = get_downloaded_atlases() if atlas_name in available_atlases: rprint( - f"[b][magenta2]brainglobe_atlasapi: installing {atlas_name}: " + f"[b][magenta2]bg_atlasapi: installing {atlas_name}: " "atlas already installed![/magenta2][/b]" ) return diff --git a/brainglobe_atlasapi/utils.py b/bg_atlasapi/utils.py similarity index 100% rename from brainglobe_atlasapi/utils.py rename to bg_atlasapi/utils.py diff --git a/pyproject.toml b/pyproject.toml index 1cbeb3ff..3f3e97e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,17 +45,17 @@ requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [project.scripts] -brainglobe = "brainglobe_atlasapi.cli:bg_cli" +brainglobe = "bg_atlasapi.cli:bg_cli" [tool.setuptools] include-package-data = true [tool.setuptools.packages.find] -include = ["brainglobe_atlasapi*"] +include = ["bg_atlasapi*"] exclude = ["tests*"] [tool.pytest.ini_options] -addopts = "--cov=brainglobe_atlasapi" +addopts = "--cov=bg_atlasapi" filterwarnings = ["error"] markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"] @@ -96,5 +96,5 @@ python = [testenv] extras = dev -commands = pytest -v --color=yes --cov=brainglobe_atlasapi --cov-report=xml +commands = pytest -v --color=yes --cov=bg_atlasapi --cov-report=xml """ diff --git a/tests/atlasapi/conftest.py b/tests/atlasapi/conftest.py index 41f197da..d7d58161 100644 --- a/tests/atlasapi/conftest.py +++ b/tests/atlasapi/conftest.py @@ -4,7 +4,7 @@ import pytest -from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas +from bg_atlasapi.bg_atlas import BrainGlobeAtlas @pytest.fixture() diff --git a/tests/atlasapi/test_bg_atlas.py b/tests/atlasapi/test_bg_atlas.py index 7d200159..449ecf96 100644 --- a/tests/atlasapi/test_bg_atlas.py +++ b/tests/atlasapi/test_bg_atlas.py @@ -3,7 +3,7 @@ import pytest -from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas +from bg_atlasapi.bg_atlas import BrainGlobeAtlas def test_versions(atlas): diff --git a/tests/atlasapi/test_cli.py b/tests/atlasapi/test_cli.py index 462582c6..31b037da 100644 --- a/tests/atlasapi/test_cli.py +++ b/tests/atlasapi/test_cli.py @@ -1,6 +1,6 @@ from click.testing import CliRunner -from brainglobe_atlasapi import cli, config +from bg_atlasapi import cli, config # This testing of the command line application does not really diff --git a/tests/atlasapi/test_config_file.py b/tests/atlasapi/test_config_file.py index 2e7e202d..33723006 100644 --- a/tests/atlasapi/test_config_file.py +++ b/tests/atlasapi/test_config_file.py @@ -5,7 +5,7 @@ import pytest from click.testing import CliRunner -from brainglobe_atlasapi import bg_atlas, cli, config +from bg_atlasapi import bg_atlas, cli, config @pytest.fixture() @@ -27,7 +27,7 @@ def test_config_creation(conf_path): # Ugly test zone: here we use the terminal commands, which edit the config -# file in the brainglobe_atlasapi repo from which the tests are being run. +# file in the bg_atlasapi repo from which the tests are being run. # This is not the cleanest way, the alternative would be to run this test in # a new env. @pytest.mark.slow diff --git a/tests/atlasapi/test_core_atlas.py b/tests/atlasapi/test_core_atlas.py index a8b6cb94..6c60df83 100644 --- a/tests/atlasapi/test_core_atlas.py +++ b/tests/atlasapi/test_core_atlas.py @@ -6,7 +6,7 @@ import pytest import tifffile -from brainglobe_atlasapi.core import AdditionalRefDict +from bg_atlasapi.core import AdditionalRefDict def test_initialization(atlas): diff --git a/tests/atlasapi/test_list_atlases.py b/tests/atlasapi/test_list_atlases.py index afa7b581..7030b76d 100644 --- a/tests/atlasapi/test_list_atlases.py +++ b/tests/atlasapi/test_list_atlases.py @@ -1,4 +1,4 @@ -from brainglobe_atlasapi.list_atlases import ( +from bg_atlasapi.list_atlases import ( get_atlases_lastversions, get_downloaded_atlases, get_local_atlas_version, diff --git a/tests/atlasapi/test_structure_dict.py b/tests/atlasapi/test_structure_dict.py index 211e87d3..cd6c627b 100644 --- a/tests/atlasapi/test_structure_dict.py +++ b/tests/atlasapi/test_structure_dict.py @@ -1,9 +1,9 @@ import meshio as mio import pytest -from brainglobe_atlasapi import descriptors -from brainglobe_atlasapi.structure_class import StructuresDict -from brainglobe_atlasapi.utils import read_json +from bg_atlasapi import descriptors +from bg_atlasapi.structure_class import StructuresDict +from bg_atlasapi.utils import read_json structures_list = [ { diff --git a/tests/atlasapi/test_update_atlas.py b/tests/atlasapi/test_update_atlas.py index abfc1ee2..433f6e86 100644 --- a/tests/atlasapi/test_update_atlas.py +++ b/tests/atlasapi/test_update_atlas.py @@ -1,6 +1,6 @@ import pytest -from brainglobe_atlasapi import update_atlases +from bg_atlasapi import update_atlases def test_update(): diff --git a/tests/atlasapi/test_utils.py b/tests/atlasapi/test_utils.py index 1d6462d3..1d194872 100644 --- a/tests/atlasapi/test_utils.py +++ b/tests/atlasapi/test_utils.py @@ -4,7 +4,7 @@ import requests from requests import HTTPError -from brainglobe_atlasapiatlasapiatlasapiatlasapi import utils +from bg_atlasapi import utils test_url = "https://gin.g-node.org/BrainGlobe/atlases/raw/master/example_mouse_100um_v1.2.tar.gz" From 7d585e984529b828fc93cea5dd9baabc6b55f97e Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:34:43 +0000 Subject: [PATCH 092/103] bg_atlasapi -> brainglobe_atlasapi, package restructure --- .gitignore | 2 +- README.md | 6 +- .../__init__.py | 4 +- .../bg_atlas.py | 8 +- {bg_atlasapi => brainglobe_atlasapi}/cli.py | 6 +- .../config.py | 0 {bg_atlasapi => brainglobe_atlasapi}/core.py | 6 +- .../descriptors.py | 0 .../list_atlases.py | 2 +- .../structure_class.py | 2 +- .../structure_tree_util.py | 0 .../update_atlases.py | 17 +- {bg_atlasapi => brainglobe_atlasapi}/utils.py | 0 pyproject.toml | 8 +- tests/atlasapi/conftest.py | 2 +- tests/atlasapi/test_bg_atlas.py | 2 +- tests/atlasapi/test_cli.py | 2 +- tests/atlasapi/test_config_file.py | 4 +- tests/atlasapi/test_core_atlas.py | 2 +- tests/atlasapi/test_list_atlases.py | 2 +- tests/atlasapi/test_structure_dict.py | 6 +- tests/atlasapi/test_update_atlas.py | 2 +- tests/atlasapi/test_utils.py | 2 +- tutorials/Atlas API usage.ipynb | 3428 ++++++++--------- 24 files changed, 1758 insertions(+), 1755 deletions(-) rename {bg_atlasapi => brainglobe_atlasapi}/__init__.py (70%) rename {bg_atlasapi => brainglobe_atlasapi}/bg_atlas.py (96%) rename {bg_atlasapi => brainglobe_atlasapi}/cli.py (91%) rename {bg_atlasapi => brainglobe_atlasapi}/config.py (100%) rename {bg_atlasapi => brainglobe_atlasapi}/core.py (98%) rename {bg_atlasapi => brainglobe_atlasapi}/descriptors.py (100%) rename {bg_atlasapi => brainglobe_atlasapi}/list_atlases.py (98%) rename {bg_atlasapi => brainglobe_atlasapi}/structure_class.py (96%) rename {bg_atlasapi => brainglobe_atlasapi}/structure_tree_util.py (100%) rename {bg_atlasapi => brainglobe_atlasapi}/update_atlases.py (76%) rename {bg_atlasapi => brainglobe_atlasapi}/utils.py (100%) diff --git a/.gitignore b/.gitignore index 0ca94130..cddbb655 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -bg_atlasapi/bg_config.conf +brainglobe_atlasapi/bg_config.conf workspace.py .vscode/ diff --git a/README.md b/README.md index a7f506a9..21f69151 100644 --- a/README.md +++ b/README.md @@ -47,9 +47,9 @@ Full information can be found in the [documentation](https://brainglobe.info/doc ### Python API **List of atlases** -To see a list of atlases use `bg_atlasapi.show_atlases` +To see a list of atlases use `brainglobe_atlasapi.show_atlases` ```python -from bg_atlasapi import show_atlases +from brainglobe_atlasapi import show_atlases show_atlases() # Brainglobe Atlases # ╭──────────────────────────────────┬────────────┬───────────────┬──────────────╮ @@ -74,7 +74,7 @@ All the features of each atlas can be accessed via the `BrainGlobeAtlas` class. e.g. for the 25um Allen Mouse Brain Atlas: ```python -from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas atlas = BrainGlobeAtlas("allen_mouse_25um") ``` diff --git a/bg_atlasapi/__init__.py b/brainglobe_atlasapi/__init__.py similarity index 70% rename from bg_atlasapi/__init__.py rename to brainglobe_atlasapi/__init__.py index 0facf5d3..d36b0416 100644 --- a/bg_atlasapi/__init__.py +++ b/brainglobe_atlasapi/__init__.py @@ -9,5 +9,5 @@ pass -from bg_atlasapi.bg_atlas import BrainGlobeAtlas -from bg_atlasapi.list_atlases import show_atlases +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.list_atlases import show_atlases diff --git a/bg_atlasapi/bg_atlas.py b/brainglobe_atlasapi/bg_atlas.py similarity index 96% rename from bg_atlasapi/bg_atlas.py rename to brainglobe_atlasapi/bg_atlas.py index cab2d152..34b80032 100644 --- a/bg_atlasapi/bg_atlas.py +++ b/brainglobe_atlasapi/bg_atlas.py @@ -6,8 +6,8 @@ from rich import print as rprint from rich.console import Console -from bg_atlasapi import config, core, descriptors, utils -from bg_atlasapi.utils import _rich_atlas_metadata +from brainglobe_atlasapi import config, core, descriptors, utils +from brainglobe_atlasapi.utils import _rich_atlas_metadata COMPRESSED_FILENAME = "atlas.tar.gz" @@ -82,7 +82,7 @@ def __init__( raise ValueError(f"{atlas_name} is not a valid atlas name!") rprint( - f"[magenta2]bg_atlasapi: {self.atlas_name} " + f"[magenta2]brainglobe_atlasapi: {self.atlas_name} " "not found locally. Downloading...[magenta2]" ) self.download_extract_file() @@ -188,7 +188,7 @@ def check_latest_version(self): if local != online: rprint( - f"[b][magenta2]bg_atlasapi[/b]: " + f"[b][magenta2]brainglobe_atlasapi[/b]: " f"[b]{self.atlas_name}[/b] version [b]{local}[/b]" f"is not the latest available ([b]{online}[/b]). " "To update the atlas run in the terminal:[/magenta2]\n" diff --git a/bg_atlasapi/cli.py b/brainglobe_atlasapi/cli.py similarity index 91% rename from bg_atlasapi/cli.py rename to brainglobe_atlasapi/cli.py index c699789a..e8687809 100644 --- a/bg_atlasapi/cli.py +++ b/brainglobe_atlasapi/cli.py @@ -1,8 +1,8 @@ import click -from bg_atlasapi.config import cli_modify_config -from bg_atlasapi.list_atlases import show_atlases -from bg_atlasapi.update_atlases import install_atlas, update_atlas +from brainglobe_atlasapi.config import cli_modify_config +from brainglobe_atlasapi.list_atlases import show_atlases +from brainglobe_atlasapi.update_atlases import install_atlas, update_atlas @click.command(context_settings={"help_option_names": ["-h", "--help"]}) diff --git a/bg_atlasapi/config.py b/brainglobe_atlasapi/config.py similarity index 100% rename from bg_atlasapi/config.py rename to brainglobe_atlasapi/config.py diff --git a/bg_atlasapi/core.py b/brainglobe_atlasapi/core.py similarity index 98% rename from bg_atlasapi/core.py rename to brainglobe_atlasapi/core.py index c1475b58..a24dd2a8 100644 --- a/bg_atlasapi/core.py +++ b/brainglobe_atlasapi/core.py @@ -6,7 +6,7 @@ import pandas as pd from brainglobe_space import AnatomicalSpace -from bg_atlasapi.descriptors import ( +from brainglobe_atlasapi.descriptors import ( ANNOTATION_FILENAME, HEMISPHERES_FILENAME, MESHES_DIRNAME, @@ -14,8 +14,8 @@ REFERENCE_FILENAME, STRUCTURES_FILENAME, ) -from bg_atlasapi.structure_class import StructuresDict -from bg_atlasapi.utils import read_json, read_tiff +from brainglobe_atlasapi.structure_class import StructuresDict +from brainglobe_atlasapi.utils import read_json, read_tiff class Atlas: diff --git a/bg_atlasapi/descriptors.py b/brainglobe_atlasapi/descriptors.py similarity index 100% rename from bg_atlasapi/descriptors.py rename to brainglobe_atlasapi/descriptors.py diff --git a/bg_atlasapi/list_atlases.py b/brainglobe_atlasapi/list_atlases.py similarity index 98% rename from bg_atlasapi/list_atlases.py rename to brainglobe_atlasapi/list_atlases.py index e4ff7ff2..92760d89 100644 --- a/bg_atlasapi/list_atlases.py +++ b/brainglobe_atlasapi/list_atlases.py @@ -6,7 +6,7 @@ from rich.panel import Panel from rich.table import Table -from bg_atlasapi import config, descriptors, utils +from brainglobe_atlasapi import config, descriptors, utils def get_downloaded_atlases(): diff --git a/bg_atlasapi/structure_class.py b/brainglobe_atlasapi/structure_class.py similarity index 96% rename from bg_atlasapi/structure_class.py rename to brainglobe_atlasapi/structure_class.py index 53c4b839..4d09c620 100644 --- a/bg_atlasapi/structure_class.py +++ b/brainglobe_atlasapi/structure_class.py @@ -3,7 +3,7 @@ import meshio as mio -from bg_atlasapi.structure_tree_util import get_structures_tree +from brainglobe_atlasapi.structure_tree_util import get_structures_tree class Structure(UserDict): diff --git a/bg_atlasapi/structure_tree_util.py b/brainglobe_atlasapi/structure_tree_util.py similarity index 100% rename from bg_atlasapi/structure_tree_util.py rename to brainglobe_atlasapi/structure_tree_util.py diff --git a/bg_atlasapi/update_atlases.py b/brainglobe_atlasapi/update_atlases.py similarity index 76% rename from bg_atlasapi/update_atlases.py rename to brainglobe_atlasapi/update_atlases.py index 57ba4f74..d7ba910f 100644 --- a/bg_atlasapi/update_atlases.py +++ b/brainglobe_atlasapi/update_atlases.py @@ -2,12 +2,15 @@ from rich import print as rprint -from bg_atlasapi.bg_atlas import BrainGlobeAtlas, _version_str_from_tuple -from bg_atlasapi.list_atlases import get_downloaded_atlases +from brainglobe_atlasapi.bg_atlas import ( + BrainGlobeAtlas, + _version_str_from_tuple, +) +from brainglobe_atlasapi.list_atlases import get_downloaded_atlases def update_atlas(atlas_name, force=False): - """Updates a bg_atlasapi atlas from the latest + """Updates a brainglobe_atlasapi atlas from the latest available version online. Arguments: @@ -25,7 +28,7 @@ def update_atlas(atlas_name, force=False): if not force: if atlas.check_latest_version(): rprint( - f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} " + f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} " "is already updated " f"(version: {_version_str_from_tuple(atlas.local_version)})" "[/b]" @@ -34,7 +37,7 @@ def update_atlas(atlas_name, force=False): # Delete atlas folder rprint( - f"[b][magenta2]bg_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" + f"[b][magenta2]brainglobe_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" ) fld = atlas.brainglobe_dir / atlas.local_full_name shutil.rmtree(fld) @@ -49,7 +52,7 @@ def update_atlas(atlas_name, force=False): # Check that everything went well rprint( - f"[b][magenta2]bg_atlasapi: {atlas.atlas_name} updated to version: " + f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} updated to version: " + f"{_version_str_from_tuple(atlas.remote_version)}[/magenta2][/b]" ) @@ -72,7 +75,7 @@ def install_atlas(atlas_name): available_atlases = get_downloaded_atlases() if atlas_name in available_atlases: rprint( - f"[b][magenta2]bg_atlasapi: installing {atlas_name}: " + f"[b][magenta2]brainglobe_atlasapi: installing {atlas_name}: " "atlas already installed![/magenta2][/b]" ) return diff --git a/bg_atlasapi/utils.py b/brainglobe_atlasapi/utils.py similarity index 100% rename from bg_atlasapi/utils.py rename to brainglobe_atlasapi/utils.py diff --git a/pyproject.toml b/pyproject.toml index 3f3e97e6..1cbeb3ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,17 +45,17 @@ requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [project.scripts] -brainglobe = "bg_atlasapi.cli:bg_cli" +brainglobe = "brainglobe_atlasapi.cli:bg_cli" [tool.setuptools] include-package-data = true [tool.setuptools.packages.find] -include = ["bg_atlasapi*"] +include = ["brainglobe_atlasapi*"] exclude = ["tests*"] [tool.pytest.ini_options] -addopts = "--cov=bg_atlasapi" +addopts = "--cov=brainglobe_atlasapi" filterwarnings = ["error"] markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"] @@ -96,5 +96,5 @@ python = [testenv] extras = dev -commands = pytest -v --color=yes --cov=bg_atlasapi --cov-report=xml +commands = pytest -v --color=yes --cov=brainglobe_atlasapi --cov-report=xml """ diff --git a/tests/atlasapi/conftest.py b/tests/atlasapi/conftest.py index d7d58161..41f197da 100644 --- a/tests/atlasapi/conftest.py +++ b/tests/atlasapi/conftest.py @@ -4,7 +4,7 @@ import pytest -from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas @pytest.fixture() diff --git a/tests/atlasapi/test_bg_atlas.py b/tests/atlasapi/test_bg_atlas.py index 449ecf96..7d200159 100644 --- a/tests/atlasapi/test_bg_atlas.py +++ b/tests/atlasapi/test_bg_atlas.py @@ -3,7 +3,7 @@ import pytest -from bg_atlasapi.bg_atlas import BrainGlobeAtlas +from brainglobe_atlasapi.bg_atlas import BrainGlobeAtlas def test_versions(atlas): diff --git a/tests/atlasapi/test_cli.py b/tests/atlasapi/test_cli.py index 31b037da..462582c6 100644 --- a/tests/atlasapi/test_cli.py +++ b/tests/atlasapi/test_cli.py @@ -1,6 +1,6 @@ from click.testing import CliRunner -from bg_atlasapi import cli, config +from brainglobe_atlasapi import cli, config # This testing of the command line application does not really diff --git a/tests/atlasapi/test_config_file.py b/tests/atlasapi/test_config_file.py index 33723006..2e7e202d 100644 --- a/tests/atlasapi/test_config_file.py +++ b/tests/atlasapi/test_config_file.py @@ -5,7 +5,7 @@ import pytest from click.testing import CliRunner -from bg_atlasapi import bg_atlas, cli, config +from brainglobe_atlasapi import bg_atlas, cli, config @pytest.fixture() @@ -27,7 +27,7 @@ def test_config_creation(conf_path): # Ugly test zone: here we use the terminal commands, which edit the config -# file in the bg_atlasapi repo from which the tests are being run. +# file in the brainglobe_atlasapi repo from which the tests are being run. # This is not the cleanest way, the alternative would be to run this test in # a new env. @pytest.mark.slow diff --git a/tests/atlasapi/test_core_atlas.py b/tests/atlasapi/test_core_atlas.py index 6c60df83..a8b6cb94 100644 --- a/tests/atlasapi/test_core_atlas.py +++ b/tests/atlasapi/test_core_atlas.py @@ -6,7 +6,7 @@ import pytest import tifffile -from bg_atlasapi.core import AdditionalRefDict +from brainglobe_atlasapi.core import AdditionalRefDict def test_initialization(atlas): diff --git a/tests/atlasapi/test_list_atlases.py b/tests/atlasapi/test_list_atlases.py index 7030b76d..afa7b581 100644 --- a/tests/atlasapi/test_list_atlases.py +++ b/tests/atlasapi/test_list_atlases.py @@ -1,4 +1,4 @@ -from bg_atlasapi.list_atlases import ( +from brainglobe_atlasapi.list_atlases import ( get_atlases_lastversions, get_downloaded_atlases, get_local_atlas_version, diff --git a/tests/atlasapi/test_structure_dict.py b/tests/atlasapi/test_structure_dict.py index cd6c627b..211e87d3 100644 --- a/tests/atlasapi/test_structure_dict.py +++ b/tests/atlasapi/test_structure_dict.py @@ -1,9 +1,9 @@ import meshio as mio import pytest -from bg_atlasapi import descriptors -from bg_atlasapi.structure_class import StructuresDict -from bg_atlasapi.utils import read_json +from brainglobe_atlasapi import descriptors +from brainglobe_atlasapi.structure_class import StructuresDict +from brainglobe_atlasapi.utils import read_json structures_list = [ { diff --git a/tests/atlasapi/test_update_atlas.py b/tests/atlasapi/test_update_atlas.py index 433f6e86..abfc1ee2 100644 --- a/tests/atlasapi/test_update_atlas.py +++ b/tests/atlasapi/test_update_atlas.py @@ -1,6 +1,6 @@ import pytest -from bg_atlasapi import update_atlases +from brainglobe_atlasapi import update_atlases def test_update(): diff --git a/tests/atlasapi/test_utils.py b/tests/atlasapi/test_utils.py index 1d194872..e09279d9 100644 --- a/tests/atlasapi/test_utils.py +++ b/tests/atlasapi/test_utils.py @@ -4,7 +4,7 @@ import requests from requests import HTTPError -from bg_atlasapi import utils +from brainglobe_atlasapi import utils test_url = "https://gin.g-node.org/BrainGlobe/atlases/raw/master/example_mouse_100um_v1.2.tar.gz" diff --git a/tutorials/Atlas API usage.ipynb b/tutorials/Atlas API usage.ipynb index 38d9d376..2881460a 100644 --- a/tutorials/Atlas API usage.ipynb +++ b/tutorials/Atlas API usage.ipynb @@ -1,1715 +1,1715 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Introduction to the `BrainGlobeAtlas` class" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 0. Creating a `BrainGlobeAtlas` object and list availabe options" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To instantiate a `BrainGlobeAtlas` object, we need to instantiate it with the atlas name. The first time we use it, a version of this atlas files will be downloaded from the [remote GIN repository](http://gin.g-node.org/brainglobe/atlases) and stored on your local machine (by default, in .../Users/username/.brainglobe):" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "allen mouse atlas (res. 100um)\n", - "From: http://www.brain-map.org (Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007 )\n" - ] - } - ], - "source": [ - "import numpy as np\n", - "from bg_atlasapi import BrainGlobeAtlas\n", - "from pprint import pprint\n", - "\n", - "bg_atlas = BrainGlobeAtlas(\"allen_mouse_100um\", check_latest=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To know what atlases are available through BrainGlobe, we can use the `show_atlases` function (we need to be online):" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                                                                                  \n",
-       "                                                                                  \n",
-       "                                Brainglobe Atlases                                \n",
-       "╭──────────────────────────────────┬────────────┬───────────────┬────────────────╮\n",
-       "│ Name                              Downloaded  Local version  Latest version │\n",
-       "├──────────────────────────────────┼────────────┼───────────────┼────────────────┤\n",
-       "│ whs_sd_rat_39um      │      1.0      │      1.0       │\n",
-       "│ allen_mouse_25um      │      1.2      │      1.2       │\n",
-       "│ allen_mouse_100um      │      1.2      │      1.2       │\n",
-       "│ allen_mouse_50um      │      1.2      │      1.2       │\n",
-       "│ example_mouse_100um------      │      1.2       │\n",
-       "│ allen_mouse_10um------      │      1.2       │\n",
-       "│ mpin_zfish_1um------      │      1.0       │\n",
-       "│ allen_human_500um------      │      0.1       │\n",
-       "│ kim_mouse_10um------      │      1.0       │\n",
-       "│ kim_mouse_25um------      │      1.0       │\n",
-       "│ kim_mouse_50um------      │      1.0       │\n",
-       "│ kim_mouse_100um------      │      1.0       │\n",
-       "│ osten_mouse_10um------      │      1.1       │\n",
-       "│ osten_mouse_25um------      │      1.1       │\n",
-       "│ osten_mouse_50um------      │      1.1       │\n",
-       "│ osten_mouse_100um------      │      1.1       │\n",
-       "│ allen_cord_20um------      │      1.0       │\n",
-       "│ azba_zfish_4um------      │      1.1       │\n",
-       "│ perens_lsfm_mouse_20um------      │      1.0       │\n",
-       "│ admba_3d_e11_5_mouse_16um------      │      1.0       │\n",
-       "│ admba_3d_e13_5_mouse_16um------      │      1.0       │\n",
-       "│ admba_3d_e15_5_mouse_16um------      │      1.0       │\n",
-       "│ admba_3d_e18_5_mouse_16um------      │      1.0       │\n",
-       "│ admba_3d_p4_mouse_16.752um------      │      1.0       │\n",
-       "│ admba_3d_p14_mouse_16.752um------      │      1.0       │\n",
-       "│ admba_3d_p28_mouse_16.752um------      │      1.0       │\n",
-       "│ admba_3d_p56_mouse_25um------      │      1.0       │\n",
-       "╰──────────────────────────────────┴────────────┴───────────────┴────────────────╯\n",
-       "
\n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\n", - "\u001b[3m \u001b[0m\n", - "\u001b[3m Brainglobe Atlases \u001b[0m\n", - "╭──────────────────────────────────┬────────────┬───────────────┬────────────────╮\n", - "│\u001b[1;32m \u001b[0m\u001b[1;32mName \u001b[0m\u001b[1;32m \u001b[0m│\u001b[1;32m \u001b[0m\u001b[1;32mDownloaded\u001b[0m\u001b[1;32m \u001b[0m│\u001b[1;32m \u001b[0m\u001b[1;32mLocal version\u001b[0m\u001b[1;32m \u001b[0m│\u001b[1;32m \u001b[0m\u001b[1;32mLatest version\u001b[0m\u001b[1;32m \u001b[0m│\n", - "├──────────────────────────────────┼────────────┼───────────────┼────────────────┤\n", - "│ \u001b[1mwhs_sd_rat_39um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.0 │ 1.0 │\n", - "│ \u001b[1mallen_mouse_25um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.2 │ 1.2 │\n", - "│ \u001b[1mallen_mouse_100um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.2 │ 1.2 │\n", - "│ \u001b[1mallen_mouse_50um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.2 │ 1.2 │\n", - "│ \u001b[1mexample_mouse_100um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.2 │\n", - "│ \u001b[1mallen_mouse_10um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.2 │\n", - "│ \u001b[1mmpin_zfish_1um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1mallen_human_500um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 0.1 │\n", - "│ \u001b[1mkim_mouse_10um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1mkim_mouse_25um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1mkim_mouse_50um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1mkim_mouse_100um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1mosten_mouse_10um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", - "│ \u001b[1mosten_mouse_25um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", - "│ \u001b[1mosten_mouse_50um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", - "│ \u001b[1mosten_mouse_100um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", - "│ \u001b[1mallen_cord_20um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1mazba_zfish_4um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", - "│ \u001b[1mperens_lsfm_mouse_20um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_e11_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_e13_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_e15_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_e18_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_p4_mouse_16.752um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_p14_mouse_16.752um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_p28_mouse_16.752um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "│ \u001b[1madmba_3d_p56_mouse_25um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", - "╰──────────────────────────────────┴────────────┴───────────────┴────────────────╯\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from bg_atlasapi import show_atlases\n", - "show_atlases()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 1. Using a `BrainGlobe` atlas" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "A BrainGlobe atlas is a convenient API for interacting with an anatomical atlas. BrainGlobe atlases contain:\n", - " * Metadata\n", - " * The reference anatomical stack used for the registration itself\n", - " * Region annotation stack (the segmented atlas image that occupies the same space as the reference stack)\n", - " * Hemisphere annotation stack which denotes left and right\n", - " * Description of the region hierarchy\n", - " * Meshes for the regions" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.0 Metadata" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "All atlases have a standard set of medatata describing their source, species, resolution, etc:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'name': 'allen_mouse',\n", - " 'citation': 'Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007',\n", - " 'atlas_link': 'http://www.brain-map.org',\n", - " 'species': 'Mus musculus',\n", - " 'symmetric': True,\n", - " 'resolution': [100.0, 100.0, 100.0],\n", - " 'orientation': 'asr',\n", - " 'version': '1.2',\n", - " 'shape': [132, 80, 114],\n", - " 'trasform_to_bg': [[1.0, 0.0, 0.0, 0.0],\n", - " [0.0, 1.0, 0.0, 0.0],\n", - " [0.0, 0.0, 1.0, 0.0],\n", - " [0.0, 0.0, 0.0, 1.0]],\n", - " 'additional_references': []}" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.metadata" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.1 Anatomical, annotation and hemispheres stack" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "from matplotlib import pyplot as plt" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Anatomical reference:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "space = bg_atlas.space\n", - "stack = bg_atlas.reference\n", - "\n", - "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", - "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", - " mid_index = stack.shape[i]//2\n", - " axs[i].imshow(np.moveaxis(stack,i,0)[mid_index,:,:], cmap=\"gray\",clim=(0,250))\n", - " axs[i].set_title(f\"{plane.capitalize()} view\")\n", - " axs[i].set_ylabel(labels[0])\n", - " axs[i].set_xlabel(labels[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Annotations stack:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "space = bg_atlas.space\n", - "stack = bg_atlas.annotation\n", - "\n", - "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", - "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", - " mid_index = stack.shape[i]//2\n", - " axs[i].imshow(np.moveaxis(stack,i,0)[mid_index,:,:], cmap=\"gray\",clim=(0,1250))\n", - " axs[i].set_title(f\"{plane.capitalize()} view\")\n", - " axs[i].set_ylabel(labels[0])\n", - " axs[i].set_xlabel(labels[1])\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "space = bg_atlas.space\n", - "stack = bg_atlas.hemispheres\n", - "\n", - "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", - "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", - " axs[i].imshow(stack.max(i), cmap=\"gray\")\n", - " axs[i].set_title(f\"{plane.capitalize()} view\")\n", - " axs[i].set_ylabel(labels[0])\n", - " axs[i].set_xlabel(labels[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.2 Regions hierarchy" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The atlas comes with the description of a hierarchy of brain structures. To have an overview:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "root (997)\n", - "├── VS (73)\n", - "│ ├── AQ (140)\n", - "│ ├── V3 (129)\n", - "│ ├── V4 (145)\n", - "│ │ └── V4r (153)\n", - "│ ├── VL (81)\n", - "│ │ ├── SEZ (98)\n", - "│ │ └── chpl (108)\n", - "│ └── c (164)\n", - "├── fiber tracts (1009)\n", - "│ ├── cbf (960)\n", - "│ │ ├── arb (728)\n", - "│ │ ├── cbc (744)\n", - "│ │ └── cbp (752)\n", - "│ │ ├── icp (1123)\n", - "│ │ │ └── sctd (553)\n", - "│ │ ├── mcp (78)\n", - "│ │ └── scp (326)\n", - "│ │ ├── dscp (812)\n", - "│ │ ├── sctv (866)\n", - "│ │ └── uf (850)\n", - "│ ├── cm (967)\n", - "│ │ ├── IIIn (832)\n", - "│ │ │ ├── mlf (62)\n", - "│ │ │ └── pc (158)\n", - "│ │ ├── IIn (848)\n", - "│ │ │ ├── bsc (916)\n", - "│ │ │ ├── csc (336)\n", - "│ │ │ ├── och (117)\n", - "│ │ │ └── opt (125)\n", - "│ │ ├── IVn (911)\n", - "│ │ ├── In (840)\n", - "│ │ │ ├── aco (900)\n", - "│ │ │ ├── lotg (21)\n", - "│ │ │ │ ├── lot (665)\n", - "│ │ │ │ └── lotd (538)\n", - "│ │ │ └── onl (1016)\n", - "│ │ ├── VIIIn (933)\n", - "│ │ │ ├── cVIIIn (948)\n", - "│ │ │ │ ├── bic (482)\n", - "│ │ │ │ ├── cic (633)\n", - "│ │ │ │ ├── das (506)\n", - "│ │ │ │ ├── ll (658)\n", - "│ │ │ │ └── tb (841)\n", - "│ │ │ └── vVIIIn (413)\n", - "│ │ ├── VIIn (798)\n", - "│ │ │ └── gVIIn (1116)\n", - "│ │ ├── Vn (901)\n", - "│ │ │ ├── moV (93)\n", - "│ │ │ └── sV (229)\n", - "│ │ │ └── sptV (794)\n", - "│ │ ├── Xn (917)\n", - "│ │ │ └── ts (237)\n", - "│ │ ├── drt (792)\n", - "│ │ │ └── cett (932)\n", - "│ │ │ ├── dc (514)\n", - "│ │ │ │ └── cuf (380)\n", - "│ │ │ └── ml (697)\n", - "│ │ └── von (949)\n", - "│ ├── eps (1000)\n", - "│ │ ├── epsc (760)\n", - "│ │ │ └── nst (102)\n", - "│ │ ├── rust (863)\n", - "│ │ │ └── vtd (397)\n", - "│ │ └── tsp (877)\n", - "│ │ ├── dtd (1060)\n", - "│ │ ├── tspc (1043)\n", - "│ │ └── tspd (1051)\n", - "│ ├── lfbs (983)\n", - "│ │ ├── cc (776)\n", - "│ │ │ ├── ccb (484682516)\n", - "│ │ │ ├── ccg (1108)\n", - "│ │ │ ├── ccs (986)\n", - "│ │ │ ├── ee (964)\n", - "│ │ │ ├── fa (956)\n", - "│ │ │ │ └── ec (579)\n", - "│ │ │ └── fp (971)\n", - "│ │ ├── cst (784)\n", - "│ │ │ ├── cpd (924)\n", - "│ │ │ ├── int (6)\n", - "│ │ │ ├── py (190)\n", - "│ │ │ └── pyd (198)\n", - "│ │ └── lfbst (896)\n", - "│ │ ├── ar (484682524)\n", - "│ │ ├── em (1092)\n", - "│ │ └── or (484682520)\n", - "│ ├── mfbs (991)\n", - "│ │ ├── mfbc (768)\n", - "│ │ │ ├── act (908)\n", - "│ │ │ ├── amc (884)\n", - "│ │ │ ├── cing (940)\n", - "│ │ │ ├── fxs (1099)\n", - "│ │ │ │ ├── alv (466)\n", - "│ │ │ │ ├── df (530)\n", - "│ │ │ │ ├── fi (603)\n", - "│ │ │ │ ├── fxpo (737)\n", - "│ │ │ │ │ ├── fx (436)\n", - "│ │ │ │ │ └── mct (428)\n", - "│ │ │ │ └── hc (618)\n", - "│ │ │ │ ├── dhc (443)\n", - "│ │ │ │ └── vhc (449)\n", - "│ │ │ └── st (301)\n", - "│ │ │ └── stc (484682528)\n", - "│ │ └── mfsbshy (824)\n", - "│ │ ├── mfb (54)\n", - "│ │ ├── mfbse (1083)\n", - "│ │ │ ├── fr (595)\n", - "│ │ │ ├── hbc (611)\n", - "│ │ │ └── sm (802)\n", - "│ │ ├── mfbsma (46)\n", - "│ │ │ ├── mp (673)\n", - "│ │ │ ├── mtg (681)\n", - "│ │ │ ├── mtt (690)\n", - "│ │ │ └── pm (753)\n", - "│ │ └── sup (349)\n", - "│ └── scwm (484682512)\n", - "└── grey (8)\n", - " ├── BS (343)\n", - " │ ├── HB (1065)\n", - " │ │ ├── MY (354)\n", - " │ │ │ ├── MY-mot (370)\n", - " │ │ │ │ ├── ACVII (576)\n", - " │ │ │ │ ├── AMB (135)\n", - " │ │ │ │ │ ├── AMBd (939)\n", - " │ │ │ │ │ └── AMBv (143)\n", - " │ │ │ │ ├── DMX (839)\n", - " │ │ │ │ ├── GRN (1048)\n", - " │ │ │ │ ├── ICB (372)\n", - " │ │ │ │ ├── IO (83)\n", - " │ │ │ │ ├── IRN (136)\n", - " │ │ │ │ ├── ISN (106)\n", - " │ │ │ │ ├── LIN (203)\n", - " │ │ │ │ ├── LRN (235)\n", - " │ │ │ │ │ ├── LRNm (955)\n", - " │ │ │ │ │ └── LRNp (963)\n", - " │ │ │ │ ├── MARN (307)\n", - " │ │ │ │ ├── MDRN (395)\n", - " │ │ │ │ │ ├── MDRNd (1098)\n", - " │ │ │ │ │ └── MDRNv (1107)\n", - " │ │ │ │ ├── PARN (852)\n", - " │ │ │ │ ├── PAS (859)\n", - " │ │ │ │ ├── PGRN (938)\n", - " │ │ │ │ │ ├── PGRNd (970)\n", - " │ │ │ │ │ └── PGRNl (978)\n", - " │ │ │ │ ├── PHY (154)\n", - " │ │ │ │ │ ├── NR (177)\n", - " │ │ │ │ │ └── PRP (169)\n", - " │ │ │ │ ├── PPY (1069)\n", - " │ │ │ │ ├── VI (653)\n", - " │ │ │ │ ├── VII (661)\n", - " │ │ │ │ ├── VNC (701)\n", - " │ │ │ │ │ ├── LAV (209)\n", - " │ │ │ │ │ ├── MV (202)\n", - " │ │ │ │ │ ├── SPIV (225)\n", - " │ │ │ │ │ └── SUV (217)\n", - " │ │ │ │ ├── XII (773)\n", - " │ │ │ │ ├── x (765)\n", - " │ │ │ │ └── y (781)\n", - " │ │ │ ├── MY-sat (379)\n", - " │ │ │ │ ├── RM (206)\n", - " │ │ │ │ ├── RO (222)\n", - " │ │ │ │ └── RPA (230)\n", - " │ │ │ └── MY-sen (386)\n", - " │ │ │ ├── AP (207)\n", - " │ │ │ ├── CN (607)\n", - " │ │ │ │ ├── DCO (96)\n", - " │ │ │ │ └── VCO (101)\n", - " │ │ │ ├── DCN (720)\n", - " │ │ │ │ ├── CU (711)\n", - " │ │ │ │ └── GR (1039)\n", - " │ │ │ ├── ECU (903)\n", - " │ │ │ ├── NTB (642)\n", - " │ │ │ ├── NTS (651)\n", - " │ │ │ ├── Pa5 (589508451)\n", - " │ │ │ ├── SPVC (429)\n", - " │ │ │ ├── SPVI (437)\n", - " │ │ │ └── SPVO (445)\n", - " │ │ └── P (771)\n", - " │ │ ├── P-mot (987)\n", - " │ │ │ ├── Acs5 (549009219)\n", - " │ │ │ ├── B (280)\n", - " │ │ │ ├── DTN (880)\n", - " │ │ │ ├── I5 (549009227)\n", - " │ │ │ ├── P5 (549009215)\n", - " │ │ │ ├── PC5 (549009223)\n", - " │ │ │ ├── PCG (898)\n", - " │ │ │ ├── PDTg (599626927)\n", - " │ │ │ ├── PG (931)\n", - " │ │ │ ├── PRNc (1093)\n", - " │ │ │ ├── SG (318)\n", - " │ │ │ ├── SUT (534)\n", - " │ │ │ ├── TRN (574)\n", - " │ │ │ └── V (621)\n", - " │ │ ├── P-sat (1117)\n", - " │ │ │ ├── CS (679)\n", - " │ │ │ ├── LC (147)\n", - " │ │ │ ├── LDT (162)\n", - " │ │ │ ├── NI (604)\n", - " │ │ │ ├── PRNr (146)\n", - " │ │ │ ├── RPO (238)\n", - " │ │ │ ├── SLC (350)\n", - " │ │ │ └── SLD (358)\n", - " │ │ └── P-sen (1132)\n", - " │ │ ├── NLL (612)\n", - " │ │ ├── PB (867)\n", - " │ │ │ └── KF (123)\n", - " │ │ ├── PSV (7)\n", - " │ │ └── SOC (398)\n", - " │ │ ├── POR (122)\n", - " │ │ ├── SOCl (114)\n", - " │ │ └── SOCm (105)\n", - " │ ├── IB (1129)\n", - " │ │ ├── HY (1097)\n", - " │ │ │ ├── LZ (290)\n", - " │ │ │ │ ├── LHA (194)\n", - " │ │ │ │ ├── LPO (226)\n", - " │ │ │ │ ├── PST (356)\n", - " │ │ │ │ ├── PSTN (364)\n", - " │ │ │ │ ├── PeF (576073704)\n", - " │ │ │ │ ├── RCH (173)\n", - " │ │ │ │ ├── STN (470)\n", - " │ │ │ │ ├── TU (614)\n", - " │ │ │ │ └── ZI (797)\n", - " │ │ │ │ └── FF (804)\n", - " │ │ │ ├── ME (10671)\n", - " │ │ │ ├── MEZ (467)\n", - " │ │ │ │ ├── AHN (88)\n", - " │ │ │ │ ├── MBO (331)\n", - " │ │ │ │ │ ├── LM (210)\n", - " │ │ │ │ │ ├── MM (491)\n", - " │ │ │ │ │ │ ├── MMd (606826659)\n", - " │ │ │ │ │ │ ├── MMl (606826647)\n", - " │ │ │ │ │ │ ├── MMm (606826651)\n", - " │ │ │ │ │ │ ├── MMme (732)\n", - " │ │ │ │ │ │ └── MMp (606826655)\n", - " │ │ │ │ │ ├── SUM (525)\n", - " │ │ │ │ │ └── TM (557)\n", - " │ │ │ │ │ ├── TMd (1126)\n", - " │ │ │ │ │ └── TMv (1)\n", - " │ │ │ │ ├── MPN (515)\n", - " │ │ │ │ ├── PH (946)\n", - " │ │ │ │ ├── PMd (980)\n", - " │ │ │ │ ├── PMv (1004)\n", - " │ │ │ │ ├── PVHd (63)\n", - " │ │ │ │ └── VMH (693)\n", - " │ │ │ ├── PVR (141)\n", - " │ │ │ │ ├── ADP (72)\n", - " │ │ │ │ ├── AVP (263)\n", - " │ │ │ │ ├── AVPV (272)\n", - " │ │ │ │ ├── DMH (830)\n", - " │ │ │ │ ├── MEPO (452)\n", - " │ │ │ │ ├── MPO (523)\n", - " │ │ │ │ ├── OV (763)\n", - " │ │ │ │ ├── PD (914)\n", - " │ │ │ │ ├── PS (1109)\n", - " │ │ │ │ ├── PVp (126)\n", - " │ │ │ │ ├── PVpo (133)\n", - " │ │ │ │ ├── SBPV (347)\n", - " │ │ │ │ ├── SCH (286)\n", - " │ │ │ │ ├── SFO (338)\n", - " │ │ │ │ ├── VLPO (689)\n", - " │ │ │ │ └── VMPO (576073699)\n", - " │ │ │ └── PVZ (157)\n", - " │ │ │ ├── ARH (223)\n", - " │ │ │ ├── ASO (332)\n", - " │ │ │ ├── PVH (38)\n", - " │ │ │ ├── PVa (30)\n", - " │ │ │ ├── PVi (118)\n", - " │ │ │ └── SO (390)\n", - " │ │ └── TH (549)\n", - " │ │ ├── DORpm (856)\n", - " │ │ │ ├── ATN (239)\n", - " │ │ │ │ ├── AD (64)\n", - " │ │ │ │ ├── AM (127)\n", - " │ │ │ │ │ ├── AMd (1096)\n", - " │ │ │ │ │ └── AMv (1104)\n", - " │ │ │ │ ├── AV (255)\n", - " │ │ │ │ ├── IAD (1113)\n", - " │ │ │ │ ├── IAM (1120)\n", - " │ │ │ │ └── LD (155)\n", - " │ │ │ ├── EPI (958)\n", - " │ │ │ │ ├── LH (186)\n", - " │ │ │ │ └── MH (483)\n", - " │ │ │ ├── GENv (1014)\n", - " │ │ │ │ ├── IGL (27)\n", - " │ │ │ │ ├── IntG (563807439)\n", - " │ │ │ │ ├── LGv (178)\n", - " │ │ │ │ └── SubG (321)\n", - " │ │ │ ├── ILM (51)\n", - " │ │ │ │ ├── CL (575)\n", - " │ │ │ │ ├── CM (599)\n", - " │ │ │ │ ├── PCN (907)\n", - " │ │ │ │ ├── PF (930)\n", - " │ │ │ │ ├── PIL (560581563)\n", - " │ │ │ │ └── RH (189)\n", - " │ │ │ ├── LAT (138)\n", - " │ │ │ │ ├── Eth (560581551)\n", - " │ │ │ │ ├── LP (218)\n", - " │ │ │ │ ├── PO (1020)\n", - " │ │ │ │ ├── POL (1029)\n", - " │ │ │ │ └── SGN (325)\n", - " │ │ │ ├── MED (444)\n", - " │ │ │ │ ├── IMD (59)\n", - " │ │ │ │ ├── MD (362)\n", - " │ │ │ │ ├── PR (1077)\n", - " │ │ │ │ └── SMT (366)\n", - " │ │ │ ├── MTN (571)\n", - " │ │ │ │ ├── PT (15)\n", - " │ │ │ │ ├── PVT (149)\n", - " │ │ │ │ ├── RE (181)\n", - " │ │ │ │ └── Xi (560581559)\n", - " │ │ │ └── RT (262)\n", - " │ │ └── DORsm (864)\n", - " │ │ ├── GENd (1008)\n", - " │ │ │ ├── LGd (170)\n", - " │ │ │ │ ├── LGd-co (496345668)\n", - " │ │ │ │ ├── LGd-ip (496345672)\n", - " │ │ │ │ └── LGd-sh (496345664)\n", - " │ │ │ └── MG (475)\n", - " │ │ │ ├── MGd (1072)\n", - " │ │ │ ├── MGm (1088)\n", - " │ │ │ └── MGv (1079)\n", - " │ │ ├── PP (1044)\n", - " │ │ ├── SPA (609)\n", - " │ │ ├── SPF (406)\n", - " │ │ │ ├── SPFm (414)\n", - " │ │ │ └── SPFp (422)\n", - " │ │ └── VENT (637)\n", - " │ │ ├── PoT (563807435)\n", - " │ │ ├── VAL (629)\n", - " │ │ ├── VM (685)\n", - " │ │ └── VP (709)\n", - " │ │ ├── VPL (718)\n", - " │ │ ├── VPLpc (725)\n", - " │ │ ├── VPM (733)\n", - " │ │ └── VPMpc (741)\n", - " │ └── MB (313)\n", - " │ ├── MBmot (323)\n", - " │ │ ├── AT (231)\n", - " │ │ ├── CUN (616)\n", - " │ │ ├── DT (75)\n", - " │ │ ├── EW (975)\n", - " │ │ ├── III (35)\n", - " │ │ ├── IV (115)\n", - " │ │ ├── LT (66)\n", - " │ │ ├── MA3 (549009211)\n", - " │ │ ├── MRN (128)\n", - " │ │ ├── MT (58)\n", - " │ │ ├── PAG (795)\n", - " │ │ │ ├── INC (67)\n", - " │ │ │ ├── ND (587)\n", - " │ │ │ ├── PRC (50)\n", - " │ │ │ └── Su3 (614454277)\n", - " │ │ ├── PN (607344830)\n", - " │ │ ├── PRT (1100)\n", - " │ │ │ ├── APN (215)\n", - " │ │ │ ├── MPT (531)\n", - " │ │ │ ├── NOT (628)\n", - " │ │ │ ├── NPC (634)\n", - " │ │ │ ├── OP (706)\n", - " │ │ │ ├── PPT (1061)\n", - " │ │ │ └── RPF (549009203)\n", - " │ │ ├── Pa4 (606826663)\n", - " │ │ ├── RN (214)\n", - " │ │ ├── RR (246)\n", - " │ │ ├── SCm (294)\n", - " │ │ │ ├── SCdg (26)\n", - " │ │ │ ├── SCdw (42)\n", - " │ │ │ ├── SCig (10)\n", - " │ │ │ └── SCiw (17)\n", - " │ │ ├── SNr (381)\n", - " │ │ ├── VTA (749)\n", - " │ │ └── VTN (757)\n", - " │ ├── MBsen (339)\n", - " │ │ ├── IC (4)\n", - " │ │ │ ├── ICc (811)\n", - " │ │ │ ├── ICd (820)\n", - " │ │ │ └── ICe (828)\n", - " │ │ ├── MEV (460)\n", - " │ │ ├── NB (580)\n", - " │ │ ├── PBG (874)\n", - " │ │ ├── SAG (271)\n", - " │ │ ├── SCO (599626923)\n", - " │ │ └── SCs (302)\n", - " │ │ ├── SCop (851)\n", - " │ │ ├── SCsg (842)\n", - " │ │ └── SCzo (834)\n", - " │ └── MBsta (348)\n", - " │ ├── PPN (1052)\n", - " │ ├── RAmb (165)\n", - " │ │ ├── CLI (591)\n", - " │ │ ├── DR (872)\n", - " │ │ ├── IF (12)\n", - " │ │ ├── IPN (100)\n", - " │ │ │ ├── IPA (607344842)\n", - " │ │ │ ├── IPC (607344838)\n", - " │ │ │ ├── IPDL (607344858)\n", - " │ │ │ ├── IPDM (607344854)\n", - " │ │ │ ├── IPI (607344850)\n", - " │ │ │ ├── IPL (607344846)\n", - " │ │ │ ├── IPR (607344834)\n", - " │ │ │ └── IPRL (607344862)\n", - " │ │ └── RL (197)\n", - " │ └── SNc (374)\n", - " ├── CB (512)\n", - " │ ├── CBN (519)\n", - " │ │ ├── DN (846)\n", - " │ │ ├── FN (989)\n", - " │ │ ├── IP (91)\n", - " │ │ └── VeCB (589508455)\n", - " │ └── CBX (528)\n", - " │ ├── HEM (1073)\n", - " │ │ ├── AN (1017)\n", - " │ │ │ ├── ANcr1 (1056)\n", - " │ │ │ └── ANcr2 (1064)\n", - " │ │ ├── COPY (1033)\n", - " │ │ ├── FL (1049)\n", - " │ │ ├── PFL (1041)\n", - " │ │ ├── PRM (1025)\n", - " │ │ └── SIM (1007)\n", - " │ └── VERM (645)\n", - " │ ├── CENT (920)\n", - " │ │ ├── CENT2 (976)\n", - " │ │ └── CENT3 (984)\n", - " │ ├── CUL (928)\n", - " │ │ └── CUL4, 5 (1091)\n", - " │ ├── DEC (936)\n", - " │ ├── FOTU (944)\n", - " │ ├── LING (912)\n", - " │ ├── NOD (968)\n", - " │ ├── PYR (951)\n", - " │ └── UVU (957)\n", - " └── CH (567)\n", - " ├── CNU (623)\n", - " │ ├── PAL (803)\n", - " │ │ ├── PALc (809)\n", - " │ │ │ ├── BAC (287)\n", - " │ │ │ └── BST (351)\n", - " │ │ ├── PALd (818)\n", - " │ │ │ ├── GPe (1022)\n", - " │ │ │ └── GPi (1031)\n", - " │ │ ├── PALm (826)\n", - " │ │ │ ├── MSC (904)\n", - " │ │ │ │ ├── MS (564)\n", - " │ │ │ │ └── NDB (596)\n", - " │ │ │ └── TRS (581)\n", - " │ │ └── PALv (835)\n", - " │ │ ├── MA (298)\n", - " │ │ └── SI (342)\n", - " │ └── STR (477)\n", - " │ ├── LSX (275)\n", - " │ │ ├── LS (242)\n", - " │ │ │ ├── LSc (250)\n", - " │ │ │ ├── LSr (258)\n", - " │ │ │ └── LSv (266)\n", - " │ │ ├── SF (310)\n", - " │ │ └── SH (333)\n", - " │ ├── STRd (485)\n", - " │ │ └── CP (672)\n", - " │ ├── STRv (493)\n", - " │ │ ├── ACB (56)\n", - " │ │ ├── FS (998)\n", - " │ │ └── OT (754)\n", - " │ └── sAMY (278)\n", - " │ ├── AAA (23)\n", - " │ ├── BA (292)\n", - " │ ├── CEA (536)\n", - " │ │ ├── CEAc (544)\n", - " │ │ ├── CEAl (551)\n", - " │ │ └── CEAm (559)\n", - " │ ├── IA (1105)\n", - " │ └── MEA (403)\n", - " └── CTX (688)\n", - " ├── CTXpl (695)\n", - " │ ├── HPF (1089)\n", - " │ │ ├── HIP (1080)\n", - " │ │ │ ├── CA (375)\n", - " │ │ │ │ ├── CA1 (382)\n", - " │ │ │ │ ├── CA2 (423)\n", - " │ │ │ │ └── CA3 (463)\n", - " │ │ │ ├── DG (726)\n", - " │ │ │ │ ├── DG-mo (10703)\n", - " │ │ │ │ ├── DG-po (10704)\n", - " │ │ │ │ └── DG-sg (632)\n", - " │ │ │ ├── FC (982)\n", - " │ │ │ └── IG (19)\n", - " │ │ └── RHP (822)\n", - " │ │ ├── APr (484682508)\n", - " │ │ ├── ENT (909)\n", - " │ │ │ ├── ENTl (918)\n", - " │ │ │ │ ├── ENTl1 (1121)\n", - " │ │ │ │ ├── ENTl2 (20)\n", - " │ │ │ │ ├── ENTl3 (52)\n", - " │ │ │ │ ├── ENTl5 (139)\n", - " │ │ │ │ └── ENTl6a (28)\n", - " │ │ │ └── ENTm (926)\n", - " │ │ │ ├── ENTm1 (526)\n", - " │ │ │ ├── ENTm2 (543)\n", - " │ │ │ ├── ENTm3 (664)\n", - " │ │ │ ├── ENTm5 (727)\n", - " │ │ │ └── ENTm6 (743)\n", - " │ │ ├── HATA (589508447)\n", - " │ │ ├── PAR (843)\n", - " │ │ ├── POST (1037)\n", - " │ │ ├── PRE (1084)\n", - " │ │ ├── ProS (484682470)\n", - " │ │ └── SUB (502)\n", - " │ ├── Isocortex (315)\n", - " │ │ ├── ACA (31)\n", - " │ │ │ ├── ACAd (39)\n", - " │ │ │ │ ├── ACAd1 (935)\n", - " │ │ │ │ ├── ACAd2/3 (211)\n", - " │ │ │ │ ├── ACAd5 (1015)\n", - " │ │ │ │ ├── ACAd6a (919)\n", - " │ │ │ │ └── ACAd6b (927)\n", - " │ │ │ └── ACAv (48)\n", - " │ │ │ ├── ACAv1 (588)\n", - " │ │ │ ├── ACAv2/3 (296)\n", - " │ │ │ ├── ACAv5 (772)\n", - " │ │ │ ├── ACAv6a (810)\n", - " │ │ │ └── ACAv6b (819)\n", - " │ │ ├── AI (95)\n", - " │ │ │ ├── AId (104)\n", - " │ │ │ │ ├── AId1 (996)\n", - " │ │ │ │ ├── AId2/3 (328)\n", - " │ │ │ │ ├── AId5 (1101)\n", - " │ │ │ │ ├── AId6a (783)\n", - " │ │ │ │ └── AId6b (831)\n", - " │ │ │ ├── AIp (111)\n", - " │ │ │ │ ├── AIp1 (120)\n", - " │ │ │ │ ├── AIp2/3 (163)\n", - " │ │ │ │ ├── AIp5 (344)\n", - " │ │ │ │ ├── AIp6a (314)\n", - " │ │ │ │ └── AIp6b (355)\n", - " │ │ │ └── AIv (119)\n", - " │ │ │ ├── AIv1 (704)\n", - " │ │ │ ├── AIv2/3 (694)\n", - " │ │ │ ├── AIv5 (800)\n", - " │ │ │ ├── AIv6a (675)\n", - " │ │ │ └── AIv6b (699)\n", - " │ │ ├── AUD (247)\n", - " │ │ │ ├── AUDd (1011)\n", - " │ │ │ │ ├── AUDd1 (527)\n", - " │ │ │ │ ├── AUDd2/3 (600)\n", - " │ │ │ │ ├── AUDd4 (678)\n", - " │ │ │ │ ├── AUDd5 (252)\n", - " │ │ │ │ ├── AUDd6a (156)\n", - " │ │ │ │ └── AUDd6b (243)\n", - " │ │ │ ├── AUDp (1002)\n", - " │ │ │ │ ├── AUDp1 (735)\n", - " │ │ │ │ ├── AUDp2/3 (251)\n", - " │ │ │ │ ├── AUDp4 (816)\n", - " │ │ │ │ ├── AUDp5 (847)\n", - " │ │ │ │ ├── AUDp6a (954)\n", - " │ │ │ │ └── AUDp6b (1005)\n", - " │ │ │ ├── AUDpo (1027)\n", - " │ │ │ │ ├── AUDpo1 (696)\n", - " │ │ │ │ ├── AUDpo2/3 (643)\n", - " │ │ │ │ ├── AUDpo4 (759)\n", - " │ │ │ │ ├── AUDpo5 (791)\n", - " │ │ │ │ ├── AUDpo6a (249)\n", - " │ │ │ │ └── AUDpo6b (456)\n", - " │ │ │ └── AUDv (1018)\n", - " │ │ │ ├── AUDv1 (959)\n", - " │ │ │ ├── AUDv2/3 (755)\n", - " │ │ │ ├── AUDv4 (990)\n", - " │ │ │ ├── AUDv5 (1023)\n", - " │ │ │ ├── AUDv6a (520)\n", - " │ │ │ └── AUDv6b (598)\n", - " │ │ ├── ECT (895)\n", - " │ │ │ ├── ECT1 (836)\n", - " │ │ │ ├── ECT2/3 (427)\n", - " │ │ │ ├── ECT5 (988)\n", - " │ │ │ ├── ECT6a (977)\n", - " │ │ │ └── ECT6b (1045)\n", - " │ │ ├── FRP (184)\n", - " │ │ │ ├── FRP1 (68)\n", - " │ │ │ ├── FRP2/3 (667)\n", - " │ │ │ ├── FRP5 (526157192)\n", - " │ │ │ ├── FRP6a (526157196)\n", - " │ │ │ └── FRP6b (526322264)\n", - " │ │ ├── GU (1057)\n", - " │ │ │ ├── GU1 (36)\n", - " │ │ │ ├── GU2/3 (180)\n", - " │ │ │ ├── GU4 (148)\n", - " │ │ │ ├── GU5 (187)\n", - " │ │ │ ├── GU6a (638)\n", - " │ │ │ └── GU6b (662)\n", - " │ │ ├── ILA (44)\n", - " │ │ │ ├── ILA1 (707)\n", - " │ │ │ ├── ILA2/3 (556)\n", - " │ │ │ ├── ILA5 (827)\n", - " │ │ │ ├── ILA6a (1054)\n", - " │ │ │ └── ILA6b (1081)\n", - " │ │ ├── MO (500)\n", - " │ │ │ ├── MOp (985)\n", - " │ │ │ │ ├── MOp1 (320)\n", - " │ │ │ │ ├── MOp2/3 (943)\n", - " │ │ │ │ ├── MOp5 (648)\n", - " │ │ │ │ ├── MOp6a (844)\n", - " │ │ │ │ └── MOp6b (882)\n", - " │ │ │ └── MOs (993)\n", - " │ │ │ ├── MOs1 (656)\n", - " │ │ │ ├── MOs2/3 (962)\n", - " │ │ │ ├── MOs5 (767)\n", - " │ │ │ ├── MOs6a (1021)\n", - " │ │ │ └── MOs6b (1085)\n", - " │ │ ├── ORB (714)\n", - " │ │ │ ├── ORBl (723)\n", - " │ │ │ │ ├── ORBl1 (448)\n", - " │ │ │ │ ├── ORBl2/3 (412)\n", - " │ │ │ │ ├── ORBl5 (630)\n", - " │ │ │ │ ├── ORBl6a (440)\n", - " │ │ │ │ └── ORBl6b (488)\n", - " │ │ │ ├── ORBm (731)\n", - " │ │ │ │ ├── ORBm1 (484)\n", - " │ │ │ │ ├── ORBm2/3 (582)\n", - " │ │ │ │ ├── ORBm5 (620)\n", - " │ │ │ │ ├── ORBm6a (910)\n", - " │ │ │ │ └── ORBm6b (527696977)\n", - " │ │ │ └── ORBvl (746)\n", - " │ │ │ ├── ORBvl1 (969)\n", - " │ │ │ ├── ORBvl2/3 (288)\n", - " │ │ │ ├── ORBvl5 (1125)\n", - " │ │ │ ├── ORBvl6a (608)\n", - " │ │ │ └── ORBvl6b (680)\n", - " │ │ ├── PERI (922)\n", - " │ │ │ ├── PERI1 (540)\n", - " │ │ │ ├── PERI2/3 (888)\n", - " │ │ │ ├── PERI5 (692)\n", - " │ │ │ ├── PERI6a (335)\n", - " │ │ │ └── PERI6b (368)\n", - " │ │ ├── PL (972)\n", - " │ │ │ ├── PL1 (171)\n", - " │ │ │ ├── PL2/3 (304)\n", - " │ │ │ ├── PL5 (363)\n", - " │ │ │ ├── PL6a (84)\n", - " │ │ │ └── PL6b (132)\n", - " │ │ ├── PTLp (22)\n", - " │ │ │ ├── VISa (312782546)\n", - " │ │ │ │ ├── VISa1 (312782550)\n", - " │ │ │ │ ├── VISa2/3 (312782554)\n", - " │ │ │ │ ├── VISa4 (312782558)\n", - " │ │ │ │ ├── VISa5 (312782562)\n", - " │ │ │ │ ├── VISa6a (312782566)\n", - " │ │ │ │ └── VISa6b (312782570)\n", - " │ │ │ └── VISrl (417)\n", - " │ │ │ ├── VISrl1 (312782604)\n", - " │ │ │ ├── VISrl2/3 (312782608)\n", - " │ │ │ ├── VISrl4 (312782612)\n", - " │ │ │ ├── VISrl5 (312782616)\n", - " │ │ │ ├── VISrl6a (312782620)\n", - " │ │ │ └── VISrl6b (312782624)\n", - " │ │ ├── RSP (254)\n", - " │ │ │ ├── RSPagl (894)\n", - " │ │ │ │ ├── RSPagl1 (671)\n", - " │ │ │ │ ├── RSPagl2/3 (965)\n", - " │ │ │ │ ├── RSPagl5 (774)\n", - " │ │ │ │ ├── RSPagl6a (906)\n", - " │ │ │ │ └── RSPagl6b (279)\n", - " │ │ │ ├── RSPd (879)\n", - " │ │ │ │ ├── RSPd1 (442)\n", - " │ │ │ │ ├── RSPd2/3 (434)\n", - " │ │ │ │ ├── RSPd4 (545)\n", - " │ │ │ │ ├── RSPd5 (610)\n", - " │ │ │ │ ├── RSPd6a (274)\n", - " │ │ │ │ └── RSPd6b (330)\n", - " │ │ │ └── RSPv (886)\n", - " │ │ │ ├── RSPv1 (542)\n", - " │ │ │ ├── RSPv2/3 (430)\n", - " │ │ │ ├── RSPv5 (687)\n", - " │ │ │ ├── RSPv6a (590)\n", - " │ │ │ └── RSPv6b (622)\n", - " │ │ ├── SS (453)\n", - " │ │ │ ├── SSp (322)\n", - " │ │ │ │ ├── SSp-bfd (329)\n", - " │ │ │ │ │ ├── SSp-bfd1 (981)\n", - " │ │ │ │ │ ├── SSp-bfd2/3 (201)\n", - " │ │ │ │ │ ├── SSp-bfd4 (1047)\n", - " │ │ │ │ │ ├── SSp-bfd5 (1070)\n", - " │ │ │ │ │ ├── SSp-bfd6a (1038)\n", - " │ │ │ │ │ └── SSp-bfd6b (1062)\n", - " │ │ │ │ ├── SSp-ll (337)\n", - " │ │ │ │ │ ├── SSp-ll1 (1030)\n", - " │ │ │ │ │ ├── SSp-ll2/3 (113)\n", - " │ │ │ │ │ ├── SSp-ll4 (1094)\n", - " │ │ │ │ │ ├── SSp-ll5 (1128)\n", - " │ │ │ │ │ ├── SSp-ll6a (478)\n", - " │ │ │ │ │ └── SSp-ll6b (510)\n", - " │ │ │ │ ├── SSp-m (345)\n", - " │ │ │ │ │ ├── SSp-m1 (878)\n", - " │ │ │ │ │ ├── SSp-m2/3 (657)\n", - " │ │ │ │ │ ├── SSp-m4 (950)\n", - " │ │ │ │ │ ├── SSp-m5 (974)\n", - " │ │ │ │ │ ├── SSp-m6a (1102)\n", - " │ │ │ │ │ └── SSp-m6b (2)\n", - " │ │ │ │ ├── SSp-n (353)\n", - " │ │ │ │ │ ├── SSp-n1 (558)\n", - " │ │ │ │ │ ├── SSp-n2/3 (838)\n", - " │ │ │ │ │ ├── SSp-n4 (654)\n", - " │ │ │ │ │ ├── SSp-n5 (702)\n", - " │ │ │ │ │ ├── SSp-n6a (889)\n", - " │ │ │ │ │ └── SSp-n6b (929)\n", - " │ │ │ │ ├── SSp-tr (361)\n", - " │ │ │ │ │ ├── SSp-tr1 (1006)\n", - " │ │ │ │ │ ├── SSp-tr2/3 (670)\n", - " │ │ │ │ │ ├── SSp-tr4 (1086)\n", - " │ │ │ │ │ ├── SSp-tr5 (1111)\n", - " │ │ │ │ │ ├── SSp-tr6a (9)\n", - " │ │ │ │ │ └── SSp-tr6b (461)\n", - " │ │ │ │ ├── SSp-ul (369)\n", - " │ │ │ │ │ ├── SSp-ul1 (450)\n", - " │ │ │ │ │ ├── SSp-ul2/3 (854)\n", - " │ │ │ │ │ ├── SSp-ul4 (577)\n", - " │ │ │ │ │ ├── SSp-ul5 (625)\n", - " │ │ │ │ │ ├── SSp-ul6a (945)\n", - " │ │ │ │ │ └── SSp-ul6b (1026)\n", - " │ │ │ │ └── SSp-un (182305689)\n", - " │ │ │ │ ├── SSp-un1 (182305693)\n", - " │ │ │ │ ├── SSp-un2/3 (182305697)\n", - " │ │ │ │ ├── SSp-un4 (182305701)\n", - " │ │ │ │ ├── SSp-un5 (182305705)\n", - " │ │ │ │ ├── SSp-un6a (182305709)\n", - " │ │ │ │ └── SSp-un6b (182305713)\n", - " │ │ │ └── SSs (378)\n", - " │ │ │ ├── SSs1 (873)\n", - " │ │ │ ├── SSs2/3 (806)\n", - " │ │ │ ├── SSs4 (1035)\n", - " │ │ │ ├── SSs5 (1090)\n", - " │ │ │ ├── SSs6a (862)\n", - " │ │ │ └── SSs6b (893)\n", - " │ │ ├── TEa (541)\n", - " │ │ │ ├── TEa1 (97)\n", - " │ │ │ ├── TEa2/3 (1127)\n", - " │ │ │ ├── TEa4 (234)\n", - " │ │ │ ├── TEa5 (289)\n", - " │ │ │ ├── TEa6a (729)\n", - " │ │ │ └── TEa6b (786)\n", - " │ │ ├── VIS (669)\n", - " │ │ │ ├── VISal (402)\n", - " │ │ │ │ ├── VISal1 (1074)\n", - " │ │ │ │ ├── VISal2/3 (905)\n", - " │ │ │ │ ├── VISal4 (1114)\n", - " │ │ │ │ ├── VISal5 (233)\n", - " │ │ │ │ ├── VISal6a (601)\n", - " │ │ │ │ └── VISal6b (649)\n", - " │ │ │ ├── VISam (394)\n", - " │ │ │ │ ├── VISam1 (281)\n", - " │ │ │ │ ├── VISam2/3 (1066)\n", - " │ │ │ │ ├── VISam4 (401)\n", - " │ │ │ │ ├── VISam5 (433)\n", - " │ │ │ │ ├── VISam6a (1046)\n", - " │ │ │ │ └── VISam6b (441)\n", - " │ │ │ ├── VISl (409)\n", - " │ │ │ │ ├── VISl1 (421)\n", - " │ │ │ │ ├── VISl2/3 (973)\n", - " │ │ │ │ ├── VISl4 (573)\n", - " │ │ │ │ ├── VISl5 (613)\n", - " │ │ │ │ ├── VISl6a (74)\n", - " │ │ │ │ └── VISl6b (121)\n", - " │ │ │ ├── VISli (312782574)\n", - " │ │ │ │ ├── VISli1 (312782578)\n", - " │ │ │ │ ├── VISli2/3 (312782582)\n", - " │ │ │ │ ├── VISli4 (312782586)\n", - " │ │ │ │ ├── VISli5 (312782590)\n", - " │ │ │ │ ├── VISli6a (312782594)\n", - " │ │ │ │ └── VISli6b (312782598)\n", - " │ │ │ ├── VISp (385)\n", - " │ │ │ │ ├── VISp1 (593)\n", - " │ │ │ │ ├── VISp2/3 (821)\n", - " │ │ │ │ ├── VISp4 (721)\n", - " │ │ │ │ ├── VISp5 (778)\n", - " │ │ │ │ ├── VISp6a (33)\n", - " │ │ │ │ └── VISp6b (305)\n", - " │ │ │ ├── VISpl (425)\n", - " │ │ │ │ ├── VISpl1 (750)\n", - " │ │ │ │ ├── VISpl2/3 (269)\n", - " │ │ │ │ ├── VISpl4 (869)\n", - " │ │ │ │ ├── VISpl5 (902)\n", - " │ │ │ │ ├── VISpl6a (377)\n", - " │ │ │ │ └── VISpl6b (393)\n", - " │ │ │ ├── VISpm (533)\n", - " │ │ │ │ ├── VISpm1 (805)\n", - " │ │ │ │ ├── VISpm2/3 (41)\n", - " │ │ │ │ ├── VISpm4 (501)\n", - " │ │ │ │ ├── VISpm5 (565)\n", - " │ │ │ │ ├── VISpm6a (257)\n", - " │ │ │ │ └── VISpm6b (469)\n", - " │ │ │ └── VISpor (312782628)\n", - " │ │ │ ├── VISpor1 (312782632)\n", - " │ │ │ ├── VISpor2/3 (312782636)\n", - " │ │ │ ├── VISpor4 (312782640)\n", - " │ │ │ ├── VISpor5 (312782644)\n", - " │ │ │ ├── VISpor6a (312782648)\n", - " │ │ │ └── VISpor6b (312782652)\n", - " │ │ └── VISC (677)\n", - " │ │ ├── VISC1 (897)\n", - " │ │ ├── VISC2/3 (1106)\n", - " │ │ ├── VISC4 (1010)\n", - " │ │ ├── VISC5 (1058)\n", - " │ │ ├── VISC6a (857)\n", - " │ │ └── VISC6b (849)\n", - " │ └── OLF (698)\n", - " │ ├── AOB (151)\n", - " │ │ ├── AOBgl (188)\n", - " │ │ ├── AOBgr (196)\n", - " │ │ └── AOBmi (204)\n", - " │ ├── AON (159)\n", - " │ ├── COA (631)\n", - " │ │ ├── COAa (639)\n", - " │ │ └── COAp (647)\n", - " │ │ ├── COApl (655)\n", - " │ │ └── COApm (663)\n", - " │ ├── DP (814)\n", - " │ ├── MOB (507)\n", - " │ ├── NLOT (619)\n", - " │ │ ├── NLOT1 (260)\n", - " │ │ ├── NLOT2 (268)\n", - " │ │ └── NLOT3 (1139)\n", - " │ ├── PAA (788)\n", - " │ ├── PIR (961)\n", - " │ ├── TR (566)\n", - " │ └── TT (589)\n", - " │ ├── TTd (597)\n", - " │ └── TTv (605)\n", - " └── CTXsp (703)\n", - " ├── BLA (295)\n", - " │ ├── BLAa (303)\n", - " │ ├── BLAp (311)\n", - " │ └── BLAv (451)\n", - " ├── BMA (319)\n", - " │ ├── BMAa (327)\n", - " │ └── BMAp (334)\n", - " ├── CLA (583)\n", - " ├── EP (942)\n", - " │ ├── EPd (952)\n", - " │ └── EPv (966)\n", - " ├── LA (131)\n", - " └── PA (780)" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.structures" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The structures attribute is a custom dictionary that can be queried by region number or acronym, and contains all the information for a given structure:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'acronym': 'root',\n", - " 'id': 997,\n", - " 'mesh': None,\n", - " 'mesh_filename': PosixPath('/home/rob/.brainglobe/allen_mouse_100um_v1.2/meshes/997.obj'),\n", - " 'name': 'root',\n", - " 'rgb_triplet': [255, 255, 255],\n", - " 'structure_id_path': [997]}\n" - ] - } - ], - "source": [ - "pprint(bg_atlas.structures[\"root\"])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In particular, the `structure_id_path` key contains a list description of the path in the hierarchy up to a particular region, and can be used for queries on the hierarchy." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[997, 8, 567]" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.structures[\"CH\"][\"structure_id_path\"]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can use the `bg_atlas.get_structure_descendants` and `bg_atlas.get_structure_ancestors` methods to explore the hierarchy:" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['VISC1', 'VISC2/3', 'VISC4', 'VISC5', 'VISC6a', 'VISC6b']" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.get_structure_descendants(\"VISC\")" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['root', 'grey', 'CH', 'CTX', 'CTXpl', 'Isocortex', 'VISC']" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.get_structure_ancestors(\"VISC6a\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "---\n", - "**NOTE**: \n", - "the levels of the hierarchy depends on the underlying atlas, so we cannot ensure the goodness and consistency of their hierarchy three.\n", - "---" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "There is an higher level description of the structures hierarchy that is built using the [treelib](https://treelib.readthedocs.io/en/latest/) package, and is available as: " - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.structures.tree" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For most applications though the methods described above and the list path of each region should be enough to query the hierarchy without additional layers of complication." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.3 Region masks" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `get_structure_mask` method returns a mask volume where all voxels belonging to an area or to the descendants of that area are non zero. All other voxels are zero. We will generate the structure mask for primary visual cortex to see how this works. \n", - "\n", - "Primary visual cortex (`VISp`) has an ID value of `385` but no voxels in the annotation image actually have that value:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "{'acronym': 'VISp',\n", - " 'id': 385,\n", - " 'mesh': None,\n", - " 'mesh_filename': PosixPath('/home/rob/.brainglobe/allen_mouse_100um_v1.2/meshes/385.obj'),\n", - " 'name': 'Primary visual area',\n", - " 'rgb_triplet': [8, 133, 140],\n", - " 'structure_id_path': [997, 8, 567, 688, 695, 315, 669, 385]}\n" - ] - } - ], - "source": [ - "pprint(bg_atlas.structures[\"VISp\"])" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "0" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# No voxels in the annotation volume are labelled as being VISp\n", - "(bg_atlas.annotation==385).sum()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The reason no VISp voxels exist is because the annotation volume is segmented more finely. In this case `VISp` is divided into cortical layers and it is IDs associated with these layers that are present in the annotation volume." - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "['VISp1', 'VISp2/3', 'VISp4', 'VISp5', 'VISp6a', 'VISp6b']" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# The descendants (children) of VISp are cortical layers\n", - "bg_atlas.get_structure_descendants(\"VISp\")" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "VISp1\t593\n", - "VISp2/3\t821\n", - "VISp4\t721\n", - "VISp5\t778\n", - "VISp6a\t33\n", - "VISp6b\t305\n" - ] - } - ], - "source": [ - "# The IDs associated with each layer in primary visual cortex\n", - "layers = bg_atlas.get_structure_descendants(\"VISp\")\n", - "layer_ids = [bg_atlas.structures[this_layer]['id'] for this_layer in layers]\n", - "\n", - "for (this_layer, this_id) in zip(layers, layer_ids):\n", - " print(\"%s\\t%s\" % (this_layer, this_id))\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "These IDs are indeed present in the annotation volume:" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "1565" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# For example, we find over a thousand voxels associated with primary visual cortex layer 6\n", - "# in the annotation volume\n", - "(bg_atlas.annotation==778).sum()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "So lets use `get_structure_mask` to return a mask volume that retains only `VISp`." - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "mask_VISp = bg_atlas.get_structure_mask('VISp')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", - "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", - " axs[i].imshow(mask_VISp.max(i), cmap=\"gray\")\n", - " axs[i].set_title(f\"{plane.capitalize()} view\")\n", - " axs[i].set_ylabel(labels[0])\n", - " axs[i].set_xlabel(labels[1])\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The `root` node encompases the whole brain and we can use this to provide a background image for the above area." - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - "mask_root = bg_atlas.get_structure_mask('root')\n", - "\n", - "# The mask images have pixel values equal to the ID of the parent area, so we change these for\n", - "# plotting purposes. \n", - "mask_root[mask_root>0]=5\n", - "mask_VISp[mask_VISp>0]=2\n", - "mask_VISp_root = mask_VISp + mask_root" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "space = bg_atlas.space\n", - "\n", - "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", - "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", - " axs[i].imshow(mask_VISp_root.max(i), cmap=\"gray\")\n", - " axs[i].set_title(f\"{plane.capitalize()} view\")\n", - " axs[i].set_ylabel(labels[0])\n", - " axs[i].set_xlabel(labels[1])\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 1.3 Regions meshes" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If we need to access the structure meshes, we can either query for the file (e.g., if we need to load the file through some library like `vedo`):" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "PosixPath('/home/rob/.brainglobe/allen_mouse_100um_v1.2/meshes/567.obj')" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.meshfile_from_structure(\"CH\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Or directly obtain the mesh, as a mesh object of the `meshio` library:" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "\n", - " Number of points: 56703\n", - " Number of cells:\n", - " triangle: 112948\n", - " Point data: obj:vn\n", - " Cell data: obj:group_ids" - ] - }, - "execution_count": 19, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "bg_atlas.mesh_from_structure(\"CH\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 2 Query the `BrainGlobeAtlas`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2.0 Query for structures:" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "A very convenient feature of the `BrainGlobeAtlas` API is the simplicity of querying for the identity of the structure or the hemisphere at a given location, either from stack indexes or space coordinates, and even cutting the hierarchy at some higher level:" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "By index: CP\n", - "By coordinates: CP\n", - "Higher hierarchy level: CH\n" - ] - } - ], - "source": [ - "# Ask for identity of some indexes in the stack:\n", - "print(\"By index:\", bg_atlas.structure_from_coords((50, 40, 30), \n", - " as_acronym=True))\n", - "\n", - "# Now give coordinates in microns\n", - "print(\"By coordinates:\", bg_atlas.structure_from_coords((5000, 4000, 3000), \n", - " as_acronym=True, \n", - " microns=True))\n", - "\n", - "# Now cut hierarchy at some level\n", - "print(\"Higher hierarchy level:\", bg_atlas.structure_from_coords((5000, 4000, 3000), \n", - " as_acronym=True, \n", - " microns=True, \n", - " hierarchy_lev=2))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### 2.1 Query for hemispheres" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "A very similar method can be used for hemispheres. 0 correspond to outside the brain, a,d 1 and 2 to left and right hemispheres - but we can just ask for the side name instead of the number:" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "By index: 2\n", - "By coordinates: 2\n", - "By : 2\n" - ] - } - ], - "source": [ - "# Ask for identity of some indexes in the stack:\n", - "print(\"By index:\", bg_atlas.hemisphere_from_coords((50, 40, 30)))\n", - "\n", - "# Now give coordinates in microns\n", - "print(\"By coordinates:\", bg_atlas.hemisphere_from_coords((5000, 4000, 3000), microns=True))\n", - "\n", - "# Now print side string\n", - "print(\"By :\", bg_atlas.hemisphere_from_coords((5000, 4000, 3000), microns=True))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.10" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Introduction to the `BrainGlobeAtlas` class" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 0. Creating a `BrainGlobeAtlas` object and list availabe options" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To instantiate a `BrainGlobeAtlas` object, we need to instantiate it with the atlas name. The first time we use it, a version of this atlas files will be downloaded from the [remote GIN repository](http://gin.g-node.org/brainglobe/atlases) and stored on your local machine (by default, in .../Users/username/.brainglobe):" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "allen mouse atlas (res. 100um)\n", + "From: http://www.brain-map.org (Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007 )\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "from brainglobe_atlasapi import BrainGlobeAtlas\n", + "from pprint import pprint\n", + "\n", + "bg_atlas = BrainGlobeAtlas(\"allen_mouse_100um\", check_latest=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To know what atlases are available through BrainGlobe, we can use the `show_atlases` function (we need to be online):" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
                                                                                  \n",
+                            "                                                                                  \n",
+                            "                                Brainglobe Atlases                                \n",
+                            "╭──────────────────────────────────┬────────────┬───────────────┬────────────────╮\n",
+                            "│ Name                              Downloaded  Local version  Latest version │\n",
+                            "├──────────────────────────────────┼────────────┼───────────────┼────────────────┤\n",
+                            "│ whs_sd_rat_39um      │      1.0      │      1.0       │\n",
+                            "│ allen_mouse_25um      │      1.2      │      1.2       │\n",
+                            "│ allen_mouse_100um      │      1.2      │      1.2       │\n",
+                            "│ allen_mouse_50um      │      1.2      │      1.2       │\n",
+                            "│ example_mouse_100um------      │      1.2       │\n",
+                            "│ allen_mouse_10um------      │      1.2       │\n",
+                            "│ mpin_zfish_1um------      │      1.0       │\n",
+                            "│ allen_human_500um------      │      0.1       │\n",
+                            "│ kim_mouse_10um------      │      1.0       │\n",
+                            "│ kim_mouse_25um------      │      1.0       │\n",
+                            "│ kim_mouse_50um------      │      1.0       │\n",
+                            "│ kim_mouse_100um------      │      1.0       │\n",
+                            "│ osten_mouse_10um------      │      1.1       │\n",
+                            "│ osten_mouse_25um------      │      1.1       │\n",
+                            "│ osten_mouse_50um------      │      1.1       │\n",
+                            "│ osten_mouse_100um------      │      1.1       │\n",
+                            "│ allen_cord_20um------      │      1.0       │\n",
+                            "│ azba_zfish_4um------      │      1.1       │\n",
+                            "│ perens_lsfm_mouse_20um------      │      1.0       │\n",
+                            "│ admba_3d_e11_5_mouse_16um------      │      1.0       │\n",
+                            "│ admba_3d_e13_5_mouse_16um------      │      1.0       │\n",
+                            "│ admba_3d_e15_5_mouse_16um------      │      1.0       │\n",
+                            "│ admba_3d_e18_5_mouse_16um------      │      1.0       │\n",
+                            "│ admba_3d_p4_mouse_16.752um------      │      1.0       │\n",
+                            "│ admba_3d_p14_mouse_16.752um------      │      1.0       │\n",
+                            "│ admba_3d_p28_mouse_16.752um------      │      1.0       │\n",
+                            "│ admba_3d_p56_mouse_25um------      │      1.0       │\n",
+                            "╰──────────────────────────────────┴────────────┴───────────────┴────────────────╯\n",
+                            "
\n" + ], + "text/plain": [ + "\u001b[3m \u001b[0m\n", + "\u001b[3m \u001b[0m\n", + "\u001b[3m Brainglobe Atlases \u001b[0m\n", + "╭──────────────────────────────────┬────────────┬───────────────┬────────────────╮\n", + "│\u001b[1;32m \u001b[0m\u001b[1;32mName \u001b[0m\u001b[1;32m \u001b[0m│\u001b[1;32m \u001b[0m\u001b[1;32mDownloaded\u001b[0m\u001b[1;32m \u001b[0m│\u001b[1;32m \u001b[0m\u001b[1;32mLocal version\u001b[0m\u001b[1;32m \u001b[0m│\u001b[1;32m \u001b[0m\u001b[1;32mLatest version\u001b[0m\u001b[1;32m \u001b[0m│\n", + "├──────────────────────────────────┼────────────┼───────────────┼────────────────┤\n", + "│ \u001b[1mwhs_sd_rat_39um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.0 │ 1.0 │\n", + "│ \u001b[1mallen_mouse_25um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.2 │ 1.2 │\n", + "│ \u001b[1mallen_mouse_100um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.2 │ 1.2 │\n", + "│ \u001b[1mallen_mouse_50um\u001b[0m │ \u001b[32m✔\u001b[0m │ 1.2 │ 1.2 │\n", + "│ \u001b[1mexample_mouse_100um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.2 │\n", + "│ \u001b[1mallen_mouse_10um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.2 │\n", + "│ \u001b[1mmpin_zfish_1um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1mallen_human_500um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 0.1 │\n", + "│ \u001b[1mkim_mouse_10um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1mkim_mouse_25um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1mkim_mouse_50um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1mkim_mouse_100um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1mosten_mouse_10um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", + "│ \u001b[1mosten_mouse_25um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", + "│ \u001b[1mosten_mouse_50um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", + "│ \u001b[1mosten_mouse_100um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", + "│ \u001b[1mallen_cord_20um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1mazba_zfish_4um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.1 │\n", + "│ \u001b[1mperens_lsfm_mouse_20um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_e11_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_e13_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_e15_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_e18_5_mouse_16um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_p4_mouse_16.752um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_p14_mouse_16.752um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_p28_mouse_16.752um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "│ \u001b[1madmba_3d_p56_mouse_25um\u001b[0m │ \u001b[31m---\u001b[0m │ \u001b[31m---\u001b[0m │ 1.0 │\n", + "╰──────────────────────────────────┴────────────┴───────────────┴────────────────╯\n" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from brainglobe_atlasapi import show_atlases\n", + "show_atlases()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Using a `BrainGlobe` atlas" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A BrainGlobe atlas is a convenient API for interacting with an anatomical atlas. BrainGlobe atlases contain:\n", + " * Metadata\n", + " * The reference anatomical stack used for the registration itself\n", + " * Region annotation stack (the segmented atlas image that occupies the same space as the reference stack)\n", + " * Hemisphere annotation stack which denotes left and right\n", + " * Description of the region hierarchy\n", + " * Meshes for the regions" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.0 Metadata" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "All atlases have a standard set of medatata describing their source, species, resolution, etc:" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'name': 'allen_mouse',\n", + " 'citation': 'Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007',\n", + " 'atlas_link': 'http://www.brain-map.org',\n", + " 'species': 'Mus musculus',\n", + " 'symmetric': True,\n", + " 'resolution': [100.0, 100.0, 100.0],\n", + " 'orientation': 'asr',\n", + " 'version': '1.2',\n", + " 'shape': [132, 80, 114],\n", + " 'trasform_to_bg': [[1.0, 0.0, 0.0, 0.0],\n", + " [0.0, 1.0, 0.0, 0.0],\n", + " [0.0, 0.0, 1.0, 0.0],\n", + " [0.0, 0.0, 0.0, 1.0]],\n", + " 'additional_references': []}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.metadata" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.1 Anatomical, annotation and hemispheres stack" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "from matplotlib import pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Anatomical reference:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "space = bg_atlas.space\n", + "stack = bg_atlas.reference\n", + "\n", + "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", + "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", + " mid_index = stack.shape[i]//2\n", + " axs[i].imshow(np.moveaxis(stack,i,0)[mid_index,:,:], cmap=\"gray\",clim=(0,250))\n", + " axs[i].set_title(f\"{plane.capitalize()} view\")\n", + " axs[i].set_ylabel(labels[0])\n", + " axs[i].set_xlabel(labels[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Annotations stack:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "space = bg_atlas.space\n", + "stack = bg_atlas.annotation\n", + "\n", + "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", + "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", + " mid_index = stack.shape[i]//2\n", + " axs[i].imshow(np.moveaxis(stack,i,0)[mid_index,:,:], cmap=\"gray\",clim=(0,1250))\n", + " axs[i].set_title(f\"{plane.capitalize()} view\")\n", + " axs[i].set_ylabel(labels[0])\n", + " axs[i].set_xlabel(labels[1])\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "space = bg_atlas.space\n", + "stack = bg_atlas.hemispheres\n", + "\n", + "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", + "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", + " axs[i].imshow(stack.max(i), cmap=\"gray\")\n", + " axs[i].set_title(f\"{plane.capitalize()} view\")\n", + " axs[i].set_ylabel(labels[0])\n", + " axs[i].set_xlabel(labels[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.2 Regions hierarchy" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The atlas comes with the description of a hierarchy of brain structures. To have an overview:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "root (997)\n", + "├── VS (73)\n", + "│ ├── AQ (140)\n", + "│ ├── V3 (129)\n", + "│ ├── V4 (145)\n", + "│ │ └── V4r (153)\n", + "│ ├── VL (81)\n", + "│ │ ├── SEZ (98)\n", + "│ │ └── chpl (108)\n", + "│ └── c (164)\n", + "├── fiber tracts (1009)\n", + "│ ├── cbf (960)\n", + "│ │ ├── arb (728)\n", + "│ │ ├── cbc (744)\n", + "│ │ └── cbp (752)\n", + "│ │ ├── icp (1123)\n", + "│ │ │ └── sctd (553)\n", + "│ │ ├── mcp (78)\n", + "│ │ └── scp (326)\n", + "│ │ ├── dscp (812)\n", + "│ │ ├── sctv (866)\n", + "│ │ └── uf (850)\n", + "│ ├── cm (967)\n", + "│ │ ├── IIIn (832)\n", + "│ │ │ ├── mlf (62)\n", + "│ │ │ └── pc (158)\n", + "│ │ ├── IIn (848)\n", + "│ │ │ ├── bsc (916)\n", + "│ │ │ ├── csc (336)\n", + "│ │ │ ├── och (117)\n", + "│ │ │ └── opt (125)\n", + "│ │ ├── IVn (911)\n", + "│ │ ├── In (840)\n", + "│ │ │ ├── aco (900)\n", + "│ │ │ ├── lotg (21)\n", + "│ │ │ │ ├── lot (665)\n", + "│ │ │ │ └── lotd (538)\n", + "│ │ │ └── onl (1016)\n", + "│ │ ├── VIIIn (933)\n", + "│ │ │ ├── cVIIIn (948)\n", + "│ │ │ │ ├── bic (482)\n", + "│ │ │ │ ├── cic (633)\n", + "│ │ │ │ ├── das (506)\n", + "│ │ │ │ ├── ll (658)\n", + "│ │ │ │ └── tb (841)\n", + "│ │ │ └── vVIIIn (413)\n", + "│ │ ├── VIIn (798)\n", + "│ │ │ └── gVIIn (1116)\n", + "│ │ ├── Vn (901)\n", + "│ │ │ ├── moV (93)\n", + "│ │ │ └── sV (229)\n", + "│ │ │ └── sptV (794)\n", + "│ │ ├── Xn (917)\n", + "│ │ │ └── ts (237)\n", + "│ │ ├── drt (792)\n", + "│ │ │ └── cett (932)\n", + "│ │ │ ├── dc (514)\n", + "│ │ │ │ └── cuf (380)\n", + "│ │ │ └── ml (697)\n", + "│ │ └── von (949)\n", + "│ ├── eps (1000)\n", + "│ │ ├── epsc (760)\n", + "│ │ │ └── nst (102)\n", + "│ │ ├── rust (863)\n", + "│ │ │ └── vtd (397)\n", + "│ │ └── tsp (877)\n", + "│ │ ├── dtd (1060)\n", + "│ │ ├── tspc (1043)\n", + "│ │ └── tspd (1051)\n", + "│ ├── lfbs (983)\n", + "│ │ ├── cc (776)\n", + "│ │ │ ├── ccb (484682516)\n", + "│ │ │ ├── ccg (1108)\n", + "│ │ │ ├── ccs (986)\n", + "│ │ │ ├── ee (964)\n", + "│ │ │ ├── fa (956)\n", + "│ │ │ │ └── ec (579)\n", + "│ │ │ └── fp (971)\n", + "│ │ ├── cst (784)\n", + "│ │ │ ├── cpd (924)\n", + "│ │ │ ├── int (6)\n", + "│ │ │ ├── py (190)\n", + "│ │ │ └── pyd (198)\n", + "│ │ └── lfbst (896)\n", + "│ │ ├── ar (484682524)\n", + "│ │ ├── em (1092)\n", + "│ │ └── or (484682520)\n", + "│ ├── mfbs (991)\n", + "│ │ ├── mfbc (768)\n", + "│ │ │ ├── act (908)\n", + "│ │ │ ├── amc (884)\n", + "│ │ │ ├── cing (940)\n", + "│ │ │ ├── fxs (1099)\n", + "│ │ │ │ ├── alv (466)\n", + "│ │ │ │ ├── df (530)\n", + "│ │ │ │ ├── fi (603)\n", + "│ │ │ │ ├── fxpo (737)\n", + "│ │ │ │ │ ├── fx (436)\n", + "│ │ │ │ │ └── mct (428)\n", + "│ │ │ │ └── hc (618)\n", + "│ │ │ │ ├── dhc (443)\n", + "│ │ │ │ └── vhc (449)\n", + "│ │ │ └── st (301)\n", + "│ │ │ └── stc (484682528)\n", + "│ │ └── mfsbshy (824)\n", + "│ │ ├── mfb (54)\n", + "│ │ ├── mfbse (1083)\n", + "│ │ │ ├── fr (595)\n", + "│ │ │ ├── hbc (611)\n", + "│ │ │ └── sm (802)\n", + "│ │ ├── mfbsma (46)\n", + "│ │ │ ├── mp (673)\n", + "│ │ │ ├── mtg (681)\n", + "│ │ │ ├── mtt (690)\n", + "│ │ │ └── pm (753)\n", + "│ │ └── sup (349)\n", + "│ └── scwm (484682512)\n", + "└── grey (8)\n", + " ├── BS (343)\n", + " │ ├── HB (1065)\n", + " │ │ ├── MY (354)\n", + " │ │ │ ├── MY-mot (370)\n", + " │ │ │ │ ├── ACVII (576)\n", + " │ │ │ │ ├── AMB (135)\n", + " │ │ │ │ │ ├── AMBd (939)\n", + " │ │ │ │ │ └── AMBv (143)\n", + " │ │ │ │ ├── DMX (839)\n", + " │ │ │ │ ├── GRN (1048)\n", + " │ │ │ │ ├── ICB (372)\n", + " │ │ │ │ ├── IO (83)\n", + " │ │ │ │ ├── IRN (136)\n", + " │ │ │ │ ├── ISN (106)\n", + " │ │ │ │ ├── LIN (203)\n", + " │ │ │ │ ├── LRN (235)\n", + " │ │ │ │ │ ├── LRNm (955)\n", + " │ │ │ │ │ └── LRNp (963)\n", + " │ │ │ │ ├── MARN (307)\n", + " │ │ │ │ ├── MDRN (395)\n", + " │ │ │ │ │ ├── MDRNd (1098)\n", + " │ │ │ │ │ └── MDRNv (1107)\n", + " │ │ │ │ ├── PARN (852)\n", + " │ │ │ │ ├── PAS (859)\n", + " │ │ │ │ ├── PGRN (938)\n", + " │ │ │ │ │ ├── PGRNd (970)\n", + " │ │ │ │ │ └── PGRNl (978)\n", + " │ │ │ │ ├── PHY (154)\n", + " │ │ │ │ │ ├── NR (177)\n", + " │ │ │ │ │ └── PRP (169)\n", + " │ │ │ │ ├── PPY (1069)\n", + " │ │ │ │ ├── VI (653)\n", + " │ │ │ │ ├── VII (661)\n", + " │ │ │ │ ├── VNC (701)\n", + " │ │ │ │ │ ├── LAV (209)\n", + " │ │ │ │ │ ├── MV (202)\n", + " │ │ │ │ │ ├── SPIV (225)\n", + " │ │ │ │ │ └── SUV (217)\n", + " │ │ │ │ ├── XII (773)\n", + " │ │ │ │ ├── x (765)\n", + " │ │ │ │ └── y (781)\n", + " │ │ │ ├── MY-sat (379)\n", + " │ │ │ │ ├── RM (206)\n", + " │ │ │ │ ├── RO (222)\n", + " │ │ │ │ └── RPA (230)\n", + " │ │ │ └── MY-sen (386)\n", + " │ │ │ ├── AP (207)\n", + " │ │ │ ├── CN (607)\n", + " │ │ │ │ ├── DCO (96)\n", + " │ │ │ │ └── VCO (101)\n", + " │ │ │ ├── DCN (720)\n", + " │ │ │ │ ├── CU (711)\n", + " │ │ │ │ └── GR (1039)\n", + " │ │ │ ├── ECU (903)\n", + " │ │ │ ├── NTB (642)\n", + " │ │ │ ├── NTS (651)\n", + " │ │ │ ├── Pa5 (589508451)\n", + " │ │ │ ├── SPVC (429)\n", + " │ │ │ ├── SPVI (437)\n", + " │ │ │ └── SPVO (445)\n", + " │ │ └── P (771)\n", + " │ │ ├── P-mot (987)\n", + " │ │ │ ├── Acs5 (549009219)\n", + " │ │ │ ├── B (280)\n", + " │ │ │ ├── DTN (880)\n", + " │ │ │ ├── I5 (549009227)\n", + " │ │ │ ├── P5 (549009215)\n", + " │ │ │ ├── PC5 (549009223)\n", + " │ │ │ ├── PCG (898)\n", + " │ │ │ ├── PDTg (599626927)\n", + " │ │ │ ├── PG (931)\n", + " │ │ │ ├── PRNc (1093)\n", + " │ │ │ ├── SG (318)\n", + " │ │ │ ├── SUT (534)\n", + " │ │ │ ├── TRN (574)\n", + " │ │ │ └── V (621)\n", + " │ │ ├── P-sat (1117)\n", + " │ │ │ ├── CS (679)\n", + " │ │ │ ├── LC (147)\n", + " │ │ │ ├── LDT (162)\n", + " │ │ │ ├── NI (604)\n", + " │ │ │ ├── PRNr (146)\n", + " │ │ │ ├── RPO (238)\n", + " │ │ │ ├── SLC (350)\n", + " │ │ │ └── SLD (358)\n", + " │ │ └── P-sen (1132)\n", + " │ │ ├── NLL (612)\n", + " │ │ ├── PB (867)\n", + " │ │ │ └── KF (123)\n", + " │ │ ├── PSV (7)\n", + " │ │ └── SOC (398)\n", + " │ │ ├── POR (122)\n", + " │ │ ├── SOCl (114)\n", + " │ │ └── SOCm (105)\n", + " │ ├── IB (1129)\n", + " │ │ ├── HY (1097)\n", + " │ │ │ ├── LZ (290)\n", + " │ │ │ │ ├── LHA (194)\n", + " │ │ │ │ ├── LPO (226)\n", + " │ │ │ │ ├── PST (356)\n", + " │ │ │ │ ├── PSTN (364)\n", + " │ │ │ │ ├── PeF (576073704)\n", + " │ │ │ │ ├── RCH (173)\n", + " │ │ │ │ ├── STN (470)\n", + " │ │ │ │ ├── TU (614)\n", + " │ │ │ │ └── ZI (797)\n", + " │ │ │ │ └── FF (804)\n", + " │ │ │ ├── ME (10671)\n", + " │ │ │ ├── MEZ (467)\n", + " │ │ │ │ ├── AHN (88)\n", + " │ │ │ │ ├── MBO (331)\n", + " │ │ │ │ │ ├── LM (210)\n", + " │ │ │ │ │ ├── MM (491)\n", + " │ │ │ │ │ │ ├── MMd (606826659)\n", + " │ │ │ │ │ │ ├── MMl (606826647)\n", + " │ │ │ │ │ │ ├── MMm (606826651)\n", + " │ │ │ │ │ │ ├── MMme (732)\n", + " │ │ │ │ │ │ └── MMp (606826655)\n", + " │ │ │ │ │ ├── SUM (525)\n", + " │ │ │ │ │ └── TM (557)\n", + " │ │ │ │ │ ├── TMd (1126)\n", + " │ │ │ │ │ └── TMv (1)\n", + " │ │ │ │ ├── MPN (515)\n", + " │ │ │ │ ├── PH (946)\n", + " │ │ │ │ ├── PMd (980)\n", + " │ │ │ │ ├── PMv (1004)\n", + " │ │ │ │ ├── PVHd (63)\n", + " │ │ │ │ └── VMH (693)\n", + " │ │ │ ├── PVR (141)\n", + " │ │ │ │ ├── ADP (72)\n", + " │ │ │ │ ├── AVP (263)\n", + " │ │ │ │ ├── AVPV (272)\n", + " │ │ │ │ ├── DMH (830)\n", + " │ │ │ │ ├── MEPO (452)\n", + " │ │ │ │ ├── MPO (523)\n", + " │ │ │ │ ├── OV (763)\n", + " │ │ │ │ ├── PD (914)\n", + " │ │ │ │ ├── PS (1109)\n", + " │ │ │ │ ├── PVp (126)\n", + " │ │ │ │ ├── PVpo (133)\n", + " │ │ │ │ ├── SBPV (347)\n", + " │ │ │ │ ├── SCH (286)\n", + " │ │ │ │ ├── SFO (338)\n", + " │ │ │ │ ├── VLPO (689)\n", + " │ │ │ │ └── VMPO (576073699)\n", + " │ │ │ └── PVZ (157)\n", + " │ │ │ ├── ARH (223)\n", + " │ │ │ ├── ASO (332)\n", + " │ │ │ ├── PVH (38)\n", + " │ │ │ ├── PVa (30)\n", + " │ │ │ ├── PVi (118)\n", + " │ │ │ └── SO (390)\n", + " │ │ └── TH (549)\n", + " │ │ ├── DORpm (856)\n", + " │ │ │ ├── ATN (239)\n", + " │ │ │ │ ├── AD (64)\n", + " │ │ │ │ ├── AM (127)\n", + " │ │ │ │ │ ├── AMd (1096)\n", + " │ │ │ │ │ └── AMv (1104)\n", + " │ │ │ │ ├── AV (255)\n", + " │ │ │ │ ├── IAD (1113)\n", + " │ │ │ │ ├── IAM (1120)\n", + " │ │ │ │ └── LD (155)\n", + " │ │ │ ├── EPI (958)\n", + " │ │ │ │ ├── LH (186)\n", + " │ │ │ │ └── MH (483)\n", + " │ │ │ ├── GENv (1014)\n", + " │ │ │ │ ├── IGL (27)\n", + " │ │ │ │ ├── IntG (563807439)\n", + " │ │ │ │ ├── LGv (178)\n", + " │ │ │ │ └── SubG (321)\n", + " │ │ │ ├── ILM (51)\n", + " │ │ │ │ ├── CL (575)\n", + " │ │ │ │ ├── CM (599)\n", + " │ │ │ │ ├── PCN (907)\n", + " │ │ │ │ ├── PF (930)\n", + " │ │ │ │ ├── PIL (560581563)\n", + " │ │ │ │ └── RH (189)\n", + " │ │ │ ├── LAT (138)\n", + " │ │ │ │ ├── Eth (560581551)\n", + " │ │ │ │ ├── LP (218)\n", + " │ │ │ │ ├── PO (1020)\n", + " │ │ │ │ ├── POL (1029)\n", + " │ │ │ │ └── SGN (325)\n", + " │ │ │ ├── MED (444)\n", + " │ │ │ │ ├── IMD (59)\n", + " │ │ │ │ ├── MD (362)\n", + " │ │ │ │ ├── PR (1077)\n", + " │ │ │ │ └── SMT (366)\n", + " │ │ │ ├── MTN (571)\n", + " │ │ │ │ ├── PT (15)\n", + " │ │ │ │ ├── PVT (149)\n", + " │ │ │ │ ├── RE (181)\n", + " │ │ │ │ └── Xi (560581559)\n", + " │ │ │ └── RT (262)\n", + " │ │ └── DORsm (864)\n", + " │ │ ├── GENd (1008)\n", + " │ │ │ ├── LGd (170)\n", + " │ │ │ │ ├── LGd-co (496345668)\n", + " │ │ │ │ ├── LGd-ip (496345672)\n", + " │ │ │ │ └── LGd-sh (496345664)\n", + " │ │ │ └── MG (475)\n", + " │ │ │ ├── MGd (1072)\n", + " │ │ │ ├── MGm (1088)\n", + " │ │ │ └── MGv (1079)\n", + " │ │ ├── PP (1044)\n", + " │ │ ├── SPA (609)\n", + " │ │ ├── SPF (406)\n", + " │ │ │ ├── SPFm (414)\n", + " │ │ │ └── SPFp (422)\n", + " │ │ └── VENT (637)\n", + " │ │ ├── PoT (563807435)\n", + " │ │ ├── VAL (629)\n", + " │ │ ├── VM (685)\n", + " │ │ └── VP (709)\n", + " │ │ ├── VPL (718)\n", + " │ │ ├── VPLpc (725)\n", + " │ │ ├── VPM (733)\n", + " │ │ └── VPMpc (741)\n", + " │ └── MB (313)\n", + " │ ├── MBmot (323)\n", + " │ │ ├── AT (231)\n", + " │ │ ├── CUN (616)\n", + " │ │ ├── DT (75)\n", + " │ │ ├── EW (975)\n", + " │ │ ├── III (35)\n", + " │ │ ├── IV (115)\n", + " │ │ ├── LT (66)\n", + " │ │ ├── MA3 (549009211)\n", + " │ │ ├── MRN (128)\n", + " │ │ ├── MT (58)\n", + " │ │ ├── PAG (795)\n", + " │ │ │ ├── INC (67)\n", + " │ │ │ ├── ND (587)\n", + " │ │ │ ├── PRC (50)\n", + " │ │ │ └── Su3 (614454277)\n", + " │ │ ├── PN (607344830)\n", + " │ │ ├── PRT (1100)\n", + " │ │ │ ├── APN (215)\n", + " │ │ │ ├── MPT (531)\n", + " │ │ │ ├── NOT (628)\n", + " │ │ │ ├── NPC (634)\n", + " │ │ │ ├── OP (706)\n", + " │ │ │ ├── PPT (1061)\n", + " │ │ │ └── RPF (549009203)\n", + " │ │ ├── Pa4 (606826663)\n", + " │ │ ├── RN (214)\n", + " │ │ ├── RR (246)\n", + " │ │ ├── SCm (294)\n", + " │ │ │ ├── SCdg (26)\n", + " │ │ │ ├── SCdw (42)\n", + " │ │ │ ├── SCig (10)\n", + " │ │ │ └── SCiw (17)\n", + " │ │ ├── SNr (381)\n", + " │ │ ├── VTA (749)\n", + " │ │ └── VTN (757)\n", + " │ ├── MBsen (339)\n", + " │ │ ├── IC (4)\n", + " │ │ │ ├── ICc (811)\n", + " │ │ │ ├── ICd (820)\n", + " │ │ │ └── ICe (828)\n", + " │ │ ├── MEV (460)\n", + " │ │ ├── NB (580)\n", + " │ │ ├── PBG (874)\n", + " │ │ ├── SAG (271)\n", + " │ │ ├── SCO (599626923)\n", + " │ │ └── SCs (302)\n", + " │ │ ├── SCop (851)\n", + " │ │ ├── SCsg (842)\n", + " │ │ └── SCzo (834)\n", + " │ └── MBsta (348)\n", + " │ ├── PPN (1052)\n", + " │ ├── RAmb (165)\n", + " │ │ ├── CLI (591)\n", + " │ │ ├── DR (872)\n", + " │ │ ├── IF (12)\n", + " │ │ ├── IPN (100)\n", + " │ │ │ ├── IPA (607344842)\n", + " │ │ │ ├── IPC (607344838)\n", + " │ │ │ ├── IPDL (607344858)\n", + " │ │ │ ├── IPDM (607344854)\n", + " │ │ │ ├── IPI (607344850)\n", + " │ │ │ ├── IPL (607344846)\n", + " │ │ │ ├── IPR (607344834)\n", + " │ │ │ └── IPRL (607344862)\n", + " │ │ └── RL (197)\n", + " │ └── SNc (374)\n", + " ├── CB (512)\n", + " │ ├── CBN (519)\n", + " │ │ ├── DN (846)\n", + " │ │ ├── FN (989)\n", + " │ │ ├── IP (91)\n", + " │ │ └── VeCB (589508455)\n", + " │ └── CBX (528)\n", + " │ ├── HEM (1073)\n", + " │ │ ├── AN (1017)\n", + " │ │ │ ├── ANcr1 (1056)\n", + " │ │ │ └── ANcr2 (1064)\n", + " │ │ ├── COPY (1033)\n", + " │ │ ├── FL (1049)\n", + " │ │ ├── PFL (1041)\n", + " │ │ ├── PRM (1025)\n", + " │ │ └── SIM (1007)\n", + " │ └── VERM (645)\n", + " │ ├── CENT (920)\n", + " │ │ ├── CENT2 (976)\n", + " │ │ └── CENT3 (984)\n", + " │ ├── CUL (928)\n", + " │ │ └── CUL4, 5 (1091)\n", + " │ ├── DEC (936)\n", + " │ ├── FOTU (944)\n", + " │ ├── LING (912)\n", + " │ ├── NOD (968)\n", + " │ ├── PYR (951)\n", + " │ └── UVU (957)\n", + " └── CH (567)\n", + " ├── CNU (623)\n", + " │ ├── PAL (803)\n", + " │ │ ├── PALc (809)\n", + " │ │ │ ├── BAC (287)\n", + " │ │ │ └── BST (351)\n", + " │ │ ├── PALd (818)\n", + " │ │ │ ├── GPe (1022)\n", + " │ │ │ └── GPi (1031)\n", + " │ │ ├── PALm (826)\n", + " │ │ │ ├── MSC (904)\n", + " │ │ │ │ ├── MS (564)\n", + " │ │ │ │ └── NDB (596)\n", + " │ │ │ └── TRS (581)\n", + " │ │ └── PALv (835)\n", + " │ │ ├── MA (298)\n", + " │ │ └── SI (342)\n", + " │ └── STR (477)\n", + " │ ├── LSX (275)\n", + " │ │ ├── LS (242)\n", + " │ │ │ ├── LSc (250)\n", + " │ │ │ ├── LSr (258)\n", + " │ │ │ └── LSv (266)\n", + " │ │ ├── SF (310)\n", + " │ │ └── SH (333)\n", + " │ ├── STRd (485)\n", + " │ │ └── CP (672)\n", + " │ ├── STRv (493)\n", + " │ │ ├── ACB (56)\n", + " │ │ ├── FS (998)\n", + " │ │ └── OT (754)\n", + " │ └── sAMY (278)\n", + " │ ├── AAA (23)\n", + " │ ├── BA (292)\n", + " │ ├── CEA (536)\n", + " │ │ ├── CEAc (544)\n", + " │ │ ├── CEAl (551)\n", + " │ │ └── CEAm (559)\n", + " │ ├── IA (1105)\n", + " │ └── MEA (403)\n", + " └── CTX (688)\n", + " ├── CTXpl (695)\n", + " │ ├── HPF (1089)\n", + " │ │ ├── HIP (1080)\n", + " │ │ │ ├── CA (375)\n", + " │ │ │ │ ├── CA1 (382)\n", + " │ │ │ │ ├── CA2 (423)\n", + " │ │ │ │ └── CA3 (463)\n", + " │ │ │ ├── DG (726)\n", + " │ │ │ │ ├── DG-mo (10703)\n", + " │ │ │ │ ├── DG-po (10704)\n", + " │ │ │ │ └── DG-sg (632)\n", + " │ │ │ ├── FC (982)\n", + " │ │ │ └── IG (19)\n", + " │ │ └── RHP (822)\n", + " │ │ ├── APr (484682508)\n", + " │ │ ├── ENT (909)\n", + " │ │ │ ├── ENTl (918)\n", + " │ │ │ │ ├── ENTl1 (1121)\n", + " │ │ │ │ ├── ENTl2 (20)\n", + " │ │ │ │ ├── ENTl3 (52)\n", + " │ │ │ │ ├── ENTl5 (139)\n", + " │ │ │ │ └── ENTl6a (28)\n", + " │ │ │ └── ENTm (926)\n", + " │ │ │ ├── ENTm1 (526)\n", + " │ │ │ ├── ENTm2 (543)\n", + " │ │ │ ├── ENTm3 (664)\n", + " │ │ │ ├── ENTm5 (727)\n", + " │ │ │ └── ENTm6 (743)\n", + " │ │ ├── HATA (589508447)\n", + " │ │ ├── PAR (843)\n", + " │ │ ├── POST (1037)\n", + " │ │ ├── PRE (1084)\n", + " │ │ ├── ProS (484682470)\n", + " │ │ └── SUB (502)\n", + " │ ├── Isocortex (315)\n", + " │ │ ├── ACA (31)\n", + " │ │ │ ├── ACAd (39)\n", + " │ │ │ │ ├── ACAd1 (935)\n", + " │ │ │ │ ├── ACAd2/3 (211)\n", + " │ │ │ │ ├── ACAd5 (1015)\n", + " │ │ │ │ ├── ACAd6a (919)\n", + " │ │ │ │ └── ACAd6b (927)\n", + " │ │ │ └── ACAv (48)\n", + " │ │ │ ├── ACAv1 (588)\n", + " │ │ │ ├── ACAv2/3 (296)\n", + " │ │ │ ├── ACAv5 (772)\n", + " │ │ │ ├── ACAv6a (810)\n", + " │ │ │ └── ACAv6b (819)\n", + " │ │ ├── AI (95)\n", + " │ │ │ ├── AId (104)\n", + " │ │ │ │ ├── AId1 (996)\n", + " │ │ │ │ ├── AId2/3 (328)\n", + " │ │ │ │ ├── AId5 (1101)\n", + " │ │ │ │ ├── AId6a (783)\n", + " │ │ │ │ └── AId6b (831)\n", + " │ │ │ ├── AIp (111)\n", + " │ │ │ │ ├── AIp1 (120)\n", + " │ │ │ │ ├── AIp2/3 (163)\n", + " │ │ │ │ ├── AIp5 (344)\n", + " │ │ │ │ ├── AIp6a (314)\n", + " │ │ │ │ └── AIp6b (355)\n", + " │ │ │ └── AIv (119)\n", + " │ │ │ ├── AIv1 (704)\n", + " │ │ │ ├── AIv2/3 (694)\n", + " │ │ │ ├── AIv5 (800)\n", + " │ │ │ ├── AIv6a (675)\n", + " │ │ │ └── AIv6b (699)\n", + " │ │ ├── AUD (247)\n", + " │ │ │ ├── AUDd (1011)\n", + " │ │ │ │ ├── AUDd1 (527)\n", + " │ │ │ │ ├── AUDd2/3 (600)\n", + " │ │ │ │ ├── AUDd4 (678)\n", + " │ │ │ │ ├── AUDd5 (252)\n", + " │ │ │ │ ├── AUDd6a (156)\n", + " │ │ │ │ └── AUDd6b (243)\n", + " │ │ │ ├── AUDp (1002)\n", + " │ │ │ │ ├── AUDp1 (735)\n", + " │ │ │ │ ├── AUDp2/3 (251)\n", + " │ │ │ │ ├── AUDp4 (816)\n", + " │ │ │ │ ├── AUDp5 (847)\n", + " │ │ │ │ ├── AUDp6a (954)\n", + " │ │ │ │ └── AUDp6b (1005)\n", + " │ │ │ ├── AUDpo (1027)\n", + " │ │ │ │ ├── AUDpo1 (696)\n", + " │ │ │ │ ├── AUDpo2/3 (643)\n", + " │ │ │ │ ├── AUDpo4 (759)\n", + " │ │ │ │ ├── AUDpo5 (791)\n", + " │ │ │ │ ├── AUDpo6a (249)\n", + " │ │ │ │ └── AUDpo6b (456)\n", + " │ │ │ └── AUDv (1018)\n", + " │ │ │ ├── AUDv1 (959)\n", + " │ │ │ ├── AUDv2/3 (755)\n", + " │ │ │ ├── AUDv4 (990)\n", + " │ │ │ ├── AUDv5 (1023)\n", + " │ │ │ ├── AUDv6a (520)\n", + " │ │ │ └── AUDv6b (598)\n", + " │ │ ├── ECT (895)\n", + " │ │ │ ├── ECT1 (836)\n", + " │ │ │ ├── ECT2/3 (427)\n", + " │ │ │ ├── ECT5 (988)\n", + " │ │ │ ├── ECT6a (977)\n", + " │ │ │ └── ECT6b (1045)\n", + " │ │ ├── FRP (184)\n", + " │ │ │ ├── FRP1 (68)\n", + " │ │ │ ├── FRP2/3 (667)\n", + " │ │ │ ├── FRP5 (526157192)\n", + " │ │ │ ├── FRP6a (526157196)\n", + " │ │ │ └── FRP6b (526322264)\n", + " │ │ ├── GU (1057)\n", + " │ │ │ ├── GU1 (36)\n", + " │ │ │ ├── GU2/3 (180)\n", + " │ │ │ ├── GU4 (148)\n", + " │ │ │ ├── GU5 (187)\n", + " │ │ │ ├── GU6a (638)\n", + " │ │ │ └── GU6b (662)\n", + " │ │ ├── ILA (44)\n", + " │ │ │ ├── ILA1 (707)\n", + " │ │ │ ├── ILA2/3 (556)\n", + " │ │ │ ├── ILA5 (827)\n", + " │ │ │ ├── ILA6a (1054)\n", + " │ │ │ └── ILA6b (1081)\n", + " │ │ ├── MO (500)\n", + " │ │ │ ├── MOp (985)\n", + " │ │ │ │ ├── MOp1 (320)\n", + " │ │ │ │ ├── MOp2/3 (943)\n", + " │ │ │ │ ├── MOp5 (648)\n", + " │ │ │ │ ├── MOp6a (844)\n", + " │ │ │ │ └── MOp6b (882)\n", + " │ │ │ └── MOs (993)\n", + " │ │ │ ├── MOs1 (656)\n", + " │ │ │ ├── MOs2/3 (962)\n", + " │ │ │ ├── MOs5 (767)\n", + " │ │ │ ├── MOs6a (1021)\n", + " │ │ │ └── MOs6b (1085)\n", + " │ │ ├── ORB (714)\n", + " │ │ │ ├── ORBl (723)\n", + " │ │ │ │ ├── ORBl1 (448)\n", + " │ │ │ │ ├── ORBl2/3 (412)\n", + " │ │ │ │ ├── ORBl5 (630)\n", + " │ │ │ │ ├── ORBl6a (440)\n", + " │ │ │ │ └── ORBl6b (488)\n", + " │ │ │ ├── ORBm (731)\n", + " │ │ │ │ ├── ORBm1 (484)\n", + " │ │ │ │ ├── ORBm2/3 (582)\n", + " │ │ │ │ ├── ORBm5 (620)\n", + " │ │ │ │ ├── ORBm6a (910)\n", + " │ │ │ │ └── ORBm6b (527696977)\n", + " │ │ │ └── ORBvl (746)\n", + " │ │ │ ├── ORBvl1 (969)\n", + " │ │ │ ├── ORBvl2/3 (288)\n", + " │ │ │ ├── ORBvl5 (1125)\n", + " │ │ │ ├── ORBvl6a (608)\n", + " │ │ │ └── ORBvl6b (680)\n", + " │ │ ├── PERI (922)\n", + " │ │ │ ├── PERI1 (540)\n", + " │ │ │ ├── PERI2/3 (888)\n", + " │ │ │ ├── PERI5 (692)\n", + " │ │ │ ├── PERI6a (335)\n", + " │ │ │ └── PERI6b (368)\n", + " │ │ ├── PL (972)\n", + " │ │ │ ├── PL1 (171)\n", + " │ │ │ ├── PL2/3 (304)\n", + " │ │ │ ├── PL5 (363)\n", + " │ │ │ ├── PL6a (84)\n", + " │ │ │ └── PL6b (132)\n", + " │ │ ├── PTLp (22)\n", + " │ │ │ ├── VISa (312782546)\n", + " │ │ │ │ ├── VISa1 (312782550)\n", + " │ │ │ │ ├── VISa2/3 (312782554)\n", + " │ │ │ │ ├── VISa4 (312782558)\n", + " │ │ │ │ ├── VISa5 (312782562)\n", + " │ │ │ │ ├── VISa6a (312782566)\n", + " │ │ │ │ └── VISa6b (312782570)\n", + " │ │ │ └── VISrl (417)\n", + " │ │ │ ├── VISrl1 (312782604)\n", + " │ │ │ ├── VISrl2/3 (312782608)\n", + " │ │ │ ├── VISrl4 (312782612)\n", + " │ │ │ ├── VISrl5 (312782616)\n", + " │ │ │ ├── VISrl6a (312782620)\n", + " │ │ │ └── VISrl6b (312782624)\n", + " │ │ ├── RSP (254)\n", + " │ │ │ ├── RSPagl (894)\n", + " │ │ │ │ ├── RSPagl1 (671)\n", + " │ │ │ │ ├── RSPagl2/3 (965)\n", + " │ │ │ │ ├── RSPagl5 (774)\n", + " │ │ │ │ ├── RSPagl6a (906)\n", + " │ │ │ │ └── RSPagl6b (279)\n", + " │ │ │ ├── RSPd (879)\n", + " │ │ │ │ ├── RSPd1 (442)\n", + " │ │ │ │ ├── RSPd2/3 (434)\n", + " │ │ │ │ ├── RSPd4 (545)\n", + " │ │ │ │ ├── RSPd5 (610)\n", + " │ │ │ │ ├── RSPd6a (274)\n", + " │ │ │ │ └── RSPd6b (330)\n", + " │ │ │ └── RSPv (886)\n", + " │ │ │ ├── RSPv1 (542)\n", + " │ │ │ ├── RSPv2/3 (430)\n", + " │ │ │ ├── RSPv5 (687)\n", + " │ │ │ ├── RSPv6a (590)\n", + " │ │ │ └── RSPv6b (622)\n", + " │ │ ├── SS (453)\n", + " │ │ │ ├── SSp (322)\n", + " │ │ │ │ ├── SSp-bfd (329)\n", + " │ │ │ │ │ ├── SSp-bfd1 (981)\n", + " │ │ │ │ │ ├── SSp-bfd2/3 (201)\n", + " │ │ │ │ │ ├── SSp-bfd4 (1047)\n", + " │ │ │ │ │ ├── SSp-bfd5 (1070)\n", + " │ │ │ │ │ ├── SSp-bfd6a (1038)\n", + " │ │ │ │ │ └── SSp-bfd6b (1062)\n", + " │ │ │ │ ├── SSp-ll (337)\n", + " │ │ │ │ │ ├── SSp-ll1 (1030)\n", + " │ │ │ │ │ ├── SSp-ll2/3 (113)\n", + " │ │ │ │ │ ├── SSp-ll4 (1094)\n", + " │ │ │ │ │ ├── SSp-ll5 (1128)\n", + " │ │ │ │ │ ├── SSp-ll6a (478)\n", + " │ │ │ │ │ └── SSp-ll6b (510)\n", + " │ │ │ │ ├── SSp-m (345)\n", + " │ │ │ │ │ ├── SSp-m1 (878)\n", + " │ │ │ │ │ ├── SSp-m2/3 (657)\n", + " │ │ │ │ │ ├── SSp-m4 (950)\n", + " │ │ │ │ │ ├── SSp-m5 (974)\n", + " │ │ │ │ │ ├── SSp-m6a (1102)\n", + " │ │ │ │ │ └── SSp-m6b (2)\n", + " │ │ │ │ ├── SSp-n (353)\n", + " │ │ │ │ │ ├── SSp-n1 (558)\n", + " │ │ │ │ │ ├── SSp-n2/3 (838)\n", + " │ │ │ │ │ ├── SSp-n4 (654)\n", + " │ │ │ │ │ ├── SSp-n5 (702)\n", + " │ │ │ │ │ ├── SSp-n6a (889)\n", + " │ │ │ │ │ └── SSp-n6b (929)\n", + " │ │ │ │ ├── SSp-tr (361)\n", + " │ │ │ │ │ ├── SSp-tr1 (1006)\n", + " │ │ │ │ │ ├── SSp-tr2/3 (670)\n", + " │ │ │ │ │ ├── SSp-tr4 (1086)\n", + " │ │ │ │ │ ├── SSp-tr5 (1111)\n", + " │ │ │ │ │ ├── SSp-tr6a (9)\n", + " │ │ │ │ │ └── SSp-tr6b (461)\n", + " │ │ │ │ ├── SSp-ul (369)\n", + " │ │ │ │ │ ├── SSp-ul1 (450)\n", + " │ │ │ │ │ ├── SSp-ul2/3 (854)\n", + " │ │ │ │ │ ├── SSp-ul4 (577)\n", + " │ │ │ │ │ ├── SSp-ul5 (625)\n", + " │ │ │ │ │ ├── SSp-ul6a (945)\n", + " │ │ │ │ │ └── SSp-ul6b (1026)\n", + " │ │ │ │ └── SSp-un (182305689)\n", + " │ │ │ │ ├── SSp-un1 (182305693)\n", + " │ │ │ │ ├── SSp-un2/3 (182305697)\n", + " │ │ │ │ ├── SSp-un4 (182305701)\n", + " │ │ │ │ ├── SSp-un5 (182305705)\n", + " │ │ │ │ ├── SSp-un6a (182305709)\n", + " │ │ │ │ └── SSp-un6b (182305713)\n", + " │ │ │ └── SSs (378)\n", + " │ │ │ ├── SSs1 (873)\n", + " │ │ │ ├── SSs2/3 (806)\n", + " │ │ │ ├── SSs4 (1035)\n", + " │ │ │ ├── SSs5 (1090)\n", + " │ │ │ ├── SSs6a (862)\n", + " │ │ │ └── SSs6b (893)\n", + " │ │ ├── TEa (541)\n", + " │ │ │ ├── TEa1 (97)\n", + " │ │ │ ├── TEa2/3 (1127)\n", + " │ │ │ ├── TEa4 (234)\n", + " │ │ │ ├── TEa5 (289)\n", + " │ │ │ ├── TEa6a (729)\n", + " │ │ │ └── TEa6b (786)\n", + " │ │ ├── VIS (669)\n", + " │ │ │ ├── VISal (402)\n", + " │ │ │ │ ├── VISal1 (1074)\n", + " │ │ │ │ ├── VISal2/3 (905)\n", + " │ │ │ │ ├── VISal4 (1114)\n", + " │ │ │ │ ├── VISal5 (233)\n", + " │ │ │ │ ├── VISal6a (601)\n", + " │ │ │ │ └── VISal6b (649)\n", + " │ │ │ ├── VISam (394)\n", + " │ │ │ │ ├── VISam1 (281)\n", + " │ │ │ │ ├── VISam2/3 (1066)\n", + " │ │ │ │ ├── VISam4 (401)\n", + " │ │ │ │ ├── VISam5 (433)\n", + " │ │ │ │ ├── VISam6a (1046)\n", + " │ │ │ │ └── VISam6b (441)\n", + " │ │ │ ├── VISl (409)\n", + " │ │ │ │ ├── VISl1 (421)\n", + " │ │ │ │ ├── VISl2/3 (973)\n", + " │ │ │ │ ├── VISl4 (573)\n", + " │ │ │ │ ├── VISl5 (613)\n", + " │ │ │ │ ├── VISl6a (74)\n", + " │ │ │ │ └── VISl6b (121)\n", + " │ │ │ ├── VISli (312782574)\n", + " │ │ │ │ ├── VISli1 (312782578)\n", + " │ │ │ │ ├── VISli2/3 (312782582)\n", + " │ │ │ │ ├── VISli4 (312782586)\n", + " │ │ │ │ ├── VISli5 (312782590)\n", + " │ │ │ │ ├── VISli6a (312782594)\n", + " │ │ │ │ └── VISli6b (312782598)\n", + " │ │ │ ├── VISp (385)\n", + " │ │ │ │ ├── VISp1 (593)\n", + " │ │ │ │ ├── VISp2/3 (821)\n", + " │ │ │ │ ├── VISp4 (721)\n", + " │ │ │ │ ├── VISp5 (778)\n", + " │ │ │ │ ├── VISp6a (33)\n", + " │ │ │ │ └── VISp6b (305)\n", + " │ │ │ ├── VISpl (425)\n", + " │ │ │ │ ├── VISpl1 (750)\n", + " │ │ │ │ ├── VISpl2/3 (269)\n", + " │ │ │ │ ├── VISpl4 (869)\n", + " │ │ │ │ ├── VISpl5 (902)\n", + " │ │ │ │ ├── VISpl6a (377)\n", + " │ │ │ │ └── VISpl6b (393)\n", + " │ │ │ ├── VISpm (533)\n", + " │ │ │ │ ├── VISpm1 (805)\n", + " │ │ │ │ ├── VISpm2/3 (41)\n", + " │ │ │ │ ├── VISpm4 (501)\n", + " │ │ │ │ ├── VISpm5 (565)\n", + " │ │ │ │ ├── VISpm6a (257)\n", + " │ │ │ │ └── VISpm6b (469)\n", + " │ │ │ └── VISpor (312782628)\n", + " │ │ │ ├── VISpor1 (312782632)\n", + " │ │ │ ├── VISpor2/3 (312782636)\n", + " │ │ │ ├── VISpor4 (312782640)\n", + " │ │ │ ├── VISpor5 (312782644)\n", + " │ │ │ ├── VISpor6a (312782648)\n", + " │ │ │ └── VISpor6b (312782652)\n", + " │ │ └── VISC (677)\n", + " │ │ ├── VISC1 (897)\n", + " │ │ ├── VISC2/3 (1106)\n", + " │ │ ├── VISC4 (1010)\n", + " │ │ ├── VISC5 (1058)\n", + " │ │ ├── VISC6a (857)\n", + " │ │ └── VISC6b (849)\n", + " │ └── OLF (698)\n", + " │ ├── AOB (151)\n", + " │ │ ├── AOBgl (188)\n", + " │ │ ├── AOBgr (196)\n", + " │ │ └── AOBmi (204)\n", + " │ ├── AON (159)\n", + " │ ├── COA (631)\n", + " │ │ ├── COAa (639)\n", + " │ │ └── COAp (647)\n", + " │ │ ├── COApl (655)\n", + " │ │ └── COApm (663)\n", + " │ ├── DP (814)\n", + " │ ├── MOB (507)\n", + " │ ├── NLOT (619)\n", + " │ │ ├── NLOT1 (260)\n", + " │ │ ├── NLOT2 (268)\n", + " │ │ └── NLOT3 (1139)\n", + " │ ├── PAA (788)\n", + " │ ├── PIR (961)\n", + " │ ├── TR (566)\n", + " │ └── TT (589)\n", + " │ ├── TTd (597)\n", + " │ └── TTv (605)\n", + " └── CTXsp (703)\n", + " ├── BLA (295)\n", + " │ ├── BLAa (303)\n", + " │ ├── BLAp (311)\n", + " │ └── BLAv (451)\n", + " ├── BMA (319)\n", + " │ ├── BMAa (327)\n", + " │ └── BMAp (334)\n", + " ├── CLA (583)\n", + " ├── EP (942)\n", + " │ ├── EPd (952)\n", + " │ └── EPv (966)\n", + " ├── LA (131)\n", + " └── PA (780)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.structures" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The structures attribute is a custom dictionary that can be queried by region number or acronym, and contains all the information for a given structure:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'acronym': 'root',\n", + " 'id': 997,\n", + " 'mesh': None,\n", + " 'mesh_filename': PosixPath('/home/rob/.brainglobe/allen_mouse_100um_v1.2/meshes/997.obj'),\n", + " 'name': 'root',\n", + " 'rgb_triplet': [255, 255, 255],\n", + " 'structure_id_path': [997]}\n" + ] + } + ], + "source": [ + "pprint(bg_atlas.structures[\"root\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In particular, the `structure_id_path` key contains a list description of the path in the hierarchy up to a particular region, and can be used for queries on the hierarchy." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[997, 8, 567]" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.structures[\"CH\"][\"structure_id_path\"]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can use the `bg_atlas.get_structure_descendants` and `bg_atlas.get_structure_ancestors` methods to explore the hierarchy:" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['VISC1', 'VISC2/3', 'VISC4', 'VISC5', 'VISC6a', 'VISC6b']" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.get_structure_descendants(\"VISC\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['root', 'grey', 'CH', 'CTX', 'CTXpl', 'Isocortex', 'VISC']" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.get_structure_ancestors(\"VISC6a\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "**NOTE**: \n", + "the levels of the hierarchy depends on the underlying atlas, so we cannot ensure the goodness and consistency of their hierarchy three.\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There is an higher level description of the structures hierarchy that is built using the [treelib](https://treelib.readthedocs.io/en/latest/) package, and is available as: " + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.structures.tree" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For most applications though the methods described above and the list path of each region should be enough to query the hierarchy without additional layers of complication." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.3 Region masks" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `get_structure_mask` method returns a mask volume where all voxels belonging to an area or to the descendants of that area are non zero. All other voxels are zero. We will generate the structure mask for primary visual cortex to see how this works. \n", + "\n", + "Primary visual cortex (`VISp`) has an ID value of `385` but no voxels in the annotation image actually have that value:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'acronym': 'VISp',\n", + " 'id': 385,\n", + " 'mesh': None,\n", + " 'mesh_filename': PosixPath('/home/rob/.brainglobe/allen_mouse_100um_v1.2/meshes/385.obj'),\n", + " 'name': 'Primary visual area',\n", + " 'rgb_triplet': [8, 133, 140],\n", + " 'structure_id_path': [997, 8, 567, 688, 695, 315, 669, 385]}\n" + ] + } + ], + "source": [ + "pprint(bg_atlas.structures[\"VISp\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# No voxels in the annotation volume are labelled as being VISp\n", + "(bg_atlas.annotation==385).sum()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The reason no VISp voxels exist is because the annotation volume is segmented more finely. In this case `VISp` is divided into cortical layers and it is IDs associated with these layers that are present in the annotation volume." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['VISp1', 'VISp2/3', 'VISp4', 'VISp5', 'VISp6a', 'VISp6b']" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# The descendants (children) of VISp are cortical layers\n", + "bg_atlas.get_structure_descendants(\"VISp\")" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "VISp1\t593\n", + "VISp2/3\t821\n", + "VISp4\t721\n", + "VISp5\t778\n", + "VISp6a\t33\n", + "VISp6b\t305\n" + ] + } + ], + "source": [ + "# The IDs associated with each layer in primary visual cortex\n", + "layers = bg_atlas.get_structure_descendants(\"VISp\")\n", + "layer_ids = [bg_atlas.structures[this_layer]['id'] for this_layer in layers]\n", + "\n", + "for (this_layer, this_id) in zip(layers, layer_ids):\n", + " print(\"%s\\t%s\" % (this_layer, this_id))\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These IDs are indeed present in the annotation volume:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1565" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# For example, we find over a thousand voxels associated with primary visual cortex layer 6\n", + "# in the annotation volume\n", + "(bg_atlas.annotation==778).sum()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "So lets use `get_structure_mask` to return a mask volume that retains only `VISp`." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "mask_VISp = bg_atlas.get_structure_mask('VISp')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", + "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", + " axs[i].imshow(mask_VISp.max(i), cmap=\"gray\")\n", + " axs[i].set_title(f\"{plane.capitalize()} view\")\n", + " axs[i].set_ylabel(labels[0])\n", + " axs[i].set_xlabel(labels[1])\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `root` node encompases the whole brain and we can use this to provide a background image for the above area." + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "mask_root = bg_atlas.get_structure_mask('root')\n", + "\n", + "# The mask images have pixel values equal to the ID of the parent area, so we change these for\n", + "# plotting purposes. \n", + "mask_root[mask_root>0]=5\n", + "mask_VISp[mask_VISp>0]=2\n", + "mask_VISp_root = mask_VISp + mask_root" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "space = bg_atlas.space\n", + "\n", + "f, axs = plt.subplots(1,3, figsize=(12, 3))\n", + "for i, (plane, labels) in enumerate(zip(space.sections, space.axis_labels)):\n", + " axs[i].imshow(mask_VISp_root.max(i), cmap=\"gray\")\n", + " axs[i].set_title(f\"{plane.capitalize()} view\")\n", + " axs[i].set_ylabel(labels[0])\n", + " axs[i].set_xlabel(labels[1])\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1.3 Regions meshes" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we need to access the structure meshes, we can either query for the file (e.g., if we need to load the file through some library like `vedo`):" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PosixPath('/home/rob/.brainglobe/allen_mouse_100um_v1.2/meshes/567.obj')" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.meshfile_from_structure(\"CH\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Or directly obtain the mesh, as a mesh object of the `meshio` library:" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\n", + " Number of points: 56703\n", + " Number of cells:\n", + " triangle: 112948\n", + " Point data: obj:vn\n", + " Cell data: obj:group_ids" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "bg_atlas.mesh_from_structure(\"CH\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2 Query the `BrainGlobeAtlas`" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.0 Query for structures:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A very convenient feature of the `BrainGlobeAtlas` API is the simplicity of querying for the identity of the structure or the hemisphere at a given location, either from stack indexes or space coordinates, and even cutting the hierarchy at some higher level:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "By index: CP\n", + "By coordinates: CP\n", + "Higher hierarchy level: CH\n" + ] + } + ], + "source": [ + "# Ask for identity of some indexes in the stack:\n", + "print(\"By index:\", bg_atlas.structure_from_coords((50, 40, 30), \n", + " as_acronym=True))\n", + "\n", + "# Now give coordinates in microns\n", + "print(\"By coordinates:\", bg_atlas.structure_from_coords((5000, 4000, 3000), \n", + " as_acronym=True, \n", + " microns=True))\n", + "\n", + "# Now cut hierarchy at some level\n", + "print(\"Higher hierarchy level:\", bg_atlas.structure_from_coords((5000, 4000, 3000), \n", + " as_acronym=True, \n", + " microns=True, \n", + " hierarchy_lev=2))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1 Query for hemispheres" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "A very similar method can be used for hemispheres. 0 correspond to outside the brain, a,d 1 and 2 to left and right hemispheres - but we can just ask for the side name instead of the number:" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "By index: 2\n", + "By coordinates: 2\n", + "By : 2\n" + ] + } + ], + "source": [ + "# Ask for identity of some indexes in the stack:\n", + "print(\"By index:\", bg_atlas.hemisphere_from_coords((50, 40, 30)))\n", + "\n", + "# Now give coordinates in microns\n", + "print(\"By coordinates:\", bg_atlas.hemisphere_from_coords((5000, 4000, 3000), microns=True))\n", + "\n", + "# Now print side string\n", + "print(\"By :\", bg_atlas.hemisphere_from_coords((5000, 4000, 3000), microns=True))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.10" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file From c055cb464c2e2963129f204d560c258bc8905f15 Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:38:46 +0000 Subject: [PATCH 093/103] Update manifest --- MANIFEST.in | 5 +++++ pyproject.toml | 8 +------- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 0b37b229..4e46edeb 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,4 +4,9 @@ include README.md include requirements.txt exclude .codecov.yml +exclude .pre-commit-config.yaml + +graft brainglobe_atlasapi *.py + +prune tests prune tutorials diff --git a/pyproject.toml b/pyproject.toml index 1cbeb3ff..247ca257 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,13 +67,7 @@ line-length = 79 [tool.setuptools_scm] [tool.check-manifest] -ignore = [ - "*.yaml", - "tox.ini", - "tests/*", - "tests/test_unit/*", - "tests/test_integration/*", -] +ignore = ["*.yaml", "tox.ini", "tests/*"] [tool.ruff] line-length = 79 From d96b91ea9f3e2101e285e01f3a2ba947a5f6e730 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 Feb 2024 11:42:13 +0000 Subject: [PATCH 094/103] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- tutorials/Atlas API usage.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tutorials/Atlas API usage.ipynb b/tutorials/Atlas API usage.ipynb index 2881460a..5bbfe1c3 100644 --- a/tutorials/Atlas API usage.ipynb +++ b/tutorials/Atlas API usage.ipynb @@ -1712,4 +1712,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} From 7ca6f63f9fbd353f5e1ba196bdb20ebfa1050888 Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:44:57 +0000 Subject: [PATCH 095/103] Pre-commit pass --- brainglobe_atlasapi/update_atlases.py | 8 +++++--- pyproject.toml | 4 +++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/brainglobe_atlasapi/update_atlases.py b/brainglobe_atlasapi/update_atlases.py index d7ba910f..7a2b7f2a 100644 --- a/brainglobe_atlasapi/update_atlases.py +++ b/brainglobe_atlasapi/update_atlases.py @@ -37,7 +37,8 @@ def update_atlas(atlas_name, force=False): # Delete atlas folder rprint( - f"[b][magenta2]brainglobe_atlasapi: updating {atlas.atlas_name}[/magenta2][/b]" + "[b][magenta2]brainglobe_atlasapi: " + f"updating {atlas.atlas_name}[/magenta2][/b]" ) fld = atlas.brainglobe_dir / atlas.local_full_name shutil.rmtree(fld) @@ -52,8 +53,9 @@ def update_atlas(atlas_name, force=False): # Check that everything went well rprint( - f"[b][magenta2]brainglobe_atlasapi: {atlas.atlas_name} updated to version: " - + f"{_version_str_from_tuple(atlas.remote_version)}[/magenta2][/b]" + "[b][magenta2]brainglobe_atlasapi: " + f"{atlas.atlas_name} updated to version: " + f"{_version_str_from_tuple(atlas.remote_version)}[/magenta2][/b]" ) diff --git a/pyproject.toml b/pyproject.toml index 247ca257..8ed01086 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,9 +72,11 @@ ignore = ["*.yaml", "tox.ini", "tests/*"] [tool.ruff] line-length = 79 exclude = ["__init__.py", "build", ".eggs"] -select = ["I", "E", "F"] fix = true +[tool.ruff.lint] +select = ["I", "E", "F"] + [tool.tox] legacy_tox_ini = """ # For more information about tox, see https://tox.readthedocs.io/en/latest/ From b195cb63c7b8b3d7f766d4ee61bd035bcda9b814 Mon Sep 17 00:00:00 2001 From: viktorpm <50667179+viktorpm@users.noreply.github.com> Date: Mon, 12 Feb 2024 11:48:06 +0000 Subject: [PATCH 096/103] fixing small bug: validation_functions function argument as variable instead of all_validation_functions (#119) --- bg_atlasgen/validate_atlases.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py index 56b76180..b9568bfc 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/bg_atlasgen/validate_atlases.py @@ -154,7 +154,7 @@ def validate_atlas(atlas_name, version, validation_functions): validation_results = {atlas_name: []} - for i, validation_function in enumerate(all_validation_functions): + for i, validation_function in enumerate(validation_functions): try: validation_function(BrainGlobeAtlas(atlas_name)) validation_results[atlas_name].append( From eb37112f91d188f78b0326cd4126c05e5b1d16ee Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Mon, 12 Feb 2024 11:51:31 +0000 Subject: [PATCH 097/103] Bump NIU action versions --- .github/workflows/test_and_deploy.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_and_deploy.yml b/.github/workflows/test_and_deploy.yml index 47bb8da4..8cd7a77a 100644 --- a/.github/workflows/test_and_deploy.yml +++ b/.github/workflows/test_and_deploy.yml @@ -13,7 +13,7 @@ jobs: linting: runs-on: ubuntu-latest steps: - - uses: neuroinformatics-unit/actions/lint@v1 + - uses: neuroinformatics-unit/actions/lint@v2 manifest: runs-on: ubuntu-latest @@ -36,7 +36,7 @@ jobs: python-version: "3.9" steps: - - uses: neuroinformatics-unit/actions/test@v1 + - uses: neuroinformatics-unit/actions/test@v2 with: python-version: ${{ matrix.python-version }} @@ -51,7 +51,7 @@ jobs: ) runs-on: ubuntu-latest steps: - - uses: neuroinformatics-unit/actions/build_sdist_wheels@v1 + - uses: neuroinformatics-unit/actions/build_sdist_wheels@v2 upload_all: name: Publish build distributions From dd314e7076283759dc98e4f7a8085536ac65bbfe Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Wed, 14 Feb 2024 09:47:26 +0000 Subject: [PATCH 098/103] Move conftest back to top-level directory --- tests/{atlasapi => }/conftest.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{atlasapi => }/conftest.py (100%) diff --git a/tests/atlasapi/conftest.py b/tests/conftest.py similarity index 100% rename from tests/atlasapi/conftest.py rename to tests/conftest.py From 8da538212c705d6a15ec49f0e0bb8ff186dc0c57 Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Wed, 14 Feb 2024 10:45:25 +0000 Subject: [PATCH 099/103] Refactor atlasgen into a submodule --- .pre-commit-config.yaml | 7 ----- MANIFEST.in | 1 - README.md | 11 ++++---- .../atlas_generation}/__init__.py | 0 .../atlas_scripts/__init__.py | 0 .../atlas_scripts/admba_3d_dev_mouse.py | 25 +++++++++++------- .../atlas_scripts/allen_cord.py | 22 +++++++++------- .../atlas_scripts/allen_mouse.py | 4 +-- .../atlas_scripts/azba_zfish.py | 8 +++--- .../atlas_scripts/example_mouse.py | 2 +- .../atlas_scripts/humanatlas.py | 6 ++--- .../kim_developmental_ccf_mouse.py | 11 +++++--- .../atlas_scripts/kim_mouse.py | 16 +++++++----- .../atlas_scripts/mpin_zfish.py | 6 ++--- .../atlas_scripts/osten_mouse.py | 11 +++++--- .../atlas_scripts/perens_lsfm_mouse.py | 14 +++++----- .../atlas_scripts/princeton_mouse.py | 11 +++++--- .../atlas_scripts/template_script.py | 2 +- .../atlas_scripts/whs_sd_rat.py | 11 +++++--- .../atlas_generation}/main_script.py | 26 +++++++++++++------ .../atlas_generation}/mesh_utils.py | 4 ++- .../atlas_generation}/metadata_utils.py | 8 +++--- .../atlas_generation}/stacks.py | 3 ++- .../structure_json_to_csv.py | 0 .../atlas_generation}/structures.py | 4 +-- .../atlas_generation}/validate_atlases.py | 9 ++++--- .../atlas_generation}/volume_utils.py | 0 .../atlas_generation}/wrapup.py | 18 +++++++------ .../test_validation.py | 8 +++--- 29 files changed, 142 insertions(+), 106 deletions(-) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/__init__.py (100%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/__init__.py (100%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/admba_3d_dev_mouse.py (94%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/allen_cord.py (93%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/allen_mouse.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/azba_zfish.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/example_mouse.py (97%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/humanatlas.py (98%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/kim_developmental_ccf_mouse.py (97%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/kim_mouse.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/mpin_zfish.py (97%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/osten_mouse.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/perens_lsfm_mouse.py (97%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/princeton_mouse.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/template_script.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/atlas_scripts/whs_sd_rat.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/main_script.py (82%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/mesh_utils.py (99%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/metadata_utils.py (94%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/stacks.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/structure_json_to_csv.py (100%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/structures.py (95%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/validate_atlases.py (96%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/volume_utils.py (100%) rename {bg_atlasgen => brainglobe_atlasapi/atlas_generation}/wrapup.py (93%) rename tests/{test_unit => atlasgen}/test_validation.py (89%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74f2506c..3117de90 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,10 +23,3 @@ repos: rev: 24.1.1 hooks: - id: black - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.8.0 - hooks: - - id: mypy - additional_dependencies: - - types-setuptools - - types-requests diff --git a/MANIFEST.in b/MANIFEST.in index f0094ea3..4e46edeb 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,7 +7,6 @@ exclude .codecov.yml exclude .pre-commit-config.yaml graft brainglobe_atlasapi *.py -graft bg_atlasgen *.py prune tests prune tutorials diff --git a/README.md b/README.md index e52c20b4..00a18a4a 100644 --- a/README.md +++ b/README.md @@ -161,19 +161,20 @@ If you find the BrainGlobe Atlas API useful, please cite the paper in your work: --- -# brainglobe-atlasgen +# Atlas Generation and Adding a New Atlas -For full instructions to add a new BrainGlobe atlas, please see [here](https://brainglobe.info/documentation/bg-atlasapi/adding-a-new-atlas.html). +For full instructions to add a new BrainGlobe atlas, please see [here](https://brainglobe.info/documentation/brainglobe_atlasapi/adding-a-new-atlas.html). -This source code Utilities and scripts for the generation of cleaned-up data for the `bg-atlasapi` module. +The `brainglobe_atlasapi.atlas_generation` submodule contains code for the generation of cleaned-up data, for the main `brainglobe_atlasapi` module. +This code was previously the `bg-atlasgen` module. ## To contribute 1. Fork this repo 2. Clone your repo -3. Run `git clone https://github.com/USERNAME/bg-atlasgen` +3. Run `git clone https://github.com/brainglobe/brainglobe-atlasapi` 4. Install an editable version of the package; by running `pip install -e .` within the cloned directory -5. Create a script to package your atlas, and place into `brainglobe_atlasgen/atlas_scripts`. Please see other scripts for examples. +5. Create a script to package your atlas, and place into `brainglobe_atlasapi/atlas_generation/atlas_scripts`. Please see other scripts for examples. Your script should contain everything required to run. The raw data should be hosted on a publicly accessible repository so that anyone can run the script to recreate the atlas. diff --git a/bg_atlasgen/__init__.py b/brainglobe_atlasapi/atlas_generation/__init__.py similarity index 100% rename from bg_atlasgen/__init__.py rename to brainglobe_atlasapi/atlas_generation/__init__.py diff --git a/bg_atlasgen/atlas_scripts/__init__.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/__init__.py similarity index 100% rename from bg_atlasgen/atlas_scripts/__init__.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/__init__.py diff --git a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/admba_3d_dev_mouse.py similarity index 94% rename from bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/admba_3d_dev_mouse.py index 4cacdb39..de923763 100644 --- a/bg_atlasgen/atlas_scripts/admba_3d_dev_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/admba_3d_dev_mouse.py @@ -11,13 +11,16 @@ import numpy as np import pandas as pd -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track from skimage import io -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = True @@ -182,7 +185,8 @@ def create_mesh_dict(structures, meshes_dir_path): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) return meshes_dict, structures_with_mesh @@ -195,9 +199,10 @@ class AtlasConfig: species: str atlas_link: str atlas_file_url: str - #: Input orientation in 3-letter notation using the NumPy system with origin - #: at top left corner of first plane. Axis 0 = front to back, 1 = top to - #: bottom, 2 = left to right. Output orientation will be ASR. + #: Input orientation in 3-letter notation using the NumPy system with + #: origin at top left corner of first plane. + #: Axis 0 = front to back, 1 = top to bottom, 2 = left to right. + #: Output orientation will be ASR. orientation: str #: Resolution to match the output orientation of ASR. resolution: Tuple[float, float, float] @@ -315,7 +320,9 @@ def create_atlas( resolution=(16, 16, 20), citation="Young et al. 2021, https://doi.org/10.7554/eLife.61408", root_id=15564, - atlas_packager="Pradeep Rajasekhar, WEHI, Australia, rajasekhardotp@wehidotedudotau; David Young, UCSF, United States, davedotyoung@ucsfdotedu", + atlas_packager="Pradeep Rajasekhar, WEHI, Australia, " + "rajasekhardotp@wehidotedudotau; David Young, UCSF, " + "United States, davedotyoung@ucsfdotedu", ) # E13.5 atlas, with updated name and URLs diff --git a/bg_atlasgen/atlas_scripts/allen_cord.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/allen_cord.py similarity index 93% rename from bg_atlasgen/atlas_scripts/allen_cord.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/allen_cord.py index 97563573..a2c80ea9 100644 --- a/bg_atlasgen/atlas_scripts/allen_cord.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/allen_cord.py @@ -10,20 +10,19 @@ import numpy as np import pandas as pd import tifffile - -# import sys -# sys.path.append("./") -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from loguru import logger from rich.progress import track -from bg_atlasgen.mesh_utils import ( +# import sys +# sys.path.append("./") +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, inspect_meshes_folder, ) -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = True TEST = False @@ -121,13 +120,15 @@ def create_meshes(download_dir_path, structures, annotated_volume, root_id): nodes = list(tree.nodes.values()) if TEST: logger.info( - "Creating atlas in test mode: selecting 10 random regions for mesh creation" + "Creating atlas in test mode: selecting 10 " + "random regions for mesh creation" ) nodes = choices(nodes, k=10) if PARALLEL: print( - f"Creating {tree.size()} meshes in parallel with {mp.cpu_count() - 2} CPU cores" + f"Creating {tree.size()} meshes in parallel with " + f"{mp.cpu_count() - 2} CPU cores" ) pool = mp.Pool(mp.cpu_count() - 2) @@ -204,7 +205,8 @@ def create_mesh_dict(structures, meshes_dir_path): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) return meshes_dict, structures_with_mesh diff --git a/bg_atlasgen/atlas_scripts/allen_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/allen_mouse.py similarity index 96% rename from bg_atlasgen/atlas_scripts/allen_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/allen_mouse.py index 8d903bbe..cc28a2ef 100644 --- a/bg_atlasgen/atlas_scripts/allen_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/allen_mouse.py @@ -5,11 +5,11 @@ from allensdk.api.queries.ontologies_api import OntologiesApi from allensdk.api.queries.reference_space_api import ReferenceSpaceApi from allensdk.core.reference_space_cache import ReferenceSpaceCache -from bg_atlasapi import descriptors from requests import exceptions from tqdm import tqdm -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi import descriptors +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data def create_atlas(working_dir, resolution): diff --git a/bg_atlasgen/atlas_scripts/azba_zfish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py similarity index 96% rename from bg_atlasgen/atlas_scripts/azba_zfish.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py index 6aa44252..1db22537 100644 --- a/bg_atlasgen/atlas_scripts/azba_zfish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py @@ -17,15 +17,15 @@ import numpy as np import tifffile -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track -from bg_atlasgen.mesh_utils import ( +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, ) -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False # Disable for debugging mesh creation diff --git a/bg_atlasgen/atlas_scripts/example_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/example_mouse.py similarity index 97% rename from bg_atlasgen/atlas_scripts/example_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/example_mouse.py index 57fb25dc..447e08a4 100644 --- a/bg_atlasgen/atlas_scripts/example_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/example_mouse.py @@ -8,7 +8,7 @@ from requests import exceptions from tqdm import tqdm -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data def create_atlas(working_dir, resolution): diff --git a/bg_atlasgen/atlas_scripts/humanatlas.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py similarity index 98% rename from bg_atlasgen/atlas_scripts/humanatlas.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py index 93aa6eb1..782c6b9e 100644 --- a/bg_atlasgen/atlas_scripts/humanatlas.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py @@ -8,18 +8,18 @@ import treelib import urllib3 from allensdk.core.structure_tree import StructureTree -from bg_atlasapi.structure_tree_util import get_structures_tree from brainio import brainio from rich.progress import track # import sys # sys.path.append("./") -from bg_atlasgen.mesh_utils import ( +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, inspect_meshes_folder, ) -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree def prune_tree(tree): diff --git a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py similarity index 97% rename from bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py index df309df0..c4a64f2c 100644 --- a/bg_atlasgen/atlas_scripts/kim_developmental_ccf_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py @@ -9,13 +9,16 @@ import imio import numpy as np import pandas as pd -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track from scipy.ndimage import zoom -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = True # disable parallel mesh extraction for easier debugging diff --git a/bg_atlasgen/atlas_scripts/kim_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py similarity index 96% rename from bg_atlasgen/atlas_scripts/kim_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py index 569a1039..dbe9f8a8 100644 --- a/bg_atlasgen/atlas_scripts/kim_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py @@ -10,16 +10,18 @@ import pandas as pd import tifffile from allensdk.core.reference_space_cache import ReferenceSpaceCache - -# import sys -# sys.path.append("./") -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track from scipy.ndimage import zoom -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +# import sys +# sys.path.append("./") +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False # disable parallel mesh extraction for easier debugging diff --git a/bg_atlasgen/atlas_scripts/mpin_zfish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py similarity index 97% rename from bg_atlasgen/atlas_scripts/mpin_zfish.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py index a09bf4a5..9e385aa9 100644 --- a/bg_atlasgen/atlas_scripts/mpin_zfish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py @@ -8,11 +8,11 @@ import numpy as np import requests from allensdk.core.structure_tree import StructureTree -from bg_atlasapi.utils import retrieve_over_http from scipy.ndimage import binary_dilation, binary_erosion, binary_fill_holes from tifffile import imread -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.utils import retrieve_over_http BASE_URL = r"https://fishatlas.neuro.mpg.de" @@ -140,7 +140,7 @@ def create_atlas(working_dir, resolution): ) # meshes from the website and stacks do not have the same orientation. - # Therefore, flip axes of the stacks so that bg-space reorientation is used on + # Therefore, flip axes of the stacks so that brainglobe-space reorientation is used on # the meshes: annotation_stack = annotation_stack.swapaxes(0, 2) hemispheres_stack = hemispheres_stack.swapaxes(0, 2) diff --git a/bg_atlasgen/atlas_scripts/osten_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py similarity index 96% rename from bg_atlasgen/atlas_scripts/osten_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py index 1079fd72..a0863042 100644 --- a/bg_atlasgen/atlas_scripts/osten_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py @@ -10,13 +10,16 @@ import pandas as pd import tifffile from allensdk.core.reference_space_cache import ReferenceSpaceCache -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track from scipy.ndimage import zoom -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False # disable parallel mesh extraction for easier debugging diff --git a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py similarity index 97% rename from bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py index 8f4b4202..3634b5d7 100644 --- a/bg_atlasgen/atlas_scripts/perens_lsfm_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py @@ -9,14 +9,16 @@ import numpy as np import pandas as pd import SimpleITK as sitk - -# from allensdk.core.reference_space_cache import ReferenceSpaceCache -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +# from allensdk.core.reference_space_cache import ReferenceSpaceCache +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False # disable parallel mesh extraction for easier debugging diff --git a/bg_atlasgen/atlas_scripts/princeton_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py similarity index 96% rename from bg_atlasgen/atlas_scripts/princeton_mouse.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py index fa5f8ccd..22167cde 100644 --- a/bg_atlasgen/atlas_scripts/princeton_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py @@ -10,13 +10,16 @@ import numpy as np import pandas as pd import tifffile -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track from scipy.ndimage import zoom -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = False diff --git a/bg_atlasgen/atlas_scripts/template_script.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/template_script.py similarity index 96% rename from bg_atlasgen/atlas_scripts/template_script.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/template_script.py index 3a967ff7..9b9229a7 100644 --- a/bg_atlasgen/atlas_scripts/template_script.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/template_script.py @@ -4,7 +4,7 @@ __version__ = "0" # will be used to set minor version of the atlas -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data def create_atlas(working_dir, resolution): diff --git a/bg_atlasgen/atlas_scripts/whs_sd_rat.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py similarity index 96% rename from bg_atlasgen/atlas_scripts/whs_sd_rat.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py index 9fe976d2..3c5ed805 100644 --- a/bg_atlasgen/atlas_scripts/whs_sd_rat.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py @@ -9,12 +9,15 @@ import imio import numpy as np import xmltodict -from bg_atlasapi import utils -from bg_atlasapi.structure_tree_util import get_structures_tree from rich.progress import track -from bg_atlasgen.mesh_utils import Region, create_region_mesh -from bg_atlasgen.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi import utils +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree PARALLEL = True diff --git a/bg_atlasgen/main_script.py b/brainglobe_atlasapi/atlas_generation/main_script.py similarity index 82% rename from bg_atlasgen/main_script.py rename to brainglobe_atlasapi/atlas_generation/main_script.py index a9e1ffad..6eb010b7 100644 --- a/bg_atlasgen/main_script.py +++ b/brainglobe_atlasapi/atlas_generation/main_script.py @@ -6,11 +6,14 @@ from importlib import import_module from pathlib import Path -from bg_atlasapi.utils import atlas_name_from_repr, atlas_repr_from_name +import brainglobe_atlasapi.atlas_generation from git import Repo from git.exc import GitCommandError -import bg_atlasgen +from brainglobe_atlasapi.utils import ( + atlas_name_from_repr, + atlas_repr_from_name, +) # Main dictionary specifying which atlases to generate # and with which resolutions: @@ -71,7 +74,9 @@ def delete_folder(path): atlases_repr[repr.pop("name")] = repr # Major version is given by version of the atlas_gen module: - bg_atlasgen_version = bg_atlasgen.__version__ + brainglobe_atlasapi.atlas_generation_version = ( + brainglobe_atlasapi.atlas_generation.__version__ + ) # Path to the scripts to generate the atlases: atlas_gen_path = Path(__file__).parent @@ -81,11 +86,16 @@ def delete_folder(path): commit_log = "Updated: " for name, resolutions in GENERATION_DICT.items(): status = atlases_repr[name] - module = import_module(f"bg_atlasgen.atlas_scripts.{name}") + module = import_module( + f"brainglobe_atlasapi.atlas_generation.atlas_scripts.{name}" + ) script_version = module.__version__ - if bg_atlasgen_version > status["major_vers"] or ( - bg_atlasgen_version == status["major_vers"] + if brainglobe_atlasapi.atlas_generation_version > status[ + "major_vers" + ] or ( + brainglobe_atlasapi.atlas_generation_version + == status["major_vers"] and script_version > status["minor_vers"] ): # Loop over all resolutions: @@ -105,8 +115,8 @@ def delete_folder(path): # Update config file with new version: k = atlas_name_from_repr(name, resolution) - conf["atlases"][k] = str( - f"{bg_atlasgen_version}.{script_version}" + conf["brainglobe_atlasapi.atlas_generation"] = str( + f"{brainglobe_atlasapi.atlas_generation_version}.{script_version}" ) with open(repo_path / "last_versions.conf", "w") as f: conf.write(f) diff --git a/bg_atlasgen/mesh_utils.py b/brainglobe_atlasapi/atlas_generation/mesh_utils.py similarity index 99% rename from bg_atlasgen/mesh_utils.py rename to brainglobe_atlasapi/atlas_generation/mesh_utils.py index 2320edf7..d700aa73 100644 --- a/bg_atlasgen/mesh_utils.py +++ b/brainglobe_atlasapi/atlas_generation/mesh_utils.py @@ -21,7 +21,9 @@ import scipy from loguru import logger -from bg_atlasgen.volume_utils import create_masked_array +from brainglobe_atlasapi.atlas_generation.volume_utils import ( + create_masked_array, +) # ---------------------------------------------------------------------------- # # MESH CREATION # diff --git a/bg_atlasgen/metadata_utils.py b/brainglobe_atlasapi/atlas_generation/metadata_utils.py similarity index 94% rename from bg_atlasgen/metadata_utils.py rename to brainglobe_atlasapi/atlas_generation/metadata_utils.py index 57b125ca..66b84487 100644 --- a/bg_atlasgen/metadata_utils.py +++ b/brainglobe_atlasapi/atlas_generation/metadata_utils.py @@ -9,11 +9,13 @@ from datetime import datetime import requests -from bg_atlasapi import descriptors -from bg_atlasapi.structure_tree_util import get_structures_tree from requests.exceptions import ConnectionError, InvalidURL, MissingSchema -from bg_atlasgen.structure_json_to_csv import convert_structure_json_to_csv +from brainglobe_atlasapi import descriptors +from brainglobe_atlasapi.atlas_generation.structure_json_to_csv import ( + convert_structure_json_to_csv, +) +from brainglobe_atlasapi.structure_tree_util import get_structures_tree def generate_metadata_dict( diff --git a/bg_atlasgen/stacks.py b/brainglobe_atlasapi/atlas_generation/stacks.py similarity index 96% rename from bg_atlasgen/stacks.py rename to brainglobe_atlasapi/atlas_generation/stacks.py index 34c5e985..72bc630b 100644 --- a/bg_atlasgen/stacks.py +++ b/brainglobe_atlasapi/atlas_generation/stacks.py @@ -1,5 +1,6 @@ import tifffile -from bg_atlasapi import descriptors + +from brainglobe_atlasapi import descriptors def write_stack(stack, filename): diff --git a/bg_atlasgen/structure_json_to_csv.py b/brainglobe_atlasapi/atlas_generation/structure_json_to_csv.py similarity index 100% rename from bg_atlasgen/structure_json_to_csv.py rename to brainglobe_atlasapi/atlas_generation/structure_json_to_csv.py diff --git a/bg_atlasgen/structures.py b/brainglobe_atlasapi/atlas_generation/structures.py similarity index 95% rename from bg_atlasgen/structures.py rename to brainglobe_atlasapi/atlas_generation/structures.py index 4266988a..e4cff4b9 100644 --- a/bg_atlasgen/structures.py +++ b/brainglobe_atlasapi/atlas_generation/structures.py @@ -1,5 +1,5 @@ -from bg_atlasapi.descriptors import STRUCTURE_TEMPLATE as STEMPLATE -from bg_atlasapi.structure_tree_util import get_structures_tree +from brainglobe_atlasapi.descriptors import STRUCTURE_TEMPLATE as STEMPLATE +from brainglobe_atlasapi.structure_tree_util import get_structures_tree def check_struct_consistency(structures): diff --git a/bg_atlasgen/validate_atlases.py b/brainglobe_atlasapi/atlas_generation/validate_atlases.py similarity index 96% rename from bg_atlasgen/validate_atlases.py rename to brainglobe_atlasapi/atlas_generation/validate_atlases.py index b9568bfc..c4c4e232 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/brainglobe_atlasapi/atlas_generation/validate_atlases.py @@ -5,14 +5,15 @@ from pathlib import Path import numpy as np -from bg_atlasapi import BrainGlobeAtlas -from bg_atlasapi.config import get_brainglobe_dir -from bg_atlasapi.list_atlases import ( + +from brainglobe_atlasapi import BrainGlobeAtlas +from brainglobe_atlasapi.config import get_brainglobe_dir +from brainglobe_atlasapi.list_atlases import ( get_all_atlases_lastversions, get_atlases_lastversions, get_local_atlas_version, ) -from bg_atlasapi.update_atlases import update_atlas +from brainglobe_atlasapi.update_atlases import update_atlas def validate_atlas_files(atlas: BrainGlobeAtlas): diff --git a/bg_atlasgen/volume_utils.py b/brainglobe_atlasapi/atlas_generation/volume_utils.py similarity index 100% rename from bg_atlasgen/volume_utils.py rename to brainglobe_atlasapi/atlas_generation/volume_utils.py diff --git a/bg_atlasgen/wrapup.py b/brainglobe_atlasapi/atlas_generation/wrapup.py similarity index 93% rename from bg_atlasgen/wrapup.py rename to brainglobe_atlasapi/atlas_generation/wrapup.py index c82d1b3e..9dcc91ba 100644 --- a/bg_atlasgen/wrapup.py +++ b/brainglobe_atlasapi/atlas_generation/wrapup.py @@ -3,28 +3,30 @@ import tarfile from pathlib import Path -import bg_space as bgs +import brainglobe_space as bgs import meshio as mio import tifffile -from bg_atlasapi import descriptors -from bg_atlasapi.utils import atlas_name_from_repr -import bg_atlasgen -from bg_atlasgen.metadata_utils import ( +import brainglobe_atlasapi.atlas_generation +from brainglobe_atlasapi import descriptors +from brainglobe_atlasapi.atlas_generation.metadata_utils import ( create_metadata_files, generate_metadata_dict, ) -from bg_atlasgen.stacks import ( +from brainglobe_atlasapi.atlas_generation.stacks import ( save_annotation, save_hemispheres, save_reference, save_secondary_reference, ) -from bg_atlasgen.structures import check_struct_consistency +from brainglobe_atlasapi.atlas_generation.structures import ( + check_struct_consistency, +) +from brainglobe_atlasapi.utils import atlas_name_from_repr # This should be changed every time we make changes in the atlas # structure: -ATLAS_VERSION = bg_atlasgen.__version__ +ATLAS_VERSION = brainglobe_atlasapi.atlas_generation.__version__ def wrapup_atlas_from_data( diff --git a/tests/test_unit/test_validation.py b/tests/atlasgen/test_validation.py similarity index 89% rename from tests/test_unit/test_validation.py rename to tests/atlasgen/test_validation.py index 5fcf9f09..9a513ce0 100644 --- a/tests/test_unit/test_validation.py +++ b/tests/atlasgen/test_validation.py @@ -2,14 +2,14 @@ import numpy as np import pytest -from bg_atlasapi import BrainGlobeAtlas -from bg_atlasapi.config import get_brainglobe_dir -from bg_atlasgen.validate_atlases import ( +from brainglobe_atlasapi import BrainGlobeAtlas +from brainglobe_atlasapi.atlas_generation.validate_atlases import ( _assert_close, validate_atlas_files, validate_mesh_matches_image_extents, ) +from brainglobe_atlasapi.config import get_brainglobe_dir @pytest.fixture @@ -41,7 +41,7 @@ def test_validate_mesh_matches_image_extents(atlas): def test_validate_mesh_matches_image_extents_negative(mocker, atlas): flipped_annotation_image = np.transpose(atlas.annotation) mocker.patch( - "bg_atlasapi.BrainGlobeAtlas.annotation", + "brainglobe_atlasapi.BrainGlobeAtlas.annotation", new_callable=mocker.PropertyMock, return_value=flipped_annotation_image, ) From ab6432dac81fca6d4facde833cb8624ed4cbaab6 Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Wed, 14 Feb 2024 11:16:29 +0000 Subject: [PATCH 100/103] pre-commit pass --- .../atlas_scripts/azba_zfish.py | 25 ++- .../atlas_scripts/humanatlas.py | 204 +++--------------- .../kim_developmental_ccf_mouse.py | 74 ++++--- .../atlas_scripts/kim_mouse.py | 24 ++- .../atlas_scripts/mpin_zfish.py | 27 ++- .../atlas_scripts/osten_mouse.py | 24 ++- .../atlas_scripts/perens_lsfm_mouse.py | 35 +-- .../atlas_scripts/princeton_mouse.py | 6 +- .../atlas_scripts/whs_sd_rat.py | 6 +- .../atlas_generation/main_script.py | 2 +- .../atlas_generation/mesh_utils.py | 41 ++-- .../atlas_generation/structures.py | 3 +- .../atlas_generation/validate_atlases.py | 24 ++- .../atlas_generation/volume_utils.py | 19 +- .../atlas_generation/wrapup.py | 33 +-- 15 files changed, 238 insertions(+), 309 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py index 1db22537..b2e021db 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/azba_zfish.py @@ -1,10 +1,10 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Script to generate a Brainglobe compatible atlas object for the Adult Zebrafish Brain Atlas (AZBA) +Script to generate a Brainglobe compatible atlas object +for the Adult Zebrafish Brain Atlas (AZBA) @author: Kailyn Fields, kailyn.fields@wayne.edu - """ __version__ = "1" @@ -70,7 +70,8 @@ def create_atlas(working_dir, resolution): meshes_dir_path = atlas_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) - # adding topro image as additional reference file, main reference file is autofl + # adding topro image as additional reference file, + # main reference file is autofl topro = tifffile.imread(reference_topro) ADDITIONAL_REFERENCES = {"TO-PRO": topro} @@ -86,7 +87,8 @@ def create_atlas(working_dir, resolution): for row in zfishDictReader: hierarchy.append(row) - # make string to int and list of int conversions in 'id', 'structure_id_path', and 'rgb_triplet' key values + # make string to int and list of int conversions in + # 'id', 'structure_id_path', and 'rgb_triplet' key values for i in range(0, len(hierarchy)): hierarchy[i]["id"] = int(hierarchy[i]["id"]) for j in range(0, len(hierarchy)): @@ -101,8 +103,10 @@ def create_atlas(working_dir, resolution): except ValueError: hierarchy[k]["rgb_triplet"] = [255, 255, 255] - # remove clear label (id 0) from hierarchy. ITK-Snap uses this to label unlabeled areas, but this convention - # interferes with the root mask generation and is unnecessary for this application + # remove clear label (id 0) from hierarchy. + # ITK-Snap uses this to label unlabeled areas, + # but this convention interferes with the root mask generation + # and is unnecessary for this application hierarchy.remove(hierarchy[1]) # use tifffile to read annotated file @@ -111,7 +115,8 @@ def create_atlas(working_dir, resolution): print(f"Saving atlas data at {atlas_path}") tree = get_structures_tree(hierarchy) print( - f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + f"Number of brain regions: {tree.size()}, " + f"max tree depth: {tree.depth()}" ) # generate binary mask for mesh creation @@ -201,10 +206,12 @@ def create_atlas(working_dir, resolution): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) - # import reference file with tifffile so it can be read in wrapup_atlas_from_data + # import reference file with tifffile so + # it can be read in wrapup_atlas_from_data reference = tifffile.imread(reference_file) # inspect_meshes_folder(meshes_dir_path) # wrap up atlas file diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py index 782c6b9e..a575bfa3 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/humanatlas.py @@ -57,9 +57,9 @@ def prune_tree(tree): PARALLEL = False # disable parallel mesh extraction for easier debugging TEST = False - # ---------------------------------------------------------------------------- # - # PREP METADATA # - # ---------------------------------------------------------------------------- # + # ----------------- # + # PREP METADATA # + # ----------------- # RES_UM = 500 VERSION = 1 ATLAS_NAME = "allen_human" @@ -68,12 +68,13 @@ def prune_tree(tree): CITATION = "Ding et al 2016, https://doi.org/10.1002/cne.24080" ORIENTATION = "ipr" - # ---------------------------------------------------------------------------- # - # PREP FILEPATHS # - # ---------------------------------------------------------------------------- # + # ------------------ # + # PREP FILEPATHS # + # ------------------ # data_fld = Path( - r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy\Atlases\atlasesforbrainrender\AllenHuman" + r"D:\Dropbox (UCL - SWC)\Rotation_vte\Anatomy" + r"\Atlases\atlasesforbrainrender\AllenHuman" ) annotations_image = data_fld / "annotation.nii" @@ -95,9 +96,9 @@ def prune_tree(tree): uncompr_atlas_path = temp_path / ATLAS_NAME uncompr_atlas_path.mkdir(exist_ok=True) - # ---------------------------------------------------------------------------- # - # GET TEMPLATE # - # ---------------------------------------------------------------------------- # + # ---------------- # + # GET TEMPLATE # + # ---------------- # annotation = brainio.load_any(annotations_image) # shape (394, 466, 378) anatomy = brainio.load_any(anatomy_image) # shape (394, 466, 378) @@ -107,9 +108,9 @@ def prune_tree(tree): # show(Volume(root_annotation), axes=1) - # ---------------------------------------------------------------------------- # - # STRUCTURES HIERARCHY # - # ---------------------------------------------------------------------------- # + # ------------------------ # + # STRUCTURES HIERARCHY # + # ------------------------ # # Download structure tree ######################### @@ -125,7 +126,8 @@ def prune_tree(tree): data = json.loads(r.data.decode("utf-8"))["msg"] structures = pd.read_json(json.dumps(data)) - # Create empty list and collect all regions traversing the regions hierarchy: + # Create empty list and collect all regions + # traversing the regions hierarchy: regions_list = [] for i, region in structures.iterrows(): @@ -149,16 +151,17 @@ def prune_tree(tree): ) ROOT_ID = regions_list[0]["id"] - # ---------------------------------------------------------------------------- # - # CREATE MESHES # - # ---------------------------------------------------------------------------- # + # ----------------- # + # CREATE MESHES # + # ----------------- # print(f"Saving atlas data at {uncompr_atlas_path}") meshes_dir_path = uncompr_atlas_path / "meshes" meshes_dir_path.mkdir(exist_ok=True) tree = get_structures_tree(regions_list) print( - f"Number of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + f"Number of brain regions: {tree.size()}, " + f"max tree depth: {tree.depth()}" ) # Mark which tree elements are in the annotation volume @@ -177,7 +180,8 @@ def prune_tree(tree): # Remove nodes for which no mesh can be created tree = prune_tree(tree) print( - f"After pruning: # of brain regions: {tree.size()}, max tree depth: {tree.depth()}" + f"After pruning: # of brain regions: {tree.size()}, " + f"max tree depth: {tree.depth()}" ) # Mesh creation @@ -209,7 +213,8 @@ def prune_tree(tree): ], ) except mp.pool.MaybeEncodingError: - pass # error with returning results from pool.map but we don't care + # error with returning results from pool.map but we don't care + pass else: print("Starting mesh creation") @@ -267,12 +272,13 @@ def prune_tree(tree): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) - # ---------------------------------------------------------------------------- # - # WRAP UP # - # ---------------------------------------------------------------------------- # + # ----------- # + # WRAP UP # + # ----------- # # Wrap up, compress, and remove file: print("Finalising atlas") @@ -294,153 +300,3 @@ def prune_tree(tree): cleanup_files=False, compress=True, ) - - -# ---------------------------------------------------------------------------- # -# OLD CODE # -# ---------------------------------------------------------------------------- # - -# # Create meshes -# ############### -# meshes_dir = uncompr_atlas_path / descriptors.MESHES_DIRNAME -# meshes_dir.mkdir(exist_ok=True) - -# unique_values, unique_counts = np.unique( -# annotation_whole, return_counts=True -# ) -# voxel_counts = dict(zip(unique_values, unique_counts)) -# if 0 in voxel_counts: -# del voxel_counts[0] -# structures.set_index("id", inplace=True) - -# # Create root first -# root = [s for s in regions_list if s["acronym"] == "root"][0] -# root_idx = root["id"] -# root_volume = volume_utils.create_masked_array( -# annotation_whole, 0, greater_than=True -# ) -# savepath = meshes_dir / f'{root["id"]}.obj' -# if not savepath.exists(): -# root_mesh = mesh_utils.extract_mesh_from_mask( -# root_volume, savepath, smooth=False, decimate=True -# ) -# else: -# root_mesh = load(str(savepath)) - -# # Asses mesh extraction quality -# # mesh_utils.compare_mesh_and_volume(root_mesh, root_volume) - -# # ? Create meshes for leaf nodes -# start = time.time() -# pool = mp.Pool(mp.cpu_count() - 2) -# try: -# pool.map( -# create_structure_mesh, -# [ -# (structures, annotation_whole, meshes_dir, a) -# for a in voxel_counts -# ], -# ) -# except mp.pool.MaybeEncodingError: -# pass # error with returning results from pool.map but we don't care -# print( -# f"Creating meshes for {len(voxel_counts)} structures took: {round(time.time() - start, 3)}s" -# ) - -# # Show which regions were represented in the annotated volume -# regions_with_mesh = [structures.loc[a, "acronym"] for a in voxel_counts] - -# tree = StructureTree(regions_list).get_structures_tree() - -# for key, node in tree.nodes.items(): -# if node.tag in regions_with_mesh: -# has_mesh = True -# else: -# has_mesh = False -# node.data = Region(has_mesh) - -# # Remove regions that are children to the ones that which -# # were represented in the volume or were -# # at least some of their children had a mesh -# tree = prune_tree(tree) - -# # ? extract meshes for non leaf regions -# id_to_acronym_map = {s["id"]: s["acronym"] for s in regions_list} -# voxel_to_acro = {a: structures.loc[a, "acronym"] for a in voxel_counts} -# acronym_to_voxel = {v: k for k, v in voxel_to_acro.items()} -# non_leaf_nodes = [ -# s -# for s in regions_list -# if s["acronym"] != "root" and s["id"] not in voxel_counts -# ] - -# start = time.time() -# pool = mp.Pool(mp.cpu_count() - 2) -# try: -# pool.map( -# create_nonleaf_structure_mesh, -# [ -# ( -# nonleaf, -# meshes_dir, -# regions_list, -# id_to_acronym_map, -# acronym_to_voxel, -# annotation_whole, -# ) -# for nonleaf in non_leaf_nodes -# ], -# ) -# except mp.pool.MaybeEncodingError: -# pass # error with returning results from pool.map but we don't care -# print( -# f"Creating meshes for {len(non_leaf_nodes)} structures took: {round(time.time() - start, 3)}s" -# ) - -# # ? Fill in more of the regions that don't have mesh yet -# for repeat in range(4): -# for idx, node in tree.nodes.items(): -# savepath = meshes_dir / f"{idx}.obj" -# if not savepath.exists(): -# region = [r for r in regions_list if r["id"] == idx][0] -# args = ( -# region, -# meshes_dir, -# regions_list, -# id_to_acronym_map, -# acronym_to_voxel, -# annotation_whole, -# ) -# create_nonleaf_structure_mesh(args) - -# # Update tree and check that everyone got a mesh -# for idx, node in tree.nodes.items(): -# savepath = meshes_dir / f"{idx}.obj" -# if savepath.exists(): -# node.data.has_mesh = True - -# tree.show(data_property="has_mesh") - -# print( -# f"\n\nTotal number of structures left in tree: {tree.size()} - max depth: {tree.depth()}" -# ) - -# tree_regions = [node.identifier for k, node in tree.nodes.items()] -# pruned_regions_list = [r for r in regions_list if r["id"] in tree_regions] - -# # save regions list json: -# with open(uncompr_atlas_path / descriptors.STRUCTURES_FILENAME, "w") as f: -# json.dump(pruned_regions_list, f) - -# # Wrap up, compress, and remove file: -# ##################################### -# wrapup_atlas_from_dir( -# uncompr_atlas_path, -# CITATION, -# ATLAS_LINK, -# SPECIES, -# (RES_UM,) * 3, -# cleanup_files=False, -# compress=True, -# root=root_idx, -# ) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py index c4a64f2c..a02067cf 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_developmental_ccf_mouse.py @@ -70,7 +70,10 @@ def create_atlas( ATLAS_NAME = f"kim_dev_mouse_{reference_key}" SPECIES = "Mus musculus" ATLAS_LINK = "https://data.mendeley.com/datasets/2svx788ddf/1" - CITATION = "Kim, Yongsoo (2022), “KimLabDevCCFv001”, Mendeley Data, V1, doi: 10.17632/2svx788ddf.1" + CITATION = ( + "Kim, Yongsoo (2022), “KimLabDevCCFv001”, Mendeley Data, " + "V1, doi: 10.17632/2svx788ddf.1" + ) ORIENTATION = "asl" ROOT_ID = 99999999 ANNOTATIONS_RES_UM = 10 @@ -108,9 +111,9 @@ def create_atlas( atlas_files_dir / "KimLabDevCCFv001" / "10um" / reference_filename ) - # ---------------------------------------------------------------------------- # - # GET TEMPLATE # - # ---------------------------------------------------------------------------- # + # ---------------- # + # GET TEMPLATE # + # ---------------- # # Load (and possibly downsample) annotated volume: scaling = ANNOTATIONS_RES_UM / resolution @@ -122,9 +125,9 @@ def create_atlas( annotated_volume, (scaling, scaling, scaling), order=0, prefilter=False ) - # ---------------------------------------------------------------------------- # - # STRUCTURES HIERARCHY # - # ---------------------------------------------------------------------------- # + # ------------------------ # + # STRUCTURES HIERARCHY # + # ------------------------ # # Parse region names & hierarchy df = pd.read_csv(structures_file) @@ -156,9 +159,9 @@ def create_atlas( with open(download_dir_path / "structures.json", "w") as f: json.dump(structures, f) - # ---------------------------------------------------------------------------- # - # Create Meshes # - # ---------------------------------------------------------------------------- # + # ----------------- # + # Create Meshes # + # ----------------- # print(f"Saving atlas data at {download_dir_path}") @@ -210,7 +213,8 @@ def create_atlas( ], ) except mp.pool.MaybeEncodingError: - pass # error with returning results from pool.map but we don't care + # error with returning results from pool.map but we don't care + pass else: for node in track( tree.nodes.values(), @@ -256,12 +260,13 @@ def create_atlas( meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) - # ---------------------------------------------------------------------------- # - # WRAP UP # - # ---------------------------------------------------------------------------- # + # ----------- # + # WRAP UP # + # ----------- # # Wrap up, compress, and remove file: print("Finalising atlas") @@ -290,14 +295,16 @@ def create_atlas( if __name__ == "__main__": """ - This atlas is too large to package into a single atlas. Hence it is split - with one atlas per reference. To avoid re-generating the meshes for each creation, - the script should be run once with mesh_creation = 'generate'. This will generate - the standard template atlas with the meshes. For the rest of the references, - use mesh_creation = 'copy' and set the existing_mesh_dir_path - to the previously-generated meshes. - - Note the decimate fraction is set to 0.04 to further reduce size of this large atlas. + This atlas is too large to package into a single atlas. + Hence it is split with one atlas per reference. + To avoid re-generating the meshes for each creation, + the script should be run once with mesh_creation = 'generate'. + This will generate the standard template atlas with the meshes. + For the rest of the references, use mesh_creation = 'copy', + and set the existing_mesh_dir_path to the previously-generated meshes. + + Note the decimate fraction is set to 0.04 + to further reduce size of this large atlas. """ resolution = 10 # some resolution, in microns (10, 25, 50, 100) @@ -319,13 +326,20 @@ def create_atlas( # for all other atlases additional_references = { - "idisco": "KimLabDevCCFv001_iDiscoLSFM2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", - "mri_a0": "KimLabDevCCFv001_P56_MRI-a02CCF_avgTemplate_ASL_Oriented_10um.nii.gz", - "mri_adc": "KimLabDevCCFv001_P56_MRI-adc2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", - "mri_dwi": "KimLabDevCCFv001_P56_MRI-dwi2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", - "mri_fa": "KimLabDevCCFv001_P56_MRI-fa2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", - "mri_mtr": "KimLabDevCCFv001_P56_MRI-MTR2CCF_avgTemplate_ASL_Oriented_10um.nii.gz", - "mri_t2": "KimLabDevCCFv001_P56_MRI-T22CCF_avgTemplate_ASL_Oriented_10um.nii.gz", + "idisco": "KimLabDevCCFv001_iDiscoLSFM2CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_a0": "KimLabDevCCFv001_P56_MRI-a02CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_adc": "KimLabDevCCFv001_P56_MRI-adc2CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_dwi": "KimLabDevCCFv001_P56_MRI-dwi2CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_fa": "KimLabDevCCFv001_P56_MRI-fa2CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_mtr": "KimLabDevCCFv001_P56_MRI-MTR2CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", + "mri_t2": "KimLabDevCCFv001_P56_MRI-T22CCF_" + "avgTemplate_ASL_Oriented_10um.nii.gz", } existing_mesh_dir_path = bg_root_dir / "downloads" / "meshes" diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py index dbe9f8a8..b1e39778 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/kim_mouse.py @@ -56,9 +56,9 @@ def create_atlas(working_dir, resolution): structures_file = atlas_files_dir / "kim_atlas" / "structures.csv" annotations_file = atlas_files_dir / "kim_atlas" / "annotation.tiff" - # ---------------------------------------------------------------------------- # - # GET TEMPLATE # - # ---------------------------------------------------------------------------- # + # ---------------- # + # GET TEMPLATE # + # ---------------- # # Load (and possibly downsample) annotated volume: scaling = ANNOTATIONS_RES_UM / resolution @@ -82,9 +82,9 @@ def create_atlas(working_dir, resolution): template_volume, _ = spacecache.get_template_volume() print("Download completed...") - # ---------------------------------------------------------------------------- # - # STRUCTURES HIERARCHY # - # ---------------------------------------------------------------------------- # + # ------------------------ # + # STRUCTURES HIERARCHY # + # ------------------------ # # Parse region names & hierarchy # ############################## @@ -156,7 +156,8 @@ def create_atlas(working_dir, resolution): ], ) except mp.pool.MaybeEncodingError: - pass # error with returning results from pool.map but we don't care + # error with returning results from pool.map but we don't care + pass else: for node in track( tree.nodes.values(), @@ -202,12 +203,13 @@ def create_atlas(working_dir, resolution): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) - # ---------------------------------------------------------------------------- # - # WRAP UP # - # ---------------------------------------------------------------------------- # + # ----------- # + # WRAP UP # + # ----------- # # Wrap up, compress, and remove file: print("Finalising atlas") diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py index 9e385aa9..a2558aa5 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/mpin_zfish.py @@ -19,7 +19,11 @@ def download_line_stack(bg_root_dir, tg_line_name): """Utility function to download a line from its name.""" - reference_url = f"{BASE_URL}/media/brain_browser/Lines/{tg_line_name}/AverageData/Tiff_File/Average_{tg_line_name}.zip" + reference_url = ( + f"{BASE_URL}/media/brain_browser/Lines/" + f"{tg_line_name}/AverageData/Tiff_File/" + f"Average_{tg_line_name}.zip" + ) out_file_path = bg_root_dir / f"{tg_line_name}.zip" retrieve_over_http(reference_url, out_file_path) with zipfile.ZipFile(out_file_path, "r") as zip_ref: @@ -140,8 +144,8 @@ def create_atlas(working_dir, resolution): ) # meshes from the website and stacks do not have the same orientation. - # Therefore, flip axes of the stacks so that brainglobe-space reorientation is used on - # the meshes: + # Therefore, flip axes of the stacks so that brainglobe-space + # reorientation is used on the meshes: annotation_stack = annotation_stack.swapaxes(0, 2) hemispheres_stack = hemispheres_stack.swapaxes(0, 2) reference_stack = reference_stack.swapaxes(0, 2) @@ -149,11 +153,13 @@ def create_atlas(working_dir, resolution): k: v.swapaxes(0, 2) for k, v in additional_references.items() } - # Improve the annotation by defining a region that encompasses the whole brain but - # not the eyes. This will be aside from the official hierarchy: + # Improve the annotation by defining a region that encompasses + # the whole brain but not the eyes. + # This will be aside from the official hierarchy: BRAIN_ID = 2 # add this as not defined in the source - # Ugly padding required not to have border artefacts in the binary operations: + # Ugly padding required not to have border + # artefacts in the binary operations: shape_stack = list(annotation_stack.shape) pad = 100 @@ -194,7 +200,8 @@ def create_atlas(working_dir, resolution): "structure_id_path": [ROOT_ID], "acronym": "root", "files": { - "file_3D": "/media/Neurons_database/Brain_and_regions/Brains/Outline/Outline_new.txt" + "file_3D": "/media/Neurons_database/Brain_and_regions" + "/Brains/Outline/Outline_new.txt" }, "color": "#ffffff", } @@ -202,7 +209,8 @@ def create_atlas(working_dir, resolution): # Go through the regions hierarchy and create the structure path entry: add_path_inplace(structures_dict) - # Create empty list and collect all regions traversing the regions hierarchy: + # Create empty list and collect all regions + # traversing the regions hierarchy: structures_list = [] meshes_dict = {} collect_all_inplace( @@ -219,7 +227,8 @@ def create_atlas(working_dir, resolution): } structures_list.append(brain_struct_entry) - # Use recalculated meshes that are smoothed with Blender and uploaded in G-Node: + # Use recalculated meshes that are smoothed + # with Blender and uploaded in G-Node: for sid in [ROOT_ID, BRAIN_ID]: meshes_dict[sid] = extracted_dir / f"{sid}.stl" diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py index a0863042..d5254623 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/osten_mouse.py @@ -54,9 +54,9 @@ def create_atlas(working_dir, resolution): structures_file = atlas_files_dir / "osten_atlas" / "structures.csv" annotations_file = atlas_files_dir / "osten_atlas" / "annotation.tiff" - # ---------------------------------------------------------------------------- # - # GET TEMPLATE # - # ---------------------------------------------------------------------------- # + # ---------------- # + # GET TEMPLATE # + # ---------------- # # Load (and possibly downsample) annotated volume: scaling = ANNOTATIONS_RES_UM / resolution @@ -80,9 +80,9 @@ def create_atlas(working_dir, resolution): template_volume, _ = spacecache.get_template_volume() print("Download completed...") - # ---------------------------------------------------------------------------- # - # STRUCTURES HIERARCHY # - # ---------------------------------------------------------------------------- # + # ------------------------ # + # STRUCTURES HIERARCHY # + # ------------------------ # # Parse region names & hierarchy # ############################## @@ -153,7 +153,8 @@ def create_atlas(working_dir, resolution): ], ) except mp.pool.MaybeEncodingError: - pass # error with returning results from pool.map but we don't care + # error with returning results from pool.map but we don't care + pass else: for node in track( tree.nodes.values(), @@ -199,12 +200,13 @@ def create_atlas(working_dir, resolution): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) - # ---------------------------------------------------------------------------- # - # WRAP UP # - # ---------------------------------------------------------------------------- # + # ----------- # + # WRAP UP # + # ----------- # # Wrap up, compress, and remove file: print("Finalising atlas") diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py index 3634b5d7..0037afd9 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/perens_lsfm_mouse.py @@ -23,7 +23,6 @@ PARALLEL = False # disable parallel mesh extraction for easier debugging -# %% ### Additional functions ##################################################### @@ -36,11 +35,13 @@ def get_id_from_acronym(df, acronym): get_id_from_acronym(df, acronym) Args: - df (pandas dataframe) : atlas table file [see atlas.load_table()] - acronym (string or list of strings) : brain region acronym(s) + df (pandas dataframe): + atlas table file [see atlas.load_table()] + acronym (string or list of strings): brain region acronym(s) Returns: - ID (int or list of ints) : brain region ID(s) corresponding to input acronym(s) + ID (int or list of ints): + brain region ID(s) corresponding to input acronym(s) """ # create as list if necessary @@ -67,11 +68,12 @@ def get_acronym_from_id(df, ID): get_acronym_from_ID(df, acronym) Args: - df (pandas dataframe) : atlas table dataframe [see atlas.load_table()] - ID (int or list of int) : brain region ID(s) + df (pandas dataframe): atlas table dataframe [see atlas.load_table()] + ID (int or list of int): brain region ID(s) Returns: - acronym (string or list of strings) : brain region acronym(s) corresponding to input ID(s) + acronym (string or list of strings): + brain region acronym(s) corresponding to input ID(s) """ # create as list if necessary @@ -164,7 +166,6 @@ def create_atlas(working_dir, resolution): destination_path.unlink() - # structures_file = atlas_files_dir / "LSFM-mouse-brain-atlas-master" / "LSFM_atlas_files" / "ARA2_annotation_info.csv" structures_file = ( atlas_files_dir / "LSFM-mouse-brain-atlas-master" @@ -195,9 +196,9 @@ def create_atlas(working_dir, resolution): print("Download completed...") - # ---------------------------------------------------------------------------- # - # STRUCTURES HIERARCHY # - # ---------------------------------------------------------------------------- # + # ------------------------ # + # STRUCTURES HIERARCHY # + # ------------------------ # # Parse region names & hierarchy # ############################## @@ -269,7 +270,8 @@ def create_atlas(working_dir, resolution): ], ) except mp.pool.MaybeEncodingError: - pass # error with returning results from pool.map but we don't care + # error with returning results from pool.map but we don't care + pass else: for node in track( tree.nodes.values(), @@ -313,12 +315,13 @@ def create_atlas(working_dir, resolution): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) - # ---------------------------------------------------------------------------- # - # WRAP UP # - # ---------------------------------------------------------------------------- # + # ----------- # + # WRAP UP # + # ----------- # # Wrap up, compress, and remove file: print("Finalising atlas") diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py index 22167cde..35c7c59b 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/princeton_mouse.py @@ -107,7 +107,8 @@ def add_parent_id(child_id): lambda x: json.loads(x) ) - # order dataframe and convert to list of dictionaries specifying parameters for each area + # order dataframe and convert to list of dictionaries + # specifying parameters for each area structures = structures[ ["acronym", "id", "name", "structure_id_path", "rgb_triplet"] ] @@ -162,7 +163,8 @@ def add_parent_id(child_id): ], ) except mp.pool.MaybeEncodingError: - pass # error with returning results from pool.map but we don't care + # Error with returning results from pool.map, but we don't care + pass else: for node in track( tree.nodes.values(), diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py index 3c5ed805..dcf8a3e3 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/whs_sd_rat.py @@ -189,7 +189,8 @@ def create_mesh_dict(structures, meshes_dir_path): meshes_dict[s["id"]] = mesh_path print( - f"In the end, {len(structures_with_mesh)} structures with mesh are kept" + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" ) return meshes_dict, structures_with_mesh @@ -257,7 +258,8 @@ def create_atlas(working_dir): else: node = tree.nodes[structure["id"]] print( - f"{node.tag} not found in annotation volume, removing from list of structures..." + f"{node.tag} not found in annotation volume, " + "removing from list of structures..." ) structures = existing_structures tree = get_structures_tree(structures) diff --git a/brainglobe_atlasapi/atlas_generation/main_script.py b/brainglobe_atlasapi/atlas_generation/main_script.py index 6eb010b7..b76d47c8 100644 --- a/brainglobe_atlasapi/atlas_generation/main_script.py +++ b/brainglobe_atlasapi/atlas_generation/main_script.py @@ -6,10 +6,10 @@ from importlib import import_module from pathlib import Path -import brainglobe_atlasapi.atlas_generation from git import Repo from git.exc import GitCommandError +import brainglobe_atlasapi.atlas_generation from brainglobe_atlasapi.utils import ( atlas_name_from_repr, atlas_repr_from_name, diff --git a/brainglobe_atlasapi/atlas_generation/mesh_utils.py b/brainglobe_atlasapi/atlas_generation/mesh_utils.py index d700aa73..3f5fc3dd 100644 --- a/brainglobe_atlasapi/atlas_generation/mesh_utils.py +++ b/brainglobe_atlasapi/atlas_generation/mesh_utils.py @@ -25,9 +25,9 @@ create_masked_array, ) -# ---------------------------------------------------------------------------- # -# MESH CREATION # -# ---------------------------------------------------------------------------- # +# ----------------- # +# MESH CREATION # +# ----------------- # def region_mask_from_annotation( @@ -96,12 +96,14 @@ def extract_mesh_from_mask( number of iterations of closing morphological operation. set to None to avoid applying morphological operations decimate_fraction: float in range [0, 1]. - What fraction of the original number of vertices is to be kept. E.g. .5 means that - 50% of the vertices are kept, the others are removed + What fraction of the original number of vertices is to be kept. + EG .5 means that 50% of the vertices are kept, + the others are removed. tol: float - parameter for decimation, larger values correspond to more aggressive decimation. - E.g. 0.02 -> points that are closer than 2% of the size of the meshe's bounding box are - identified and removed (only one is kep) + parameter for decimation, with larger values corresponding + to more aggressive decimation. + EG 0.02 -> points that are closer than 2% of the size of the mesh's + bounding box are identified and removed (only one is kept). extract_largest: bool If True only the largest region are extracted. It can cause issues for bilateral regions as only one will remain @@ -121,7 +123,8 @@ def extract_mesh_from_mask( # Check volume argument if np.min(volume) > 0 or np.max(volume) < 1: raise ValueError( - "Argument volume should be a binary mask with only 0s and 1s when passing a np.ndarray" + "Argument volume should be a binary mask with only " + "0s and 1s when passing a np.ndarray" ) # Apply morphological transformations @@ -137,7 +140,8 @@ def extract_mesh_from_mask( mesh = volume.clone().isosurface(value=threshold).cap() else: print( - "The marching cubes algorithm might be rotated compared to your volume data" + "The marching cubes algorithm might be rotated " + "compared to your volume data" ) # Apply marching cubes and save to .obj if mcubes_smooth: @@ -181,9 +185,11 @@ def create_region_mesh(args): meshes_dir_path: pathlib Path object with folder where meshes are saved tree: treelib.Tree with hierarchical structures information node: tree's node corresponding to the region who's mesh is being created - labels: list of unique label annotations in annotated volume (list(np.unique(annotated_volume))) + labels: list of unique label annotations in annotated volume, + (list(np.unique(annotated_volume))) annotated_volume: 3d numpy array with annotaed volume - ROOT_ID: int, id of root structure (mesh creation is a bit more refined for that) + ROOT_ID: int, + id of root structure (mesh creation is a bit more refined for that) """ # Split arguments logger.debug(f"Creating mesh for region {args[1].identifier}") @@ -241,17 +247,18 @@ def create_region_mesh(args): class Region(object): """ - Class used to add metadata to treelib.Tree during atlas creation. Using this - means that you can then filter tree nodes depending on wether or not they have a mesh/label + Class used to add metadata to treelib.Tree during atlas creation. + Using this means that you can then filter tree nodes depending on + whether or not they have a mesh/label """ def __init__(self, has_label): self.has_label = has_label -# ---------------------------------------------------------------------------- # -# MESH INSPECTION # -# ---------------------------------------------------------------------------- # +# ------------------- # +# MESH INSPECTION # +# ------------------- # def compare_mesh_and_volume(mesh, volume): """ Creates and interactive vedo diff --git a/brainglobe_atlasapi/atlas_generation/structures.py b/brainglobe_atlasapi/atlas_generation/structures.py index e4cff4b9..1abb2b89 100644 --- a/brainglobe_atlasapi/atlas_generation/structures.py +++ b/brainglobe_atlasapi/atlas_generation/structures.py @@ -47,7 +47,8 @@ def get_structure_children(structures, region, use_tree=False): if "id" not in region.keys() or "structure_id_path" not in region.keys(): raise ValueError( - 'Incomplete structures dicts, need both "id" and "structure_id_path"' + "Incomplete structures dicts, " + "need both 'id' and 'structure_id_path'" ) if not use_tree: diff --git a/brainglobe_atlasapi/atlas_generation/validate_atlases.py b/brainglobe_atlasapi/atlas_generation/validate_atlases.py index c4c4e232..2c47ff78 100644 --- a/brainglobe_atlasapi/atlas_generation/validate_atlases.py +++ b/brainglobe_atlasapi/atlas_generation/validate_atlases.py @@ -44,12 +44,16 @@ def validate_atlas_files(atlas: BrainGlobeAtlas): def _assert_close(mesh_coord, annotation_coord, pixel_size, diff_tolerance=10): """ Helper function to check if the mesh and the annotation coordinate - are closer to each other than an arbitrary tolerance value times the pixel size. + are closer to each other than an arbitrary tolerance value + times the pixel size. + The default tolerance value is 10. """ assert abs(mesh_coord - annotation_coord) <= diff_tolerance * pixel_size, ( - f"Mesh coordinate {mesh_coord} and annotation coordinate {annotation_coord}", - f"differ by more than {diff_tolerance} times pixel size {pixel_size}", + f"Mesh coordinate {mesh_coord} and " + f"annotation coordinate {annotation_coord}", + f"differ by more than {diff_tolerance} " + f"times pixel size {pixel_size}", ) return True @@ -67,7 +71,8 @@ def validate_mesh_matches_image_extents(atlas: BrainGlobeAtlas): y_min, y_max = np.min(y_range), np.max(y_range) x_min, x_max = np.min(x_range), np.max(x_range) - # minimum and maximum values of the annotation image scaled by the atlas resolution + # minimum and maximum values of the annotation image + # scaled by the atlas resolution z_min_scaled, z_max_scaled = z_min * resolution[0], z_max * resolution[0] y_min_scaled, y_max_scaled = y_min * resolution[1], y_max * resolution[1] x_min_scaled, x_max_scaled = x_min * resolution[2], x_max * resolution[2] @@ -112,7 +117,10 @@ def check_additional_references(atlas: BrainGlobeAtlas): def validate_mesh_structure_pairs(atlas: BrainGlobeAtlas): - """Ensure mesh files (.obj) exist for each expected structure in the atlas.""" + """ + Ensure mesh files (.obj) exist for each expected structure + in the atlas. + """ ids_from_bg_atlas_api = list(atlas.structures.keys()) atlas_path = ( @@ -139,8 +147,10 @@ def validate_mesh_structure_pairs(atlas: BrainGlobeAtlas): if len(in_mesh_not_bg) or len(in_bg_not_mesh): raise AssertionError( - f"Structures with ID {in_bg_not_mesh} are in the atlas, but don't have a corresponding mesh file; " - f"Structures with IDs {in_mesh_not_bg} have a mesh file, but are not accessible through the atlas." + f"Structures with ID {in_bg_not_mesh} are in the atlas, " + "but don't have a corresponding mesh file; " + f"Structures with IDs {in_mesh_not_bg} have a mesh file, " + "but are not accessible through the atlas." ) diff --git a/brainglobe_atlasapi/atlas_generation/volume_utils.py b/brainglobe_atlasapi/atlas_generation/volume_utils.py index a00c04e6..af4904ad 100644 --- a/brainglobe_atlasapi/atlas_generation/volume_utils.py +++ b/brainglobe_atlasapi/atlas_generation/volume_utils.py @@ -1,5 +1,6 @@ """ - Code useful for dealing with volumetric data (e.g. allen annotation volume for the mouse atlas) + Code useful for dealing with volumetric data + (e.g. allen annotation volume for the mouse atlas) extracting surfaces from volumetric data .... """ @@ -66,13 +67,17 @@ def create_masked_array(volume, label, greater_than=False): def load_labelled_volume(data, vmin=0, alpha=1, **kwargs): """ Load volume image from .nrrd file. - It assume that voxels with value = 0 are empty while voxels with values > 0 - are labelles (e.g. to indicate the location of a brain region in a reference atlas) - :param data: str, path to file with volume data or 3d numpy array - :param vmin: float, values below this numner will be assigned an alpha=0 and not be visualized - :param **kwargs: kwargs to pass to the Volume class from vedo - :param alpha: float in range [0, 1], transparency [for the part of volume with value > vmin] + Assume that voxels with value = 0 are empty while voxels with values > 0 + are labels. + (EG to indicate the location of a brain region in a reference atlas) + + :param data: str, path to file with volume data or 3d numpy array. + :param vmin: float, values below this number will be assigned an alpha=0 + and not be visualized. + :param **kwargs: kwargs to pass to the Volume class from vedo. + :param alpha: float in [0,1], + transparency [for the part of volume with value > vmin]. """ # Load/check volumetric data if isinstance(data, str): # load from file diff --git a/brainglobe_atlasapi/atlas_generation/wrapup.py b/brainglobe_atlasapi/atlas_generation/wrapup.py index 9dcc91ba..ef0f9f6d 100644 --- a/brainglobe_atlasapi/atlas_generation/wrapup.py +++ b/brainglobe_atlasapi/atlas_generation/wrapup.py @@ -69,36 +69,45 @@ def wrapup_atlas_from_data( resolution : tuple Three elements tuple, resolution on three axes orientation : - Orientation of the original atlas (tuple describing origin for BGSpace). + Orientation of the original atlas + (tuple describing origin for BGSpace). root_id : Id of the root element of the atlas. reference_stack : str or Path or numpy array - Reference stack for the atlas. If str or Path, will be read with tifffile. + Reference stack for the atlas. + If str or Path, will be read with tifffile. annotation_stack : str or Path or numpy array - Annotation stack for the atlas. If str or Path, will be read with tifffile. + Annotation stack for the atlas. + If str or Path, will be read with tifffile. structures_list : list of dict List of valid dictionary for structures. meshes_dict : dict - dict of meshio-compatible mesh file paths in the form {sruct_id: meshpath} + dict of meshio-compatible mesh file paths in the form + {sruct_id: meshpath} working_dir : str or Path obj Path where the atlas folder and compressed file will be generated. atlas_packager : str or None - Credit for those responsible for converting the atlas into the BrainGlobe - format. + Credit for those responsible for converting the atlas + into the BrainGlobe format. hemispheres_stack : str or Path or numpy array, optional - Hemisphere stack for the atlas. If str or Path, will be read with tifffile. - If none is provided, atlas is assumed to be symmetric + Hemisphere stack for the atlas. + If str or Path, will be read with tifffile. + If none is provided, atlas is assumed to be symmetric. cleanup_files : bool, optional (Default value = False) compress : bool, optional (Default value = True) scale_meshes: bool, optional - (Default values = False). If True the meshes points are scaled by the resolution - to ensure that they are specified in microns, regardless of the atlas resolution. + (Default values = False). + If True the meshes points are scaled by the resolution + to ensure that they are specified in microns, + regardless of the atlas resolution. additional_references: dict, optional - (Default value = empty dict). Dictionary with secondary reference stacks. + (Default value = empty dict). + Dictionary with secondary reference stacks. additional_metadata: dict, optional - (Default value = empty dict). Additional metadata to write to metadata.json + (Default value = empty dict). + Additional metadata to write to metadata.json """ # If no hemisphere file is given, assume the atlas is symmetric: From 76a513a1a6c53d59854ea2c6ba7ae7e04c0b364b Mon Sep 17 00:00:00 2001 From: viktorpm <50667179+viktorpm@users.noreply.github.com> Date: Wed, 21 Feb 2024 16:31:13 +0000 Subject: [PATCH 101/103] test functions for validate_mesh_structure_pairs (#114) * first test functions for validate_mesh_structure_pairs * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * adding docstring * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * writing docstrings to explain the behaviour of the test functions * validate_atlases.py: updated validate_mesh_structure_pairs function, test_validation.py: updated tests * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * spliting validate_mesh_structure_pairs function, checking mesh files and structures separately * writing tests for catch_missing_mesh_files and catch_missing_structures validation functions * creating an atlas with a missing structure to test catch_missing_structures function * Update tests/test_unit/test_validation.py Co-authored-by: Alessandro Felder --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Alessandro Felder --- bg_atlasgen/validate_atlases.py | 44 ++++++++++++++++++++------- tests/test_unit/test_validation.py | 48 ++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 10 deletions(-) diff --git a/bg_atlasgen/validate_atlases.py b/bg_atlasgen/validate_atlases.py index b9568bfc..6a0d7c69 100644 --- a/bg_atlasgen/validate_atlases.py +++ b/bg_atlasgen/validate_atlases.py @@ -110,8 +110,9 @@ def check_additional_references(atlas: BrainGlobeAtlas): pass -def validate_mesh_structure_pairs(atlas: BrainGlobeAtlas): - """Ensure mesh files (.obj) exist for each expected structure in the atlas.""" +def catch_missing_mesh_files(atlas: BrainGlobeAtlas): + """Checks if all the structures in the atlas have a corresponding mesh file""" + ids_from_bg_atlas_api = list(atlas.structures.keys()) atlas_path = ( @@ -126,19 +127,41 @@ def validate_mesh_structure_pairs(atlas: BrainGlobeAtlas): if file.endswith(".obj") ] - in_mesh_not_bg = [] - for id in ids_from_mesh_files: - if id not in ids_from_bg_atlas_api: - in_mesh_not_bg.append(id) - in_bg_not_mesh = [] for id in ids_from_bg_atlas_api: if id not in ids_from_mesh_files: in_bg_not_mesh.append(id) - if len(in_mesh_not_bg) or len(in_bg_not_mesh): + if len(in_bg_not_mesh) != 0: + raise AssertionError( + f"Structures with IDs {in_bg_not_mesh} are in the atlas, but don't have a corresponding mesh file." + ) + + +def catch_missing_structures(atlas: BrainGlobeAtlas): + """Checks if all the mesh files in the atlas folder are listed as a structure in the atlas""" + + ids_from_bg_atlas_api = list(atlas.structures.keys()) + + atlas_path = ( + Path(get_brainglobe_dir()) + / f"{atlas.atlas_name}_v{get_local_atlas_version(atlas.atlas_name)}" + ) + obj_path = Path(atlas_path / "meshes") + + ids_from_mesh_files = [ + int(Path(file).stem) + for file in os.listdir(obj_path) + if file.endswith(".obj") + ] + + in_mesh_not_bg = [] + for id in ids_from_mesh_files: + if id not in ids_from_bg_atlas_api: + in_mesh_not_bg.append(id) + + if len(in_mesh_not_bg) != 0: raise AssertionError( - f"Structures with ID {in_bg_not_mesh} are in the atlas, but don't have a corresponding mesh file; " f"Structures with IDs {in_mesh_not_bg} have a mesh file, but are not accessible through the atlas." ) @@ -176,7 +199,8 @@ def validate_atlas(atlas_name, version, validation_functions): open_for_visual_check, validate_checksum, check_additional_references, - validate_mesh_structure_pairs, + catch_missing_mesh_files, + catch_missing_structures, ] valid_atlases = [] diff --git a/tests/test_unit/test_validation.py b/tests/test_unit/test_validation.py index 5fcf9f09..bcffe25e 100644 --- a/tests/test_unit/test_validation.py +++ b/tests/test_unit/test_validation.py @@ -7,6 +7,8 @@ from bg_atlasgen.validate_atlases import ( _assert_close, + catch_missing_mesh_files, + catch_missing_structures, validate_atlas_files, validate_mesh_matches_image_extents, ) @@ -34,6 +36,17 @@ def atlas_with_bad_reference_file(): os.rename(bad_name, good_name) +@pytest.fixture +def atlas_with_missing_structure(): + atlas = BrainGlobeAtlas("osten_mouse_100um") + modified_structures = atlas.structures.copy() + modified_structures.pop(688) + + modified_atlas = BrainGlobeAtlas("osten_mouse_100um") + modified_atlas.structures = modified_structures + return modified_atlas + + def test_validate_mesh_matches_image_extents(atlas): assert validate_mesh_matches_image_extents(atlas) @@ -69,3 +82,38 @@ def test_assert_close_negative(): AssertionError, match="differ by more than 10 times pixel size" ): _assert_close(99.5, 30, 2) + + +def test_catch_missing_mesh_files(atlas): + """ + Tests if catch_missing_mesh_files function raises an error, + when there is at least one structure in the atlas that doesn't have + a corresponding obj file. + + Expected behaviour: + True for "allen_mouse_10um" (structure 545 doesn't have an obj file): fails + the validation function, raises an error --> no output from this test function + """ + + with pytest.raises( + AssertionError, + match=r"Structures with IDs \[.*?\] are in the atlas, but don't have a corresponding mesh file.", + ): + catch_missing_mesh_files(atlas) + + +def test_catch_missing_structures(atlas_with_missing_structure): + """ + Tests if catch_missing_structures function raises an error, + when there is at least one orphan obj file (doesn't have a corresponding structure in the atlas) + + Expected behaviour: + Currently no atlas fails the validation function this way so the [] is always empty + --> this test function should always raise an error + """ + + with pytest.raises( + AssertionError, + match=r"Structures with IDs \[.*?\] have a mesh file, but are not accessible through the atlas.", + ): + catch_missing_structures(atlas_with_missing_structure) From c9d913683e8bf3aca5a2e64db84dd98c3a4364eb Mon Sep 17 00:00:00 2001 From: willGraham01 <1willgraham@gmail.com> Date: Fri, 23 Feb 2024 10:19:42 +0000 Subject: [PATCH 102/103] Revert "Prepare for merge and rename (#198)" This reverts commit 9d5317db83020cb0df06aee0ca2e236aa84aa2df. --- README.md | 5 ----- bg_atlasapi/__init__.py | 9 --------- pyproject.toml | 2 +- 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/README.md b/README.md index 3a0c127d..58fe0931 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,3 @@ -# This package has moved! - -This package has been renamed to `brainglobe-atlasapi`. -To continue receiving updates, please switch over to using [the new package](https://github.com/brainglobe/brainglobe-atlasapi). - # BG-atlasAPI [![Python Version](https://img.shields.io/pypi/pyversions/bg-atlasapi.svg)](https://pypi.org/project/bg-atlasapi) diff --git a/bg_atlasapi/__init__.py b/bg_atlasapi/__init__.py index b8433a59..30bf062a 100644 --- a/bg_atlasapi/__init__.py +++ b/bg_atlasapi/__init__.py @@ -1,12 +1,3 @@ -from warnings import warn - -warn( - "This package has been renamed. " - "To continue receiving updates, please use brainglobe-atlasapi instead of this package. " - "https://github.com/brainglobe/brainglobe-atlasapi", - DeprecationWarning, -) - from importlib.metadata import PackageNotFoundError, metadata try: diff --git a/pyproject.toml b/pyproject.toml index 0153ebc0..3d91ac3a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,5 +96,5 @@ python = [testenv] extras = dev -commands = pytest -v --color=yes --cov=bg_atlasapi --cov-report=xml -W ignore::DeprecationWarning +commands = pytest -v --color=yes --cov=bg_atlasapi --cov-report=xml """ From 33498a0d32f2970aac4e44741be98e53ba7aa0f6 Mon Sep 17 00:00:00 2001 From: Will Graham <32364977+willGraham01@users.noreply.github.com> Date: Fri, 23 Feb 2024 11:21:20 +0000 Subject: [PATCH 103/103] Update pyproject.toml Co-authored-by: Alessandro Felder --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d9ddcf32..786914d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "brainglobe-atlasapi" -description = "A lightweight python module to interact with atlases for systems neuroscience, and for generating atlases for the API." +description = "A lightweight python module to interact with and generate atlases for systems neuroscience." readme = "README.md" license = { file = "LICENSE" } authors = [