From 6ca66b2d1ac7cb8899a8a76f3111e2b9b108fe68 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Thu, 6 Jun 2024 15:48:16 +0100 Subject: [PATCH 01/11] first draft of symmetrical 50um blackcap atlas --- .../atlas_scripts/blackcap.py | 256 ++++++++++++++++++ 1 file changed, 256 insertions(+) create mode 100644 brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py new file mode 100644 index 00000000..8af6e02e --- /dev/null +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -0,0 +1,256 @@ +__version__ = "0" + +import glob as glob +import os +import time +from pathlib import Path + +import numpy as np +import pandas as pd +from brainglobe_utils.IO.image import load_nii +from rich.progress import track +from scipy import ndimage +from skimage.filters.rank import median +from skimage.morphology import ball + +from brainglobe_atlasapi.atlas_generation.mesh_utils import ( + Region, + create_region_mesh, +) +from brainglobe_atlasapi.atlas_generation.wrapup import wrapup_atlas_from_data +from brainglobe_atlasapi.structure_tree_util import get_structures_tree + + +def create_atlas(working_dir, resolution): + ATLAS_NAME = "oldenburg_blackcap" + SPECIES = "Sylvia atricapilla" + ATLAS_LINK = "https://uol.de/en/ibu/animal-navigation" + CITATION = "unpublished" + ATLAS_FILE_URL = "https://uol.de/en/ibu/animal-navigation" # noqa + ORIENTATION = "asr" + ROOT_ID = 999 + ATLAS_PACKAGER = "BrainGlobe Developers, hello@brainglobe.info" + ADDITIONAL_METADATA = {} + + # setup folder for downloading + + atlas_path = Path( + "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-50um_n-18/for_atlas/" + ) + + structures_file = atlas_path / "Label_description_BC74white_16_02_2024.txt" + annotations_file = ( + atlas_path / "sub-BC74_res-50um_labels_aligned-to-reference.nii.gz" + ) + reference_file = atlas_path / "reference_res-50um_image.nii.gz" + reference_mask_file = atlas_path / "reference_res-50um_mask-4reg.nii.gz" + meshes_dir_path = Path.home() / "blackcap-meshes" + + try: + os.mkdir(meshes_dir_path) + except FileExistsError: + "mesh folder already exists" + + # Read structures file + print("Reading structures file") + with open( + structures_file, mode="r", encoding="utf-8-sig" + ) as blackcap_file: + structure_data = pd.read_csv( + blackcap_file, + delimiter="\s+", + comment="#", + names=["IDX", "R", "G", "B", "A", "VIS", "MESH_VIS", "LABEL"], + ) + + # replace Clear Label (first row) with a root entry + structure_data["LABEL"].iloc[0] = "root" + structure_data["R"].iloc[0] = 255 + structure_data["G"].iloc[0] = 255 + structure_data["B"].iloc[0] = 255 + structure_data["IDX"].iloc[0] = 999 + + structure_data.rename(columns={"IDX": "id"}, inplace=True) + structure_data.rename(columns={"LABEL": "acronym"}, inplace=True) + structure_data["name"] = structure_data.apply( + lambda row: row["acronym"], axis=1 + ) + structure_data["rgb_triplet"] = structure_data.apply( + lambda row: [str(row["R"]), str(row["G"]), str(row["B"])], axis=1 + ) + structure_data["structure_id_path"] = structure_data.apply( + lambda row: [row["id"]] if row["id"] == 999 else [999, row["id"]], + axis=1, + ) + + # drop columns we don't need + structure_data.drop( + columns=["A", "VIS", "MESH_VIS", "R", "G", "B"], inplace=True + ) + + structure_data_list = [] + for _, row in structure_data.iterrows(): + structure_data_list.append( + { + "id": row["id"], + "rgb_triplet": row["rgb_triplet"], + # "parent_structure_id": row["parent_structure_id"], + "name": row["name"], + "structure_id_path": row["structure_id_path"], + "acronym": row["acronym"], + } + ) + + tree = get_structures_tree(structure_data_list) + print( + f"Number of brain regions: {tree.size()}, " + f"max tree depth: {tree.depth()}" + ) + print(tree) + print(f"Saving atlas data at {atlas_path}") + + # use tifffile to read annotated file + annotated_volume = load_nii(annotations_file, as_array=True).astype( + np.uint8 + ) + + # remove unconnected components + label_im, nb_labels = ndimage.label( + annotated_volume + ) # not to be confused with our labels + sizes = ndimage.sum(annotated_volume > 0, label_im, range(nb_labels + 1)) + mask = sizes > 1000 + annotated_volume *= mask[label_im] + + # naive forcing symmetry + extent_LR = annotated_volume.shape[2] + half_image = extent_LR // 2 + + flipped_first_half = np.flip( + annotated_volume[:, :, 0:half_image], axis=2 + ).copy() + flipped_second_half = np.flip( + annotated_volume[:, :, half_image - 1 : -1], axis=2 + ).copy() + + annotated_volume[:, :, 0:half_image] = np.minimum( + annotated_volume[:, :, 0:half_image], flipped_second_half + ) + annotated_volume[:, :, half_image - 1 : -1] = np.minimum( + annotated_volume[:, :, half_image - 1 : -1], flipped_first_half + ) + + # smooth annotations + annotated_volume = median( + annotated_volume, ball(3), mask=annotated_volume > 0 + ) + + # keep only annotations in mask + brain_mask = load_nii(reference_mask_file, as_array=True).astype(np.uint16) + annotated_volume *= brain_mask + + # rescale reference volume into int16 range + reference_volume = load_nii(reference_file, as_array=True) + dmin = np.min(reference_volume) + dmax = np.max(reference_volume) + drange = dmax - dmin + dscale = (2**16 - 1) / drange + reference_volume = (reference_volume - dmin) * dscale + reference_volume = reference_volume.astype(np.uint16) + + # generate binary mask for mesh creation + labels = np.unique(annotated_volume).astype(np.int_) + for key, node in tree.nodes.items(): + if key in labels: + is_label = True + else: + is_label = False + + node.data = Region(is_label) + + # mesh creation + closing_n_iters = 2 + decimate_fraction = 0.3 + smooth = True + + start = time.time() + + for node in track( + tree.nodes.values(), + total=tree.size(), + description="Creating meshes", + ): + create_region_mesh( + ( + meshes_dir_path, + node, + tree, + labels, + annotated_volume, + ROOT_ID, + closing_n_iters, + decimate_fraction, + smooth, + ) + ) + + print( + "Finished mesh extraction in : ", + round((time.time() - start) / 60, 2), + " minutes", + ) + + # create meshes dict + meshes_dict = dict() + structures_with_mesh = [] + for s in structure_data_list: + # check if a mesh was created + mesh_path = meshes_dir_path / f"{s['id']}.obj" + if not mesh_path.exists(): + print(f"No mesh file exists for: {s}, ignoring it.") + continue + else: + # check that the mesh actually exists and isn't empty + if mesh_path.stat().st_size < 512: + print(f"obj file for {s} is too small, ignoring it.") + continue + structures_with_mesh.append(s) + meshes_dict[s["id"]] = mesh_path + + print( + f"In the end, {len(structures_with_mesh)} " + "structures with mesh are kept" + ) + + output_filename = wrapup_atlas_from_data( + atlas_name=ATLAS_NAME, + atlas_minor_version=__version__, + citation=CITATION, + atlas_link=ATLAS_LINK, + species=SPECIES, + resolution=resolution, + orientation=ORIENTATION, + root_id=999, + reference_stack=reference_volume, + annotation_stack=annotated_volume, + structures_list=structure_data_list, + meshes_dict=meshes_dict, + scale_meshes=True, + working_dir=working_dir, + hemispheres_stack=None, + cleanup_files=False, + compress=True, + atlas_packager=ATLAS_PACKAGER, + additional_metadata=ADDITIONAL_METADATA, + ) + + return output_filename + + +if __name__ == "__main__": + res = 50, 50, 50 + home = str(Path.home()) + bg_root_dir = Path.home() / "bg-atlasgen" + bg_root_dir.mkdir(exist_ok=True, parents=True) + + create_atlas(bg_root_dir, res) From 64921880d46c88dbf7616ceb386d678d8b4c1772 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Thu, 18 Jul 2024 11:24:50 +0100 Subject: [PATCH 02/11] switch to 25um data --- .../atlas_generation/atlas_scripts/blackcap.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 8af6e02e..f5029592 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -35,15 +35,15 @@ def create_atlas(working_dir, resolution): # setup folder for downloading atlas_path = Path( - "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-50um_n-18/for_atlas/" + "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-25um_n-18/for_atlas/" ) structures_file = atlas_path / "Label_description_BC74white_16_02_2024.txt" annotations_file = ( - atlas_path / "sub-BC74_res-50um_labels_aligned-to-reference.nii.gz" + atlas_path / "sub-BC74_res-25um_labels_aligned-to-reference.nii.gz" ) - reference_file = atlas_path / "reference_res-50um_image.nii.gz" - reference_mask_file = atlas_path / "reference_res-50um_mask-4reg.nii.gz" + reference_file = atlas_path / "reference_res-25um_image.nii.gz" + reference_mask_file = atlas_path / "reference_res-25um_mask-4reg.nii.gz" meshes_dir_path = Path.home() / "blackcap-meshes" try: @@ -248,7 +248,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": - res = 50, 50, 50 + res = 25, 25, 25 home = str(Path.home()) bg_root_dir = Path.home() / "bg-atlasgen" bg_root_dir.mkdir(exist_ok=True, parents=True) From 3101c1027886f77f8927d0d2585f2701eaa9883e Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Mon, 4 Nov 2024 09:58:01 +0000 Subject: [PATCH 03/11] polish blackcap for Oldenburg visit --- .../atlas_generation/annotation_utils.py | 75 +++++++++ .../_combine_blackcap_annotations.py | 62 +++++++ .../_halve_blackcap_reference.py | 24 +++ .../atlas_scripts/blackcap.py | 153 +++++++----------- validate_atlas_script.py | 94 +++++++++++ 5 files changed, 312 insertions(+), 96 deletions(-) create mode 100644 brainglobe_atlasapi/atlas_generation/annotation_utils.py create mode 100644 brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py create mode 100644 brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py create mode 100644 validate_atlas_script.py diff --git a/brainglobe_atlasapi/atlas_generation/annotation_utils.py b/brainglobe_atlasapi/atlas_generation/annotation_utils.py new file mode 100644 index 00000000..e6ad6671 --- /dev/null +++ b/brainglobe_atlasapi/atlas_generation/annotation_utils.py @@ -0,0 +1,75 @@ +"""Helper functions to read annotation metadata from common formats""" + +from pathlib import Path + + +def split_label_text(name: str) -> str: + if name.endswith(")"): + name, acronym = name.split("(") + name = name[:-1] # ignore trailing space + acronym = acronym[:-1] # ignore trailing ) + else: + acronym = name[0] + return name, acronym + + +def read_itk_labels(path: Path) -> dict: + labels = [] + with open(path) as labels_file: + for line in labels_file: + if not line.startswith("#"): + raw_values = line.split(maxsplit=7) + id = int(raw_values[0]) + rgb = tuple((int(r) for r in raw_values[1:4])) + if raw_values[7][-1] == "\n": + raw_values[7] = raw_values[7][:-1] + label_text = raw_values[7][1:-1] + if label_text != "Clear Label": + name, acronym = split_label_text(label_text) + labels.append( + { + "id": id, + "name": name, + "rgb_triplet": rgb, + "acronym": acronym, + } + ) + return labels + + +ITK_SNAP_HEADER = """################################################ +# ITK-SnAP Label Description File +# File format: +# IDX -R- -G- -B- -A-- VIS MSH LABEL +# Fields: +# IDX: Zero-based index +# -R-: Red color component (0..255) +# -G-: Green color component (0..255) +# -B-: Blue color component (0..255) +# -A-: Label transparency (0.00 .. 1.00) +# VIS: Label visibility (0 or 1) +# IDX: Label mesh visibility (0 or 1) +# LABEL: Label description +################################################ +""" + +ITK_CLEAR_LABEL = '0 0 0 0 0 0 0 "Clear Label"\n' + + +def write_itk_labels(path: Path, labels): + with open(path, "w") as labels_file: + labels_file.write(ITK_SNAP_HEADER) + labels_file.write(ITK_CLEAR_LABEL) + for label in labels: + labels_file.write( + f"{label['id']} {label['rgb_triplet'][0]} {label['rgb_triplet'][1]} {label['rgb_triplet'][2]} 1.00 1 1 \"{label['name'] + ' (' + label['acronym']+')'}\"\n" + ) + + +if __name__ == "__main__": + path = Path.home() / "Downloads" / "corrected_LabelMainBrainAreas_SW.txt" + labels = read_itk_labels(path) + [print(label) for label in labels] + write_itk_labels( + Path.home() / "Downloads" / "test-writing.txt", labels=labels + ) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py new file mode 100644 index 00000000..4fa8ceda --- /dev/null +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py @@ -0,0 +1,62 @@ +import csv +from pathlib import Path + +import numpy as np +from brainglobe_utils.IO.image import load_nii, save_any + +from brainglobe_atlasapi.atlas_generation.annotation_utils import ( + read_itk_labels, + write_itk_labels, +) + +if __name__ == "__main__": + # setup paths + annotations_root = Path( + "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-25um_n-18_average-trimean/for_atlas/annotations-right-hemisphere/" + ) + main_annotation_path = ( + annotations_root / "corrected_AnnotationsMainBrainAreas_SW.nii.gz" + ) + small_annotation_path = annotations_root / "corrected_smallareas_SW.nii.gz" + main_labels_path = ( + annotations_root / "corrected_LabelMainBrainAreas_SW.txt" + ) + small_labels_path = annotations_root / "corrected_smallareas_SW.txt" + small_to_main_csv = annotations_root / "hierarchy_annotat1_annotat2.csv" + + # combine annotation images + main_annotation_image = load_nii(main_annotation_path, as_array=True) + small_annotation_image = load_nii(small_annotation_path, as_array=True) + small_annotation_image *= ( + 10 # avoid label clashes, there are 6 main labels + ) + small_annotation_image = small_annotation_image.astype(np.uint8) + combined_annotation_image = main_annotation_image.copy() + + small_to_main_map = {} + with open(small_to_main_csv, mode="r") as file: + reader = csv.reader(file) + next(reader) # Skip the header + for row in reader: + print(row) + if row[3]: + small_to_main_map[10 * int(row[3])] = int(row[1]) + for small, main in small_to_main_map.items(): + is_main_value = main_annotation_image == main + is_small_value = small_annotation_image == small + combined_indices = is_main_value * is_small_value + combined_annotation_image[combined_indices] = small + save_any( + combined_annotation_image, + annotations_root / "combined_annotations.nii.gz", + ) + + # combine label data + main_annotation_labels = read_itk_labels(main_labels_path) + small_annotation_labels = read_itk_labels(small_labels_path) + for label in small_annotation_labels: + label["id"] = int(10 * label["id"]) + combined_labels = main_annotation_labels + small_annotation_labels + write_itk_labels( + annotations_root / "combined_label_descriptions.txt", combined_labels + ) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py new file mode 100644 index 00000000..5dd7d40c --- /dev/null +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py @@ -0,0 +1,24 @@ +from pathlib import Path + +from brainglobe_utils.IO.image import load_nii, save_any + +# note that this messes with the orientation in the nifti header +# so we need to manually correct with ITK snap! + +if __name__ == "__main__": + # make hemi-template + template_root = Path( + "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-25um_n-18_average-trimean/for_atlas/" + ) + template_path = template_root / "reference_res-25um_image.nii.gz" + # rescale reference volume into int16 range + reference_volume = load_nii(template_path, as_array=True) + + extent_LR = reference_volume.shape[2] + half_image = extent_LR // 2 + + right_hemi_template = reference_volume[:, :, 0:half_image] + save_any( + right_hemi_template, + template_root / "reference_res-25_hemi-right_image.nii.gz", + ) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index f5029592..7e45b6c2 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -1,18 +1,18 @@ __version__ = "0" +import csv import glob as glob import os import time from pathlib import Path import numpy as np -import pandas as pd from brainglobe_utils.IO.image import load_nii from rich.progress import track -from scipy import ndimage -from skimage.filters.rank import median -from skimage.morphology import ball +from brainglobe_atlasapi.atlas_generation.annotation_utils import ( + read_itk_labels, +) from brainglobe_atlasapi.atlas_generation.mesh_utils import ( Region, create_region_mesh, @@ -34,16 +34,13 @@ def create_atlas(working_dir, resolution): # setup folder for downloading - atlas_path = Path( - "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-25um_n-18/for_atlas/" - ) + atlas_path = Path.home() / "data/for_atlas/" - structures_file = atlas_path / "Label_description_BC74white_16_02_2024.txt" + structures_file = atlas_path / "sw/Label_definitions_1124_SW.txt" annotations_file = ( - atlas_path / "sub-BC74_res-25um_labels_aligned-to-reference.nii.gz" + atlas_path / "sw/combined_annotations_sw_555v_smooth_cleaned.nii.gz" ) - reference_file = atlas_path / "reference_res-25um_image.nii.gz" - reference_mask_file = atlas_path / "reference_res-25um_mask-4reg.nii.gz" + reference_file = atlas_path / "sw/reference_res-25_hemi-right_image.nii.gz" meshes_dir_path = Path.home() / "blackcap-meshes" try: @@ -51,56 +48,40 @@ def create_atlas(working_dir, resolution): except FileExistsError: "mesh folder already exists" - # Read structures file - print("Reading structures file") - with open( - structures_file, mode="r", encoding="utf-8-sig" - ) as blackcap_file: - structure_data = pd.read_csv( - blackcap_file, - delimiter="\s+", - comment="#", - names=["IDX", "R", "G", "B", "A", "VIS", "MESH_VIS", "LABEL"], - ) - - # replace Clear Label (first row) with a root entry - structure_data["LABEL"].iloc[0] = "root" - structure_data["R"].iloc[0] = 255 - structure_data["G"].iloc[0] = 255 - structure_data["B"].iloc[0] = 255 - structure_data["IDX"].iloc[0] = 999 - - structure_data.rename(columns={"IDX": "id"}, inplace=True) - structure_data.rename(columns={"LABEL": "acronym"}, inplace=True) - structure_data["name"] = structure_data.apply( - lambda row: row["acronym"], axis=1 - ) - structure_data["rgb_triplet"] = structure_data.apply( - lambda row: [str(row["R"]), str(row["G"]), str(row["B"])], axis=1 - ) - structure_data["structure_id_path"] = structure_data.apply( - lambda row: [row["id"]] if row["id"] == 999 else [999, row["id"]], - axis=1, - ) - - # drop columns we don't need - structure_data.drop( - columns=["A", "VIS", "MESH_VIS", "R", "G", "B"], inplace=True + print("Reading structures files") + hierarchy_path = atlas_path / "sw/hierarchy_1124_SW.csv" + small_to_main_map = {} + with open(hierarchy_path, mode="r") as file: + reader = csv.reader(file) + next(reader) # Skip the header + for row in reader: + print(row) + if row[3]: + small_to_main_map[int(row[3])] = int(row[1]) + + structure_data_list = read_itk_labels(structures_file) + for structure in structure_data_list: + structure_id = structure["id"] + if structure_id in small_to_main_map.keys(): + structure["structure_id_path"] = [ + 999, + small_to_main_map[structure_id], + structure_id, + ] + else: + structure["structure_id_path"] = [999, structure_id] + + # append root structure + structure_data_list.append( + { + "id": 999, + "name": "root", + "acronym": "root", + "structure_id_path": [999], + "rgb_triplet": [255, 255, 255], + } ) - structure_data_list = [] - for _, row in structure_data.iterrows(): - structure_data_list.append( - { - "id": row["id"], - "rgb_triplet": row["rgb_triplet"], - # "parent_structure_id": row["parent_structure_id"], - "name": row["name"], - "structure_id_path": row["structure_id_path"], - "acronym": row["acronym"], - } - ) - tree = get_structures_tree(structure_data_list) print( f"Number of brain regions: {tree.size()}, " @@ -113,42 +94,12 @@ def create_atlas(working_dir, resolution): annotated_volume = load_nii(annotations_file, as_array=True).astype( np.uint8 ) - - # remove unconnected components - label_im, nb_labels = ndimage.label( - annotated_volume - ) # not to be confused with our labels - sizes = ndimage.sum(annotated_volume > 0, label_im, range(nb_labels + 1)) - mask = sizes > 1000 - annotated_volume *= mask[label_im] - - # naive forcing symmetry - extent_LR = annotated_volume.shape[2] - half_image = extent_LR // 2 - - flipped_first_half = np.flip( - annotated_volume[:, :, 0:half_image], axis=2 - ).copy() - flipped_second_half = np.flip( - annotated_volume[:, :, half_image - 1 : -1], axis=2 - ).copy() - - annotated_volume[:, :, 0:half_image] = np.minimum( - annotated_volume[:, :, 0:half_image], flipped_second_half - ) - annotated_volume[:, :, half_image - 1 : -1] = np.minimum( - annotated_volume[:, :, half_image - 1 : -1], flipped_first_half + # Append a mirror image of the reference volume along axis 2 + mirrored_volume = np.flip(annotated_volume, axis=2) + annotated_volume = np.concatenate( + (annotated_volume, mirrored_volume), axis=2 ) - # smooth annotations - annotated_volume = median( - annotated_volume, ball(3), mask=annotated_volume > 0 - ) - - # keep only annotations in mask - brain_mask = load_nii(reference_mask_file, as_array=True).astype(np.uint16) - annotated_volume *= brain_mask - # rescale reference volume into int16 range reference_volume = load_nii(reference_file, as_array=True) dmin = np.min(reference_volume) @@ -157,11 +108,21 @@ def create_atlas(working_dir, resolution): dscale = (2**16 - 1) / drange reference_volume = (reference_volume - dmin) * dscale reference_volume = reference_volume.astype(np.uint16) + # Append a mirror image of the reference volume along axis 2 + mirrored_volume = np.flip(reference_volume, axis=2) + reference_volume = np.concatenate( + (reference_volume, mirrored_volume), axis=2 + ) + + has_label = annotated_volume > 0 + reference_volume *= has_label # generate binary mask for mesh creation labels = np.unique(annotated_volume).astype(np.int_) for key, node in tree.nodes.items(): - if key in labels: + if ( + key in labels or key == 1 + ): # Pallium == 1 needs mesh but has no own voxels is_label = True else: is_label = False @@ -170,7 +131,7 @@ def create_atlas(working_dir, resolution): # mesh creation closing_n_iters = 2 - decimate_fraction = 0.3 + decimate_fraction = 0.6 # higher = more triangles smooth = True start = time.time() @@ -250,7 +211,7 @@ def create_atlas(working_dir, resolution): if __name__ == "__main__": res = 25, 25, 25 home = str(Path.home()) - bg_root_dir = Path.home() / "bg-atlasgen" + bg_root_dir = Path.home() / "brainglobe_workingdir" bg_root_dir.mkdir(exist_ok=True, parents=True) create_atlas(bg_root_dir, res) diff --git a/validate_atlas_script.py b/validate_atlas_script.py new file mode 100644 index 00000000..cc3cbfaa --- /dev/null +++ b/validate_atlas_script.py @@ -0,0 +1,94 @@ +import shutil +from pathlib import Path + +import napari +from brainrender_napari.napari_atlas_representation import ( + NapariAtlasRepresentation, +) +from napari.viewer import Viewer + +from brainglobe_atlasapi import BrainGlobeAtlas +from brainglobe_atlasapi.atlas_generation.validate_atlases import ( + catch_missing_mesh_files, + catch_missing_structures, + open_for_visual_check, + validate_additional_references, + validate_atlas_files, + validate_checksum, + validate_image_dimensions, +) + +all_validation_functions = [ + validate_atlas_files, + # validate_mesh_matches_image_extents, + open_for_visual_check, + validate_checksum, + validate_image_dimensions, + validate_additional_references, + catch_missing_mesh_files, + catch_missing_structures, +] + + +# adapt this code block for newly packaged atlases +brainglobe_dir = Path.home() / ".brainglobe/" +working_dir = Path.home() / "brainglobe_workingdir/" +atlas_name = "oldenburg_blackcap" +resolution = 25 +minor_version = 0 + +# nothing below this needs changing + +# make sure we have the latest packaged version in .brainglobe +# by replacing it with the working_dir version if needed +atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" +source_dir = working_dir / atlas_name_with_version +destination_dir = brainglobe_dir / atlas_name_with_version +if destination_dir.exists() and destination_dir.is_dir(): + shutil.rmtree(destination_dir) +assert source_dir.exists(), f"{source_dir} doesn't exist" +if source_dir.exists(): + shutil.copytree(source_dir, destination_dir) +assert destination_dir.exists(), f"{destination_dir} doesn't exist" + +# run validation functions on the new atlas +atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") +validation_results = {atlas_name: []} + +for i, validation_function in enumerate(all_validation_functions): + try: + validation_function(atlas) + validation_results[atlas_name].append( + (validation_function.__name__, None, str("Pass")) + ) + except AssertionError as error: + validation_results[atlas_name].append( + (validation_function.__name__, str(error), str("Fail")) + ) + +# print validation results and open napari for a visual check +# in napari, we should see three layers: +# - the annotation +# - the reference image (visibility turned off by default) +# - the root mesh + +failed_validations = [ + (result[0], result[1]) + for result in validation_results[atlas_name] + if result[2] == "Fail" +] +if failed_validations: + print("Failed validations:") + for failed in failed_validations: + print(failed) +else: + print(f"{atlas_name} is a valid atlas") + +viewer = Viewer() +viewer.dims.ndisplay = 3 +napari_atlas = NapariAtlasRepresentation(atlas, viewer) +napari_atlas.add_structure_to_viewer("root") +napari_atlas.add_structure_to_viewer("P") +napari_atlas.add_to_viewer() + +napari.run() From 32792fff540f6d4d9b9ef8e416a57477b3cea54b Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 29 Nov 2024 17:28:23 +0000 Subject: [PATCH 04/11] polish for new detailed annotations paths now on ceph magneto areas yet to come --- .../atlas_generation/annotation_utils.py | 44 ++++++++++++++++++- .../atlas_scripts/blackcap.py | 21 ++++++--- 2 files changed, 58 insertions(+), 7 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/annotation_utils.py b/brainglobe_atlasapi/atlas_generation/annotation_utils.py index e6ad6671..715c444f 100644 --- a/brainglobe_atlasapi/atlas_generation/annotation_utils.py +++ b/brainglobe_atlasapi/atlas_generation/annotation_utils.py @@ -1,7 +1,11 @@ -"""Helper functions to read annotation metadata from common formats""" +"""Helper functions to read annotation metadata from common formats, +or manipulate annotations.""" from pathlib import Path +import numpy as np +from scipy.ndimage import generic_filter + def split_label_text(name: str) -> str: if name.endswith(")"): @@ -62,10 +66,46 @@ def write_itk_labels(path: Path, labels): labels_file.write(ITK_CLEAR_LABEL) for label in labels: labels_file.write( - f"{label['id']} {label['rgb_triplet'][0]} {label['rgb_triplet'][1]} {label['rgb_triplet'][2]} 1.00 1 1 \"{label['name'] + ' (' + label['acronym']+')'}\"\n" + f"{label['id']} " + f"{label['rgb_triplet'][0]} " + f"{label['rgb_triplet'][1]} " + f"{label['rgb_triplet'][2]} 1.00 1 1 " + f"\"{label['name'] + ' (' + label['acronym']+')'}\"\n" ) +def modal_filter_ignore_zeros(window): + """ + Compute the mode of the window ignoring zero values. + """ + # Remove zeros from the window + non_zero_values = window[window != 0] + if len(non_zero_values) == 0: + return 0 # If all values are zero, return 0 + # Compute the mode (most common value) + values, counts = np.unique(non_zero_values, return_counts=True) + return values[np.argmax(counts)] + + +def apply_modal_filter(image, filter_size=3): + """ + Apply a modal filter to the image, ignoring zero neighbors. + + Parameters: + image (ndarray): Input image as a 2D NumPy array. + filter_size (int): Size of the filtering window (must be odd). + + Returns: + ndarray: Filtered image. + """ + # Apply the modal filter using a sliding window + filtered_image = generic_filter( + image, function=modal_filter_ignore_zeros, size=filter_size + ) + return filtered_image + + +# TODO turn into test if __name__ == "__main__": path = Path.home() / "Downloads" / "corrected_LabelMainBrainAreas_SW.txt" labels = read_itk_labels(path) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 7e45b6c2..5d856d34 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -34,13 +34,24 @@ def create_atlas(working_dir, resolution): # setup folder for downloading - atlas_path = Path.home() / "data/for_atlas/" + atlas_path = Path( + "/media/ceph/margrie/sweiler/AnalyzedData/" + "Bird_brain_project/black_cap/final_annotations24/" + ) - structures_file = atlas_path / "sw/Label_definitions_1124_SW.txt" + structures_file = ( + atlas_path + / "Detailed annotations_281124/DetailedLabels_281124_fix.txt" + ) annotations_file = ( - atlas_path / "sw/combined_annotations_sw_555v_smooth_cleaned.nii.gz" + atlas_path + / "Detailed annotations_281124/DetailedAnnotations_281124.nii.gz" + ) + reference_file = ( + atlas_path + / "Detailed annotations_281124" + / "reference_res-25_hemi-right_IMAGE.nii.gz" ) - reference_file = atlas_path / "sw/reference_res-25_hemi-right_image.nii.gz" meshes_dir_path = Path.home() / "blackcap-meshes" try: @@ -49,7 +60,7 @@ def create_atlas(working_dir, resolution): "mesh folder already exists" print("Reading structures files") - hierarchy_path = atlas_path / "sw/hierarchy_1124_SW.csv" + hierarchy_path = atlas_path / "hierarchy_291124_fix.csv" small_to_main_map = {} with open(hierarchy_path, mode="r") as file: reader = csv.reader(file) From 9375b1ee9efd252b1146563e1e09e6d144ce9dd7 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Mon, 2 Dec 2024 12:21:35 +0000 Subject: [PATCH 05/11] combined magneto+detailed areas, more filtering and polishing --- .../atlas_generation/annotation_utils.py | 37 +----- .../_combine_blackcap_annotations.py | 7 +- ...kcap_magneto_and_anatomical_annotations.py | 123 ++++++++++++++++++ .../atlas_scripts/blackcap.py | 51 +++++--- 4 files changed, 160 insertions(+), 58 deletions(-) create mode 100644 brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py diff --git a/brainglobe_atlasapi/atlas_generation/annotation_utils.py b/brainglobe_atlasapi/atlas_generation/annotation_utils.py index 715c444f..2cab7dba 100644 --- a/brainglobe_atlasapi/atlas_generation/annotation_utils.py +++ b/brainglobe_atlasapi/atlas_generation/annotation_utils.py @@ -1,11 +1,7 @@ -"""Helper functions to read annotation metadata from common formats, -or manipulate annotations.""" +"""Helper functions to read annotation metadata from common formats.""" from pathlib import Path -import numpy as np -from scipy.ndimage import generic_filter - def split_label_text(name: str) -> str: if name.endswith(")"): @@ -74,37 +70,6 @@ def write_itk_labels(path: Path, labels): ) -def modal_filter_ignore_zeros(window): - """ - Compute the mode of the window ignoring zero values. - """ - # Remove zeros from the window - non_zero_values = window[window != 0] - if len(non_zero_values) == 0: - return 0 # If all values are zero, return 0 - # Compute the mode (most common value) - values, counts = np.unique(non_zero_values, return_counts=True) - return values[np.argmax(counts)] - - -def apply_modal_filter(image, filter_size=3): - """ - Apply a modal filter to the image, ignoring zero neighbors. - - Parameters: - image (ndarray): Input image as a 2D NumPy array. - filter_size (int): Size of the filtering window (must be odd). - - Returns: - ndarray: Filtered image. - """ - # Apply the modal filter using a sliding window - filtered_image = generic_filter( - image, function=modal_filter_ignore_zeros, size=filter_size - ) - return filtered_image - - # TODO turn into test if __name__ == "__main__": path = Path.home() / "Downloads" / "corrected_LabelMainBrainAreas_SW.txt" diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py index 4fa8ceda..d0a0e478 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py @@ -1,3 +1,6 @@ +"""One-off script to reproduce how we combined the first version of +the annotations provided by the bird anatomists""" + import csv from pathlib import Path @@ -12,7 +15,9 @@ if __name__ == "__main__": # setup paths annotations_root = Path( - "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap/templates/template_sym_res-25um_n-18_average-trimean/for_atlas/annotations-right-hemisphere/" + "/media/ceph-neuroinformatics/neuroinformatics/atlas-forge/BlackCap" + / "templates/template_sym_res-25um_n-18_average-trimean/for_atlas" + / "annotations-right-hemisphere/" ) main_annotation_path = ( annotations_root / "corrected_AnnotationsMainBrainAreas_SW.nii.gz" diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py new file mode 100644 index 00000000..a5f0665f --- /dev/null +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py @@ -0,0 +1,123 @@ +"""One-off script to reproduce how we combined +the second version of the annotations provided by the bird anatomists""" + +from pathlib import Path + +from brainglobe_utils.IO.image import load_nii, save_any +from scipy.ndimage import generic_filter + + +def modal_filter_ignore_zeros(window): + """ + Compute the mode of the window ignoring zero values. + """ + # Remove zeros from the window + non_zero_values = window[window != 0] + if len(non_zero_values) == 0: + return 0 # If all values are zero, return 0 + # Compute the mode (most common value) + values, counts = np.unique(non_zero_values, return_counts=True) + return values[np.argmax(counts)] + + +def apply_modal_filter(image, filter_size=3): + """ + Apply a modal filter to the image, ignoring zero neighbors. + + Parameters: + image (ndarray): Input image as a 2D NumPy array. + filter_size (int): Size of the filtering window (must be odd). + + Returns: + ndarray: Filtered image. + """ + # Apply the modal filter using a sliding window + filtered_image = generic_filter( + image, function=modal_filter_ignore_zeros, size=filter_size + ) + return filtered_image + + +if __name__ == "__main__": + atlas_path = Path( + "/media/ceph/margrie/sweiler/AnalyzedData/" + "Bird_brain_project/black_cap/final_annotations24/" + ) + + detailed_annotations_file = ( + atlas_path + / "Detailed annotations_281124/DetailedAnnotations_281124.nii.gz" + ) + + magneto_annotations_file = ( + atlas_path + / "Magnetic Brain Areas_281124/magnetic_annotation_281124.nii.gz" + ) + + # open magneto and detailed image + detailed_annotations = load_nii(detailed_annotations_file, as_array=True) + magneto_annotations = load_nii(magneto_annotations_file, as_array=True) + + # make a numpy copy of detailed image + combined_annotations = detailed_annotations.copy() + + ## pons magnetic areas + # make detailed 180 where detailed is 5 and magneto is 180 + # make detailed 190 where detailed is 5 and magneto is 190 + # make detailed 10 where detailed is 5 and magneto is 18 + # make detailed 17 where detailed is 5 and magneto is 17 + import numpy as np + + assert np.any( + np.logical_and(detailed_annotations == 5, magneto_annotations == 18) + ) + combined_annotations[ + np.logical_and(detailed_annotations == 5, magneto_annotations == 18) + ] = 18 + combined_annotations[ + np.logical_and(detailed_annotations == 5, magneto_annotations == 19) + ] = 19 + combined_annotations[ + np.logical_and(detailed_annotations == 5, magneto_annotations == 10) + ] = 10 + combined_annotations[ + np.logical_and(detailed_annotations == 5, magneto_annotations == 17) + ] = 17 + + ## Cluster N + # make detailed 305 where detailed is 30 and magneto is 5 + # make detailed 505 where detailed is 50 and magneto is 5 + # make detailed 605 where detailed is 60 and magneto is 5 + # make detailed 905 where detailed is 90 and magneto is 5 + combined_annotations[ + np.logical_and(detailed_annotations == 30, magneto_annotations == 5) + ] = 305 + combined_annotations[ + np.logical_and(detailed_annotations == 50, magneto_annotations == 5) + ] = 505 + combined_annotations[ + np.logical_and(detailed_annotations == 60, magneto_annotations == 5) + ] = 605 + combined_annotations[ + np.logical_and(detailed_annotations == 90, magneto_annotations == 5) + ] = 905 + + ## NFT + # make detailed 402 where detailed is 40 and magneto is 20 + combined_annotations[ + np.logical_and(detailed_annotations == 40, magneto_annotations == 20) + ] = 402 + + ## smooth labels preserving background-foreground boundary + filter_size = 3 + combined_annotations = apply_modal_filter( + combined_annotations, filter_size=filter_size + ) + + # save as nifti + save_any( + combined_annotations, + atlas_path + / "CombinedBrainAreas_291124" + / f"combined_annotations_modal-{filter_size}.nii.gz", + ) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 5d856d34..5f7c0799 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -9,6 +9,7 @@ import numpy as np from brainglobe_utils.IO.image import load_nii from rich.progress import track +from scipy.ndimage import binary_erosion from brainglobe_atlasapi.atlas_generation.annotation_utils import ( read_itk_labels, @@ -40,16 +41,15 @@ def create_atlas(working_dir, resolution): ) structures_file = ( - atlas_path - / "Detailed annotations_281124/DetailedLabels_281124_fix.txt" + atlas_path / "CombinedBrainAreas_291124/combined_labels.txt" ) annotations_file = ( atlas_path - / "Detailed annotations_281124/DetailedAnnotations_281124.nii.gz" + / "CombinedBrainAreas_291124/combined_annotations_modal-3.nii.gz" ) reference_file = ( atlas_path - / "Detailed annotations_281124" + / "CombinedBrainAreas_291124" / "reference_res-25_hemi-right_IMAGE.nii.gz" ) meshes_dir_path = Path.home() / "blackcap-meshes" @@ -60,29 +60,34 @@ def create_atlas(working_dir, resolution): "mesh folder already exists" print("Reading structures files") - hierarchy_path = atlas_path / "hierarchy_291124_fix.csv" - small_to_main_map = {} + hierarchy_path = ( + atlas_path / "CombinedBrainAreas_291124/combined_structures.csv" + ) + structure_to_parent_map = {} with open(hierarchy_path, mode="r") as file: reader = csv.reader(file) next(reader) # Skip the header for row in reader: - print(row) - if row[3]: - small_to_main_map[int(row[3])] = int(row[1]) + structure_to_parent_map[int(row[1])] = [ + int(parent) for parent in row[2].split(",") + ] structure_data_list = read_itk_labels(structures_file) for structure in structure_data_list: structure_id = structure["id"] - if structure_id in small_to_main_map.keys(): - structure["structure_id_path"] = [ - 999, - small_to_main_map[structure_id], - structure_id, - ] - else: - structure["structure_id_path"] = [999, structure_id] + structure["structure_id_path"] = structure_to_parent_map[structure_id] - # append root structure + # append root and pallium structures, which don't have their own voxels + # and are therefore not in itk file + structure_data_list.append( + { + "id": 1, + "name": "Pallium", + "acronym": "P", + "structure_id_path": [999, 1], + "rgb_triplet": [0, 200, 100], + } + ) structure_data_list.append( { "id": 999, @@ -103,7 +108,7 @@ def create_atlas(working_dir, resolution): # use tifffile to read annotated file annotated_volume = load_nii(annotations_file, as_array=True).astype( - np.uint8 + np.uint16 ) # Append a mirror image of the reference volume along axis 2 mirrored_volume = np.flip(annotated_volume, axis=2) @@ -126,7 +131,11 @@ def create_atlas(working_dir, resolution): ) has_label = annotated_volume > 0 + # annotations are a bit too wide + # erode by two pixels + has_label = binary_erosion(has_label, iterations=4) reference_volume *= has_label + annotated_volume *= has_label # generate binary mask for mesh creation labels = np.unique(annotated_volume).astype(np.int_) @@ -141,9 +150,9 @@ def create_atlas(working_dir, resolution): node.data = Region(is_label) # mesh creation - closing_n_iters = 2 + closing_n_iters = 1 decimate_fraction = 0.6 # higher = more triangles - smooth = True + smooth = False start = time.time() From 191b19dff0dbf5d2c178ba8f7fc7621ae0eb5218 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Mon, 2 Dec 2024 16:33:18 +0000 Subject: [PATCH 06/11] tweaks for Dominik to double-check --- ...ckcap_magneto_and_anatomical_annotations.py | 18 +++++++++++++----- .../atlas_generation/atlas_scripts/blackcap.py | 10 +++------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py index a5f0665f..fa81150e 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py @@ -4,7 +4,7 @@ from pathlib import Path from brainglobe_utils.IO.image import load_nii, save_any -from scipy.ndimage import generic_filter +from scipy.ndimage import binary_erosion, generic_filter def modal_filter_ignore_zeros(window): @@ -95,9 +95,6 @@ def apply_modal_filter(image, filter_size=3): combined_annotations[ np.logical_and(detailed_annotations == 50, magneto_annotations == 5) ] = 505 - combined_annotations[ - np.logical_and(detailed_annotations == 60, magneto_annotations == 5) - ] = 605 combined_annotations[ np.logical_and(detailed_annotations == 90, magneto_annotations == 5) ] = 905 @@ -114,10 +111,21 @@ def apply_modal_filter(image, filter_size=3): combined_annotations, filter_size=filter_size ) + # annotations are a bit too wide + # erode by a few pixels + # but not on mid-sagittal plane! + erosions = 3 + has_label = combined_annotations > 0 + mirrored_has_label = np.flip(has_label, axis=2) + has_label = np.concatenate((has_label, mirrored_has_label), axis=2) + has_label = binary_erosion(has_label, iterations=erosions) + has_label = has_label[:, :, : has_label.shape[2] // 2] + combined_annotations *= has_label + # save as nifti save_any( combined_annotations, atlas_path / "CombinedBrainAreas_291124" - / f"combined_annotations_modal-{filter_size}.nii.gz", + / f"combined_annotations_modal-{filter_size}_eroded-{erosions}.nii.gz", ) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 5f7c0799..61ed2a29 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -9,7 +9,6 @@ import numpy as np from brainglobe_utils.IO.image import load_nii from rich.progress import track -from scipy.ndimage import binary_erosion from brainglobe_atlasapi.atlas_generation.annotation_utils import ( read_itk_labels, @@ -45,7 +44,8 @@ def create_atlas(working_dir, resolution): ) annotations_file = ( atlas_path - / "CombinedBrainAreas_291124/combined_annotations_modal-3.nii.gz" + / "CombinedBrainAreas_291124" + / "combined_annotations_modal-3_eroded-3.nii.gz" ) reference_file = ( atlas_path @@ -61,7 +61,7 @@ def create_atlas(working_dir, resolution): print("Reading structures files") hierarchy_path = ( - atlas_path / "CombinedBrainAreas_291124/combined_structures.csv" + atlas_path / "CombinedBrainAreas_291124" / "combined_structures.csv" ) structure_to_parent_map = {} with open(hierarchy_path, mode="r") as file: @@ -131,11 +131,7 @@ def create_atlas(working_dir, resolution): ) has_label = annotated_volume > 0 - # annotations are a bit too wide - # erode by two pixels - has_label = binary_erosion(has_label, iterations=4) reference_volume *= has_label - annotated_volume *= has_label # generate binary mask for mesh creation labels = np.unique(annotated_volume).astype(np.int_) From db9caeca4ce269b08d1f963f50f21c0d17855aef Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 6 Dec 2024 13:28:21 +0000 Subject: [PATCH 07/11] packaged bird with DH checked annotations --- .../atlas_scripts/blackcap.py | 22 +++++-------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 61ed2a29..7073c917 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -36,22 +36,12 @@ def create_atlas(working_dir, resolution): atlas_path = Path( "/media/ceph/margrie/sweiler/AnalyzedData/" - "Bird_brain_project/black_cap/final_annotations24/" + "Bird_brain_project/black_cap/DH_checked/" ) - structures_file = ( - atlas_path / "CombinedBrainAreas_291124/combined_labels.txt" - ) - annotations_file = ( - atlas_path - / "CombinedBrainAreas_291124" - / "combined_annotations_modal-3_eroded-3.nii.gz" - ) - reference_file = ( - atlas_path - / "CombinedBrainAreas_291124" - / "reference_res-25_hemi-right_IMAGE.nii.gz" - ) + structures_file = atlas_path / "labels051224.txt" + annotations_file = atlas_path / "annotations_051224.nii.gz" + reference_file = atlas_path / "reference_res-25_hemi-right_IMAGE.nii.gz" meshes_dir_path = Path.home() / "blackcap-meshes" try: @@ -60,9 +50,7 @@ def create_atlas(working_dir, resolution): "mesh folder already exists" print("Reading structures files") - hierarchy_path = ( - atlas_path / "CombinedBrainAreas_291124" / "combined_structures.csv" - ) + hierarchy_path = atlas_path / "combined_structures.csv" structure_to_parent_map = {} with open(hierarchy_path, mode="r") as file: reader = csv.reader(file) From ff7cfee550b54039909a5825e1be39a5bb5fc3d0 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 6 Dec 2024 13:31:06 +0000 Subject: [PATCH 08/11] remove validation script (to be integrated in separate PR) --- validate_atlas_script.py | 94 ---------------------------------------- 1 file changed, 94 deletions(-) delete mode 100644 validate_atlas_script.py diff --git a/validate_atlas_script.py b/validate_atlas_script.py deleted file mode 100644 index cc3cbfaa..00000000 --- a/validate_atlas_script.py +++ /dev/null @@ -1,94 +0,0 @@ -import shutil -from pathlib import Path - -import napari -from brainrender_napari.napari_atlas_representation import ( - NapariAtlasRepresentation, -) -from napari.viewer import Viewer - -from brainglobe_atlasapi import BrainGlobeAtlas -from brainglobe_atlasapi.atlas_generation.validate_atlases import ( - catch_missing_mesh_files, - catch_missing_structures, - open_for_visual_check, - validate_additional_references, - validate_atlas_files, - validate_checksum, - validate_image_dimensions, -) - -all_validation_functions = [ - validate_atlas_files, - # validate_mesh_matches_image_extents, - open_for_visual_check, - validate_checksum, - validate_image_dimensions, - validate_additional_references, - catch_missing_mesh_files, - catch_missing_structures, -] - - -# adapt this code block for newly packaged atlases -brainglobe_dir = Path.home() / ".brainglobe/" -working_dir = Path.home() / "brainglobe_workingdir/" -atlas_name = "oldenburg_blackcap" -resolution = 25 -minor_version = 0 - -# nothing below this needs changing - -# make sure we have the latest packaged version in .brainglobe -# by replacing it with the working_dir version if needed -atlas_name_with_version = f"{atlas_name}_{resolution}um_v1.{minor_version}" -source_dir = working_dir / atlas_name_with_version -destination_dir = brainglobe_dir / atlas_name_with_version -if destination_dir.exists() and destination_dir.is_dir(): - shutil.rmtree(destination_dir) -assert source_dir.exists(), f"{source_dir} doesn't exist" -if source_dir.exists(): - shutil.copytree(source_dir, destination_dir) -assert destination_dir.exists(), f"{destination_dir} doesn't exist" - -# run validation functions on the new atlas -atlas = BrainGlobeAtlas(f"{atlas_name}_{resolution}um") -validation_results = {atlas_name: []} - -for i, validation_function in enumerate(all_validation_functions): - try: - validation_function(atlas) - validation_results[atlas_name].append( - (validation_function.__name__, None, str("Pass")) - ) - except AssertionError as error: - validation_results[atlas_name].append( - (validation_function.__name__, str(error), str("Fail")) - ) - -# print validation results and open napari for a visual check -# in napari, we should see three layers: -# - the annotation -# - the reference image (visibility turned off by default) -# - the root mesh - -failed_validations = [ - (result[0], result[1]) - for result in validation_results[atlas_name] - if result[2] == "Fail" -] -if failed_validations: - print("Failed validations:") - for failed in failed_validations: - print(failed) -else: - print(f"{atlas_name} is a valid atlas") - -viewer = Viewer() -viewer.dims.ndisplay = 3 -napari_atlas = NapariAtlasRepresentation(atlas, viewer) -napari_atlas.add_structure_to_viewer("root") -napari_atlas.add_structure_to_viewer("P") -napari_atlas.add_to_viewer() - -napari.run() From ef1e4b7540675ca2284f4c4d0d579f1ca69be8fa Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 6 Dec 2024 13:34:00 +0000 Subject: [PATCH 09/11] clearly label one-off scripts needed for reproducibility --- .../_combine_blackcap_annotations.py | 0 .../_combine_blackcap_magneto_and_anatomical_annotations.py | 0 .../_halve_blackcap_reference.py | 4 ++++ 3 files changed, 4 insertions(+) rename brainglobe_atlasapi/atlas_generation/atlas_scripts/{ => blackcap_reproducibility}/_combine_blackcap_annotations.py (100%) rename brainglobe_atlasapi/atlas_generation/atlas_scripts/{ => blackcap_reproducibility}/_combine_blackcap_magneto_and_anatomical_annotations.py (100%) rename brainglobe_atlasapi/atlas_generation/atlas_scripts/{ => blackcap_reproducibility}/_halve_blackcap_reference.py (89%) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_combine_blackcap_annotations.py similarity index 100% rename from brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_annotations.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_combine_blackcap_annotations.py diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_combine_blackcap_magneto_and_anatomical_annotations.py similarity index 100% rename from brainglobe_atlasapi/atlas_generation/atlas_scripts/_combine_blackcap_magneto_and_anatomical_annotations.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_combine_blackcap_magneto_and_anatomical_annotations.py diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_halve_blackcap_reference.py similarity index 89% rename from brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py rename to brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_halve_blackcap_reference.py index 5dd7d40c..8624d5d5 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/_halve_blackcap_reference.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap_reproducibility/_halve_blackcap_reference.py @@ -1,3 +1,7 @@ +"""One-off script to reproduce how we halved the symmetric blackcap template +to simplify annotation. +""" + from pathlib import Path from brainglobe_utils.IO.image import load_nii, save_any From 42af7db3d486b6ff599f0ee7632f53acd9ea2507 Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Fri, 6 Dec 2024 16:40:24 +0000 Subject: [PATCH 10/11] move blackcap packaging to pooch and GIN --- .../atlas_scripts/blackcap.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 7073c917..92b0376f 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -7,6 +7,7 @@ from pathlib import Path import numpy as np +import pooch from brainglobe_utils.IO.image import load_nii from rich.progress import track @@ -32,16 +33,17 @@ def create_atlas(working_dir, resolution): ATLAS_PACKAGER = "BrainGlobe Developers, hello@brainglobe.info" ADDITIONAL_METADATA = {} - # setup folder for downloading - - atlas_path = Path( - "/media/ceph/margrie/sweiler/AnalyzedData/" - "Bird_brain_project/black_cap/DH_checked/" + gin_url = "https://gin.g-node.org/BrainGlobe/blackcap_materials/raw/master/blackcap_materials.zip" + atlas_path = pooch.retrieve( + gin_url, known_hash=None, processor=pooch.Unzip(), progressbar=True ) - structures_file = atlas_path / "labels051224.txt" - annotations_file = atlas_path / "annotations_051224.nii.gz" - reference_file = atlas_path / "reference_res-25_hemi-right_IMAGE.nii.gz" + hierarchy_path = atlas_path[0] # "combined_structures.csv" + reference_file = atlas_path[ + 1 + ] # "reference_res-25_hemi-right_IMAGE.nii.gz" + structures_file = atlas_path[2] # "labels051224.txt" + annotations_file = atlas_path[3] # "annotations_051224.nii.gz" meshes_dir_path = Path.home() / "blackcap-meshes" try: @@ -50,7 +52,6 @@ def create_atlas(working_dir, resolution): "mesh folder already exists" print("Reading structures files") - hierarchy_path = atlas_path / "combined_structures.csv" structure_to_parent_map = {} with open(hierarchy_path, mode="r") as file: reader = csv.reader(file) @@ -92,7 +93,6 @@ def create_atlas(working_dir, resolution): f"max tree depth: {tree.depth()}" ) print(tree) - print(f"Saving atlas data at {atlas_path}") # use tifffile to read annotated file annotated_volume = load_nii(annotations_file, as_array=True).astype( From 33d3c8c8d45bbec80ba82817bbcdeb173f6e482e Mon Sep 17 00:00:00 2001 From: alessandrofelder Date: Tue, 10 Dec 2024 14:54:12 +0000 Subject: [PATCH 11/11] update blackcap minor version --- brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py index 92b0376f..a4935339 100644 --- a/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py +++ b/brainglobe_atlasapi/atlas_generation/atlas_scripts/blackcap.py @@ -1,4 +1,4 @@ -__version__ = "0" +__version__ = "1" import csv import glob as glob