Skip to content

Commit

Permalink
testing and fixing outdated atlas generation scripts (#249)
Browse files Browse the repository at this point in the history
* first commit, quick fixes of old syntax to try running it on the HPC

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* humanatlas.py: from brainio import brainio has to be installed manually

* fixing azba_zfish issue: replace smoothLaplacian with smooth in mesh_utils.py

* fixing kim_developmental_ccf_mouse.py issue: pandas syntax and df handling

* fixing allen_cord.py: updating source URL

* adding packages to dependencies

* removing unnecessary comments

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
viktorpm and pre-commit-ci[bot] authored Mar 1, 2024
1 parent c733fe9 commit 3c8a581
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,7 @@ def create_atlas(
if isinstance(working_dir, str):
working_dir = Path(working_dir)
# Generated atlas path:
working_dir = (
working_dir / "brainglobe_workingdir" / atlas_config.atlas_name
)
working_dir = working_dir / "admba_3d_dev_mouse" / atlas_config.atlas_name
working_dir.mkdir(exist_ok=True, parents=True)

download_dir_path = working_dir / "downloads"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,10 +221,7 @@ def create_atlas(working_dir):
ORIENTATION = "asr"
RESOLUTION = (20, 10, 10)
ROOT_ID = 250
ATLAS_FILE_URL = (
"https://md-datasets-cache-zipfiles-prod.s3.eu-west-1."
"amazonaws.com/4rrggzv5d5-1.zip"
)
ATLAS_FILE_URL = "https://prod-dcd-datasets-cache-zipfiles.s3.eu-west-1.amazonaws.com/4rrggzv5d5-1.zip"
ATLAS_PACKAGER = "MetaCell LLC, Ltd."

download_dir_path = working_dir / "downloads"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,10 @@ def create_atlas(
df = pd.read_csv(structures_file)
clean_up_df_entries(df)

df.loc[len(df)] = ["root", ROOT_ID, "root", ROOT_ID]
df.append(["root", ROOT_ID, "root", ROOT_ID])
new_row = pd.DataFrame(
[["root", ROOT_ID, "root", ROOT_ID]], columns=df.columns
)
df = pd.concat([df, new_row], ignore_index=True)

id_dict = dict(zip(df["ID"], df["Parent ID"]))

Expand Down
4 changes: 2 additions & 2 deletions brainglobe_atlasapi/atlas_generation/mesh_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,10 +158,10 @@ def extract_mesh_from_mask(
mesh = mesh.extractLargestRegion()

# decimate
mesh.decimate(decimate_fraction, method="pro")
mesh.decimate_pro(decimate_fraction)

if smooth:
mesh.smoothLaplacian()
mesh.smooth()

if obj_filepath is not None:
write(mesh, str(obj_filepath))
Expand Down
4 changes: 2 additions & 2 deletions brainglobe_atlasapi/atlas_generation/volume_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

import os

import imio
import brainglobe_utils.image_io as image_io
import numpy as np


Expand Down Expand Up @@ -85,7 +85,7 @@ def load_labelled_volume(data, vmin=0, alpha=1, **kwargs):
raise FileNotFoundError(f"Volume data file {data} not found")

try:
data = imio.load_any(data)
data = image_io.load_any(data)
except Exception as e:
raise ValueError(
f"Could not load volume data from file: {data} - {e}"
Expand Down
4 changes: 4 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ dependencies = [
"treelib",
"vedo",
"xmltodict",
"scikit-image",
"brainio",
"brainglobe-utils",

]
dynamic = ["version"]

Expand Down

0 comments on commit 3c8a581

Please sign in to comment.