Skip to content

Commit

Permalink
Merge branch 'main' into decouple-server-init
Browse files Browse the repository at this point in the history
  • Loading branch information
k-dominik authored Dec 9, 2024
2 parents 3ec629b + 0cb3535 commit 242d6b3
Show file tree
Hide file tree
Showing 10 changed files with 36 additions and 20 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
auto-activate-base: false
activate-environment: tiktorch-server-env
environment-file: environment.yml
channel-priority: flexible
channel-priority: strict
miniforge-variant: Miniforge3
- name: Get the latest commit hash and target ref
run: |
Expand All @@ -51,7 +51,7 @@ jobs:
auto-activate-base: false
activate-environment: tiktorch-server-env
environment-file: environment.yml
channel-priority: flexible
channel-priority: strict
miniforge-variant: Miniforge3
- name: conda diagnostics
run: |
Expand Down Expand Up @@ -89,7 +89,7 @@ jobs:
with:
auto-update-conda: true
auto-activate-base: true
channel-priority: flexible
channel-priority: strict
miniforge-variant: Miniforge3
- name: install common conda dependencies
run: conda install -n base -c conda-forge conda-build setuptools_scm -y
Expand Down Expand Up @@ -133,7 +133,7 @@ jobs:
with:
auto-update-conda: true
auto-activate-base: true
channel-priority: flexible
channel-priority: strict
miniforge-variant: Miniforge3
- name: install common conda dependencies
run: conda install -n base -c conda-forge conda-build setuptools_scm -y
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ tiktorch/__pycache/
/#wrapper.py#
/.#wrapper.py#
.py~
.vscode
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
SHELL=/bin/bash
ROOT_DIR:=$(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
TIKTORCH_ENV_NAME ?= tiktorch-server-env
SUBMODULES = ./vendor/core-bioimage-io-python ./vendor/spec-bioimage-io
SUBMODULES = ./vendor/spec-bioimage-io ./vendor/core-bioimage-io-python

protos:
python -m grpc_tools.protoc -I./proto --python_out=tiktorch/proto/ --grpc_python_out=tiktorch/proto/ ./proto/*.proto
Expand Down
27 changes: 21 additions & 6 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
name: tiktorch-server-env
channels:
- ilastik-forge
- pytorch
- ilastik-forge
- conda-forge
- nodefaults
dependencies:
# - bioimage.spec via submodule
# - bioimage.core via submodule
- python 3.9.*
- numpy
- numpy >=1.21,<2
- grpcio=1.44 # protobuf 5 requires protoc version > 3.19.0 that requires grpcio >= 1.44
- marshmallow-union
- marshmallow=3.12.*
- marshmallow-jsonschema
- protobuf
- pyyaml=5.3.*
- requests
- ruamel.yaml
- scikit-learn
- scipy
- typing-extensions
Expand All @@ -32,17 +32,32 @@ dependencies:
- cpuonly
# - cudatoolkit >=10.2
# - cudnn
# - tochvision
- torchvision

# tensorflow (1.14 is the latest 1.x version on cf)
# so far we don't have any 2.x models in the model zoo
# tensorflow skipped for now, as it conflicts with grpcio version 1.41
# - tensorflow >=2.9,<3.0

# convenient to use bioiamgeio.core tools
- imageio
# bioimageio.spec / bioimageio.core dependencies:
- annotated-types >=0.5.0,<1
- email_validator
- h5py
- imageio >=2.10
- loguru
- packaging >=17.0
- pooch >=1.5,<2
- pydantic >=2.7.0,<2.10
- pydantic-core
- pydantic-settings >=2.5
- python-dateutil
- rich
- ruyaml
- tifffile
- tqdm
- typer
- zipp


# dev stuff
- pytest
Expand Down
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,12 @@
],
packages=find_packages(exclude=["tests"]), # Required
install_requires=[
"bioimageio.spec==0.5.3.2",
"bioimageio.core==0.6.8",
"bioimageio.spec==0.5.3.3",
"bioimageio.core==0.6.10",
"grpcio>=1.31",
"numpy<2", # pytorch 2.2.2-py3.9_0 for macos is compiled with numpy 1.*
"protobuf",
"pydantic>=2.7.0,<2.10",
"pyyaml",
"xarray",
],
Expand Down
4 changes: 2 additions & 2 deletions tests/test_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
from tiktorch.proto import inference_pb2


def _numpy_to_pb_tensor(arr):
def _numpy_to_pb_tensor(arr, tensor_id: str = "dummy_tensor_name"):
"""
Makes sure that tensor was serialized/deserialized
"""
tensor = numpy_to_pb_tensor(arr)
tensor = numpy_to_pb_tensor(tensor_id, arr)
parsed = inference_pb2.Tensor()
parsed.ParseFromString(tensor.SerializeToString())
return parsed
Expand Down
4 changes: 2 additions & 2 deletions tiktorch/converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@ def sample_to_pb_tensors(sample: Sample) -> List[inference_pb2.Tensor]:
return [xarray_to_pb_tensor(tensor_id, res_tensor.data) for tensor_id, res_tensor in sample.members.items()]


def numpy_to_pb_tensor(array: np.ndarray, axistags=None) -> inference_pb2.Tensor:
def numpy_to_pb_tensor(tensor_id: str, array: np.ndarray, axistags=None) -> inference_pb2.Tensor:
if axistags:
shape = [inference_pb2.NamedInt(size=dim, name=name) for dim, name in zip(array.shape, axistags)]
else:
shape = [inference_pb2.NamedInt(size=dim) for dim in array.shape]
return inference_pb2.Tensor(dtype=str(array.dtype), shape=shape, buffer=bytes(array))
return inference_pb2.Tensor(tensorId=tensor_id, dtype=str(array.dtype), shape=shape, buffer=bytes(array))


def xarray_to_pb_tensor(tensor_id: str, array: xr.DataArray) -> inference_pb2.Tensor:
Expand Down
1 change: 0 additions & 1 deletion tiktorch/server/session/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from bioimageio.core import PredictionPipeline, Tensor, create_prediction_pipeline
from bioimageio.spec import InvalidDescr, load_description
from bioimageio.spec.model import v0_5
from bioimageio.spec.model.v0_5 import BatchAxis

from tiktorch import log
from tiktorch.rpc import Shutdown
Expand Down

0 comments on commit 242d6b3

Please sign in to comment.