Skip to content

Commit

Permalink
Merge branch 'galaxyproject:main' into integrate-notification-system
Browse files Browse the repository at this point in the history
  • Loading branch information
heisner-tillman authored Jun 30, 2023
2 parents 0aabd18 + d81b013 commit f40475e
Show file tree
Hide file tree
Showing 5 changed files with 352 additions and 0 deletions.
1 change: 1 addition & 0 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ jobs:
tox_env: [py37]
galaxy_version:
- dev
- release_23.1
- release_23.0
- release_22.05
- release_22.01
Expand Down
96 changes: 96 additions & 0 deletions bioblend/_tests/TestGalaxyToolContainerResolution.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
"""
Test functions in bioblend.galaxy.tool_dependencies
"""
from . import (
GalaxyTestBase,
test_util,
)


class TestGalaxyContainerResolution(GalaxyTestBase.GalaxyTestBase):
@test_util.skip_unless_galaxy("release_22.05")
def test_get_container_resolvers(self):
container_resolvers = self.gi.container_resolution.get_container_resolvers()
assert isinstance(container_resolvers, list)
assert len(container_resolvers) > 0
assert isinstance(container_resolvers[0], dict)
assert container_resolvers[0]["model_class"] == "ExplicitContainerResolver"
assert container_resolvers[0]["resolver_type"] == "explicit"
assert container_resolvers[0]["can_uninstall_dependencies"] is False
assert container_resolvers[0]["builds_on_resolution"] is False

@test_util.skip_unless_galaxy("release_22.05")
def test_show_container_resolver(self):
container_resolver = self.gi.container_resolution.show_container_resolver(0)
print(container_resolver)
assert isinstance(container_resolver, dict)
assert container_resolver["model_class"] == "ExplicitContainerResolver"
assert container_resolver["resolver_type"] == "explicit"
assert container_resolver["can_uninstall_dependencies"] is False
assert container_resolver["builds_on_resolution"] is False

@test_util.skip_unless_galaxy("release_22.05")
def test_resolve(self):
tool = self.gi.container_resolution.resolve(tool_id="CONVERTER_parquet_to_csv")
print(tool)
assert isinstance(tool, dict)

tool_requirements_only = self.gi.container_resolution.resolve(
tool_id="CONVERTER_parquet_to_csv", requirements_only=True
)
assert isinstance(tool_requirements_only, dict)

@test_util.skip_unless_galaxy("release_22.05")
def test_resolve_toolbox(self):
toolbox = self.gi.container_resolution.resolve_toolbox()
assert isinstance(toolbox, list)
assert len(toolbox) > 0
assert isinstance(toolbox[0], dict)

toolbox_by_tool_ids = self.gi.container_resolution.resolve_toolbox(tool_ids=[toolbox[0]["tool_id"]])
assert isinstance(toolbox_by_tool_ids, list)
assert len(toolbox_by_tool_ids) == 1
assert isinstance(toolbox_by_tool_ids[0], dict)

toolbox_by_resolver_type = self.gi.container_resolution.resolve_toolbox(resolver_type="mulled")
assert isinstance(toolbox_by_resolver_type, list)
assert len(toolbox_by_resolver_type) > 0
assert isinstance(toolbox_by_resolver_type[0], dict)
assert len(toolbox) == len(toolbox_by_resolver_type)
for tool in toolbox_by_resolver_type:
print(tool)
assert (
tool["status"]["dependency_type"] is None
or tool["status"]["container_resolver"]["resolver_type"] == "mulled"
)

toolbox_by_container_type = self.gi.container_resolution.resolve_toolbox(container_type="docker")
assert isinstance(toolbox_by_container_type, list)
assert len(toolbox_by_container_type) > 0
assert isinstance(toolbox_by_container_type[0], dict)
assert len(toolbox) == len(toolbox_by_container_type)
for tool in toolbox_by_container_type:
assert tool["status"]["dependency_type"] is None or tool["status"]["dependency_type"] == "docker"
assert (
tool["status"]["dependency_type"] is None or tool["status"]["container_description"]["type"] == "docker"
)

toolbox_requirements_only = self.gi.container_resolution.resolve_toolbox(requirements_only=True)
assert isinstance(toolbox_requirements_only, list)
assert len(toolbox_requirements_only) > 0
assert isinstance(toolbox_requirements_only[0], dict)
assert len(toolbox) == len(toolbox_requirements_only)

# TODO unless containers are available this may fallback to conda by default?
# depending on Galaxy's config
# toolbox_by_index = self.gi.container_resolution.resolve_toolbox(tool_ids=[toolbox[0]['tool_id']], index=0, install=True)
# assert isinstance(toolbox_by_index, list)
# assert len(toolbox_by_index) > 0
# assert isinstance(toolbox_by_index[0], dict)

# TODO unless containers are available this may fallback to conda by default?
# depending on Galaxy's config
# def test_resolve_toolbox_with_install(self):
# toolbox = self.gi.container_resolution.resolve_toolbox_with_install(tool_ids=[])
# assert isinstance(toolbox, list)
# assert len(toolbox) == 0
2 changes: 2 additions & 0 deletions bioblend/galaxy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from bioblend.galaxy import (
config,
container_resolution,
dataset_collections,
datasets,
datatypes,
Expand Down Expand Up @@ -94,6 +95,7 @@ def __init__(
self.toolshed = toolshed.ToolShedClient(self)
self.toolShed = self.toolshed # historical alias
self.config = config.ConfigClient(self)
self.container_resolution = container_resolution.ContainerResolutionClient(self)
self.visual = visual.VisualClient(self)
self.quotas = quotas.QuotaClient(self)
self.groups = groups.GroupsClient(self)
Expand Down
238 changes: 238 additions & 0 deletions bioblend/galaxy/container_resolution/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
"""
Contains interactions dealing with Galaxy container resolvers.
Works only with Galaxy > 22.01
"""
from typing import (
List,
Optional,
)

from bioblend.galaxy.client import Client


class ContainerResolutionClient(Client):
module = "container_resolvers"

def get_container_resolvers(self) -> list:
"""
List container resolvers
:rtype: list
return: List of container resolvers
For example:
[{'builds_on_resolution': False,
'can_uninstall_dependencies': False,
'model_class': 'CachedExplicitSingularityContainerResolver',
'resolver_type': 'cached_explicit_singularity'},
{'builds_on_resolution': False,
'can_uninstall_dependencies': False,
'model_class': 'CachedMulledSingularityContainerResolver',
'resolver_type': 'cached_mulled_singularity'},
{'builds_on_resolution': False,
'can_uninstall_dependencies': False,
'model_class': 'MulledSingularityContainerResolver',
'resolver_type': 'mulled_singularity'}] {'builds_on_resolution': False,
"""
url = self._make_url()
return self._get(url=url)

def show_container_resolver(self, index: int) -> dict:
"""
Show container resolver
:type index: int
:param index: index of the dependency resolver with respect to
the dependency resolvers config file
:rtype: dict
return: Dict of properties of a given container resolver
{'builds_on_resolution': False,
'can_uninstall_dependencies': False,
'model_class': 'CachedMulledSingularityContainerResolver',
'resolver_type': 'cached_mulled_singularity'}
"""
url = f"{self._make_url()}/{index}"
return self._get(url=url)

def resolve(
self,
tool_id: str,
index: Optional[int] = None,
resolver_type: Optional[str] = None,
container_type: Optional[str] = None,
requirements_only: bool = False,
install: bool = False,
) -> dict:
"""
Resolve described requirement against specified container resolvers.
:type index: int
:param index: index of the dependency resolver with respect to
the dependency resolvers config file
:type tool_id: str
:param tool_id: tool id to resolve against containers
:type resolver_type: str
:param resolver_type: restrict to specified resolver type
:type container_type: str
:param container_type: restrict to specified container type
:type requirements_only: bool
:param requirements_only: ignore tool containers, properties - just search based on tool requirements set to True to mimic default behavior of tool dependency API.
:type install: bool
:param install: allow installation of new containers (for build_mulled containers) the way job resolution will operate, defaults to False
:rtype: dict
For example:
{
'requirements': [{'name': 'pyarrow', 'specs': [], 'type': 'package', 'version': '4.0.1'}],
'status': {
'cacheable': False,
'container_description': {'identifier': 'quay.io/biocontainers/pyarrow:4.0.1', 'resolve_dependencies': False, 'shell': '/bin/bash', 'type': 'docker'},
'container_resolver': {'builds_on_resolution': False, 'can_uninstall_dependencies': False, 'model_class': 'MulledDockerContainerResolver', 'resolver_type': 'mulled'},
'dependency_type': 'docker',
...
},
'tool_id': 'CONVERTER_parquet_to_csv'
}
"""
params = {}
if tool_id:
params["tool_id"] = tool_id
if resolver_type:
params["resolver_type"] = resolver_type
if container_type:
params["container_type"] = container_type
params["requirements_only"] = str(requirements_only)
params["install"] = str(install)
if index is not None:
url = "/".join((self._make_url(), str(index), "resolve"))
else:
url = "/".join((self._make_url(), "resolve"))
return self._get(url=url, params=params)

def resolve_toolbox(
self,
index: Optional[int] = None,
tool_ids: Optional[List[str]] = None,
resolver_type: Optional[str] = None,
container_type: Optional[str] = None,
requirements_only: bool = False,
install: bool = False,
) -> list:
"""
Apply resolve() to each tool in the toolbox and return the results as a list.
See documentation for resolve() for a description of parameters that can be
consumed and a description of the resulting items.
:type index: int
:param index: index of the dependency resolver with respect to
the dependency resolvers config file
:type tool_ids: list
:param tool_ids: tool_ids to filter toolbox on
:type resolver_type: str
:param resolver_type: restrict to specified resolver type
:type container_type: str
:param container_type: restrict to specified container type
:type requirements_only: bool
:param requirements_only: ignore tool containers, properties - just search based on tool requirements set to True to mimic default behavior of tool dependency API.
:type install: bool
:param install: allow installation of new containers (for build_mulled containers) the way job resolution will operate, defaults to False
:rtype: list
For example::
[{'tool_id': 'upload1', 'status': {'model_class': 'NullDependency', 'dependency_type': None, 'exact': True, 'name': None, 'version': None, 'cacheable': False}, 'requirements': []}, ...]
"""
params = {}
if tool_ids:
params["tool_ids"] = ",".join(tool_ids)
if resolver_type:
params["resolver_type"] = resolver_type
if container_type:
params["container_type"] = container_type
params["requirements_only"] = str(requirements_only)
params["install"] = str(install)
if index is not None:
url = "/".join((self._make_url(), str(index), "toolbox"))
else:
url = "/".join((self._make_url(), "toolbox"))
return self._get(url=url, params=params)

def resolve_toolbox_with_install(
self,
index: Optional[int] = None,
tool_ids: Optional[List[str]] = None,
resolver_type: Optional[str] = None,
container_type: Optional[str] = None,
requirements_only: bool = False,
) -> list:
"""
Do the resolution of dependencies like resolve_toolbox(), but allow building and installing new containers.
:type index: int
:param index: index of the dependency resolver with respect to
the dependency resolvers config file
:type tool_ids: list
:param tool_ids: tool_ids to filter toolbox on
:type resolver_type: str
:param resolver_type: restrict to specified resolver type
:type container_type: str
:param container_type: restrict to specified container type
:type requirements_only: bool
:param requirements_only: ignore tool containers, properties - just search based on tool requirements set to True to mimic default behavior of tool dependency API.
:rtype: list of dicts
:returns: dictified descriptions of the dependencies, with attribute
`dependency_type: None` if no match was found.
For example::
[{'requirements': [{'name': 'canu',
'specs': [],
'type': 'package',
'version': '2.2'}],
'status': {'cacheable': False,
'container_description': {'identifier': 'docker://quay.io/biocontainers/canu:2.2--ha47f30e_0',
'resolve_dependencies': False,
'shell': '/bin/bash',
'type': 'singularity'},
'container_resolver': {'builds_on_resolution': False,
'can_uninstall_dependencies': False,
'model_class': 'MulledSingularityContainerResolver',
'resolver_type': 'mulled_singularity'},
'dependency_type': 'singularity',
'environment_path': 'docker://quay.io/biocontainers/canu:2.2--ha47f30e_0',
'exact': True,
'model_class': 'ContainerDependency',
'name': None,
'version': None},
'tool_id': 'toolshed.g2.bx.psu.edu/repos/bgruening/canu/canu/2.2+galaxy0'}]
"""
params = {}
if tool_ids:
params["tool_ids"] = ",".join(tool_ids)
if resolver_type:
params["resolver_type"] = resolver_type
if container_type:
params["container_type"] = container_type
params["requirements_only"] = str(requirements_only)
if index is not None:
url = "/".join((self._make_url(), str(index), "toolbox", "install"))
else:
url = "/".join((self._make_url(), "toolbox", "install"))
return self._post(url=url, payload=params)
15 changes: 15 additions & 0 deletions bioblend/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ def run_tool(
tool_id: str,
tool_inputs: Union[InputsBuilder, dict],
input_format: Literal["21.01", "legacy"] = "legacy",
data_manager_mode: Optional[Literal["populate", "dry_run", "bundle"]] = None,
) -> Dict[str, Any]:
"""
Runs tool specified by ``tool_id`` in history indicated
Expand All @@ -344,6 +345,16 @@ def run_tool(
:type tool_id: str
:param tool_id: ID of the tool to be run
:type data_manager_mode: str
:param data_manager_mode: Possible values are 'populate', 'dry_run' and 'bundle'.
'populate' is the default behavior for data manager tools and results in tool data table
files being updated after the data manager job completes.
'dry_run' will skip any processing after the data manager job completes
'bundle' will create a data manager bundle that can be imported on other Galaxy servers.
:type tool_inputs: dict
:param tool_inputs: dictionary of input datasets and parameters
for the tool (see below)
Expand Down Expand Up @@ -416,6 +427,10 @@ def run_tool(
payload["inputs"] = tool_inputs.to_dict()
else:
payload["inputs"] = tool_inputs

if data_manager_mode:
payload["data_manager_mode"] = data_manager_mode

return self._post(payload)

def upload_file(
Expand Down

0 comments on commit f40475e

Please sign in to comment.