From a157f495479d1b1c7a151f170d4bca9503da251c Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 30 Apr 2024 18:55:29 +0100 Subject: [PATCH 01/21] Enforce ``gi`` param is not None when instantiating ``DatasetContainer`` objects --- bioblend/galaxy/objects/wrappers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bioblend/galaxy/objects/wrappers.py b/bioblend/galaxy/objects/wrappers.py index a6f576c88..ff035458b 100644 --- a/bioblend/galaxy/objects/wrappers.py +++ b/bioblend/galaxy/objects/wrappers.py @@ -1218,6 +1218,7 @@ def __init__( :type content_infos: list of :class:`ContentInfo` :param content_infos: info objects for the container's contents """ + assert gi is not None super().__init__(c_dict, gi=gi) if content_infos is None: content_infos = [] From d3df34f7ae663e410ed5ef010ea3c463a0134d51 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 30 Apr 2024 15:50:06 +0100 Subject: [PATCH 02/21] Update ruff settings for 0.2.0 deprecations --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f3e5dc627..10a0010d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,8 +11,10 @@ target-version = ['py38'] isort = true [tool.ruff] -select = ["E", "F", "B", "UP"] target-version = "py38" + +[tool.ruff.lint] +select = ["E", "F", "B", "UP"] # Exceptions: # B9 flake8-bugbear opinionated warnings # E501 is line length (delegated to black) From 9cff461a0243df29c25d8126141b10338b8530c0 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 30 Apr 2024 19:06:11 +0100 Subject: [PATCH 03/21] Enable flake8-comprehensions ruff rules See https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4 --- bioblend/_tests/TestGalaxyHistories.py | 2 +- .../galaxy/dataset_collections/__init__.py | 48 ++++++++++--------- bioblend/galaxy/datasets/__init__.py | 6 +-- bioblend/galaxy/histories/__init__.py | 14 +++--- bioblend/galaxy/tools/inputs.py | 2 +- bioblend/toolshed/repositories/__init__.py | 2 +- pyproject.toml | 2 +- 7 files changed, 40 insertions(+), 36 deletions(-) diff --git a/bioblend/_tests/TestGalaxyHistories.py b/bioblend/_tests/TestGalaxyHistories.py index a088916fb..c53292201 100644 --- a/bioblend/_tests/TestGalaxyHistories.py +++ b/bioblend/_tests/TestGalaxyHistories.py @@ -91,7 +91,7 @@ def test_get_histories(self): # Test keys: check that fields requested are returned histories_with_keys = self.gi.histories.get_histories(keys=["id", "user_id", "size"]) - assert {key for key in histories_with_keys[0]} >= {"id", "user_id", "size"} + assert set(histories_with_keys[0]) >= {"id", "user_id", "size"} # TODO: check whether deleted history is returned correctly # At the moment, get_histories() returns only not-deleted histories diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index 970698595..c375201dc 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -45,17 +45,21 @@ def add(self, element: Union["CollectionElement", "SimpleElement"]) -> "HasEleme class CollectionDescription(HasElements): def to_dict(self) -> Dict[str, Union[str, List]]: - return dict(name=self.name, collection_type=self.type, element_identifiers=[e.to_dict() for e in self.elements]) + return { + "name": self.name, + "collection_type": self.type, + "element_identifiers": [e.to_dict() for e in self.elements], + } class CollectionElement(HasElements): def to_dict(self) -> Dict[str, Union[str, List]]: - return dict( - src="new_collection", - name=self.name, - collection_type=self.type, - element_identifiers=[e.to_dict() for e in self.elements], - ) + return { + "src": "new_collection", + "name": self.name, + "collection_type": self.type, + "element_identifiers": [e.to_dict() for e in self.elements], + } class SimpleElement: @@ -69,33 +73,33 @@ def to_dict(self) -> Dict[str, str]: class HistoryDatasetElement(SimpleElement): def __init__(self, name: str, id: str) -> None: super().__init__( - dict( - name=name, - src="hda", - id=id, - ) + { + "name": name, + "src": "hda", + "id": id, + } ) class HistoryDatasetCollectionElement(SimpleElement): def __init__(self, name: str, id: str) -> None: super().__init__( - dict( - name=name, - src="hdca", - id=id, - ) + { + "name": name, + "src": "hdca", + "id": id, + } ) class LibraryDatasetElement(SimpleElement): def __init__(self, name: str, id: str) -> None: super().__init__( - dict( - name=name, - src="ldda", - id=id, - ) + { + "name": name, + "src": "ldda", + "id": id, + } ) diff --git a/bioblend/galaxy/datasets/__init__.py b/bioblend/galaxy/datasets/__init__.py index ec150aa98..91bb4295a 100644 --- a/bioblend/galaxy/datasets/__init__.py +++ b/bioblend/galaxy/datasets/__init__.py @@ -56,9 +56,9 @@ def show_dataset(self, dataset_id: str, hda_ldda: HdaLdda = "hda") -> Dict[str, :rtype: dict :return: Information about the HDA or LDDA """ - params = dict( - hda_ldda=hda_ldda, - ) + params = { + "hda_ldda": hda_ldda, + } return self._get(id=dataset_id, params=params) def _initiate_download( diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index bcad72e74..aa02b14ed 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -735,13 +735,13 @@ def create_dataset_collection( else: collection_description_dict = collection_description - payload = dict( - name=collection_description_dict["name"], - type="dataset_collection", - collection_type=collection_description_dict["collection_type"], - element_identifiers=collection_description_dict["element_identifiers"], - copy_elements=copy_elements, - ) + payload = { + "name": collection_description_dict["name"], + "type": "dataset_collection", + "collection_type": collection_description_dict["collection_type"], + "element_identifiers": collection_description_dict["element_identifiers"], + "copy_elements": copy_elements, + } return self._post(payload, id=history_id, contents=True) def delete_history(self, history_id: str, purge: bool = False) -> Dict[str, Any]: diff --git a/bioblend/galaxy/tools/inputs.py b/bioblend/galaxy/tools/inputs.py index 51d5c0679..99524f242 100644 --- a/bioblend/galaxy/tools/inputs.py +++ b/bioblend/galaxy/tools/inputs.py @@ -64,7 +64,7 @@ def __init__(self, value: Any) -> None: class DatasetParam(Param): def __init__(self, value: Union[Dict[str, str], str], src: str = "hda") -> None: if not isinstance(value, dict): - value = dict(src=src, id=value) + value = {"src": src, "id": value} super().__init__(value) diff --git a/bioblend/toolshed/repositories/__init__.py b/bioblend/toolshed/repositories/__init__.py index c00063830..19d4943dc 100644 --- a/bioblend/toolshed/repositories/__init__.py +++ b/bioblend/toolshed/repositories/__init__.py @@ -126,7 +126,7 @@ def search_repositories( 'page_size': '2', 'total_results': '64'} """ - params = dict(q=q, page=page, page_size=page_size) + params = {"q": q, "page": page, "page_size": page_size} return self._get(params=params) def show_repository( diff --git a/pyproject.toml b/pyproject.toml index 10a0010d6..239491fc4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ isort = true target-version = "py38" [tool.ruff.lint] -select = ["E", "F", "B", "UP"] +select = ["E", "F", "B", "C4", "UP"] # Exceptions: # B9 flake8-bugbear opinionated warnings # E501 is line length (delegated to black) From 015f229a5858e7338e815c1b89141c04aa9ea94d Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 30 Apr 2024 19:18:02 +0100 Subject: [PATCH 04/21] Enable flake8-logging-format and flake8-implicit-str-concat ruff rules --- bioblend/galaxy/dataset_collections/__init__.py | 6 +++++- bioblend/galaxy/datasets/__init__.py | 2 +- bioblend/galaxy/invocations/__init__.py | 4 +++- bioblend/galaxy/jobs/__init__.py | 2 +- bioblend/galaxy/objects/galaxy_instance.py | 2 +- bioblend/galaxyclient.py | 2 +- pyproject.toml | 2 +- 7 files changed, 13 insertions(+), 7 deletions(-) diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index c375201dc..69c46fa80 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -219,7 +219,11 @@ def wait_for_dataset_collection( return dataset_collection if time_left > 0: log.info( - f"The dataset collection {dataset_collection_id} has {len(terminal_states)} out of {len(states)} datasets in a terminal state. Will wait {time_left} more s" + "The dataset collection %s has %s out of %s datasets in a terminal state. Will wait %s more s", + dataset_collection_id, + len(terminal_states), + len(states), + time_left, ) time.sleep(min(time_left, interval)) time_left -= interval diff --git a/bioblend/galaxy/datasets/__init__.py b/bioblend/galaxy/datasets/__init__.py index 91bb4295a..6f889465c 100644 --- a/bioblend/galaxy/datasets/__init__.py +++ b/bioblend/galaxy/datasets/__init__.py @@ -435,7 +435,7 @@ def wait_for_dataset( raise Exception(f"Dataset {dataset_id} is in terminal state {state}") return dataset if time_left > 0: - log.info(f"Dataset {dataset_id} is in non-terminal state {state}. Will wait {time_left} more s") + log.info("Dataset %s is in non-terminal state %s. Will wait %s more s", dataset_id, state, time_left) time.sleep(min(time_left, interval)) time_left -= interval else: diff --git a/bioblend/galaxy/invocations/__init__.py b/bioblend/galaxy/invocations/__init__.py index fa6e023ab..7321c5c20 100644 --- a/bioblend/galaxy/invocations/__init__.py +++ b/bioblend/galaxy/invocations/__init__.py @@ -449,7 +449,9 @@ def wait_for_invocation( raise Exception(f"Invocation {invocation_id} is in terminal state {state}") return invocation if time_left > 0: - log.info(f"Invocation {invocation_id} is in non-terminal state {state}. Will wait {time_left} more s") + log.info( + "Invocation %s is in non-terminal state %s. Will wait %s more s", invocation_id, state, time_left + ) time.sleep(min(time_left, interval)) time_left -= interval else: diff --git a/bioblend/galaxy/jobs/__init__.py b/bioblend/galaxy/jobs/__init__.py index c90b04ce4..9b1c97243 100644 --- a/bioblend/galaxy/jobs/__init__.py +++ b/bioblend/galaxy/jobs/__init__.py @@ -518,7 +518,7 @@ def wait_for_job( raise Exception(f"Job {job_id} is in terminal state {state}") return job if time_left > 0: - log.info(f"Job {job_id} is in non-terminal state {state}. Will wait {time_left} more s") + log.info("Job %s is in non-terminal state %s. Will wait %s more s", job_id, state, time_left) time.sleep(min(time_left, interval)) time_left -= interval else: diff --git a/bioblend/galaxy/objects/galaxy_instance.py b/bioblend/galaxy/objects/galaxy_instance.py index d4baa81d3..470d7a11c 100644 --- a/bioblend/galaxy/objects/galaxy_instance.py +++ b/bioblend/galaxy/objects/galaxy_instance.py @@ -104,7 +104,7 @@ def poll(ds_list: Iterable[wrappers.Dataset]) -> List[wrappers.Dataset]: if not ds.state: self.log.warning("Dataset %s has an empty state", ds.id) elif ds.state not in TERMINAL_STATES: - self.log.info(f"Dataset {ds.id} is in non-terminal state {ds.state}") + self.log.info("Dataset %s is in non-terminal state %s", ds.id, ds.state) pending.append(ds) return pending diff --git a/bioblend/galaxyclient.py b/bioblend/galaxyclient.py index 6d58e0e37..559c48cb5 100644 --- a/bioblend/galaxyclient.py +++ b/bioblend/galaxyclient.py @@ -54,7 +54,7 @@ def __init__( found_scheme = None # Try to guess the scheme, starting from the more secure for scheme in ("https://", "http://"): - log.warning(f"Missing scheme in url, trying with {scheme}") + log.warning("Missing scheme in url, trying with %s", scheme) with contextlib.suppress(requests.RequestException): r = requests.get( scheme + url, diff --git a/pyproject.toml b/pyproject.toml index 239491fc4..fe855cc91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ isort = true target-version = "py38" [tool.ruff.lint] -select = ["E", "F", "B", "C4", "UP"] +select = ["E", "F", "B", "C4", "G", "ISC", "UP"] # Exceptions: # B9 flake8-bugbear opinionated warnings # E501 is line length (delegated to black) From 7d46e886f7905c6370e54bbc2aae708fb666fa77 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 30 Apr 2024 18:39:36 +0100 Subject: [PATCH 05/21] Fix ``InvocationClient.get_invocation_biocompute_object()`` method on upcoming Galaxy 24.1 xref: https://github.com/galaxyproject/galaxy/pull/16645 Also: - Set `enable_celery_tasks` for tests (needed for BioCompute objects export in Galaxy 24.1) - Add ``wait`` parameter to ``HistoryClient.delete_dataset()`` and ``HistoryDatasetAssociation.delete()`` methods (needed for testing purged datasets when Celery tasks are enabled). - Refactor methods that wait for something to make use of a new generic ``bioblend.wait_on()`` function. They now all raise ``TimeoutException``. --- CHANGELOG.md | 9 +++ bioblend/__init__.py | 38 +++++++++ bioblend/_tests/TestGalaxyDatasets.py | 11 +-- bioblend/_tests/TestGalaxyHistories.py | 2 +- bioblend/_tests/TestGalaxyJobs.py | 1 - bioblend/_tests/TestGalaxyObjects.py | 2 +- bioblend/_tests/TestGalaxyTools.py | 1 - bioblend/_tests/template_galaxy.yml | 1 + .../galaxy/dataset_collections/__init__.py | 36 +++------ bioblend/galaxy/datasets/__init__.py | 38 ++++----- bioblend/galaxy/histories/__init__.py | 22 ++++- bioblend/galaxy/invocations/__init__.py | 81 ++++++++++++++----- bioblend/galaxy/jobs/__init__.py | 24 +++--- bioblend/galaxy/libraries/__init__.py | 36 +++------ bioblend/galaxy/objects/wrappers.py | 6 +- 15 files changed, 182 insertions(+), 126 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e60056a46..7036e4e76 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,9 @@ * Dropped support for Python 3.7. Added support for Python 3.12. Added support for Galaxy releases 23.2 and 24.0. +* Added ``wait`` parameter to ``HistoryClient.delete_dataset()`` and + ``HistoryDatasetAssociation.delete()`` methods. + * Dropped broken ``deleted`` parameter of ``DatasetClient.show_dataset()``. * Parameters after ``password`` in the ``__init__()`` method of the @@ -12,8 +15,14 @@ * Classes defined in ``bioblend.galaxy.objects.wrappers`` are no more re-exported by ``bioblend.galaxy.objects``. +* ``DatasetTimeoutException`` and ``DatasetCollectionTimeoutException`` are now + aliases for ``TimeoutException`` instead of subclasses. + * Added support for the new "cancelling" invocation state. +* Fixed ``InvocationClient.get_invocation_biocompute_object()`` method on + upcoming Galaxy 24.1 . + ### BioBlend v1.2.0 - 2023-06-30 * Dropped support for Galaxy releases 17.09-19.01. Added support for Galaxy diff --git a/bioblend/__init__.py b/bioblend/__init__.py index 2a526756b..d0062462a 100644 --- a/bioblend/__init__.py +++ b/bioblend/__init__.py @@ -2,8 +2,11 @@ import logging import logging.config import os +import time from typing import ( + Callable, Optional, + TypeVar, Union, ) @@ -116,3 +119,38 @@ def __str__(self) -> str: class TimeoutException(Exception): pass + + +class NotReady(Exception): + pass + + +T = TypeVar("T") + + +def wait_on(func: Callable[[], T], maxwait: float = 60, interval: float = 3) -> T: + """ + Wait until a function returns without raising a NotReady exception + + :param func: function to wait on. It should accept no parameters. + + :param maxwait: Total time (in seconds) to wait for the function to return + without raising a NotReady exception. After this time, a + ``TimeoutException`` will be raised. + + :param interval: Time (in seconds) to wait between 2 consecutive checks. + """ + assert maxwait >= 0 + assert interval > 0 + + time_left = maxwait + while True: + try: + return func() + except NotReady as e: + if time_left > 0: + log.info("%s. Will wait %s more s", e, time_left) + time.sleep(min(time_left, interval)) + time_left -= interval + else: + raise TimeoutException(f"{e} after {maxwait} s") diff --git a/bioblend/_tests/TestGalaxyDatasets.py b/bioblend/_tests/TestGalaxyDatasets.py index 8123bed8e..714ebbbc7 100644 --- a/bioblend/_tests/TestGalaxyDatasets.py +++ b/bioblend/_tests/TestGalaxyDatasets.py @@ -24,7 +24,6 @@ def setUp(self): def tearDown(self): self.gi.histories.delete_history(self.history_id, purge=True) - @test_util.skip_unless_galaxy("release_19.05") def test_show_nonexistent_dataset(self): with pytest.raises(ConnectionError): self.gi.datasets.show_dataset("nonexistent_id") @@ -65,25 +64,21 @@ def test_download_dataset(self): f.flush() assert f.read() == expected_contents - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets(self): datasets = self.gi.datasets.get_datasets() dataset_ids = [dataset["id"] for dataset in datasets] assert self.dataset_id in dataset_ids - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_history(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id) assert len(datasets) == 1 - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_limit_offset(self): datasets = self.gi.datasets.get_datasets(limit=1) assert len(datasets) == 1 datasets = self.gi.datasets.get_datasets(history_id=self.history_id, offset=1) assert datasets == [] - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_name(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Pasted Entry") assert len(datasets) == 1 @@ -143,7 +138,6 @@ def test_get_datasets_visible(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=False) assert len(datasets) == 0 - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_ordering(self): self.dataset_id2 = self._test_dataset(self.history_id, contents=self.dataset_contents) self.gi.datasets.wait_for_dataset(self.dataset_id2) @@ -156,7 +150,6 @@ def test_get_datasets_ordering(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-asc") assert datasets[0]["id"] == self.dataset_id - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_deleted(self): deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True) assert deleted_datasets == [] @@ -165,11 +158,10 @@ def test_get_datasets_deleted(self): assert len(deleted_datasets) == 1 purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) assert purged_datasets == [] - self.gi.histories.delete_dataset(self.history_id, self.dataset_id, purge=True) + self.gi.histories.delete_dataset(self.history_id, self.dataset_id, purge=True, wait=True) purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) assert len(purged_datasets) == 1 - @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_tool_id_and_tag(self): cat1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="cat1") assert cat1_datasets == [] @@ -189,7 +181,6 @@ def test_wait_for_dataset(self): self.gi.histories.delete_history(history_id, purge=True) - @test_util.skip_unless_galaxy("release_19.05") def test_dataset_permissions(self): admin_user_id = self.gi.users.get_current_user()["id"] username = test_util.random_string() diff --git a/bioblend/_tests/TestGalaxyHistories.py b/bioblend/_tests/TestGalaxyHistories.py index c53292201..160b6eb5c 100644 --- a/bioblend/_tests/TestGalaxyHistories.py +++ b/bioblend/_tests/TestGalaxyHistories.py @@ -194,7 +194,7 @@ def test_delete_dataset(self): def test_purge_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) - self.gi.histories.delete_dataset(history_id, dataset1_id, purge=True) + self.gi.histories.delete_dataset(history_id, dataset1_id, purge=True, wait=True) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) assert dataset["deleted"] assert dataset["purged"] diff --git a/bioblend/_tests/TestGalaxyJobs.py b/bioblend/_tests/TestGalaxyJobs.py index a43c6d7d3..4b52ac55f 100644 --- a/bioblend/_tests/TestGalaxyJobs.py +++ b/bioblend/_tests/TestGalaxyJobs.py @@ -144,7 +144,6 @@ def test_rerun_and_remap(self): assert last_dataset["id"] == history_contents[2]["id"] self._wait_and_verify_dataset(last_dataset["id"], b"line 1\tline 1\n") - @test_util.skip_unless_galaxy("release_19.05") @test_util.skip_unless_tool("random_lines1") def test_get_common_problems(self): job_id = self._run_tool()["jobs"][0]["id"] diff --git a/bioblend/_tests/TestGalaxyObjects.py b/bioblend/_tests/TestGalaxyObjects.py index efcfa00c1..69eece87a 100644 --- a/bioblend/_tests/TestGalaxyObjects.py +++ b/bioblend/_tests/TestGalaxyObjects.py @@ -966,7 +966,7 @@ def test_dataset_delete(self): assert not self.ds.purged def test_dataset_purge(self): - self.ds.delete(purge=True) + self.ds.delete(purge=True, wait=True) assert self.ds.deleted assert self.ds.purged diff --git a/bioblend/_tests/TestGalaxyTools.py b/bioblend/_tests/TestGalaxyTools.py index 722dcfa2d..2b3892a3c 100644 --- a/bioblend/_tests/TestGalaxyTools.py +++ b/bioblend/_tests/TestGalaxyTools.py @@ -125,7 +125,6 @@ def test_run_cat1(self): # TODO: Wait for results and verify it has 3 lines - 1 2 3, 4 5 6, # and 7 8 9. - @test_util.skip_unless_galaxy("release_19.05") @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_dependency_install(self): installed_dependencies = self.gi.tools.install_dependencies("CONVERTER_fasta_to_bowtie_color_index") diff --git a/bioblend/_tests/template_galaxy.yml b/bioblend/_tests/template_galaxy.yml index 54f62d0d6..75bed0c8b 100644 --- a/bioblend/_tests/template_galaxy.yml +++ b/bioblend/_tests/template_galaxy.yml @@ -17,3 +17,4 @@ galaxy: master_api_key: $BIOBLEND_GALAXY_MASTER_API_KEY enable_quotas: true cleanup_job: onsuccess + enable_celery_tasks: true diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index 69c46fa80..addd84a32 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -1,5 +1,4 @@ import logging -import time from typing import ( Any, Dict, @@ -11,7 +10,9 @@ from bioblend import ( CHUNK_SIZE, + NotReady, TimeoutException, + wait_on, ) from bioblend.galaxy.client import Client from bioblend.galaxy.datasets import TERMINAL_STATES @@ -176,9 +177,8 @@ def wait_for_dataset_collection( :type maxwait: float :param maxwait: Total time (in seconds) to wait for the dataset - states in the dataset collection to become terminal. If not - all datasets are in a terminal state within this time, a - ``DatasetCollectionTimeoutException`` will be raised. + states in the dataset collection to become terminal. After this time, + a ``TimeoutException`` will be raised. :type interval: float :param interval: Time (in seconds) to wait between two consecutive checks. @@ -200,12 +200,9 @@ def wait_for_dataset_collection( :rtype: dict :return: Details of the given dataset collection. """ - assert maxwait >= 0 - assert interval > 0 assert 0 <= proportion_complete <= 1 - time_left = maxwait - while True: + def check_and_get_dataset_collection() -> Dict[str, Any]: dataset_collection = self.show_dataset_collection(dataset_collection_id) states = [elem["object"]["state"] for elem in dataset_collection["elements"]] terminal_states = [state for state in states if state in TERMINAL_STATES] @@ -217,24 +214,15 @@ def wait_for_dataset_collection( proportion = len(terminal_states) / len(states) if proportion >= proportion_complete: return dataset_collection - if time_left > 0: - log.info( - "The dataset collection %s has %s out of %s datasets in a terminal state. Will wait %s more s", - dataset_collection_id, - len(terminal_states), - len(states), - time_left, - ) - time.sleep(min(time_left, interval)) - time_left -= interval - else: - raise DatasetCollectionTimeoutException( - f"Less than {proportion_complete * 100}% of datasets in the dataset collection is in a terminal state after {maxwait} s" - ) + raise NotReady( + f"The dataset collection {dataset_collection_id} has only {proportion * 100}% of datasets in a terminal state" + ) + + return wait_on(check_and_get_dataset_collection, maxwait=maxwait, interval=interval) -class DatasetCollectionTimeoutException(TimeoutException): - pass +# Unused, for backward compatibility +DatasetCollectionTimeoutException = TimeoutException __all__ = ( diff --git a/bioblend/galaxy/datasets/__init__.py b/bioblend/galaxy/datasets/__init__.py index 6f889465c..f164fddeb 100644 --- a/bioblend/galaxy/datasets/__init__.py +++ b/bioblend/galaxy/datasets/__init__.py @@ -5,7 +5,6 @@ import logging import os import shlex -import time import warnings from typing import ( Any, @@ -21,8 +20,12 @@ from requests import Response -import bioblend -from bioblend import TimeoutException +from bioblend import ( + CHUNK_SIZE, + NotReady, + TimeoutException, + wait_on, +) from bioblend.galaxy.client import Client if TYPE_CHECKING: @@ -144,8 +147,8 @@ def download_dataset( :type maxwait: float :param maxwait: Total time (in seconds) to wait for the dataset state to - become terminal. If the dataset state is not terminal within this - time, a ``DatasetTimeoutException`` will be thrown. + become terminal. After this time, a ``TimeoutException`` will be + raised. :rtype: bytes or str :return: If a ``file_path`` argument is not provided, returns the file @@ -180,7 +183,7 @@ def download_dataset( file_local_path = file_path with open(file_local_path, "wb") as fp: - for chunk in r.iter_content(chunk_size=bioblend.CHUNK_SIZE): + for chunk in r.iter_content(chunk_size=CHUNK_SIZE): if chunk: fp.write(chunk) @@ -411,8 +414,7 @@ def wait_for_dataset( :type maxwait: float :param maxwait: Total time (in seconds) to wait for the dataset state to - become terminal. If the dataset state is not terminal within this - time, a ``DatasetTimeoutException`` will be raised. + become terminal. After this time, a ``TimeoutException`` will be raised. :type interval: float :param interval: Time (in seconds) to wait between 2 consecutive checks. @@ -423,25 +425,17 @@ def wait_for_dataset( :rtype: dict :return: Details of the given dataset. """ - assert maxwait >= 0 - assert interval > 0 - time_left = maxwait - while True: + def check_and_get_dataset() -> Dict[str, Any]: dataset = self.show_dataset(dataset_id) state = dataset["state"] if state in TERMINAL_STATES: if check and state != "ok": raise Exception(f"Dataset {dataset_id} is in terminal state {state}") return dataset - if time_left > 0: - log.info("Dataset %s is in non-terminal state %s. Will wait %s more s", dataset_id, state, time_left) - time.sleep(min(time_left, interval)) - time_left -= interval - else: - raise DatasetTimeoutException( - f"Dataset {dataset_id} is still in non-terminal state {state} after {maxwait} s" - ) + raise NotReady(f"Dataset {dataset_id} is in non-terminal state {state}") + + return wait_on(check_and_get_dataset, maxwait=maxwait, interval=interval) class DatasetStateException(Exception): @@ -452,5 +446,5 @@ class DatasetStateWarning(UserWarning): pass -class DatasetTimeoutException(TimeoutException): - pass +# Unused, just for backward compatibility +DatasetTimeoutException = TimeoutException diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index aa02b14ed..b1a2da726 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -21,7 +21,11 @@ ) import bioblend -from bioblend import ConnectionError +from bioblend import ( + ConnectionError, + NotReady, + wait_on, +) from bioblend.galaxy.client import Client from bioblend.galaxy.dataset_collections import CollectionDescription from bioblend.util import attach_file @@ -403,7 +407,7 @@ def show_history( params["keys"] = ",".join(keys) return self._get(id=history_id, contents=contents, params=params) - def delete_dataset(self, history_id: str, dataset_id: str, purge: bool = False) -> None: + def delete_dataset(self, history_id: str, dataset_id: str, purge: bool = False, wait: bool = False) -> None: """ Mark corresponding dataset as deleted. @@ -416,6 +420,8 @@ def delete_dataset(self, history_id: str, dataset_id: str, purge: bool = False) :type purge: bool :param purge: if ``True``, also purge (permanently delete) the dataset + :param wait: Whether to wait for the dataset to be purged. + :rtype: None :return: None @@ -426,9 +432,17 @@ def delete_dataset(self, history_id: str, dataset_id: str, purge: bool = False) """ url = "/".join((self._make_url(history_id, contents=True), dataset_id)) payload = {} - if purge is True: - payload["purge"] = purge + if purge: + payload["purge"] = True self._delete(payload=payload, url=url) + if purge and wait: + + def check_dataset_purged() -> None: + dataset = self.show_dataset(history_id, dataset_id) + if not dataset["purged"]: + raise NotReady(f"Dataset {dataset_id} in library {history_id} is not purged") + + wait_on(check_dataset_purged) def delete_dataset_collection(self, history_id: str, dataset_collection_id: str) -> None: """ diff --git a/bioblend/galaxy/invocations/__init__.py b/bioblend/galaxy/invocations/__init__.py index 7321c5c20..3d32113e1 100644 --- a/bioblend/galaxy/invocations/__init__.py +++ b/bioblend/galaxy/invocations/__init__.py @@ -3,7 +3,6 @@ """ import logging -import time from typing import ( Any, Dict, @@ -14,7 +13,9 @@ from bioblend import ( CHUNK_SIZE, - TimeoutException, + ConnectionError, + NotReady, + wait_on, ) from bioblend.galaxy.client import Client from bioblend.galaxy.workflows import InputsBy @@ -400,18 +401,66 @@ def get_invocation_report_pdf(self, invocation_id: str, file_path: str, chunk_si for chunk in r.iter_content(chunk_size): outf.write(chunk) - def get_invocation_biocompute_object(self, invocation_id: str) -> Dict[str, Any]: + # TODO: Move to a new ``bioblend.galaxy.short_term_storage`` module + def _wait_for_short_term_storage( + self, storage_request_id: str, maxwait: float = 60, interval: float = 3 + ) -> Dict[str, Any]: + """ + Wait until a short term storage request is ready + + :type storage_request_id: str + :param storage_request_id: Storage request ID to wait for. + + :type maxwait: float + :param maxwait: Total time (in seconds) to wait for the storage request + to become ready. After this time, a ``TimeoutException`` will be + raised. + + :type interval: float + :param interval: Time (in seconds) to wait between 2 consecutive checks. + + :rtype: dict + :return: The short term storage request. + """ + url = f"{self.gi.url}/short_term_storage/{storage_request_id}" + is_ready_url = f"{url}/ready" + + def check_and_get_short_term_storage() -> Dict[str, Any]: + if self._get(url=is_ready_url): + return self._get(url=url) + raise NotReady(f"Storage request {storage_request_id} is not ready") + + return wait_on(check_and_get_short_term_storage, maxwait=maxwait, interval=interval) + + def get_invocation_biocompute_object(self, invocation_id: str, maxwait: float = 1200) -> Dict[str, Any]: """ Get a BioCompute object for an invocation. :type invocation_id: str :param invocation_id: Encoded workflow invocation ID + :type maxwait: float + :param maxwait: Total time (in seconds) to wait for the BioCompute + object to become ready. After this time, a ``TimeoutException`` will + be raised. + :rtype: dict :return: The BioCompute object """ - url = self._make_url(invocation_id) + "/biocompute" - return self._get(url=url) + url = self._make_url(invocation_id) + "/prepare_store_download" + payload = {"model_store_format": "bco.json"} + try: + psd = self._post(url=url, payload=payload) + except ConnectionError as e: + if e.status_code not in (400, 404): + raise + # Galaxy release_22.05 and earlier + url = self._make_url(invocation_id) + "/biocompute" + return self._get(url=url) + else: + storage_request_id = psd["storage_request_id"] + url = f"{self.gi.url}/short_term_storage/{storage_request_id}/ready" + return self._wait_for_short_term_storage(storage_request_id, maxwait=maxwait) def wait_for_invocation( self, invocation_id: str, maxwait: float = 12000, interval: float = 3, check: bool = True @@ -424,8 +473,8 @@ def wait_for_invocation( :type maxwait: float :param maxwait: Total time (in seconds) to wait for the invocation state - to become terminal. If the invocation state is not terminal within - this time, a ``TimeoutException`` will be raised. + to become terminal. After this time, a ``TimeoutException`` will be + raised. :type interval: float :param interval: Time (in seconds) to wait between 2 consecutive checks. @@ -437,27 +486,17 @@ def wait_for_invocation( :rtype: dict :return: Details of the workflow invocation. """ - assert maxwait >= 0 - assert interval > 0 - time_left = maxwait - while True: + def check_and_get_invocation() -> Dict[str, Any]: invocation = self.gi.invocations.show_invocation(invocation_id) state = invocation["state"] if state in INVOCATION_TERMINAL_STATES: if check and state != "scheduled": raise Exception(f"Invocation {invocation_id} is in terminal state {state}") return invocation - if time_left > 0: - log.info( - "Invocation %s is in non-terminal state %s. Will wait %s more s", invocation_id, state, time_left - ) - time.sleep(min(time_left, interval)) - time_left -= interval - else: - raise TimeoutException( - f"Invocation {invocation_id} is still in non-terminal state {state} after {maxwait} s" - ) + raise NotReady(f"Invocation {invocation_id} is in non-terminal state {state}") + + return wait_on(check_and_get_invocation, maxwait=maxwait, interval=interval) def _invocation_step_url(self, invocation_id: str, step_id: str) -> str: return "/".join((self._make_url(invocation_id), "steps", step_id)) diff --git a/bioblend/galaxy/jobs/__init__.py b/bioblend/galaxy/jobs/__init__.py index 9b1c97243..4403d368b 100644 --- a/bioblend/galaxy/jobs/__init__.py +++ b/bioblend/galaxy/jobs/__init__.py @@ -3,7 +3,6 @@ """ import logging -import time from typing import ( Any, Dict, @@ -13,7 +12,10 @@ TYPE_CHECKING, ) -from bioblend import TimeoutException +from bioblend import ( + NotReady, + wait_on, +) from bioblend.galaxy.client import Client if TYPE_CHECKING: @@ -494,8 +496,8 @@ def wait_for_job( :type maxwait: float :param maxwait: Total time (in seconds) to wait for the job state to - become terminal. If the job state is not terminal within this time, a - ``TimeoutException`` will be raised. + become terminal. After this time, a ``TimeoutException`` will be + raised. :type interval: float :param interval: Time (in seconds) to wait between 2 consecutive checks. @@ -506,20 +508,14 @@ def wait_for_job( :rtype: dict :return: Details of the given job. """ - assert maxwait >= 0 - assert interval > 0 - time_left = maxwait - while True: + def check_and_get_job() -> Dict[str, Any]: job = self.show_job(job_id) state = job["state"] if state in JOB_TERMINAL_STATES: if check and state != "ok": raise Exception(f"Job {job_id} is in terminal state {state}") return job - if time_left > 0: - log.info("Job %s is in non-terminal state %s. Will wait %s more s", job_id, state, time_left) - time.sleep(min(time_left, interval)) - time_left -= interval - else: - raise TimeoutException(f"Job {job_id} is still in non-terminal state {state} after {maxwait} s") + raise NotReady(f"Job {job_id} is in non-terminal state {state}") + + return wait_on(check_and_get_job, maxwait=maxwait, interval=interval) diff --git a/bioblend/galaxy/libraries/__init__.py b/bioblend/galaxy/libraries/__init__.py index eb89ff157..e718b36c9 100644 --- a/bioblend/galaxy/libraries/__init__.py +++ b/bioblend/galaxy/libraries/__init__.py @@ -3,7 +3,6 @@ """ import logging -import time from typing import ( Any, Dict, @@ -13,11 +12,12 @@ TYPE_CHECKING, ) -from bioblend.galaxy.client import Client -from bioblend.galaxy.datasets import ( - DatasetTimeoutException, - TERMINAL_STATES, +from bioblend import ( + NotReady, + wait_on, ) +from bioblend.galaxy.client import Client +from bioblend.galaxy.datasets import TERMINAL_STATES from bioblend.util import attach_file if TYPE_CHECKING: @@ -174,8 +174,8 @@ def wait_for_dataset( :type maxwait: float :param maxwait: Total time (in seconds) to wait for the dataset state to - become terminal. If the dataset state is not terminal within this - time, a ``DatasetTimeoutException`` will be thrown. + become terminal. After this time, a ``TimeoutException`` will be + raised. :type interval: float :param interval: Time (in seconds) to wait between 2 consecutive checks. @@ -184,29 +184,15 @@ def wait_for_dataset( :return: A dictionary containing information about the dataset in the library """ - assert maxwait >= 0 - assert interval > 0 - time_left = maxwait - while True: + def check_and_get_library_dataset() -> Dict[str, Any]: dataset = self.show_dataset(library_id, dataset_id) state = dataset["state"] if state in TERMINAL_STATES: return dataset - if time_left > 0: - log.info( - "Dataset %s in library %s is in non-terminal state %s. Will wait %i more s", - dataset_id, - library_id, - state, - time_left, - ) - time.sleep(min(time_left, interval)) - time_left -= interval - else: - raise DatasetTimeoutException( - f"Dataset {dataset_id} in library {library_id} is still in non-terminal state {state} after {maxwait} s" - ) + raise NotReady(f"Dataset {dataset_id} in library {library_id} is in non-terminal state {state}") + + return wait_on(check_and_get_library_dataset, maxwait=maxwait, interval=interval) def show_folder(self, library_id: str, folder_id: str) -> Dict[str, Any]: """ diff --git a/bioblend/galaxy/objects/wrappers.py b/bioblend/galaxy/objects/wrappers.py index ff035458b..6ffaa058d 100644 --- a/bioblend/galaxy/objects/wrappers.py +++ b/bioblend/galaxy/objects/wrappers.py @@ -1025,19 +1025,21 @@ def update(self, **kwargs: Any) -> "HistoryDatasetAssociation": self.__init__(res, self.container, gi=self.gi) # type: ignore[misc] return self - def delete(self, purge: bool = False) -> None: + def delete(self, purge: bool = False, wait: bool = False) -> None: """ Delete this history dataset. :type purge: bool :param purge: if ``True``, also purge (permanently delete) the dataset + :param wait: Whether to wait for the dataset to be purged. + .. note:: The ``purge`` option works only if the Galaxy instance has the ``allow_user_dataset_purge`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ - self.gi.gi.histories.delete_dataset(self.container.id, self.id, purge=purge) + self.gi.gi.histories.delete_dataset(self.container.id, self.id, purge=purge, wait=wait) self.container.refresh() self.refresh() From f6774ea8fdf313f60d6c1b5f66258ca958f792ea Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 1 May 2024 14:52:56 +0100 Subject: [PATCH 06/21] Fix for ruff 0.3.0 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index fdb0e1db4..0eb7b0508 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ passenv = [testenv:lint] commands = - ruff . + ruff check . flake8 . black --check --diff . isort --check --diff . From b245c40cef50cb6e52f1e113e0352e60573df429 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 1 May 2024 15:12:38 +0100 Subject: [PATCH 07/21] Update action versions --- .github/workflows/deploy.yaml | 2 +- .github/workflows/lint.yaml | 16 ++++++++-------- .github/workflows/test.yaml | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 0ade1c765..d836b89e8 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -4,7 +4,7 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: '3.12' diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 0760bc6ee..044fe8ace 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -10,11 +10,11 @@ jobs: matrix: python-version: ['3.8', '3.12'] steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install tox - run: python -m pip install 'tox>=1.8.0' - - name: Lint - run: tox -e lint + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install tox + run: python -m pip install 'tox>=1.8.0' + - name: Lint + run: tox -e lint diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e9c3ba588..9f9a6c199 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -59,7 +59,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Cache pip dir - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cache/pip key: pip-cache-${{ matrix.tox_env }}-${{ matrix.galaxy_version }} From 009d621d17bb1c53dfb4263fc2d0bcbb2e64849b Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Thu, 2 May 2024 12:09:53 +0200 Subject: [PATCH 08/21] test purging of users - after deletion - direct (i.e. without deletion) --- bioblend/_tests/TestGalaxyUsers.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/bioblend/_tests/TestGalaxyUsers.py b/bioblend/_tests/TestGalaxyUsers.py index 6288dbcc0..152db52b5 100644 --- a/bioblend/_tests/TestGalaxyUsers.py +++ b/bioblend/_tests/TestGalaxyUsers.py @@ -52,12 +52,17 @@ def test_create_local_user(self): # test a BioBlend GalaxyInstance can be created using username+password user_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, email=new_user_email, password=password) assert user_gi.users.get_current_user()["email"] == new_user_email - # test deletion + # test deletion and purging if self.gi.config.get_config()["allow_user_deletion"]: deleted_user = self.gi.users.delete_user(new_user["id"]) assert deleted_user["email"] == new_user_email assert deleted_user["deleted"] + purged_user = self.gi.users.delete_user(new_user["id"], purge=True) + assert purged_user["email"] == new_user_email + assert purged_user["deleted"] + assert purged_user["purged"] + def test_get_current_user(self): user = self.gi.users.get_current_user() assert user["id"] is not None @@ -84,8 +89,11 @@ def test_update_user(self): assert updated_user["username"] == updated_username assert updated_user["email"] == updated_user_email + # delete user after test (if possile), also tests purging without prior deletion if self.gi.config.get_config()["allow_user_deletion"]: - self.gi.users.delete_user(new_user_id) + purged_user = self.gi.users.delete_user(new_user_id, purge=True) + assert purged_user["deleted"] + assert purged_user["purged"] def test_get_user_apikey(self): # Test getting the API key of the current user, which surely has one From e02b0d96e2bce7a793d618de55192673c9f49c52 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Thu, 2 May 2024 11:32:25 +0200 Subject: [PATCH 09/21] Fix and extend tests for deleting and purging users --- bioblend/_tests/TestGalaxyUsers.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/bioblend/_tests/TestGalaxyUsers.py b/bioblend/_tests/TestGalaxyUsers.py index 152db52b5..a833fcec0 100644 --- a/bioblend/_tests/TestGalaxyUsers.py +++ b/bioblend/_tests/TestGalaxyUsers.py @@ -23,10 +23,9 @@ def test_show_user(self): # assert user["nice_total_disk_usage"] == current_user["nice_total_disk_usage"] # assert user["total_disk_usage"] == current_user["total_disk_usage"] + @test_util.skip_unless_galaxy("release_19.09") # for user purging def test_create_remote_user(self): # WARNING: only admins can create users! - # WARNING: Users cannot be purged through the Galaxy API, so execute - # this test only on a disposable Galaxy instance! if not self.gi.config.get_config()["use_remote_user"]: self.skipTest("This Galaxy instance is not configured to use remote users") new_user_email = "newuser@example.org" @@ -36,11 +35,17 @@ def test_create_remote_user(self): deleted_user = self.gi.users.delete_user(user["id"]) assert deleted_user["email"] == new_user_email assert deleted_user["deleted"] + assert not deleted_user["purged"] + purged_user = self.gi.users.delete_user(user["id"], purge=True) + # email is redacted when purging a user + assert purged_user["email"] != new_user_email + assert purged_user["deleted"] + assert purged_user["purged"] + + @test_util.skip_unless_galaxy("release_19.09") # for user purging def test_create_local_user(self): # WARNING: only admins can create users! - # WARNING: Users cannot be purged through the Galaxy API, so execute - # this test only on a disposable Galaxy instance! if self.gi.config.get_config()["use_remote_user"]: self.skipTest("This Galaxy instance is not configured to use local users") new_username = test_util.random_string() @@ -55,11 +60,15 @@ def test_create_local_user(self): # test deletion and purging if self.gi.config.get_config()["allow_user_deletion"]: deleted_user = self.gi.users.delete_user(new_user["id"]) + assert deleted_user["username"] == new_username assert deleted_user["email"] == new_user_email assert deleted_user["deleted"] + assert not deleted_user["purged"] purged_user = self.gi.users.delete_user(new_user["id"], purge=True) - assert purged_user["email"] == new_user_email + # username and email are redacted when purging a user + assert purged_user["username"] != new_username + assert purged_user["email"] != new_user_email assert purged_user["deleted"] assert purged_user["purged"] @@ -71,10 +80,9 @@ def test_get_current_user(self): assert user["nice_total_disk_usage"] is not None assert user["total_disk_usage"] is not None + @test_util.skip_unless_galaxy("release_19.09") # for user purging def test_update_user(self): # WARNING: only admins can create users! - # WARNING: Users cannot be purged through the Galaxy API, so execute - # this test only on a disposable Galaxy instance! if self.gi.config.get_config()["use_remote_user"]: self.skipTest("This Galaxy instance is not configured to use local users") new_username = test_util.random_string() @@ -89,8 +97,9 @@ def test_update_user(self): assert updated_user["username"] == updated_username assert updated_user["email"] == updated_user_email - # delete user after test (if possile), also tests purging without prior deletion + # delete and purge user after test (if possile) if self.gi.config.get_config()["allow_user_deletion"]: + self.gi.users.delete_user(new_user_id) purged_user = self.gi.users.delete_user(new_user_id, purge=True) assert purged_user["deleted"] assert purged_user["purged"] From 803b30d0b8cf7228cfa0bed2de12c3400d0b7520 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 10 May 2024 17:45:57 +0100 Subject: [PATCH 10/21] Restore position of ``all`` parameter in ``HistoryClient.get_histories()`` --- CHANGELOG.md | 14 +++++++++++++- bioblend/galaxy/histories/__init__.py | 26 +++++++++++++------------- 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7036e4e76..613c1f0e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,19 @@ for Galaxy releases 23.2 and 24.0. * Added ``wait`` parameter to ``HistoryClient.delete_dataset()`` and - ``HistoryDatasetAssociation.delete()`` methods. + BioBlend.objects ``HistoryDatasetAssociation.delete()`` methods. + +* Added ``create_time_min``, ``create_time_max``, ``update_time_min``, + ``update_time_max``, ``view``, ``keys``, ``limit`` and ``offset`` parameters + to ``HistoryClient.get_histories()`` (thanks to + [cat-bro](https://github.com/cat-bro)). + +* Added ``create_time_min``, ``create_time_max``, ``update_time_min`` and + ``update_time_max`` parameters to ``HistoryClient.get_published_histories()`` + (thanks to [cat-bro](https://github.com/cat-bro)). + +* Added ``keys`` parameter to ``HistoryClient.show_history()`` (thanks to + [cat-bro](https://github.com/cat-bro)). * Dropped broken ``deleted`` parameter of ``DatasetClient.show_dataset()``. diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index b1a2da726..93e04fa47 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -93,11 +93,11 @@ def _get_histories( filter_user_published: Optional[bool] = None, get_all_published: bool = False, slug: Optional[str] = None, + all: Optional[bool] = False, create_time_min: Optional[str] = None, create_time_max: Optional[str] = None, update_time_min: Optional[str] = None, update_time_max: Optional[str] = None, - all: Optional[bool] = False, view: Optional[Literal["summary", "detailed"]] = None, keys: Optional[List[str]] = None, limit: Optional[int] = None, @@ -118,6 +118,8 @@ def _get_histories( if slug is not None: params.setdefault("q", []).append("slug") params.setdefault("qv", []).append(slug) + if all: + params["all"] = True if create_time_min: params.setdefault("q", []).append("create_time-ge") params.setdefault("qv", []).append(create_time_min) @@ -130,8 +132,6 @@ def _get_histories( if update_time_max: params.setdefault("q", []).append("update_time-le") params.setdefault("qv", []).append(update_time_max) - if all: - params["all"] = True if view: params["view"] = view if keys: @@ -155,11 +155,11 @@ def get_histories( deleted: bool = False, published: Optional[bool] = None, slug: Optional[str] = None, + all: Optional[bool] = False, create_time_min: Optional[str] = None, create_time_max: Optional[str] = None, update_time_min: Optional[str] = None, update_time_max: Optional[str] = None, - all: Optional[bool] = False, view: Optional[Literal["summary", "detailed"]] = None, keys: Optional[List[str]] = None, limit: Optional[int] = None, @@ -186,6 +186,11 @@ def get_histories( :type slug: str :param slug: History slug to filter on + :type all: bool + :param all: Whether to include histories from other users. This + parameter works only on Galaxy 20.01 or later and can be specified + only if the user is a Galaxy admin. + :type create_time_min: str :param create_time_min: Return histories created after the provided time and date, which should be formatted as ``YYYY-MM-DDTHH-MM-SS``. @@ -202,11 +207,6 @@ def get_histories( :param update_time_max: Return histories last updated before the provided time and date, which should be formatted as ``YYYY-MM-DDTHH-MM-SS``. - :type all: bool - :param all: Whether to include histories from other users. This - parameter works only on Galaxy 20.01 or later and can be specified - only if the user is a Galaxy admin. - :type view: str :param view: Options are 'summary' or 'detailed'. This defaults to 'summary'. Setting view to 'detailed' results in a larger number of fields returned. @@ -239,14 +239,14 @@ def get_histories( get_all_published=False, slug=slug, all=all, - view=view, - keys=keys, - limit=limit, - offset=offset, create_time_min=create_time_min, create_time_max=create_time_max, update_time_min=update_time_min, update_time_max=update_time_max, + view=view, + keys=keys, + limit=limit, + offset=offset, ) def get_published_histories( From c103e7f74692ff70a1d3caa3a15e4620f998dcba Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 12 May 2024 18:52:36 +0100 Subject: [PATCH 11/21] Skip ToolShed tests if BIOBLEND_TOOLSHED_URL is down --- bioblend/_tests/test_util.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/bioblend/_tests/test_util.py b/bioblend/_tests/test_util.py index 5cb584fd4..cdf1e8eeb 100644 --- a/bioblend/_tests/test_util.py +++ b/bioblend/_tests/test_util.py @@ -10,6 +10,8 @@ Optional, ) +import requests + import bioblend.galaxy NO_GALAXY_MESSAGE = "Externally configured Galaxy required, but not found. Set BIOBLEND_GALAXY_URL and BIOBLEND_GALAXY_API_KEY to run this test." @@ -19,6 +21,14 @@ def random_string(length: int = 8) -> str: return "".join(random.choice(string.ascii_lowercase) for _ in range(length)) +def is_site_up(url: str) -> bool: + try: + response = requests.get(url, timeout=10) + return response.status_code == 200 + except Exception: + return False + + def skip_unless_toolshed() -> Callable: """Decorate tests with this to skip the test if a URL for a ToolShed to run the tests is not provided. @@ -27,6 +37,9 @@ def skip_unless_toolshed() -> Callable: return unittest.skip( "Externally configured ToolShed required, but not found. Set BIOBLEND_TOOLSHED_URL (e.g. to https://testtoolshed.g2.bx.psu.edu/ ) to run this test." ) + toolshed_url = os.environ["BIOBLEND_TOOLSHED_URL"] + if not is_site_up(toolshed_url): + return unittest.skip(f"Configured ToolShed [{toolshed_url}] appears to be down") return lambda f: f From 62fcbc39f1a4bcba4e88d0c4b6bbf551261b66a2 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 10 May 2024 17:48:19 +0100 Subject: [PATCH 12/21] Release 1.3.0 --- CHANGELOG.md | 9 ++++++++- bioblend/__init__.py | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 613c1f0e6..d1a774710 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,12 @@ -### BioBlend v +### BioBlend v1.3.0 - 2024-05-12 * Dropped support for Python 3.7. Added support for Python 3.12. Added support for Galaxy releases 23.2 and 24.0. +* Added ``copy_elements`` parameter to + ``HistoryClient.create_dataset_collection()`` and BioBlend.objects + ``History.create_dataset_collection()`` methods. + * Added ``wait`` parameter to ``HistoryClient.delete_dataset()`` and BioBlend.objects ``HistoryDatasetAssociation.delete()`` methods. @@ -35,6 +39,9 @@ * Fixed ``InvocationClient.get_invocation_biocompute_object()`` method on upcoming Galaxy 24.1 . +* * Improvements to linting and tests (thanks to + [Matthias Bernt](https://github.com/bernt-matthias)). + ### BioBlend v1.2.0 - 2023-06-30 * Dropped support for Galaxy releases 17.09-19.01. Added support for Galaxy diff --git a/bioblend/__init__.py b/bioblend/__init__.py index d0062462a..7f906a98e 100644 --- a/bioblend/__init__.py +++ b/bioblend/__init__.py @@ -16,7 +16,7 @@ ) # Current version of the library -__version__ = "1.2.0" +__version__ = "1.3.0" # default chunk size (in bytes) for reading remote data try: From 49a5fd668402affec68a2d1132561d80feb61114 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 12 May 2024 21:01:01 +0100 Subject: [PATCH 13/21] Fix typo --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1a774710..b0eeb94a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,6 @@ +### BioBlend v + + ### BioBlend v1.3.0 - 2024-05-12 * Dropped support for Python 3.7. Added support for Python 3.12. Added support @@ -39,7 +42,7 @@ * Fixed ``InvocationClient.get_invocation_biocompute_object()`` method on upcoming Galaxy 24.1 . -* * Improvements to linting and tests (thanks to +* Improvements to linting and tests (thanks to [Matthias Bernt](https://github.com/bernt-matthias)). ### BioBlend v1.2.0 - 2023-06-30 From c2ff1c3c92e60eaa6814be868e67b55bb21de314 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Sun, 12 May 2024 21:03:46 +0100 Subject: [PATCH 14/21] Drop universal wheel config Since Python 2 is not supported any more, a pure Python wheel is more appropriate. --- setup.cfg | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 85e05232e..3e3cdecbd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,3 @@ -[bdist_wheel] -universal = 1 - [flake8] exclude = .eggs @@ -63,6 +60,7 @@ strict_equality = True warn_redundant_casts = True warn_unused_ignores = True warn_unreachable = True + [mypy-bioblend._tests.*] disallow_untyped_defs = False # Allow testing that a function return value is None From a11a57f9bbc14f1f7d988d3b33a07459f9cc0191 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 21 May 2024 09:35:35 +0100 Subject: [PATCH 15/21] Add support for Galaxy release_24.1 --- .github/workflows/test.yaml | 1 + CHANGELOG.md | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 9f9a6c199..ab1c00485 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -33,6 +33,7 @@ jobs: tox_env: [py38] galaxy_version: - dev + - release_24.1 - release_24.0 - release_23.2 - release_23.1 diff --git a/CHANGELOG.md b/CHANGELOG.md index b0eeb94a6..f2e07fdaa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ### BioBlend v +* Added support for Galaxy release 24.1. ### BioBlend v1.3.0 - 2024-05-12 From 37c6f05e0f0637a92b9f5fd2868776e6c88b3e9c Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sat, 1 Jun 2024 12:19:52 +0200 Subject: [PATCH 16/21] add test of get_histories: limit and offset --- bioblend/_tests/TestGalaxyHistories.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bioblend/_tests/TestGalaxyHistories.py b/bioblend/_tests/TestGalaxyHistories.py index 160b6eb5c..53f39ae32 100644 --- a/bioblend/_tests/TestGalaxyHistories.py +++ b/bioblend/_tests/TestGalaxyHistories.py @@ -70,6 +70,15 @@ def test_get_histories(self): all_histories = self.gi.histories.get_histories() assert len(all_histories) > 0 + # Test limit and offset + first = self.gi.histories.get_histories(limit=1) + others = self.gi.histories.get_histories(offset=1) + assert len(others) > 0 # guess the test makes more sense with at least 2 histories + assert [h["id"] for h in all_histories] == [h["id"] for h in first] + [h["id"] for h in others] + + out_of_limit = self.gi.histories.get_histories(offset=1000000) + assert out_of_limit == [] + # Check whether id is present, when searched by name histories = self.gi.histories.get_histories(name=self.default_history_name) assert len([h for h in histories if h["id"] == self.history["id"]]) == 1 From 6a1b47de0cfcd37fa390e557367835e696c5e04e Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sat, 1 Jun 2024 12:46:44 +0200 Subject: [PATCH 17/21] fix docs for offset --- bioblend/galaxy/histories/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index 93e04fa47..7e817b85b 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -218,8 +218,8 @@ def get_histories( :param limit: How many items to return (upper bound). :type offset: int - :param offset: skip the first ( offset - 1 ) items and begin returning - at the Nth item. + :param offset: skip the first (offset) items and begin returning + at item at index offset (i.e. start with the element offset+1). :rtype: list :return: List of history dicts. From dd1dacfd20e766aa603edf0d1376ba8ef30598a2 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sat, 1 Jun 2024 15:52:07 +0200 Subject: [PATCH 18/21] fix linter issue --- bioblend/galaxy/dataset_collections/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index addd84a32..e1dff9b20 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -33,7 +33,7 @@ def __init__( self.name = name self.type = type if isinstance(elements, dict): - self.elements: List[Union["CollectionElement", "SimpleElement"]] = [ + self.elements: List[Union[CollectionElement, SimpleElement]] = [ HistoryDatasetElement(name=key, id=value) for key, value in elements.values() ] elif elements: From bc58a96acb4e1d17029f3068832d05b231c55107 Mon Sep 17 00:00:00 2001 From: M Bernt Date: Sat, 1 Jun 2024 19:43:38 +0200 Subject: [PATCH 19/21] Improved wording Co-authored-by: Nicola Soranzo --- bioblend/galaxy/histories/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index 7e817b85b..6efc6f204 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -219,7 +219,7 @@ def get_histories( :type offset: int :param offset: skip the first (offset) items and begin returning - at item at index offset (i.e. start with the element offset+1). + items at index offset (i.e. start with the element offset+1). :rtype: list :return: List of history dicts. From e37fa02b0cc0d4d00093068c66f546bd3ebe769c Mon Sep 17 00:00:00 2001 From: M Bernt Date: Sat, 1 Jun 2024 21:44:22 +0200 Subject: [PATCH 20/21] Only assume 1 history in the 2nd batch Co-authored-by: Nicola Soranzo --- bioblend/_tests/TestGalaxyHistories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/_tests/TestGalaxyHistories.py b/bioblend/_tests/TestGalaxyHistories.py index 53f39ae32..b1ae681b9 100644 --- a/bioblend/_tests/TestGalaxyHistories.py +++ b/bioblend/_tests/TestGalaxyHistories.py @@ -73,7 +73,7 @@ def test_get_histories(self): # Test limit and offset first = self.gi.histories.get_histories(limit=1) others = self.gi.histories.get_histories(offset=1) - assert len(others) > 0 # guess the test makes more sense with at least 2 histories + assert len(first) == 1 assert [h["id"] for h in all_histories] == [h["id"] for h in first] + [h["id"] for h in others] out_of_limit = self.gi.histories.get_histories(offset=1000000) From 1c13d6afe22189f003ebf962bdadd0fa4c6f76d7 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Sun, 2 Jun 2024 07:28:47 +0200 Subject: [PATCH 21/21] test if there is user["deleted"] --- bioblend/_tests/TestGalaxyUsers.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bioblend/_tests/TestGalaxyUsers.py b/bioblend/_tests/TestGalaxyUsers.py index a833fcec0..c58e75f41 100644 --- a/bioblend/_tests/TestGalaxyUsers.py +++ b/bioblend/_tests/TestGalaxyUsers.py @@ -11,6 +11,7 @@ def test_get_users(self): for user in users: assert user["id"] is not None assert user["email"] is not None + assert user["deleted"] is not None def test_show_user(self): current_user = self.gi.users.get_current_user()