diff --git a/CHANGELOG.md b/CHANGELOG.md index e7b802447..cdb2df115 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,9 @@ ``GalaxyClient``, ``GalaxyInstance`` and ``ToolShedInstance`` classes are now keyword-only. +* Classes defined in ``bioblend.galaxy.objects.wrappers`` are no more + re-exported by ``bioblend.galaxy.objects``. + ### BioBlend v1.2.0 - 2023-06-30 * Dropped support for Galaxy releases 17.09-19.01. Added support for Galaxy diff --git a/bioblend/galaxy/objects/__init__.py b/bioblend/galaxy/objects/__init__.py index 1b4061ba2..55ed6fe96 100644 --- a/bioblend/galaxy/objects/__init__.py +++ b/bioblend/galaxy/objects/__init__.py @@ -1,2 +1,3 @@ from .galaxy_instance import GalaxyInstance # noqa: F401 -from .wrappers import * # noqa: F401,F403 + +__all__ = ("GalaxyInstance",) diff --git a/bioblend/galaxy/objects/wrappers.py b/bioblend/galaxy/objects/wrappers.py index b747149ed..a6f576c88 100644 --- a/bioblend/galaxy/objects/wrappers.py +++ b/bioblend/galaxy/objects/wrappers.py @@ -1827,6 +1827,8 @@ class DatasetContainerPreview(Wrapper): "deleted", "name", ) + deleted: bool + name: str class LibraryPreview(DatasetContainerPreview): @@ -1872,6 +1874,12 @@ class WorkflowPreview(Wrapper): "show_in_tool_panel", "tags", ) + deleted: bool + name: str + owner: str + published: bool + show_in_tool_panel: bool + tags: List[str] class InvocationPreview(Wrapper): @@ -1884,7 +1892,6 @@ class InvocationPreview(Wrapper): BASE_ATTRS = Wrapper.BASE_ATTRS + ( "history_id", - "id", "state", "update_time", "uuid", diff --git a/docs/examples/objects/common.py b/docs/examples/objects/common.py index 02557e95f..66894f371 100644 --- a/docs/examples/objects/common.py +++ b/docs/examples/objects/common.py @@ -1,4 +1,12 @@ -def get_one(iterable): +from typing import ( + Iterable, + TypeVar, +) + +T = TypeVar("T") + + +def get_one(iterable: Iterable[T]) -> T: seq = list(iterable) assert len(seq) == 1 return seq[0] diff --git a/docs/examples/objects/small.py b/docs/examples/objects/small.py index 79890ebe3..abe21c50c 100644 --- a/docs/examples/objects/small.py +++ b/docs/examples/objects/small.py @@ -42,13 +42,15 @@ history_name = "get_col output" params = {"Cut1": {"columnList": "c2"}} print(f"Running workflow: {wf.name} [{wf.id}]") -outputs, out_hist = wf.run(input_map, history_name, params=params, wait=True) +inv = wf.invoke(input_map, params=params, history=history_name, inputs_by="name") +out_hist = gi.histories.get(inv.history_id) +inv.wait() print("Job has finished") assert out_hist.name == history_name print(f"Output history: {out_hist.name} [{out_hist.id}]") # Save results to local disk -out_ds = get_one([_ for _ in outputs if _.name == "Cut on data 1"]) -with tempfile.NamedTemporaryFile(prefix="bioblend_", delete=False) as f: - out_ds.download(f) +out_ds = get_one(out_hist.get_datasets(name="Cut on data 1")) +with tempfile.NamedTemporaryFile(prefix="bioblend_", delete=False) as tmp_f: + out_ds.download(tmp_f) print(f'Output downloaded to "{f.name}"') diff --git a/docs/examples/objects/w2_bacterial_reseq.py b/docs/examples/objects/w2_bacterial_reseq.py index 6d7c5942a..332a9be83 100644 --- a/docs/examples/objects/w2_bacterial_reseq.py +++ b/docs/examples/objects/w2_bacterial_reseq.py @@ -56,7 +56,8 @@ # Run the workflow on a new history with the selected datasets as inputs -outputs, out_hist = iw.run(input_map, h, params=params) +inv = iw.invoke(input_map, params=params, history=h, inputs_by="name") +out_hist = gi.histories.get(inv.history_id) assert out_hist.name == history_name print(f"Running workflow: {iw.name} [{iw.id}]") diff --git a/docs/examples/objects/w3_bacterial_denovo.py b/docs/examples/objects/w3_bacterial_denovo.py index 1f81594fc..ae92ed827 100644 --- a/docs/examples/objects/w3_bacterial_denovo.py +++ b/docs/examples/objects/w3_bacterial_denovo.py @@ -1,6 +1,10 @@ import json import os import sys +from typing import ( + Any, + Dict, +) from common import get_one # noqa:I100,I201 @@ -51,7 +55,7 @@ lengths = {"19", "23", "29"} ws_ids = iw.tool_labels_to_ids["velveth"] assert len(ws_ids) == len(lengths) -params = {id_: {"hash_length": v} for id_, v in zip(ws_ids, lengths)} +params: Dict[str, Any] = {id_: {"hash_length": v} for id_, v in zip(ws_ids, lengths)} # Set the "ins_length" runtime parameter to the same value for the 3 # "velvetg" steps @@ -70,7 +74,8 @@ # Run the workflow on a new history with the selected datasets as inputs -outputs, out_hist = iw.run(input_map, h, params=params) +inv = iw.invoke(input_map, params=params, history=h, inputs_by="name") +out_hist = gi.histories.get(inv.history_id) assert out_hist.name == history_name print(f"Running workflow: {iw.name} [{iw.id}]") diff --git a/docs/examples/objects/w5_galaxy_api.py b/docs/examples/objects/w5_galaxy_api.py index d5f19897c..b4419e7c2 100644 --- a/docs/examples/objects/w5_galaxy_api.py +++ b/docs/examples/objects/w5_galaxy_api.py @@ -2,44 +2,41 @@ import os import sys +import requests + # This example, provided for comparison with w5_metagenomics.py, # contains the code required to run the metagenomics workflow # *without* BioBlend. URL = os.getenv("GALAXY_URL", "https://orione.crs4.it").rstrip("/") API_URL = f"{URL}/api" -API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY") -if API_KEY == "YOUR_API_KEY": +API_KEY = os.getenv("GALAXY_API_KEY") +if not API_KEY: sys.exit("API_KEY not set, see the README.txt file") - -# Clone the galaxy git repository and replace -# YOUR_GALAXY_PATH with the clone's local path in the following code, e.g.: -# cd /tmp -# git clone https://github.com/galaxyproject/galaxy -# GALAXY_PATH = '/tmp/galaxy' - -GALAXY_PATH = "YOUR_GALAXY_PATH" -sys.path.insert(1, os.path.join(GALAXY_PATH, "scripts/api")) -import common # noqa: E402,I100,I202 +headers = {"Content-Type": "application/json", "x-api-key": API_KEY} # Select "W5 - Metagenomics" from published workflows workflow_name = "W5 - Metagenomics" -workflows = common.get(API_KEY, f"{API_URL}/workflows?show_published=True") -w = [_ for _ in workflows if _["published"] and _["name"] == workflow_name] -assert len(w) == 1 -w = w[0] +r = requests.get(f"{API_URL}/workflows", params={"show_published": True}, headers=headers) +workflows = r.json() +filtered_workflows = [_ for _ in workflows if _["published"] and _["name"] == workflow_name] +assert len(filtered_workflows) == 1 +w = filtered_workflows[0] # Import the workflow to user space data = {"workflow_id": w["id"]} -iw = common.post(API_KEY, f"{API_URL}/workflows/import", data) -iw_details = common.get(API_KEY, f"{API_URL}/workflows/{iw['id']}") +r = requests.post(f"{API_URL}/workflows/import", data=json.dumps(data), headers=headers) +iw = r.json() +r = requests.get(f"{API_URL}/workflows/{iw['id']}", headers=headers) +iw_details = r.json() # Select the "Orione SupMat" library library_name = "Orione SupMat" -libraries = common.get(API_KEY, f"{API_URL}/libraries") +r = requests.get(f"{API_URL}/libraries", headers=headers) +libraries = r.json() filtered_libraries = [_ for _ in libraries if _["name"] == library_name] assert len(filtered_libraries) == 1 library = filtered_libraries[0] @@ -47,16 +44,17 @@ # Select the "/Metagenomics/MetagenomicsDataset.fq" dataset ds_name = "/Metagenomics/MetagenomicsDataset.fq" -contents = common.get(API_KEY, f"{API_URL}/libraries/{library['id']}/contents") -ld = [_ for _ in contents if _["type"] == "file" and _["name"] == ds_name] -assert len(ld) == 1 -ld = ld[0] +r = requests.get(f"{API_URL}/libraries/{library['id']}/contents", headers=headers) +contents = r.json() +filtered_contents = [_ for _ in contents if _["type"] == "file" and _["name"] == ds_name] +assert len(filtered_contents) == 1 +ld = filtered_contents[0] # Select the blastn step -ws = [_ for _ in iw_details["steps"].values() if _["tool_id"] and "blastn" in _["tool_id"]] -assert len(ws) == 1 -ws = ws[0] +filtered_wf_steps = [_ for _ in iw_details["steps"].values() if _["tool_id"] and "blastn" in _["tool_id"]] +assert len(filtered_wf_steps) == 1 +ws = filtered_wf_steps[0] tool_id = ws["tool_id"] # Get (a copy of) the parameters dict for the selected step @@ -78,7 +76,8 @@ input_step_id = iw_details["inputs"].keys()[0] data["ds_map"] = {input_step_id: {"src": "ld", "id": ld["id"]}} data["history"] = history_name -r_dict = common.post(API_KEY, f"{API_URL}/workflows", data) +r = requests.post(f"{API_URL}/workflows", data=json.dumps(data), headers=headers) +r_dict = r.json() print(f"Running workflow: {iw['name']} [{iw['id']}]") print(f"Output history: {history_name} [{r_dict['history']}]") diff --git a/docs/examples/objects/w5_metagenomics.py b/docs/examples/objects/w5_metagenomics.py index 1866201ab..53bc4e823 100644 --- a/docs/examples/objects/w5_metagenomics.py +++ b/docs/examples/objects/w5_metagenomics.py @@ -52,7 +52,8 @@ params = {tool_id: {"db_opts": json.loads(ws_parameters["db_opts"])}} params[tool_id]["db_opts"]["database"] = "16SMicrobial-20131106" -outputs, out_hist = iw.run(input_map, h, params=params) +inv = iw.invoke(input_map, params=params, history=h, inputs_by="name") +out_hist = gi.histories.get(inv.history_id) assert out_hist.name == history_name print(f"Running workflow: {iw.name} [{iw.id}]") diff --git a/tox.ini b/tox.ini index ddc413b9a..fdb0e1db4 100644 --- a/tox.ini +++ b/tox.ini @@ -21,7 +21,7 @@ commands = flake8 . black --check --diff . isort --check --diff . - mypy bioblend/ + mypy bioblend/ docs/examples/ deps = black flake8