From 5cb528fba6af95fe31e6d1a80447bbf42927832a Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 9 Nov 2021 01:44:27 +0100 Subject: [PATCH 01/32] Skip tool dependendency installation testing in Galaxy < 19.05 Conda installation is too old and broken in previous releases. Also: - Merge test_tool_dependency_uninstall into test_tool_dependency_install --- bioblend/_tests/TestGalaxyTools.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bioblend/_tests/TestGalaxyTools.py b/bioblend/_tests/TestGalaxyTools.py index 988ac6e78..1bb9f9cc2 100644 --- a/bioblend/_tests/TestGalaxyTools.py +++ b/bioblend/_tests/TestGalaxyTools.py @@ -130,9 +130,13 @@ def test_run_cat1(self): # TODO: Wait for results and verify it has 3 lines - 1 2 3, 4 5 6, # and 7 8 9. + @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_tool('CONVERTER_fasta_to_bowtie_color_index') def test_tool_dependency_install(self): installed_dependencies = self.gi.tools.install_dependencies('CONVERTER_fasta_to_bowtie_color_index') self.assertTrue(any(True for d in installed_dependencies if d.get('name') == 'bowtie' and d.get('dependency_type') == 'conda'), f"installed_dependencies is {installed_dependencies}") + status = self.gi.tools.uninstall_dependencies('CONVERTER_fasta_to_bowtie_color_index') + self.assertEqual(status[0]['model_class'], 'NullDependency', status) @test_util.skip_unless_tool('CONVERTER_fasta_to_bowtie_color_index') def test_tool_requirements(self): @@ -154,10 +158,6 @@ def test_get_citations(self): citations = self.gi.tools.get_citations('sra_source') self.assertEqual(len(citations), 2) - def test_tool_dependency_uninstall(self): - status = self.gi.tools.uninstall_dependencies('CONVERTER_fasta_to_bowtie_color_index') - self.assertEqual(status[0]['model_class'], 'NullDependency') - def _wait_for_and_verify_upload(self, tool_output, file_name, fn, expected_dbkey="?"): self.assertEqual(len(tool_output["outputs"]), 1) output = tool_output['outputs'][0] From cd471d1fcaf7e01f0ae0315cdc593b3ca7c2381c Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 14 Dec 2021 10:52:03 +0000 Subject: [PATCH 02/32] Update Python version for Galaxy >=22.01 --- .github/workflows/test.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c99bba1bd..c54283c77 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -91,9 +91,11 @@ jobs: release_20.0* ) galaxy_python_version=3.5 ;; - release_21.0* | dev ) + release_21.0* ) galaxy_python_version=3.6 ;; + release_22.0* | dev ) + galaxy_python_version=3.7 esac echo "::set-output name=galaxy_python_version::$galaxy_python_version" - name: Set up Python for Galaxy From a8a60e816c7b08d69fe2612517905b31673bd4ad Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 14 Dec 2021 11:06:25 +0000 Subject: [PATCH 03/32] Cancel in-progress concurrent builds --- .github/workflows/lint.yaml | 3 +++ .github/workflows/test.yaml | 3 +++ 2 files changed, 6 insertions(+) diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 54b989be8..bfa65d1e3 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -1,5 +1,8 @@ name: Lint on: [push, pull_request] +concurrency: + group: lint-${{ github.ref }} + cancel-in-progress: true jobs: lint: runs-on: ubuntu-latest diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c54283c77..0ec055ed7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -5,6 +5,9 @@ on: schedule: # Run at midnight UTC every Tuesday - cron: '0 0 * * 2' +concurrency: + group: test-${{ github.ref }} + cancel-in-progress: true jobs: test: if: github.event_name != 'schedule' || github.repository_owner == 'galaxyproject' From 85f80412fa918131513bcddfc5d789b5db4050a9 Mon Sep 17 00:00:00 2001 From: Joshi Date: Wed, 15 Dec 2021 22:55:38 -0500 Subject: [PATCH 04/32] tool build function included --- bioblend/galaxy/tools/__init__.py | 36 +++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index 5390a0bda..a8fe9a496 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -189,6 +189,42 @@ def show_tool(self, tool_id, io_details=False, link_details=False): params['link_details'] = link_details return self._get(id=tool_id, params=params) + def build_tool(self, tool_id, inputs=None, tool_version=None, history_id=None): + + """ + Get updated tool parameters given tool. + + :type inputs: dict + :param inputs: dictionary of tool parameters + + :type tool_id: str + :param tool_id: id of the requested tool + + :type history_id str + :param history_id: id of the requested history + + :type tool_version str + :param tool_version: version of the requested tool + + :rtype: dict + :return: Returns a tool model including dynamic parameters and updated values, repeats block etc. + """ + params = {} + + if inputs: + params['inputs'] = inputs + + if tool_version: + params['tool_version'] = tool_version + + if history_id: + params['history_id'] = history_id + + url = '/'.join((self.gi.url, 'tools', tool_id, 'build')) + + + return self._post(payload=params, url=url) + def run_tool(self, history_id, tool_id, tool_inputs, input_format='legacy'): """ Runs tool specified by ``tool_id`` in history indicated From 700eb94106a16a060f44c309c696ca79052427b9 Mon Sep 17 00:00:00 2001 From: Joshi Date: Wed, 15 Dec 2021 22:57:52 -0500 Subject: [PATCH 05/32] function name changed --- bioblend/galaxy/tools/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index a8fe9a496..17f9ac601 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -189,7 +189,7 @@ def show_tool(self, tool_id, io_details=False, link_details=False): params['link_details'] = link_details return self._get(id=tool_id, params=params) - def build_tool(self, tool_id, inputs=None, tool_version=None, history_id=None): + def build(self, tool_id, inputs=None, tool_version=None, history_id=None): """ Get updated tool parameters given tool. From 255dd4453b9512b31f8fb4be3f4a5f5815422f1a Mon Sep 17 00:00:00 2001 From: Joshi Date: Wed, 15 Dec 2021 23:28:56 -0500 Subject: [PATCH 06/32] help section fixed --- bioblend/galaxy/tools/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index 17f9ac601..e56aa133b 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -192,7 +192,7 @@ def show_tool(self, tool_id, io_details=False, link_details=False): def build(self, tool_id, inputs=None, tool_version=None, history_id=None): """ - Get updated tool parameters given tool. + Get updated tool parameters of a given tool. :type inputs: dict :param inputs: dictionary of tool parameters From 6e0591d56b9f2cca1b883564bdaa6c50b5986e1a Mon Sep 17 00:00:00 2001 From: Joshi Date: Thu, 16 Dec 2021 10:13:27 -0500 Subject: [PATCH 07/32] flake8 formating issues fixed --- bioblend/galaxy/tools/__init__.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index e56aa133b..62f68fd45 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -195,7 +195,7 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): Get updated tool parameters of a given tool. :type inputs: dict - :param inputs: dictionary of tool parameters + :param inputs: dictionary of tool parameters :type tool_id: str :param tool_id: id of the requested tool @@ -204,7 +204,7 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): :param history_id: id of the requested history :type tool_version str - :param tool_version: version of the requested tool + :param tool_version: version of the requested tool :rtype: dict :return: Returns a tool model including dynamic parameters and updated values, repeats block etc. @@ -212,17 +212,16 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): params = {} if inputs: - params['inputs'] = inputs + params['inputs'] = inputs if tool_version: - params['tool_version'] = tool_version + params['tool_version'] = tool_version if history_id: - params['history_id'] = history_id + params['history_id'] = history_id url = '/'.join((self.gi.url, 'tools', tool_id, 'build')) - return self._post(payload=params, url=url) def run_tool(self, history_id, tool_id, tool_inputs, input_format='legacy'): From 07ecdc55c049a1232be934648cd4ec975c4fdc92 Mon Sep 17 00:00:00 2001 From: Joshi Date: Fri, 17 Dec 2021 16:50:21 -0500 Subject: [PATCH 08/32] new test added --- bioblend/_tests/TestGalaxyTools.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/bioblend/_tests/TestGalaxyTools.py b/bioblend/_tests/TestGalaxyTools.py index 1bb9f9cc2..f3a5b6561 100644 --- a/bioblend/_tests/TestGalaxyTools.py +++ b/bioblend/_tests/TestGalaxyTools.py @@ -165,3 +165,9 @@ def _wait_for_and_verify_upload(self, tool_output, file_name, fn, expected_dbkey expected_contents = open(fn, "rb").read() self._wait_and_verify_dataset(output["id"], expected_contents) self.assertEqual(output["genome_build"], expected_dbkey) + + @test_util.skip_unless_tool("random_lines1") + def test_get_tool_model(self): + history_id = self.gi.histories.create_history(name="test_run_random_lines history")["id"] + tool_model = self.gi.tools.build(tool_id="random_lines1", history_id=history_id) + self.assertEqual(len(tool_model['inputs']), 3) From d24ec0a6898436b8ac871b2d029b59b9cc7a0111 Mon Sep 17 00:00:00 2001 From: Joshi Date: Sat, 18 Dec 2021 13:45:18 -0500 Subject: [PATCH 09/32] parameters description and test for buid function added --- bioblend/galaxy/tools/__init__.py | 175 +++++++++++++++++++++++++++++- 1 file changed, 173 insertions(+), 2 deletions(-) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index 62f68fd45..579f001ba 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -192,10 +192,19 @@ def show_tool(self, tool_id, io_details=False, link_details=False): def build(self, tool_id, inputs=None, tool_version=None, history_id=None): """ - Get updated tool parameters of a given tool. + This method returns the tool model, which includes an updated input parameter array for the given tool, + based on user-defined "inputs". :type inputs: dict - :param inputs: dictionary of tool parameters + :param inputs: (optional) inputs for the payload. Possible values are the + default 'legacy' (where inputs nested inside conditionals + or repeats are identified with e.g. '|') + or '21.01' (where inputs inside conditionals or repeats are nested elements). + For example:: + + {"num_lines": "1", + "input": {"values": [{"src": "hda", "id": "4d366c1196c36d18"}]}, + "seed_source|seed_source_selector": "no_seed",} :type tool_id: str :param tool_id: id of the requested tool @@ -208,6 +217,168 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): :rtype: dict :return: Returns a tool model including dynamic parameters and updated values, repeats block etc. + For example: + + {"model_class": "Tool", + "id": "random_lines1", + "name": "Select random lines", + "version": "2.0.2", + "description": "from a file", + "labels": [], + "edam_operations": [], + "edam_topics": [], + "hidden": "", + "is_workflow_compatible": True, + "xrefs": [], + "config_file": "/Users/joshij/galaxy/tools/filters/randomlines.xml", + "panel_section_id": "textutil", + "panel_section_name": "Text Manipulation", + "form_style": "regular", + "inputs": [ + { + "model_class": "IntegerToolParameter", + "name": "num_lines", + "argument": None, + "type": "integer", + "label": "Randomly select", + "help": "lines", + "refresh_on_change": False, + "min": None, + "max": None, + "optional": False, + "hidden": False, + "is_dynamic": False, + "value": "1", + "area": False, + "datalist": [], + "default_value": "1", + "text_value": "1", + }, + { + "model_class": "DataToolParameter", + "name": "input", + "argument": None, + "type": "data", + "label": "from", + "help": "", + "refresh_on_change": True, + "optional": False, + "hidden": False, + "is_dynamic": False, + "value": {"values": [{"id": "4d366c1196c36d18", "src": "hda"}]}, + "extensions": ["txt"], + "edam": {"edam_formats": ["format_2330"], "edam_data": ["data_0006"]}, + "multiple": False, + "options": { + "hda": [ + { + "id": "4d366c1196c36d18", + "hid": 4, + "name": "non_ACPs.fasta", + "tags": [], + "src": "hda", + "keep": False, + } + ], + "hdca": [ + { + "id": "1cd8e2f6b131e891", + "hid": 8, + "name": "data 55 and data 56 (as list) (with implicit datatype conversion)", + "tags": [], + "src": "hdca", + "keep": False, + }, + { + "id": "f597429621d6eb2b", + "hid": 3, + "name": "data 55 and data 56 (as list) (with implicit datatype conversion)", + "tags": [], + "src": "hdca", + "keep": False, + }, + ], + }, + "default_value": {"values": [{"id": "4d366c1196c36d18", "src": "hda"}]}, + "text_value": "No dataset.", + }, + { + "model_class": "Conditional", + "name": "seed_source", + "type": "conditional", + "cases": [ + {"model_class": "ConditionalWhen", "value": "no_seed", "inputs": []}, + { + "model_class": "ConditionalWhen", + "value": "set_seed", + "inputs": [ + { + "model_class": "TextToolParameter", + "name": "seed", + "argument": None, + "type": "text", + "label": "Random seed", + "help": "", + "refresh_on_change": False, + "optional": False, + "hidden": False, + "is_dynamic": False, + "value": "", + "area": False, + "datalist": [], + "default_value": "", + "text_value": "Empty.", + } + ], + }, + ], + "test_param": { + "model_class": "SelectToolParameter", + "name": "seed_source_selector", + "argument": None, + "type": "select", + "label": "Set a random seed", + "help": "", + "refresh_on_change": True, + "optional": False, + "hidden": False, + "is_dynamic": False, + "value": "no_seed", + "options": [ + ["Don't set seed", "no_seed", True], + ["Set seed", "set_seed", False], + ], + "display": None, + "multiple": False, + "textable": False, + "text_value": "Don't set seed", + }, + }, + ], + "help": '

What it does

\n

This tool selects N random lines from a file, with no repeats, and preserving ordering.

\n
\n

Example

\n

Input File:

\n
\nchr7  56632  56652   D17003_CTCF_R6  310   \nchr7  56736  56756   D17003_CTCF_R7  354   \nchr7  56761  56781   D17003_CTCF_R4  220   \nchr7  56772  56792   D17003_CTCF_R7  372   \nchr7  56775  56795   D17003_CTCF_R4  207   \n
\n

Selecting 2 random lines might return this:

\n
\nchr7  56736  56756   D17003_CTCF_R7  354   \nchr7  56775  56795   D17003_CTCF_R4  207   \n
\n', + "citations": False, + "sharable_url": None, + "message": "", + "warnings": "", + "versions": ["2.0.2"], + "requirements": [], + "errors": {}, + "tool_errors": None, + "state_inputs": { + "num_lines": "1", + "input": {"values": [{"id": "4d366c1196c36d18", "src": "hda"}]}, + "seed_source": {"seed_source_selector": "no_seed", "__current_case__": 0}, + }, + "job_id": None, + "job_remap": None, + "history_id": "c9468fdb6dc5c5f1", + "display": True, + "action": "/tool_runner/index", + "license": None, + "creator": None, + "method": "post", + "enctype": "application/x-www-form-urlencoded",} + """ params = {} From edbc8c002fbd7718db92cdf77d5029ba89c9b7d2 Mon Sep 17 00:00:00 2001 From: Simon Bray Date: Thu, 23 Dec 2021 11:39:31 +0100 Subject: [PATCH 10/32] small modifications to docstring --- bioblend/galaxy/tools/__init__.py | 135 +++++------------------------- 1 file changed, 21 insertions(+), 114 deletions(-) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index 579f001ba..cac8ad2ee 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -190,36 +190,42 @@ def show_tool(self, tool_id, io_details=False, link_details=False): return self._get(id=tool_id, params=params) def build(self, tool_id, inputs=None, tool_version=None, history_id=None): - """ This method returns the tool model, which includes an updated input parameter array for the given tool, based on user-defined "inputs". :type inputs: dict - :param inputs: (optional) inputs for the payload. Possible values are the - default 'legacy' (where inputs nested inside conditionals - or repeats are identified with e.g. '|') - or '21.01' (where inputs inside conditionals or repeats are nested elements). + :param inputs: (optional) inputs for the payload. For example:: - {"num_lines": "1", - "input": {"values": [{"src": "hda", "id": "4d366c1196c36d18"}]}, - "seed_source|seed_source_selector": "no_seed",} + { + "num_lines": "1", + "input": { + "values": [ + { + "src": "hda", + "id": "4d366c1196c36d18" + } + ] + }, + "seed_source|seed_source_selector": "no_seed", + } :type tool_id: str :param tool_id: id of the requested tool - :type history_id str + :type history_id: str :param history_id: id of the requested history - :type tool_version str + :type tool_version: str :param tool_version: version of the requested tool :rtype: dict :return: Returns a tool model including dynamic parameters and updated values, repeats block etc. - For example: + For example:: - {"model_class": "Tool", + { + "model_class": "Tool", "id": "random_lines1", "name": "Select random lines", "version": "2.0.2", @@ -254,108 +260,8 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): "default_value": "1", "text_value": "1", }, - { - "model_class": "DataToolParameter", - "name": "input", - "argument": None, - "type": "data", - "label": "from", - "help": "", - "refresh_on_change": True, - "optional": False, - "hidden": False, - "is_dynamic": False, - "value": {"values": [{"id": "4d366c1196c36d18", "src": "hda"}]}, - "extensions": ["txt"], - "edam": {"edam_formats": ["format_2330"], "edam_data": ["data_0006"]}, - "multiple": False, - "options": { - "hda": [ - { - "id": "4d366c1196c36d18", - "hid": 4, - "name": "non_ACPs.fasta", - "tags": [], - "src": "hda", - "keep": False, - } - ], - "hdca": [ - { - "id": "1cd8e2f6b131e891", - "hid": 8, - "name": "data 55 and data 56 (as list) (with implicit datatype conversion)", - "tags": [], - "src": "hdca", - "keep": False, - }, - { - "id": "f597429621d6eb2b", - "hid": 3, - "name": "data 55 and data 56 (as list) (with implicit datatype conversion)", - "tags": [], - "src": "hdca", - "keep": False, - }, - ], - }, - "default_value": {"values": [{"id": "4d366c1196c36d18", "src": "hda"}]}, - "text_value": "No dataset.", - }, - { - "model_class": "Conditional", - "name": "seed_source", - "type": "conditional", - "cases": [ - {"model_class": "ConditionalWhen", "value": "no_seed", "inputs": []}, - { - "model_class": "ConditionalWhen", - "value": "set_seed", - "inputs": [ - { - "model_class": "TextToolParameter", - "name": "seed", - "argument": None, - "type": "text", - "label": "Random seed", - "help": "", - "refresh_on_change": False, - "optional": False, - "hidden": False, - "is_dynamic": False, - "value": "", - "area": False, - "datalist": [], - "default_value": "", - "text_value": "Empty.", - } - ], - }, - ], - "test_param": { - "model_class": "SelectToolParameter", - "name": "seed_source_selector", - "argument": None, - "type": "select", - "label": "Set a random seed", - "help": "", - "refresh_on_change": True, - "optional": False, - "hidden": False, - "is_dynamic": False, - "value": "no_seed", - "options": [ - ["Don't set seed", "no_seed", True], - ["Set seed", "set_seed", False], - ], - "display": None, - "multiple": False, - "textable": False, - "text_value": "Don't set seed", - }, - }, ], - "help": '

What it does

\n

This tool selects N random lines from a file, with no repeats, and preserving ordering.

\n
\n

Example

\n

Input File:

\n
\nchr7  56632  56652   D17003_CTCF_R6  310   \nchr7  56736  56756   D17003_CTCF_R7  354   \nchr7  56761  56781   D17003_CTCF_R4  220   \nchr7  56772  56792   D17003_CTCF_R7  372   \nchr7  56775  56795   D17003_CTCF_R4  207   \n
\n

Selecting 2 random lines might return this:

\n
\nchr7  56736  56756   D17003_CTCF_R7  354   \nchr7  56775  56795   D17003_CTCF_R4  207   \n
\n', + "help": 'This tool selects N random lines from a file, with no repeats, and preserving ordering.', "citations": False, "sharable_url": None, "message": "", @@ -377,7 +283,8 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): "license": None, "creator": None, "method": "post", - "enctype": "application/x-www-form-urlencoded",} + "enctype": "application/x-www-form-urlencoded", + } """ params = {} From c96dc6cf9b777a6ec8e04ad39693d7b0fc4e304c Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 11 Jan 2022 11:58:46 +0000 Subject: [PATCH 11/32] Add `order_by` parameter to `JobsClient.get_jobs()` method Fix frenquently failing test `TestGalaxyJobs.test_get_jobs_with_filtering`: ``` jobs = self.gi.jobs.get_jobs(history_id=self.history_id) self.assertEqual(len(jobs), 3) job1_id = jobs[1]['id'] jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1) self.assertEqual(len(jobs), 1) > self.assertEqual(jobs[0]['id'], job1_id) E AssertionError: '6fb17d0cc6e8fae5' != '5114a2a207b7caff' ``` by retrieving job by create time instead of update time. --- bioblend/_tests/TestGalaxyJobs.py | 8 ++++---- bioblend/galaxy/jobs/__init__.py | 8 +++++++- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/bioblend/_tests/TestGalaxyJobs.py b/bioblend/_tests/TestGalaxyJobs.py index 82ea6e94e..d04c29d71 100644 --- a/bioblend/_tests/TestGalaxyJobs.py +++ b/bioblend/_tests/TestGalaxyJobs.py @@ -74,10 +74,10 @@ def test_get_jobs_with_filtering(self): self.gi.invocations.wait_for_invocation(invocation1['id']) self.gi.invocations.wait_for_invocation(invocation2['id']) - jobs = self.gi.jobs.get_jobs(history_id=self.history_id) - self.assertEqual(len(jobs), 3) - job1_id = jobs[1]['id'] - jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1) + all_jobs = self.gi.jobs.get_jobs(history_id=self.history_id, order_by='create_time') + self.assertEqual(len(all_jobs), 3) + job1_id = all_jobs[1]['id'] + jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1, order_by='create_time') self.assertEqual(len(jobs), 1) self.assertEqual(jobs[0]['id'], job1_id) jobs = self.gi.jobs.get_jobs(invocation_id=invocation1['id']) diff --git a/bioblend/galaxy/jobs/__init__.py b/bioblend/galaxy/jobs/__init__.py index 201152ddd..1da72bbfd 100644 --- a/bioblend/galaxy/jobs/__init__.py +++ b/bioblend/galaxy/jobs/__init__.py @@ -26,7 +26,7 @@ def __init__(self, galaxy_instance): def get_jobs(self, state=None, history_id=None, invocation_id=None, tool_id=None, workflow_id=None, user_id=None, date_range_min=None, date_range_max=None, - limit=500, offset=0, user_details=False): + limit=500, offset=0, user_details=False, order_by=None): """ Get all jobs, or select a subset by specifying optional arguments for filtering (e.g. a state). @@ -73,6 +73,10 @@ def get_jobs(self, state=None, history_id=None, invocation_id=None, tool_id=None :param user_details: If ``True`` and the user is an admin, add the user email to each returned job dictionary. + :type order_by: str + :param order_by: Whether to order jobs by ``create_time`` or + ``update_time`` (the default). + :rtype: list of dict :return: Summary information for each selected job. For example:: @@ -118,6 +122,8 @@ def get_jobs(self, state=None, history_id=None, invocation_id=None, tool_id=None params['date_range_max'] = date_range_max if user_details: params['user_details'] = user_details + if order_by: + params['order_by'] = order_by return self._get(params=params) def show_job(self, job_id, full_details=False): From 99ea9dae70dcda803a6465aa6ecae6f4ab39aef7 Mon Sep 17 00:00:00 2001 From: Marius van den Beek Date: Mon, 31 Jan 2022 19:42:01 +0100 Subject: [PATCH 12/32] Run tests against 22.01 as well (#417) --- .github/workflows/test.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 0ec055ed7..c705ad93f 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -35,6 +35,7 @@ jobs: tox_env: [py36] galaxy_version: - dev + - release_22.01 - release_21.09 - release_21.05 - release_21.01 From bfd8d33133f107acbc31777d759ef9bb5b3effb2 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 8 Apr 2022 14:57:20 +0100 Subject: [PATCH 13/32] Drop support for EOL Python 3.6 --- .github/workflows/lint.yaml | 2 +- .github/workflows/test.yaml | 7 ++----- ABOUT.rst | 2 +- CHANGELOG.md | 3 ++- README.rst | 2 +- run_bioblend_tests.sh | 4 ++-- setup.cfg | 3 +-- tox.ini | 2 +- 8 files changed, 11 insertions(+), 14 deletions(-) diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index bfa65d1e3..89b53e5f1 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.6'] + python-version: ['3.7'] steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c705ad93f..5499b6bdf 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -32,7 +32,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest] - tox_env: [py36] + tox_env: [py37] galaxy_version: - dev - release_22.01 @@ -59,13 +59,10 @@ jobs: - os: ubuntu-latest tox_env: py38 galaxy_version: dev - - os: ubuntu-latest - tox_env: py37 - galaxy_version: dev # Cannot test on macOS because service containers are not supported # yet: https://github.community/t/github-actions-services-available-on-others-vms/16916 # - os: macos-latest - # tox_env: py36 + # tox_env: py37 # galaxy_version: dev steps: - uses: actions/checkout@v2 diff --git a/ABOUT.rst b/ABOUT.rst index d1caf4c6f..0b925d0d5 100644 --- a/ABOUT.rst +++ b/ABOUT.rst @@ -3,7 +3,7 @@ interacting with `Galaxy`_ and `CloudMan`_ APIs. BioBlend is supported and tested on: -- Python 3.6, 3.7, 3.8, 3.9 and 3.10 +- Python 3.7, 3.8, 3.9 and 3.10 - Galaxy release_17.09 and later. BioBlend's goal is to make it easier to script and automate the running of diff --git a/CHANGELOG.md b/CHANGELOG.md index c20d796f0..a6dfc1cda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ ### BioBlend v - unreleased -* Added support for Python 3.10. Added support for Galaxy release 21.09. +* Dropped support for Python 3.6. Added support for Python 3.10. Added support + for Galaxy release 21.09 and 22.01. * Added ``get_extra_files()`` method to ``HistoryClient``. diff --git a/README.rst b/README.rst index 5959b5925..fd76fbc3a 100644 --- a/README.rst +++ b/README.rst @@ -16,7 +16,7 @@ APIs. BioBlend is supported and tested on: -- Python 3.6, 3.7, 3.8, 3.9 and 3.10 +- Python 3.7, 3.8, 3.9 and 3.10 - Galaxy release_17.09 and later. Full docs are available at https://bioblend.readthedocs.io/ with a quick library diff --git a/run_bioblend_tests.sh b/run_bioblend_tests.sh index 654ddedf2..a37f9d61b 100755 --- a/run_bioblend_tests.sh +++ b/run_bioblend_tests.sh @@ -14,7 +14,7 @@ Options: -p PORT Port to use for the Galaxy server. Defaults to 8080. -e TOX_ENV - Work against specified tox environments. Defaults to py36. + Work against specified tox environments. Defaults to py37. -t BIOBLEND_TESTS Subset of tests to run, e.g. 'tests/TestGalaxyObjects.py::TestHistory::test_create_delete' . Defaults @@ -31,7 +31,7 @@ get_abs_dirname () { cd "$1" && pwd } -e_val=py36 +e_val=py37 GALAXY_PORT=8080 while getopts 'hcg:e:p:t:r:' option; do case $option in diff --git a/setup.cfg b/setup.cfg index 77fe276de..e8dcd7511 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,7 +21,6 @@ classifiers = License :: OSI Approved :: MIT License Operating System :: OS Independent Programming Language :: Python :: 3 - Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 @@ -54,7 +53,7 @@ install_requires = requests>=2.20.0 requests-toolbelt>=0.5.1,!=0.9.0 packages = find: -python_requires = >=3.6 +python_requires = >=3.7 [options.entry_points] console_scripts = diff --git a/tox.ini b/tox.ini index 6e58fe24d..ec8845db8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = lint, py36 +envlist = lint, py37 [testenv] commands = From 2437524579d75eb2633fd85c27c2cb1b8a8b76fe Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 8 Apr 2022 16:08:52 +0100 Subject: [PATCH 14/32] BioBlend.objects: Fail if multiple libraries/histories/workflows match when deleting by name instead of deleting them all. Also, don't log the same message before raising an exception. --- CHANGELOG.md | 3 ++ bioblend/_tests/TestGalaxyObjects.py | 33 ++++++++++++---- bioblend/galaxy/objects/client.py | 59 ++++++++++++++-------------- 3 files changed, 58 insertions(+), 37 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a6dfc1cda..de6c9daad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,9 @@ * Added ``get_extra_files()`` method to ``HistoryClient``. +* BioBlend.objects: Fail if multiple libraries/histories/workflows match when + deleting by name, instead of deleting them all. + * Improvements to type annotations, tests and documentation ### BioBlend v0.16.0 - 2021-06-13 diff --git a/bioblend/_tests/TestGalaxyObjects.py b/bioblend/_tests/TestGalaxyObjects.py index a582bc0b9..739903d27 100644 --- a/bioblend/_tests/TestGalaxyObjects.py +++ b/bioblend/_tests/TestGalaxyObjects.py @@ -614,27 +614,44 @@ def ids(seq): def test_delete_libraries_by_name(self): self._test_delete_by_name('libraries') + self._test_delete_by_ambiguous_name('libraries') def test_delete_histories_by_name(self): self._test_delete_by_name('histories') + self._test_delete_by_ambiguous_name('histories') def test_delete_workflows_by_name(self): self._test_delete_by_name('workflows') + self._test_delete_by_ambiguous_name('workflows') def _test_delete_by_name(self, obj_type): obj_gi_client = getattr(self.gi, obj_type) - create, del_kwargs = self._normalized_functions( - obj_type) + create, del_kwargs = self._normalized_functions(obj_type) name = f"test_{uuid.uuid4().hex}" - objs = [create(name) for _ in range(2)] - final_name = objs[0].name - prevs = [_ for _ in obj_gi_client.get_previews(name=final_name) if not _.deleted] - self.assertEqual(len(prevs), len(objs)) - del_kwargs['name'] = final_name + create(name) + prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] + self.assertEqual(len(prevs), 1) + del_kwargs["name"] = name obj_gi_client.delete(**del_kwargs) - prevs = [_ for _ in obj_gi_client.get_previews(name=final_name) if not _.deleted] + prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] self.assertEqual(len(prevs), 0) + def _test_delete_by_ambiguous_name(self, obj_type): + obj_gi_client = getattr(self.gi, obj_type) + create, del_kwargs = self._normalized_functions(obj_type) + name = f"test_{uuid.uuid4().hex}" + objs = [create(name) for _ in range(2)] + prevs = [_ for _ in obj_gi_client.get_previews(name=name) if not _.deleted] + self.assertEqual(len(prevs), len(objs)) + del_kwargs["name"] = name + with self.assertRaises(ValueError): + obj_gi_client.delete(**del_kwargs) + # Cleanup + del del_kwargs["name"] + for prev in prevs: + del_kwargs["id_"] = prev.id + obj_gi_client.delete(**del_kwargs) + class TestLibrary(GalaxyObjectsTestBase): # just something that can be expected to be always up diff --git a/bioblend/galaxy/objects/client.py b/bioblend/galaxy/objects/client.py index af34c4530..71ffca1ea 100644 --- a/bioblend/galaxy/objects/client.py +++ b/bioblend/galaxy/objects/client.py @@ -58,32 +58,33 @@ def list(self) -> list: """ pass - def _select_ids(self, id_=None, name=None): + def _select_id(self, id_=None, name=None): """ - Return the id list that corresponds to the given id or name info. + Return the id that corresponds to the given id or name info. """ if id_ is None and name is None: - self._error('neither id nor name provided', err_type=TypeError) + raise ValueError('Neither id nor name provided') if id_ is not None and name is not None: - self._error('both id and name provided', err_type=TypeError) + raise ValueError('Both id and name provided') if id_ is None: - return [_.id for _ in self.get_previews(name=name)] + id_list = [_.id for _ in self.get_previews(name=name)] + if len(id_list) > 1: + raise ValueError("Ambiguous name") + if not id_list: + raise ValueError("name not found") + return id_list[0] else: - return [id_] - - def _error(self, msg, err_type=RuntimeError): - self.log.error(msg) - raise err_type(msg) + return id_ def _get_dict(self, meth_name, reply): if reply is None: - self._error(f"{meth_name}: no reply") + raise RuntimeError(f"{meth_name}: no reply") elif isinstance(reply, Mapping): return reply try: return reply[0] except (TypeError, IndexError): - self._error(f'{meth_name}: unexpected reply: {reply!r}') + raise RuntimeError(f'{meth_name}: unexpected reply: {reply!r}') class ObjDatasetContainerClient(ObjClient): @@ -97,7 +98,7 @@ def _get_container(self, id_, ctype): cdict['id'] = id_ # overwrite unencoded id c_infos = show_f(id_, contents=True) if not isinstance(c_infos, Sequence): - self._error(f'{show_fname}: unexpected reply: {c_infos!r}') + raise RuntimeError(f'{show_fname}: unexpected reply: {c_infos!r}') c_infos = [ctype.CONTENT_INFO_TYPE(_) for _ in c_infos] return ctype(cdict, content_infos=c_infos, gi=self.obj_gi) @@ -158,16 +159,16 @@ def delete(self, id_=None, name=None): """ Delete the library with the given id or name. - Note that the same name can map to multiple libraries. + Fails if multiple libraries have the specified name. .. warning:: Deleting a data library is irreversible - all of the data from the library will be permanently deleted. """ - for id_ in self._select_ids(id_=id_, name=name): - res = self.gi.libraries.delete_library(id_) - if not isinstance(res, Mapping): - self._error(f'delete_library: unexpected reply: {res!r}') + id_ = self._select_id(id_=id_, name=name) + res = self.gi.libraries.delete_library(id_) + if not isinstance(res, Mapping): + raise RuntimeError(f'delete_library: unexpected reply: {res!r}') class ObjHistoryClient(ObjDatasetContainerClient): @@ -220,7 +221,7 @@ def delete(self, id_=None, name=None, purge=False): """ Delete the history with the given id or name. - Note that the same name can map to multiple histories. + Fails if multiple histories have the same name. :type purge: bool :param purge: if ``True``, also purge (permanently delete) the history @@ -230,10 +231,10 @@ def delete(self, id_=None, name=None, purge=False): ``allow_user_dataset_purge`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ - for id_ in self._select_ids(id_=id_, name=name): - res = self.gi.histories.delete_history(id_, purge=purge) - if not isinstance(res, Mapping): - self._error(f'delete_history: unexpected reply: {res!r}') + id_ = self._select_id(id_=id_, name=name) + res = self.gi.histories.delete_history(id_, purge=purge) + if not isinstance(res, Mapping): + raise RuntimeError(f'delete_history: unexpected reply: {res!r}') class ObjWorkflowClient(ObjClient): @@ -263,7 +264,7 @@ def import_new(self, src, publish=False): try: wf_dict = json.loads(src) except (TypeError, ValueError): - self._error(f'src not supported: {src!r}') + raise ValueError(f'src not supported: {src!r}') wf_info = self.gi.workflows.import_workflow_dict(wf_dict, publish) return self.get(wf_info['id']) @@ -315,16 +316,16 @@ def delete(self, id_=None, name=None): """ Delete the workflow with the given id or name. - Note that the same name can map to multiple workflows. + Fails if multiple workflows have the specified name. .. warning:: Deleting a workflow is irreversible - all of the data from the workflow will be permanently deleted. """ - for id_ in self._select_ids(id_=id_, name=name): - res = self.gi.workflows.delete_workflow(id_) - if not isinstance(res, str): - self._error(f"delete_workflow: unexpected reply: {res!r}") + id_ = self._select_id(id_=id_, name=name) + res = self.gi.workflows.delete_workflow(id_) + if not isinstance(res, str): + raise RuntimeError(f"delete_workflow: unexpected reply: {res!r}") class ObjInvocationClient(ObjClient): From de55247036236da3827007cdc02ae6f6404377ad Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 8 Apr 2022 18:38:36 +0100 Subject: [PATCH 15/32] Update test scripts for upcoming Galaxy 22.05 --- .github/workflows/deploy.yaml | 6 +-- .github/workflows/lint.yaml | 4 +- .github/workflows/test.yaml | 31 +++-------- bioblend/_tests/template_galaxy.yml | 18 +++++++ run_bioblend_tests.sh | 82 +++++++++++++++++++++++------ 5 files changed, 95 insertions(+), 46 deletions(-) create mode 100644 bioblend/_tests/template_galaxy.yml diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index bf3e04bd9..a222a5a33 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -4,8 +4,8 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 with: python-version: '3.10' - name: Install dependencies @@ -18,7 +18,7 @@ jobs: twine check dist/* - name: Publish to PyPI if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && github.repository_owner == 'galaxyproject' - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_PASSWORD }} diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 89b53e5f1..512422a0c 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -10,8 +10,8 @@ jobs: matrix: python-version: ['3.7'] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} - name: Install tox diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 5499b6bdf..5a620e69f 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -65,7 +65,7 @@ jobs: # tox_env: py37 # galaxy_version: dev steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Cache pip dir uses: actions/cache@v2 with: @@ -75,7 +75,7 @@ jobs: id: get_bioblend_python_version run: echo "::set-output name=bioblend_python_version::$(echo "${{ matrix.tox_env }}" | sed -e 's/^py\([3-9]\)\([0-9]\+\)/\1.\2/')" - name: Set up Python for BioBlend - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ steps.get_bioblend_python_version.outputs.bioblend_python_version }} - name: Install tox @@ -100,7 +100,7 @@ jobs: esac echo "::set-output name=galaxy_python_version::$galaxy_python_version" - name: Set up Python for Galaxy - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ steps.get_galaxy_python_version.outputs.galaxy_python_version }} - name: Run tests @@ -112,28 +112,11 @@ jobs: # Create a PostgreSQL database for Galaxy. The default SQLite3 database makes test fail randomly because of "database locked" error. createdb -U postgres galaxy # Install Galaxy - wget https://github.com/galaxyproject/galaxy/archive/${{ matrix.galaxy_version }}.tar.gz - tar xvzf ${{ matrix.galaxy_version }}.tar.gz | tail - cd galaxy-${{ matrix.galaxy_version }} - export GALAXY_DIR=$PWD - export GALAXY_PYTHON=python${{ steps.get_galaxy_python_version.outputs.galaxy_python_version }} - export GALAXY_VERSION=${{ matrix.galaxy_version }} - # Export GALAXY_CONFIG_FILE environment variable to be used by run_galaxy.sh - export GALAXY_CONFIG_FILE=config/galaxy.ini - # Export BIOBLEND_ environment variables to be used in BioBlend tests - export BIOBLEND_GALAXY_MASTER_API_KEY=$(LC_ALL=C tr -dc A-Za-z0-9 < /dev/urandom | head -c 32) - export BIOBLEND_GALAXY_USER_EMAIL=${USER}@localhost.localdomain + GALAXY_DIR=galaxy-${{ matrix.galaxy_version }} + git clone --depth=1 -b ${{ matrix.galaxy_version }} https://github.com/galaxyproject/galaxy $GALAXY_DIR export DATABASE_CONNECTION=postgresql://postgres:@localhost/galaxy - eval "echo \"$(cat "${{ github.workspace }}/tests/template_galaxy.ini")\"" > "$GALAXY_CONFIG_FILE" - # Update psycopg2 requirement to a version compatible with glibc 2.26 for Galaxy releases 16.01-18.01, see https://github.com/psycopg/psycopg2-wheels/issues/2 - sed -i.bak -e 's/psycopg2==2.6.1/psycopg2==2.7.3.1/' lib/galaxy/dependencies/conditional-requirements.txt - # Start Galaxy and wait for successful server start - export GALAXY_SKIP_CLIENT_BUILD=1 - GALAXY_RUN_ALL=1 "${{ github.workspace }}/run_galaxy.sh" --daemon --wait - export BIOBLEND_GALAXY_URL=http://localhost:8080 - cd "${{ github.workspace }}" - tox -e ${{ matrix.tox_env }} + ./run_bioblend_tests.sh -g $GALAXY_DIR -v python${{ steps.get_galaxy_python_version.outputs.galaxy_python_version }} -e ${{ matrix.tox_env }} - name: The job has failed if: ${{ failure() }} run: | - cat galaxy-${{ matrix.galaxy_version }}/main.log + cat galaxy-${{ matrix.galaxy_version }}/*.log diff --git a/bioblend/_tests/template_galaxy.yml b/bioblend/_tests/template_galaxy.yml new file mode 100644 index 000000000..7ad8ba02f --- /dev/null +++ b/bioblend/_tests/template_galaxy.yml @@ -0,0 +1,18 @@ +gravity: + gunicorn: + bind: localhost:${GALAXY_PORT:-8080} + +galaxy: + managed_config_dir: ${TEMP_DIR:-${GALAXY_DIR}}/config + data_dir: ${TEMP_DIR:-${GALAXY_DIR}}/database + database_connection: $DATABASE_CONNECTION + tool_config_file: ${GALAXY_DIR}/config/tool_conf.xml.sample,${TEMP_DIR:-${GALAXY_DIR}}/config/shed_tool_conf.xml,${GALAXY_DIR}/test/functional/tools/samples_tool_conf.xml + # Don't use $TEMP_DIR for tool_dependency_dir to save time on local testing + tool_dependency_dir: ${GALAXY_DIR}/database/dependencies + allow_path_paste: true + admin_users: $BIOBLEND_GALAXY_USER_EMAIL + allow_user_deletion: true + enable_beta_workflow_modules: true + master_api_key: $BIOBLEND_GALAXY_MASTER_API_KEY + enable_quotas: true + cleanup_job: onsuccess diff --git a/run_bioblend_tests.sh b/run_bioblend_tests.sh index a37f9d61b..cb73d44b0 100755 --- a/run_bioblend_tests.sh +++ b/run_bioblend_tests.sh @@ -21,6 +21,8 @@ Options: to all tests. -r GALAXY_REV Branch or commit of the local Galaxy git repository to checkout. + -v GALAXY_PYTHON + Python to use for the Galaxy virtual environment. -c Force removal of the temporary directory created for Galaxy, even if some test failed." @@ -33,7 +35,7 @@ get_abs_dirname () { e_val=py37 GALAXY_PORT=8080 -while getopts 'hcg:e:p:t:r:' option; do +while getopts 'hcg:e:p:t:r:v:' option; do case $option in h) show_help exit;; @@ -43,6 +45,7 @@ while getopts 'hcg:e:p:t:r:' option; do p) GALAXY_PORT=$OPTARG;; t) t_val=$OPTARG;; r) r_val=$OPTARG;; + v) GALAXY_PYTHON=$OPTARG;; *) show_help exit 1;; esac @@ -56,15 +59,16 @@ fi # Install BioBlend BIOBLEND_DIR=$(get_abs_dirname "$(dirname "$0")") -cd "${BIOBLEND_DIR}" -if [ ! -d .venv ]; then - virtualenv -p python3 .venv +if ! command -v tox >/dev/null; then + cd "${BIOBLEND_DIR}" + if [ ! -d .venv ]; then + virtualenv -p python3 .venv + fi + . .venv/bin/activate + python3 -m pip install --upgrade "tox>=1.8.0" fi -. .venv/bin/activate -python3 setup.py install -python3 -m pip install --upgrade "tox>=1.8.0" -# Setup Galaxy +# Setup Galaxy version cd "${GALAXY_DIR}" if [ -n "${r_val}" ]; then # Update repository (may change the sample files or the list of eggs) @@ -83,24 +87,63 @@ else ;; esac fi + +# Setup Galaxy virtualenv +if [ -n "${GALAXY_PYTHON}" ]; then + if [ ! -d .venv ]; then + virtualenv -p "${GALAXY_PYTHON}" .venv + fi + export GALAXY_PYTHON +fi + # Setup Galaxy master API key and admin user TEMP_DIR=$(mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir') echo "Created temporary directory $TEMP_DIR" -mkdir "$TEMP_DIR/config" +mkdir "${TEMP_DIR}/config" "${TEMP_DIR}/database" printf "\n\n\n" "$TEMP_DIR/shed_tools" > "$TEMP_DIR/config/shed_tool_conf.xml" -# Export GALAXY_CONFIG_FILE environment variable to be used by run_galaxy.sh -export GALAXY_CONFIG_FILE="$TEMP_DIR/config/galaxy.ini" # Export BIOBLEND_ environment variables to be used in BioBlend tests -BIOBLEND_GALAXY_MASTER_API_KEY=$(LC_ALL=C tr -dc A-Za-z0-9 < /dev/urandom | head -c 32) -export BIOBLEND_GALAXY_MASTER_API_KEY +export BIOBLEND_GALAXY_MASTER_API_KEY=$(LC_ALL=C tr -dc A-Za-z0-9 < /dev/urandom | head -c 32) export BIOBLEND_GALAXY_USER_EMAIL="${USER}@localhost.localdomain" -DATABASE_CONNECTION="sqlite:///$TEMP_DIR/universe.sqlite?isolation_level=IMMEDIATE" -eval "echo \"$(cat "${BIOBLEND_DIR}/tests/template_galaxy.ini")\"" > "$GALAXY_CONFIG_FILE" +DATABASE_CONNECTION=${DATABASE_CONNECTION:-"sqlite:///${TEMP_DIR}/database/universe.sqlite?isolation_level=IMMEDIATE"} # Update psycopg2 requirement to a version compatible with glibc 2.26 for Galaxy releases 16.01-18.01, see https://github.com/psycopg/psycopg2-wheels/issues/2 sed -i.bak -e 's/psycopg2==2.6.1/psycopg2==2.7.3.1/' lib/galaxy/dependencies/conditional-requirements.txt # Start Galaxy and wait for successful server start export GALAXY_SKIP_CLIENT_BUILD=1 -GALAXY_RUN_ALL=1 "${BIOBLEND_DIR}/run_galaxy.sh" --daemon --wait +if grep -q wait_arg_set run.sh ; then + # Galaxy 22.01 or earlier. + # Export GALAXY_CONFIG_FILE environment variable to be used by run_galaxy.sh + export GALAXY_CONFIG_FILE="${TEMP_DIR}/config/galaxy.ini" + eval "echo \"$(cat "${BIOBLEND_DIR}/tests/template_galaxy.ini")\"" > "${GALAXY_CONFIG_FILE}" + GALAXY_RUN_ALL=1 "${BIOBLEND_DIR}/run_galaxy.sh" --daemon --wait +else + # Galaxy is controlled via gravity, paste/uwsgi are replaced by gunicorn + # and the `--wait` option does not work any more. + # Export GALAXY_CONFIG_FILE environment variable to be used by run.sh + export GALAXY_CONFIG_FILE="${TEMP_DIR}/config/galaxy.yml" + eval "echo \"$(cat "${BIOBLEND_DIR}/tests/template_galaxy.yml")\"" > "${GALAXY_CONFIG_FILE}" + export GRAVITY_STATE_DIR="${TEMP_DIR}/database/gravity" + ./run.sh --daemon + if ! .venv/bin/galaxyctl -h > /dev/null; then + echo 'galaxyctl status not working' + exit 1 + fi + while true; do + sleep 1 + if .venv/bin/galaxyctl status | grep -q 'gunicorn.*RUNNING'; then + break + else + echo 'gunicorn not running yet' + fi + done + while true; do + sleep 1 + if grep -q "serving on http://127.0.0.1:${GALAXY_PORT}" "${GRAVITY_STATE_DIR}/log/gunicorn.log"; then + break + else + echo 'Galaxy not serving yet' + fi + done +fi export BIOBLEND_GALAXY_URL=http://localhost:${GALAXY_PORT} # Run the tests @@ -115,8 +158,13 @@ exit_code=$? deactivate # Stop Galaxy +echo 'Stopping Galaxy' cd "${GALAXY_DIR}" -GALAXY_RUN_ALL=1 "${BIOBLEND_DIR}/run_galaxy.sh" --daemon stop +if grep -q wait_arg_set run.sh ; then + GALAXY_RUN_ALL=1 "${BIOBLEND_DIR}/run_galaxy.sh" --daemon stop +else + ./run.sh --daemon stop +fi # Remove temporary directory if -c is specified or if all tests passed if [ -n "${c_val}" ] || [ $exit_code -eq 0 ]; then rm -rf "$TEMP_DIR" From 3b429266ac6d2fc6ee00a5d1ca6107f6940e1c20 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Mon, 11 Apr 2022 18:52:56 +0100 Subject: [PATCH 16/32] Fix ``TestGalaxyDatasets.test_get_datasets_limit_offset()`` test for Galaxy 22.01 ``limit`` needs to be >=1 . --- bioblend/_tests/TestGalaxyDatasets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bioblend/_tests/TestGalaxyDatasets.py b/bioblend/_tests/TestGalaxyDatasets.py index b39eabb11..27cf90967 100644 --- a/bioblend/_tests/TestGalaxyDatasets.py +++ b/bioblend/_tests/TestGalaxyDatasets.py @@ -66,8 +66,8 @@ def test_get_datasets_history(self): @test_util.skip_unless_galaxy('release_19.05') def test_get_datasets_limit_offset(self): - datasets = self.gi.datasets.get_datasets(limit=0) - self.assertEqual(datasets, []) + datasets = self.gi.datasets.get_datasets(limit=1) + self.assertEqual(len(datasets), 1) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, offset=1) self.assertEqual(datasets, []) From 95a49936aa74e6368dba852aca09e43544eb98de Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 13 Apr 2022 17:51:38 +0100 Subject: [PATCH 17/32] Fix ``TestGalaxyHistories::test_import_history()`` test on Galaxy 22.05 Fix `bioblend.ConnectionError: Unexpected HTTP status code: 500: Internal Server Error` error caused by the following traceback on upcoming Galaxy 22.05 which uses FastAPI: ``` Traceback (most recent call last): File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/uvicorn/protocols/http/h11_impl.py", line 366, in run_asgi result = await app(self.scope, self.receive, self.send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/uvicorn/middleware/proxy_headers.py", line 75, in __call__ return await self.app(scope, receive, send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/fastapi/applications.py", line 261, in __call__ await super().__call__(scope, receive, send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/applications.py", line 112, in __call__ await self.middleware_stack(scope, receive, send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/middleware/errors.py", line 181, in __call__ raise exc File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/middleware/errors.py", line 159, in __call__ await self.app(scope, receive, _send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette_context/middleware/raw_middleware.py", line 96, in __call__ await self.app(scope, receive, send_wrapper) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/middleware/base.py", line 63, in __call__ response = await self.dispatch_func(request, call_next) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/lib/galaxy/webapps/galaxy/fast_app.py", line 103, in add_x_frame_options response = await call_next(request) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/middleware/base.py", line 44, in call_next raise app_exc File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/middleware/base.py", line 34, in coro await self.app(scope, request.receive, send_stream.send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/exceptions.py", line 82, in __call__ raise exc File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/exceptions.py", line 71, in __call__ await self.app(scope, receive, sender) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/fastapi/middleware/asyncexitstack.py", line 21, in __call__ raise e File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/fastapi/middleware/asyncexitstack.py", line 18, in __call__ await self.app(scope, receive, send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/routing.py", line 656, in __call__ await route.handle(scope, receive, send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/routing.py", line 259, in handle await self.app(scope, receive, send) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/starlette/routing.py", line 61, in app response = await func(request) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/fastapi/routing.py", line 217, in app solved_result = await solve_dependencies( File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/.venv/lib/python3.8/site-packages/fastapi/dependencies/utils.py", line 527, in solve_dependencies solved = await call(**sub_values) File "/usr/users/ga002/soranzon/software/nsoranzo_galaxy/lib/galaxy/webapps/galaxy/api/__init__.py", line 332, in _as_form return cls(**data) File "pydantic/main.py", line 331, in pydantic.main.BaseModel.__init__ pydantic.error_wrappers.ValidationError: 1 validation error for CreateHistoryFormData archive_type value is not a valid enumeration member; permitted: 'url', 'file' (type=type_error.enum; enum_values=[, ]) ``` --- bioblend/galaxyclient.py | 4 ++-- run_bioblend_tests.sh | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/bioblend/galaxyclient.py b/bioblend/galaxyclient.py index d6b681c2d..7e04f833d 100644 --- a/bioblend/galaxyclient.py +++ b/bioblend/galaxyclient.py @@ -109,7 +109,7 @@ def my_dumps(d): not of type ``FileStream``. """ for k, v in d.items(): - if not isinstance(v, FileStream): + if not isinstance(v, (FileStream, str, bytes)): d[k] = json.dumps(v) return d @@ -117,9 +117,9 @@ def my_dumps(d): # leveraging the requests-toolbelt library if any files have # been attached. if files_attached: - payload = my_dumps(payload) if params: payload.update(params) + payload = my_dumps(payload) payload = MultipartEncoder(fields=payload) headers = self.json_headers.copy() headers['Content-Type'] = payload.content_type diff --git a/run_bioblend_tests.sh b/run_bioblend_tests.sh index cb73d44b0..122251036 100755 --- a/run_bioblend_tests.sh +++ b/run_bioblend_tests.sh @@ -155,7 +155,6 @@ else tox -e "${e_val}" fi exit_code=$? -deactivate # Stop Galaxy echo 'Stopping Galaxy' From 8ab1ee7969a54d6342687e4f5fbb70cd9d64b2d6 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Thu, 14 Apr 2022 13:43:30 +0100 Subject: [PATCH 18/32] Some type annotations --- bioblend/galaxy/client.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/bioblend/galaxy/client.py b/bioblend/galaxy/client.py index 574323912..fc0b1d1f6 100644 --- a/bioblend/galaxy/client.py +++ b/bioblend/galaxy/client.py @@ -6,6 +6,7 @@ """ import time +from typing import Optional import requests @@ -16,6 +17,8 @@ class Client: + # The `module` attribute needs to be defined in subclasses + module: str # Class variables that configure GET request retries. Note that since these # are class variables their values are shared by all Client instances -- @@ -76,7 +79,7 @@ def __init__(self, galaxy_instance): """ self.gi = galaxy_instance - def _make_url(self, module_id=None, deleted=False, contents=False): + def _make_url(self, module_id: Optional[str] = None, deleted: bool = False, contents: bool = False): """ Compose a URL based on the provided arguments. @@ -92,16 +95,23 @@ def _make_url(self, module_id=None, deleted=False, contents=False): ``/api/libraries//contents`` """ c_url = '/'.join((self.gi.url, self.module)) - if deleted is True: + if deleted: c_url = c_url + '/deleted' - if module_id is not None: + if module_id: c_url = '/'.join((c_url, module_id)) - if contents is True: + if contents: c_url = c_url + '/contents' return c_url - def _get(self, id=None, deleted=False, contents=None, url=None, - params=None, json=True): + def _get( + self, + id: Optional[str] = None, + deleted: bool = False, + contents: bool = False, + url: Optional[str] = None, + params=None, + json: bool = True, + ): """ Do a GET request, composing the URL from ``id``, ``deleted`` and ``contents``. Alternatively, an explicit ``url`` can be provided. From 86fa96f8cc9ae4223d064ac4cb5ec6aeee6e9340 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Thu, 14 Apr 2022 13:45:36 +0100 Subject: [PATCH 19/32] Move ``dataset_collection_id`` from query parameter to path --- bioblend/galaxy/dataset_collections/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index 2d16b03ec..28535b6ae 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -107,11 +107,10 @@ def show_dataset_collection(self, dataset_collection_id: str, :return: element view of the dataset collection """ params = { - 'id': dataset_collection_id, 'instance_type': instance_type, } url = self._make_url(module_id=dataset_collection_id) - return self._get(url=url, params=params) + return self._get(id=dataset_collection_id, url=url, params=params) def download_dataset_collection(self, dataset_collection_id: str, file_path: str) -> dict: """ From ffc622786b2ca9c36c60f383b4901b0f764ebf78 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Thu, 14 Apr 2022 14:38:38 +0100 Subject: [PATCH 20/32] Fix ``TestGalaxyDatasetCollections.test_show_dataset_collection()`` broken on Galaxy dev branch --- bioblend/_tests/TestGalaxyDatasetCollections.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/bioblend/_tests/TestGalaxyDatasetCollections.py b/bioblend/_tests/TestGalaxyDatasetCollections.py index 57658e2b2..b0838ca6d 100644 --- a/bioblend/_tests/TestGalaxyDatasetCollections.py +++ b/bioblend/_tests/TestGalaxyDatasetCollections.py @@ -131,7 +131,18 @@ def test_show_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestDatasetCollectionShow")["id"] dataset_collection1 = self._create_pair_in_history(history_id) dataset_collection2 = self.gi.dataset_collections.show_dataset_collection(dataset_collection1['id']) - self.assertEqual(dataset_collection1.keys(), dataset_collection2.keys()) + for key in ( + "collection_type", + "deleted", + "id", + "hid", + "history_content_type", + "history_id", + "name", + "url", + "visible", + ): + self.assertEqual(dataset_collection1[key], dataset_collection2[key]) for element1, element2 in zip(dataset_collection1['elements'], dataset_collection2['elements']): self.assertEqual(element1['id'], element2['id']) self.assertEqual(element1.keys(), element2.keys()) From 0e4d36717c3335b4cb382bb8907e16d302a35afb Mon Sep 17 00:00:00 2001 From: cat-bro Date: Tue, 3 May 2022 11:18:54 +1000 Subject: [PATCH 21/32] Add whoami to bioblend.galaxy.config --- bioblend/_tests/TestGalaxyConfig.py | 18 ++++++++++++++++++ bioblend/galaxy/config/__init__.py | 18 ++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 bioblend/_tests/TestGalaxyConfig.py diff --git a/bioblend/_tests/TestGalaxyConfig.py b/bioblend/_tests/TestGalaxyConfig.py new file mode 100644 index 000000000..d05fde07b --- /dev/null +++ b/bioblend/_tests/TestGalaxyConfig.py @@ -0,0 +1,18 @@ +from . import GalaxyTestBase + + +class TestGalaxyConfig(GalaxyTestBase.GalaxyTestBase): + def test_get_config(self): + response = self.gi.config.get_config() + self.assertTrue(isinstance(response, dict)) + self.assertTrue('brand' in response.keys()) + + def test_get_version(self): + response = self.gi.config.get_version() + self.assertTrue(isinstance(response, dict)) + self.assertTrue('version_major' in response.keys()) + + def test_whoami(self): + response = self.gi.config.whoami() + self.assertTrue(isinstance(response, dict)) + self.assertTrue('username' in response.keys()) diff --git a/bioblend/galaxy/config/__init__.py b/bioblend/galaxy/config/__init__.py index 2c0d7608d..fbe2b159e 100644 --- a/bioblend/galaxy/config/__init__.py +++ b/bioblend/galaxy/config/__init__.py @@ -48,3 +48,21 @@ def get_version(self): """ url = self.gi.url + '/version' return self._get(url=url) + + def whoami(self): + """ + Return information about the current authenticated user. + + :rtype: dict + :return: Information about current authenticated user + For example:: + {'active': True, + 'deleted': False, + 'email': 'jgillard83649163@student.unimelb.edu.au', + 'id': '4aaaaa85aacc9caa', + 'last_password_change': '2021-07-29T05:34:54.632345', + 'model_class': 'User', + 'username': 'julia'} + """ + url = '/'.join((self.gi.url, 'whoami')) + return self._get(url=url) From 01dd0b804d01544e3257b02ecb8bfd1e1a8f8d03 Mon Sep 17 00:00:00 2001 From: Simon Bray Date: Tue, 3 May 2022 11:53:26 +0200 Subject: [PATCH 22/32] fix ``TestGalaxyWorkflows.test_get_workflows()`` broken on Galaxy dev branch --- bioblend/_tests/TestGalaxyWorkflows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/_tests/TestGalaxyWorkflows.py b/bioblend/_tests/TestGalaxyWorkflows.py index cb945d399..311b6454e 100644 --- a/bioblend/_tests/TestGalaxyWorkflows.py +++ b/bioblend/_tests/TestGalaxyWorkflows.py @@ -173,7 +173,7 @@ def test_get_workflows(self): wf_list = [w for w in wfs_with_name if w['id'] == workflow['id']] self.assertEqual(len(wf_list), 1) wf_data = wf_list[0] - self.assertEqual(wf_data['url'], workflow['url']) + self.assertEqual(wf_data['create_time'], workflow['create_time']) def test_show_workflow(self): path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) From cca2b78725b61938a5e093879322a269e1d436cb Mon Sep 17 00:00:00 2001 From: Simon Bray Date: Tue, 3 May 2022 12:15:51 +0200 Subject: [PATCH 23/32] check create_time in ``TestGalaxyWorkflows.test_get_workflows()`` only for newer Galaxy versions --- bioblend/_tests/TestGalaxyWorkflows.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bioblend/_tests/TestGalaxyWorkflows.py b/bioblend/_tests/TestGalaxyWorkflows.py index 311b6454e..f5c3d6b3c 100644 --- a/bioblend/_tests/TestGalaxyWorkflows.py +++ b/bioblend/_tests/TestGalaxyWorkflows.py @@ -173,7 +173,10 @@ def test_get_workflows(self): wf_list = [w for w in wfs_with_name if w['id'] == workflow['id']] self.assertEqual(len(wf_list), 1) wf_data = wf_list[0] - self.assertEqual(wf_data['create_time'], workflow['create_time']) + if 'create_time' in workflow: # Galaxy >= 20.01 + self.assertEqual(wf_data['create_time'], workflow['create_time']) + else: # Galaxy <= 22.01 + self.assertEqual(wf_data['url'], workflow['url']) def test_show_workflow(self): path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) From 43a53b1f24b0941768c56e267cb18bc4c15faef5 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 3 May 2022 13:18:30 +0100 Subject: [PATCH 24/32] Uniform use of example.org domain for docs and tests --- bioblend/_tests/TestGalaxyDatasets.py | 2 +- bioblend/_tests/TestGalaxyUsers.py | 8 ++++---- bioblend/galaxy/users/__init__.py | 2 +- docs/api_docs/galaxy/docs.rst | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/bioblend/_tests/TestGalaxyDatasets.py b/bioblend/_tests/TestGalaxyDatasets.py index 27cf90967..fb309f103 100644 --- a/bioblend/_tests/TestGalaxyDatasets.py +++ b/bioblend/_tests/TestGalaxyDatasets.py @@ -176,7 +176,7 @@ def test_wait_for_dataset(self): @test_util.skip_unless_galaxy('release_19.05') def test_dataset_permissions(self): admin_user_id = self.gi.users.get_current_user()['id'] - user_id = self.gi.users.create_local_user('newuser3', 'newuser3@example.com', 'secret')['id'] + user_id = self.gi.users.create_local_user('newuser3', 'newuser3@example.org', 'secret')['id'] user_api_key = self.gi.users.create_user_apikey(user_id) anonymous_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=None) user_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=user_api_key) diff --git a/bioblend/_tests/TestGalaxyUsers.py b/bioblend/_tests/TestGalaxyUsers.py index e32334ef8..2dfabaada 100644 --- a/bioblend/_tests/TestGalaxyUsers.py +++ b/bioblend/_tests/TestGalaxyUsers.py @@ -30,7 +30,7 @@ def test_create_remote_user(self): # this test only on a disposable Galaxy instance! if not self.gi.config.get_config()['use_remote_user']: self.skipTest('This Galaxy instance is not configured to use remote users') - new_user_email = 'newuser@example.com' + new_user_email = 'newuser@example.org' user = self.gi.users.create_remote_user(new_user_email) self.assertEqual(user['email'], new_user_email) if self.gi.config.get_config()['allow_user_deletion']: @@ -44,7 +44,7 @@ def test_create_local_user(self): # this test only on a disposable Galaxy instance! if self.gi.config.get_config()['use_remote_user']: self.skipTest('This Galaxy instance is not configured to use local users') - new_user_email = 'newuser@example.com' + new_user_email = 'newuser@example.org' username = 'newuser' password = 'secret' user = self.gi.users.create_local_user(username, new_user_email, password) @@ -73,12 +73,12 @@ def test_update_user(self): # this test only on a disposable Galaxy instance! if self.gi.config.get_config()['use_remote_user']: self.skipTest('This Galaxy instance is not configured to use local users') - new_user_email = 'newuser2@example.com' + new_user_email = 'newuser2@example.org' user = self.gi.users.create_local_user('newuser2', new_user_email, 'secret') self.assertEqual(user['username'], 'newuser2') self.assertEqual(user['email'], new_user_email) - updated_user_email = 'updateduser@example.com' + updated_user_email = 'updateduser@example.org' updated_username = 'updateduser' user_id = user['id'] self.gi.users.update_user(user_id, username=updated_username, email=updated_user_email) diff --git a/bioblend/galaxy/users/__init__.py b/bioblend/galaxy/users/__init__.py index 253ef0f3d..6e3548990 100644 --- a/bioblend/galaxy/users/__init__.py +++ b/bioblend/galaxy/users/__init__.py @@ -43,7 +43,7 @@ def get_users(self, deleted=False, f_email=None, f_name=None, f_any=None): :return: a list of dicts with user details. For example:: - [{'email': 'a_user@example.com', + [{'email': 'a_user@example.org', 'id': 'dda47097d9189f15', 'url': '/api/users/dda47097d9189f15'}] diff --git a/docs/api_docs/galaxy/docs.rst b/docs/api_docs/galaxy/docs.rst index b49d116b1..b3d48e8ed 100644 --- a/docs/api_docs/galaxy/docs.rst +++ b/docs/api_docs/galaxy/docs.rst @@ -361,11 +361,11 @@ Methods for managing users are grouped under ``GalaxyInstance.users.*``. User ma To get a list of users, call: >>> gi.users.get_users() - [{'email': 'userA@unimelb.edu.au', + [{'email': 'userA@example.org', 'id': '975a9ce09b49502a', 'quota_percent': None, 'url': '/api/users/975a9ce09b49502a'}, - {'email': 'userB@student.unimelb.edu.au', + {'email': 'userB@example.org', 'id': '0193a95acf427d2c', 'quota_percent': None, 'url': '/api/users/0193a95acf427d2c'}] From d3fcb298d1365ed409abd562c0006b38ff185c12 Mon Sep 17 00:00:00 2001 From: cat-bro Date: Tue, 3 May 2022 22:46:09 +1000 Subject: [PATCH 25/32] Update bioblend/galaxy/config/__init__.py Co-authored-by: Nicola Soranzo --- bioblend/galaxy/config/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/galaxy/config/__init__.py b/bioblend/galaxy/config/__init__.py index fbe2b159e..1997b45aa 100644 --- a/bioblend/galaxy/config/__init__.py +++ b/bioblend/galaxy/config/__init__.py @@ -58,7 +58,7 @@ def whoami(self): For example:: {'active': True, 'deleted': False, - 'email': 'jgillard83649163@student.unimelb.edu.au', + 'email': 'user@example.org', 'id': '4aaaaa85aacc9caa', 'last_password_change': '2021-07-29T05:34:54.632345', 'model_class': 'User', From 9324f8c29ec0f5800c5db5d17857ce8493dea43a Mon Sep 17 00:00:00 2001 From: cat-bro Date: Tue, 3 May 2022 22:46:24 +1000 Subject: [PATCH 26/32] Update bioblend/galaxy/config/__init__.py Co-authored-by: Nicola Soranzo --- bioblend/galaxy/config/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bioblend/galaxy/config/__init__.py b/bioblend/galaxy/config/__init__.py index 1997b45aa..6c8a91bc4 100644 --- a/bioblend/galaxy/config/__init__.py +++ b/bioblend/galaxy/config/__init__.py @@ -64,5 +64,5 @@ def whoami(self): 'model_class': 'User', 'username': 'julia'} """ - url = '/'.join((self.gi.url, 'whoami')) + url = self.gi.url + "/whoami" return self._get(url=url) From 9c8095c98a1af21970c60736b7995cdf465358f1 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 10 May 2022 00:31:55 +0100 Subject: [PATCH 27/32] Make ``test_refactor_workflow`` independent of the position of the added input step The test was broken by https://github.com/galaxyproject/galaxy/pull/13641 . --- bioblend/_tests/TestGalaxyWorkflows.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/bioblend/_tests/TestGalaxyWorkflows.py b/bioblend/_tests/TestGalaxyWorkflows.py index f5c3d6b3c..4f73e8d88 100644 --- a/bioblend/_tests/TestGalaxyWorkflows.py +++ b/bioblend/_tests/TestGalaxyWorkflows.py @@ -280,10 +280,8 @@ def test_refactor_workflow(self): path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) wf = self.gi.workflows.import_workflow_from_local_path(path) response = self.gi.workflows.refactor_workflow(wf['id'], actions, dry_run=True) - self.assertEqual(len(response), 3) - self.assertTrue('action_executions' in response) - self.assertTrue('workflow' in response) - self.assertTrue('dry_run' in response) - self.assertEqual(len(response['action_executions']), 2) - self.assertEqual(response['workflow']['steps']['0']['label'], 'bar') + self.assertEqual(len(response['action_executions']), len(actions)) self.assertEqual(response['dry_run'], True) + updated_steps = response['workflow']['steps'] + self.assertEqual(len(updated_steps), 4) + self.assertEqual({step['label'] for step in updated_steps.values()}, {'bar', None, 'Input 1', 'Input 2'}) From d676872fa791aa8a24d44f0e99df936d728d78c8 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Mon, 9 May 2022 19:35:04 +0100 Subject: [PATCH 28/32] Release 0.17.0 Also: - Using the deprecated ``history_id`` parameter of the ``HistoryClient.get_histories()`` method now raises a ``ValueError`` exception. - Docs fixes. --- ABOUT.rst | 2 +- CHANGELOG.md | 61 +++++++++++++++++-- README.rst | 2 +- bioblend/__init__.py | 2 +- bioblend/galaxy/config/__init__.py | 1 + .../galaxy/dataset_collections/__init__.py | 2 +- bioblend/galaxy/datasets/__init__.py | 4 +- bioblend/galaxy/histories/__init__.py | 38 +++++------- bioblend/galaxy/invocations/__init__.py | 2 +- bioblend/galaxy/jobs/__init__.py | 26 ++++---- bioblend/galaxy/libraries/__init__.py | 20 +++--- bioblend/galaxy/objects/client.py | 2 +- bioblend/galaxy/objects/wrappers.py | 19 +++--- bioblend/galaxy/tool_dependencies/__init__.py | 6 +- bioblend/galaxy/tools/__init__.py | 28 ++++++--- bioblend/galaxy/users/__init__.py | 6 +- docs/api_docs/galaxy/docs.rst | 4 +- 17 files changed, 137 insertions(+), 88 deletions(-) diff --git a/ABOUT.rst b/ABOUT.rst index 0b925d0d5..f32bd7431 100644 --- a/ABOUT.rst +++ b/ABOUT.rst @@ -4,7 +4,7 @@ interacting with `Galaxy`_ and `CloudMan`_ APIs. BioBlend is supported and tested on: - Python 3.7, 3.8, 3.9 and 3.10 -- Galaxy release_17.09 and later. +- Galaxy release 17.09 and later. BioBlend's goal is to make it easier to script and automate the running of Galaxy analyses, administering of a Galaxy server, and cloud infrastructure diff --git a/CHANGELOG.md b/CHANGELOG.md index de6c9daad..8e5c10423 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,14 +1,63 @@ -### BioBlend v - unreleased +### BioBlend v0.17.0 - 2022-05-09 * Dropped support for Python 3.6. Added support for Python 3.10. Added support for Galaxy release 21.09 and 22.01. +* Removed deprecated ``run_workflow()`` method of ``WorkflowClient``. + +* Using the deprecated ``history_id`` parameter of the + ``HistoryClient.get_histories()`` method now raises a ``ValueError`` + exception. + +* Made ``tool_inputs_update`` parameter of ``JobsClient.rerun_job()`` more + flexible. + +* Added ``whoami()`` method to ``ConfigClient`` (thanks to + [cat-bro](https://github.com/cat-bro)). + * Added ``get_extra_files()`` method to ``HistoryClient``. +* Added ``build()`` and ``reload()`` methods to ``ToolClient`` (thanks to + [Jayadev Joshi](https://github.com/jaidevjoshi83) and + [cat-bro](https://github.com/cat-bro) respectively). + +* Added ``get_repositories()`` method to ``ToolShedCategoryClient`` (thanks to + [cat-bro](https://github.com/cat-bro)). + +* Added ``update_repository_metadata()`` method to ``ToolShedRepositoryClient``. + +* Added ``order_by`` parameter to ``JobsClient.get_jobs()`` method. + +* BioBlend.objects: Removed deprecated ``run()`` method of ``Workflow``. + * BioBlend.objects: Fail if multiple libraries/histories/workflows match when deleting by name, instead of deleting them all. -* Improvements to type annotations, tests and documentation +* BioBlend.objects: in ``HistoryDatasetAssociation.get_stream()``, wait for + the dataset to be ready. + +* BioBlend.objects: in ``Workflow.invoke()``, check that the workflow is mapped + and runnable before invoking, allow the ``inputs`` parameter to be an instance + of a ``Dataset`` subclass, and allow the ``history`` parameter to be the name + of a new history. + +* BioBlend.objects: Added new ``datasets`` and ``dataset_collections`` + attributes to ``GalaxyInstance`` objects, which are instances of the new + ``ObjDatasetClient`` and ``ObjDatasetCollectionClient`` respectively. + +* BioBlend.objects: Added ``refresh()``, ``get_outputs()`` and + ``get_output_collections()`` methods to ``InvocationStep``. + +* Fixed [error](https://github.com/galaxyproject/bioblend/issues/398) when + instantiating ``GalaxyInstance`` with ``email`` and ``password`` (reported by + [Peter Briggs](https://github.com/pjbriggs)). + +* Fixed parameter validation errors for POST requests with attached files on + upcoming Galaxy 22.05. + +* Code cleanups (thanks to [Martmists](https://github.com/Martmists-GH)). + +* Improvements to type annotations, tests and documentation. ### BioBlend v0.16.0 - 2021-06-13 @@ -280,7 +329,7 @@ * BioBlend.objects: added ``update()`` method to ``LibraryDataset`` (thanks to [Anthony Bretaudeau](https://github.com/abretaud)). -* Run tests with pytest instead of nose +* Run tests with pytest instead of nose. ### BioBlend v0.11.0 - 2018-04-18 @@ -413,7 +462,7 @@ * Added ``install_resolver_dependencies`` parameter to ``ToolShedClient.install_repository_revision()``, applicable for Galaxy - release_16.07 and later (thanks to + release 16.07 and later (thanks to [Marius van den Beek](https://github.com/mvdbeek)). * Improve ``DatasetClient.download_dataset()`` by downloading the dataset in @@ -529,7 +578,7 @@ * Project source moved to new URL - https://github.com/galaxyproject/bioblend * Huge improvements to automated testing, tests now run against Galaxy - release_14.02 and all later versions to ensure backward compatibility + release 14.02 and all later versions to ensure backward compatibility (see `.travis.yml` for details). * Many documentation improvements (thanks to @@ -553,7 +602,7 @@ deployments. * Made ``LibraryClient._get_root_folder_id()`` method safer and faster for - Galaxy release_13.06 and later. + Galaxy release 13.06 and later. * Deprecate and ignore invalid ``deleted`` parameter to ``WorkflowClient.get_workflows()``. diff --git a/README.rst b/README.rst index fd76fbc3a..1ac04f36d 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,7 @@ APIs. BioBlend is supported and tested on: - Python 3.7, 3.8, 3.9 and 3.10 -- Galaxy release_17.09 and later. +- Galaxy release 17.09 and later. Full docs are available at https://bioblend.readthedocs.io/ with a quick library overview also available in `ABOUT.rst <./ABOUT.rst>`_. diff --git a/bioblend/__init__.py b/bioblend/__init__.py index 0b16efdf6..cf6147807 100644 --- a/bioblend/__init__.py +++ b/bioblend/__init__.py @@ -8,7 +8,7 @@ ) # Current version of the library -__version__ = '0.16.0' +__version__ = '0.17.0' # default chunk size (in bytes) for reading remote data try: diff --git a/bioblend/galaxy/config/__init__.py b/bioblend/galaxy/config/__init__.py index 6c8a91bc4..92dde44c0 100644 --- a/bioblend/galaxy/config/__init__.py +++ b/bioblend/galaxy/config/__init__.py @@ -56,6 +56,7 @@ def whoami(self): :rtype: dict :return: Information about current authenticated user For example:: + {'active': True, 'deleted': False, 'email': 'user@example.org', diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index 28535b6ae..5909ae2e3 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -128,7 +128,7 @@ def download_dataset_collection(self, dataset_collection_id: str, file_path: str .. note:: This method downloads a ``zip`` archive for Galaxy 21.01 and later. For earlier versions of Galaxy this method downloads a ``tgz`` archive. - This method is only supported by Galaxy 18.01 or later. + This method works only on Galaxy 18.01 or later. """ url = self._make_url(module_id=dataset_collection_id) + '/download' r = self.gi.make_get_request(url, stream=True) diff --git a/bioblend/galaxy/datasets/__init__.py b/bioblend/galaxy/datasets/__init__.py index b40cd9fae..82480fbfb 100644 --- a/bioblend/galaxy/datasets/__init__.py +++ b/bioblend/galaxy/datasets/__init__.py @@ -320,7 +320,7 @@ def publish_dataset(self, dataset_id: str, published: bool = False): :return: Current roles for all available permission types. .. note:: - This method can only be used with Galaxy ``release_19.05`` or later. + This method works only on Galaxy 19.05 or later. """ payload: Dict[str, Any] = { 'action': 'remove_restrictions' if published else 'make_private' @@ -349,7 +349,7 @@ def update_permissions(self, dataset_id: str, access_ids: Optional[list] = None, :return: Current roles for all available permission types. .. note:: - This method can only be used with Galaxy ``release_19.05`` or later. + This method works only on Galaxy 19.05 or later. """ payload: Dict[str, Any] = { 'action': 'set_permissions' diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index e51cd5440..0ff1d7b82 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -5,7 +5,6 @@ import re import sys import time -import warnings import webbrowser from typing import List from urllib.parse import urljoin @@ -56,12 +55,10 @@ def import_history(self, file_path=None, url=None): return self._post(payload=payload, files_attached=file_path is not None) - def _get_histories(self, history_id=None, name=None, deleted=False, filter_user_published=None, get_all_published=False, slug=None): + def _get_histories(self, name=None, deleted=False, filter_user_published=None, get_all_published=False, slug=None): """ Hidden method to be used by both get_histories() and get_published_histories() """ - if history_id is not None and name is not None: - raise ValueError('Provide only one argument between name or history_id, but not both') assert not (filter_user_published is not None and get_all_published) params = {} @@ -78,10 +75,7 @@ def _get_histories(self, history_id=None, name=None, deleted=False, filter_user_ url = '/'.join((self._make_url(), 'published')) if get_all_published else None histories = self._get(url=url, params=params) - if history_id is not None: - history = next((_ for _ in histories if _['id'] == history_id), None) - histories = [history] if history is not None else [] - elif name is not None: + if name is not None: histories = [_ for _ in histories if _['name'] == name] return histories @@ -90,13 +84,6 @@ def get_histories(self, history_id=None, name=None, deleted=False, published=Non Get all histories, or select a subset by specifying optional arguments for filtering (e.g. a history name). - :type history_id: str - :param history_id: Encoded history ID to filter on - - .. deprecated:: 0.15.0 - To get details of a history for which you know the ID, use the much - more efficient :meth:`show_history` instead. - :type name: str :param name: History name to filter on. @@ -116,13 +103,16 @@ def get_histories(self, history_id=None, name=None, deleted=False, published=Non :rtype: list :return: List of history dicts. + + .. versionchanged:: 0.17.0 + Using the deprecated ``history_id`` parameter now raises a + ``ValueError`` exception. """ if history_id is not None: - warnings.warn( - 'The history_id parameter is deprecated, use the show_history() method to view details of a history for which you know the ID.', - category=FutureWarning + raise ValueError( + 'The history_id parameter has been removed, use the show_history() method to view details of a history for which you know the ID.', ) - return self._get_histories(history_id=history_id, name=name, deleted=deleted, filter_user_published=published, get_all_published=False, slug=slug) + return self._get_histories(name=name, deleted=deleted, filter_user_published=published, get_all_published=False, slug=slug) def get_published_histories(self, name=None, deleted=False, slug=None): """ @@ -217,9 +207,9 @@ def delete_dataset(self, history_id, dataset_id, purge=False): :return: None .. note:: - For the purge option to work, the Galaxy instance must have the - ``allow_user_dataset_purge`` option set to ``true`` in the - ``config/galaxy.yml`` configuration file. + The ``purge`` option works only if the Galaxy instance has the + ``allow_user_dataset_purge`` option set to ``true`` in the + ``config/galaxy.yml`` configuration file. """ url = '/'.join((self._make_url(history_id, contents=True), dataset_id)) payload = {} @@ -542,7 +532,7 @@ def delete_history(self, history_id, purge=False): purged). .. note:: - For the purge option to work, the Galaxy instance must have the + The ``purge`` option works only if the Galaxy instance has the ``allow_user_dataset_purge`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ @@ -773,7 +763,7 @@ def get_extra_files(self, history_id: str, dataset_id: str) -> List[dict]: :return: List of extra files .. note:: - This method is only supported by Galaxy 19.01 or later. + This method works only on Galaxy 19.01 or later. """ url = '/'.join((self._make_url(history_id, contents=True), dataset_id, 'extra_files')) return self._get(url=url) diff --git a/bioblend/galaxy/invocations/__init__.py b/bioblend/galaxy/invocations/__init__.py index b02e13c9e..62ccd6e4e 100644 --- a/bioblend/galaxy/invocations/__init__.py +++ b/bioblend/galaxy/invocations/__init__.py @@ -194,7 +194,7 @@ def rerun_invocation(self, invocation_id: str, inputs_update: Optional[dict] = N :return: A dict describing the new workflow invocation. .. note:: - This method can only be used with Galaxy ``release_21.01`` or later. + This method works only on Galaxy 21.01 or later. """ invocation_details = self.show_invocation(invocation_id) workflow_id = invocation_details['workflow_id'] diff --git a/bioblend/galaxy/jobs/__init__.py b/bioblend/galaxy/jobs/__init__.py index 1da72bbfd..da2e7aaeb 100644 --- a/bioblend/galaxy/jobs/__init__.py +++ b/bioblend/galaxy/jobs/__init__.py @@ -97,8 +97,8 @@ def get_jobs(self, state=None, history_id=None, invocation_id=None, tool_id=None 'update_time': '2014-03-01T16:05:39.558458'}] .. note:: - The following filtering options can only be used with Galaxy ``release_21.05`` or later: - user_id, limit, offset, workflow_id, invocation_id + The following options work only on Galaxy 21.05 or later: ``user_id``, + ``limit``, ``offset``, ``workflow_id``, ``invocation_id``. """ params = { 'limit': limit, @@ -204,7 +204,7 @@ def rerun_job(self, job_id, remap=False, tool_inputs_update=None, history_id=Non :return: Information about outputs and the rerun job .. note:: - This method can only be used with Galaxy ``release_21.01`` or later. + This method works only on Galaxy 21.01 or later. """ job_rerun_params = self._build_for_rerun(job_id) job_inputs = job_rerun_params['state_inputs'] @@ -277,7 +277,7 @@ def search_jobs(self, tool_id: str, inputs: dict, state: Optional[str] = None) - ``inputs`` and ``state``. .. note:: - This method is only supported by Galaxy 18.01 or later. + This method works only on Galaxy 18.01 or later. """ job_info = { 'tool_id': tool_id, @@ -342,7 +342,7 @@ def report_error(self, job_id: str, dataset_id: str, message: str, email: str = :return: dict containing job error reply .. note:: - This method is only supported by Galaxy 20.01 or later. + This method works only on Galaxy 20.01 or later. """ payload = { "message": message, @@ -366,7 +366,7 @@ def get_common_problems(self, job_id: str) -> dict: :return: dict containing potential problems .. note:: - This method is only supported by Galaxy 19.05 or later. + This method works only on Galaxy 19.05 or later. """ url = self._make_url(module_id=job_id) + '/common_problems' return self._get(url=url) @@ -408,7 +408,7 @@ def resume_job(self, job_id: str) -> dict: :return: dict containing output dataset associations .. note:: - This method is only supported by Galaxy 18.09 or later. + This method works only on Galaxy 18.09 or later. """ url = self._make_url(module_id=job_id) + '/resume' return self._put(url=url) @@ -425,8 +425,8 @@ def get_destination_params(self, job_id: str) -> dict: :return: Destination parameters for the given job .. note:: - This method is only supported by Galaxy 20.05 or later and requires - the user to be an admin. + This method works only on Galaxy 20.05 or later and if the user is a + Galaxy admin. """ url = self._make_url(module_id=job_id) + '/destination_params' return self._get(url=url) @@ -440,8 +440,8 @@ def show_job_lock(self) -> bool: :return: Status of the job lock .. note:: - This method is only supported by Galaxy 20.05 or later and requires - the user to be an admin. + This method works only on Galaxy 20.05 or later and if the user is a + Galaxy admin. """ url = self.gi.url + '/job_lock' response = self._get(url=url) @@ -457,8 +457,8 @@ def update_job_lock(self, active=False) -> bool: :return: Updated status of the job lock .. note:: - This method is only supported by Galaxy 20.05 or later and requires - the user to be an admin. + This method works only on Galaxy 20.05 or later and if the user is a + Galaxy admin. """ payload = { 'active': active, diff --git a/bioblend/galaxy/libraries/__init__.py b/bioblend/galaxy/libraries/__init__.py index 6690fa61c..8664e9e52 100644 --- a/bioblend/galaxy/libraries/__init__.py +++ b/bioblend/galaxy/libraries/__init__.py @@ -492,11 +492,6 @@ def upload_file_from_server(self, library_id, server_dir, folder_id=None, Upload all files in the specified subdirectory of the Galaxy library import directory to a library. - .. note:: - For this method to work, the Galaxy instance must have the - ``library_import_dir`` option configured in the ``config/galaxy.yml`` - configuration file. - :type library_id: str :param library_id: id of the library where to place the uploaded file @@ -539,6 +534,11 @@ def upload_file_from_server(self, library_id, server_dir, folder_id=None, :rtype: list :return: List with a single dictionary containing information about the LDDA + + .. note:: + This method works only if the Galaxy instance has the + ``library_import_dir`` option configured in the ``config/galaxy.yml`` + configuration file. """ return self._do_upload(library_id, server_dir=server_dir, folder_id=folder_id, file_type=file_type, @@ -555,11 +555,6 @@ def upload_from_galaxy_filesystem(self, library_id, filesystem_paths, folder_id= Upload a set of files already present on the filesystem of the Galaxy server to a library. - .. note:: - For this method to work, the Galaxy instance must have the - ``allow_path_paste`` option set to ``true`` in the - ``config/galaxy.yml`` configuration file. - :type library_id: str :param library_id: id of the library where to place the uploaded file @@ -600,6 +595,11 @@ def upload_from_galaxy_filesystem(self, library_id, filesystem_paths, folder_id= :rtype: list :return: List with a single dictionary containing information about the LDDA + + .. note:: + This method works only if the Galaxy instance has the + ``allow_path_paste`` option set to ``true`` in the + ``config/galaxy.yml`` configuration file. """ return self._do_upload(library_id, filesystem_paths=filesystem_paths, folder_id=folder_id, file_type=file_type, diff --git a/bioblend/galaxy/objects/client.py b/bioblend/galaxy/objects/client.py index 71ffca1ea..ad07a1f7c 100644 --- a/bioblend/galaxy/objects/client.py +++ b/bioblend/galaxy/objects/client.py @@ -227,7 +227,7 @@ def delete(self, id_=None, name=None, purge=False): :param purge: if ``True``, also purge (permanently delete) the history .. note:: - For the purge option to work, the Galaxy instance must have the + The ``purge`` option works only if the Galaxy instance has the ``allow_user_dataset_purge`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ diff --git a/bioblend/galaxy/objects/wrappers.py b/bioblend/galaxy/objects/wrappers.py index 9dcbd6273..c6f6f8938 100644 --- a/bioblend/galaxy/objects/wrappers.py +++ b/bioblend/galaxy/objects/wrappers.py @@ -913,9 +913,9 @@ def delete(self, purge=False): :param purge: if ``True``, also purge (permanently delete) the dataset .. note:: - For the purge option to work, the Galaxy instance must have the - ``allow_user_dataset_purge`` option set to ``true`` in the - ``config/galaxy.yml`` configuration file. + The ``purge`` option works only if the Galaxy instance has the + ``allow_user_dataset_purge`` option set to ``true`` in the + ``config/galaxy.yml`` configuration file. """ self.gi.gi.histories.delete_dataset(self.container.id, self.id, purge=purge) self.container.refresh() @@ -1134,7 +1134,6 @@ def get_datasets(self, name=None): container .. note:: - when filtering library datasets by name, specify their full paths starting from the library's root folder, e.g., ``/seqdata/reads.fastq``. Full paths are available through @@ -1197,7 +1196,7 @@ def delete(self, purge=False): :param purge: if ``True``, also purge (permanently delete) the history .. note:: - For the purge option to work, the Galaxy instance must have the + The ``purge`` option works only if the Galaxy instance has the ``allow_user_dataset_purge`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ @@ -1417,11 +1416,6 @@ def upload_from_galaxy_fs(self, paths, folder=None, link_data_only=None, **kwarg """ Upload data to this library from filesystem paths on the server. - .. note:: - For this method to work, the Galaxy instance must have the - ``allow_path_paste`` option set to ``true`` in the - ``config/galaxy.yml`` configuration file. - :type paths: str or :class:`~collections.abc.Iterable` of str :param paths: server-side file paths from which data should be read @@ -1434,6 +1428,11 @@ def upload_from_galaxy_fs(self, paths, folder=None, link_data_only=None, **kwarg :return: the dataset objects that represent the uploaded content See :meth:`.upload_data` for info on other params. + + .. note:: + This method works only if the Galaxy instance has the + ``allow_path_paste`` option set to ``true`` in the + ``config/galaxy.yml`` configuration file. """ fid = self._pre_upload(folder) if isinstance(paths, str): diff --git a/bioblend/galaxy/tool_dependencies/__init__.py b/bioblend/galaxy/tool_dependencies/__init__.py index d33a10013..773db2f3d 100644 --- a/bioblend/galaxy/tool_dependencies/__init__.py +++ b/bioblend/galaxy/tool_dependencies/__init__.py @@ -62,9 +62,9 @@ def summarize_toolbox(self, index=None, tool_ids=None, resolver_type=None, inclu 'tool_ids': ['vcf_to_maf_customtrack1']}] .. note:: - This method can only be used with Galaxy ``release_20.01`` or later and requires - the user to be an admin. It relies on an experimental API particularly tied to - the GUI and therefore is subject to breaking changes. + This method works only on Galaxy 20.01 or later and if the user is a + Galaxy admin. It relies on an experimental API particularly tied to + the GUI and therefore is subject to breaking changes. """ assert index_by in ['tools', 'requirements'], "index_by must be one of 'tools' or 'requirements'." params = { diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index cac8ad2ee..a38aa00e1 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -75,7 +75,6 @@ def _raw_get_tool(self, in_panel=None, trackster=None): def requirements(self, tool_id): """ Return the resolver status for a specific tool. - This functionality is available only to Galaxy admins. :type tool_id: str :param tool_id: id of the requested tool @@ -101,15 +100,19 @@ def requirements(self, tool_id): 'model_class': 'MergedCondaDependency', 'name': 'blast', 'version': '2.10.1'}] + + .. note:: + This method works only if the user is a Galaxy admin. """ url = self._make_url(tool_id) + '/requirements' return self._get(url=url) def reload(self, tool_id: str) -> dict: """ - Reload the specified tool in the toolbox. Any changes that have been made to the wrapper - since the tool was last reloaded will take effect. - This functionality is available only to Galaxy admins. + Reload the specified tool in the toolbox. + + Any changes that have been made to the wrapper since the tool was last + reloaded will take effect. :type tool_id: str :param tool_id: id of the requested tool @@ -118,9 +121,12 @@ def reload(self, tool_id: str) -> dict: :param: dict containing the id, name, and version of the reloaded tool. For example:: - {'message': {'name': 'Cutadapt', - 'id': 'toolshed.g2.bx.psu.edu/repos/lparsons/cutadapt/cutadapt/3.4+galaxy1', - 'version': '3.4+galaxy1'}} + {'message': {'id': 'toolshed.g2.bx.psu.edu/repos/lparsons/cutadapt/cutadapt/3.4+galaxy1', + 'name': 'Cutadapt', + 'version': '3.4+galaxy1'}} + + .. note:: + This method works only if the user is a Galaxy admin. """ url = self._make_url(tool_id) + '/reload' return self._put(url=url) @@ -142,13 +148,15 @@ def install_dependencies(self, tool_id): """ Install dependencies for a given tool via a resolver. This works only for Conda currently. - This functionality is available only to Galaxy admins. :type tool_id: str :param tool_id: id of the requested tool :rtype: dict :return: Tool requirement status + + .. note:: + This method works only if the user is a Galaxy admin. """ url = self._make_url(tool_id) + '/install_dependencies' return self._post(url=url) @@ -157,13 +165,15 @@ def uninstall_dependencies(self, tool_id: str) -> dict: """ Uninstall dependencies for a given tool via a resolver. This works only for Conda currently. - This functionality is available only to Galaxy admins. :type tool_id: str :param tool_id: id of the requested tool :rtype: dict :return: Tool requirement status + + .. note:: + This method works only if the user is a Galaxy admin. """ url = self._make_url(tool_id) + '/dependencies' return self._delete(url=url) diff --git a/bioblend/galaxy/users/__init__.py b/bioblend/galaxy/users/__init__.py index 6e3548990..36e27e395 100644 --- a/bioblend/galaxy/users/__init__.py +++ b/bioblend/galaxy/users/__init__.py @@ -77,7 +77,7 @@ def create_remote_user(self, user_email): Create a new Galaxy remote user. .. note:: - For this method to work, the Galaxy instance must have the + This method works only if the Galaxy instance has the ``allow_user_creation`` and ``use_remote_user`` options set to ``true`` in the ``config/galaxy.yml`` configuration file. Also note that setting ``use_remote_user`` will require an upstream @@ -99,7 +99,7 @@ def create_local_user(self, username, user_email, password): Create a new Galaxy local user. .. note:: - For this method to work, the Galaxy instance must have the + This method works only if the Galaxy instance has the ``allow_user_creation`` option set to ``true`` and ``use_remote_user`` option set to ``false`` in the ``config/galaxy.yml`` configuration file. @@ -153,7 +153,7 @@ def delete_user(self, user_id, purge=False): Delete a user. .. note:: - For this method to work, the Galaxy instance must have the + This method works only if the Galaxy instance has the ``allow_user_deletion`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. diff --git a/docs/api_docs/galaxy/docs.rst b/docs/api_docs/galaxy/docs.rst index b3d48e8ed..46af76828 100644 --- a/docs/api_docs/galaxy/docs.rst +++ b/docs/api_docs/galaxy/docs.rst @@ -281,8 +281,8 @@ Instead of using dictionaries directly, workflows can be exported to or imported .. Note:: If we export a workflow from one Galaxy instance and import it into another, Galaxy will only run it without modification if it has the same versions of the tool wrappers installed. This is to ensure reproducibility. Otherwise, we will need to manually update the workflow to use the new tool versions. -Invoke a Workflow -~~~~~~~~~~~~~~ +Invoke a workflow +~~~~~~~~~~~~~~~~~ To invoke a workflow, we need to tell Galaxy which datasets to use for which workflow inputs. We can use datasets from histories or data libraries. From f416b52a832e6bc4bd98bfad80595c64b21417a5 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 13 May 2022 18:00:50 +0100 Subject: [PATCH 29/32] Lint and test on minimum and maximum supported Python versions --- .github/workflows/lint.yaml | 2 +- .github/workflows/test.yaml | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 512422a0c..d9dc7319c 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7'] + python-version: ['3.7', '3.10'] steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v3 diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 5a620e69f..10a34304b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -53,12 +53,6 @@ jobs: - os: ubuntu-latest tox_env: py310 galaxy_version: dev - - os: ubuntu-latest - tox_env: py39 - galaxy_version: dev - - os: ubuntu-latest - tox_env: py38 - galaxy_version: dev # Cannot test on macOS because service containers are not supported # yet: https://github.community/t/github-actions-services-available-on-others-vms/16916 # - os: macos-latest From 7bcd07db8392ac790d1b0b92f4a377945197e43d Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 13 May 2022 18:36:15 +0100 Subject: [PATCH 30/32] Format Python code with black and isort --- .isort.cfg | 10 + CHANGELOG.md | 6 +- bioblend/__init__.py | 6 +- bioblend/_tests/CloudmanTestBase.py | 51 +- bioblend/_tests/GalaxyTestBase.py | 7 +- bioblend/_tests/TestCloudmanLaunch.py | 41 +- bioblend/_tests/TestCloudmanMock.py | 37 +- bioblend/_tests/TestCloudmanServices.py | 22 +- bioblend/_tests/TestGalaxyConfig.py | 6 +- .../_tests/TestGalaxyDatasetCollections.py | 55 +- bioblend/_tests/TestGalaxyDatasets.py | 125 ++-- bioblend/_tests/TestGalaxyFolders.py | 64 +- bioblend/_tests/TestGalaxyGroups.py | 45 +- bioblend/_tests/TestGalaxyHistories.py | 138 ++--- bioblend/_tests/TestGalaxyInstance.py | 5 +- bioblend/_tests/TestGalaxyInvocations.py | 96 ++- bioblend/_tests/TestGalaxyJobs.py | 216 ++++--- bioblend/_tests/TestGalaxyLibraries.py | 152 ++--- bioblend/_tests/TestGalaxyObjects.py | 469 +++++++-------- bioblend/_tests/TestGalaxyQuotas.py | 55 +- bioblend/_tests/TestGalaxyRoles.py | 13 +- bioblend/_tests/TestGalaxyToolData.py | 11 +- bioblend/_tests/TestGalaxyToolDependencies.py | 22 +- bioblend/_tests/TestGalaxyToolInputs.py | 36 +- bioblend/_tests/TestGalaxyTools.py | 101 ++-- bioblend/_tests/TestGalaxyUsers.py | 94 +-- bioblend/_tests/TestGalaxyWorkflows.py | 235 ++++---- bioblend/_tests/test_util.py | 40 +- bioblend/cloudman/__init__.py | 239 ++++---- bioblend/cloudman/launch.py | 559 +++++++++--------- bioblend/config.py | 12 +- bioblend/galaxy/client.py | 20 +- bioblend/galaxy/config/__init__.py | 4 +- .../galaxy/dataset_collections/__init__.py | 98 +-- bioblend/galaxy/datasets/__init__.py | 128 ++-- bioblend/galaxy/datatypes/__init__.py | 8 +- bioblend/galaxy/folders/__init__.py | 26 +- bioblend/galaxy/forms/__init__.py | 2 +- bioblend/galaxy/ftpfiles/__init__.py | 2 +- bioblend/galaxy/genomes/__init__.py | 37 +- bioblend/galaxy/groups/__init__.py | 26 +- bioblend/galaxy/histories/__init__.py | 130 ++-- bioblend/galaxy/invocations/__init__.py | 109 ++-- bioblend/galaxy/jobs/__init__.py | 105 ++-- bioblend/galaxy/libraries/__init__.py | 271 +++++---- bioblend/galaxy/objects/client.py | 78 ++- bioblend/galaxy/objects/galaxy_instance.py | 10 +- bioblend/galaxy/objects/wrappers.py | 487 ++++++++------- bioblend/galaxy/quotas/__init__.py | 52 +- bioblend/galaxy/roles/__init__.py | 9 +- bioblend/galaxy/tool_data/__init__.py | 6 +- bioblend/galaxy/tool_dependencies/__init__.py | 28 +- bioblend/galaxy/tools/__init__.py | 56 +- bioblend/galaxy/tools/inputs.py | 6 +- bioblend/galaxy/toolshed/__init__.py | 54 +- bioblend/galaxy/users/__init__.py | 30 +- bioblend/galaxy/visual/__init__.py | 2 +- bioblend/galaxy/workflows/__init__.py | 91 +-- bioblend/galaxyclient.py | 21 +- bioblend/toolshed/__init__.py | 6 +- bioblend/toolshed/categories/__init__.py | 10 +- bioblend/toolshed/repositories/__init__.py | 133 +++-- bioblend/toolshed/tools/__init__.py | 2 +- bioblend/util/__init__.py | 8 +- docs/conf.py | 54 +- .../examples/cloudman_basic_usage_scenario.py | 4 +- docs/examples/create_user_get_api_key.py | 6 +- docs/examples/list_histories.py | 2 +- docs/examples/objects/small.py | 31 +- docs/examples/objects/w2_bacterial_reseq.py | 38 +- docs/examples/objects/w3_bacterial_denovo.py | 50 +- docs/examples/objects/w5_galaxy_api.py | 49 +- docs/examples/objects/w5_metagenomics.py | 29 +- docs/examples/run_imported_workflow.py | 36 +- docs/examples/start_cloudman.py | 57 +- pyproject.toml | 8 + setup.cfg | 7 +- tox.ini | 6 +- 78 files changed, 2811 insertions(+), 2589 deletions(-) create mode 100644 .isort.cfg diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 000000000..5d98509c2 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,10 @@ +[settings] +force_alphabetical_sort_within_sections=true +# Override force_grid_wrap value from profile=black, but black is still happy +force_grid_wrap=2 +# Same line length as for black +line_length=120 +no_lines_before=LOCALFOLDER +profile=black +reverse_relative=true +skip_gitignore=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e5c10423..aea77a191 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,11 @@ +### BioBlend v + +* Format Python code with black and isort. + ### BioBlend v0.17.0 - 2022-05-09 * Dropped support for Python 3.6. Added support for Python 3.10. Added support - for Galaxy release 21.09 and 22.01. + for Galaxy releases 21.09 and 22.01. * Removed deprecated ``run_workflow()`` method of ``WorkflowClient``. diff --git a/bioblend/__init__.py b/bioblend/__init__.py index cf6147807..e99ddb0b6 100644 --- a/bioblend/__init__.py +++ b/bioblend/__init__.py @@ -8,11 +8,12 @@ ) # Current version of the library -__version__ = '0.17.0' +__version__ = "0.17.0" # default chunk size (in bytes) for reading remote data try: import resource + CHUNK_SIZE = resource.getpagesize() except Exception: CHUNK_SIZE = 4096 @@ -47,7 +48,7 @@ def emit(self, record): # import logging # logging.basicConfig(filename="bioblend.log", level=logging.DEBUG) default_format_string = "%(asctime)s %(name)s [%(levelname)s]: %(message)s" -log = logging.getLogger('bioblend') +log = logging.getLogger("bioblend") log.addHandler(NullHandler()) init_logging() @@ -94,6 +95,7 @@ class ConnectionError(Exception): proxy server getting in the way of the request etc. @see: body attribute to see the content of the http response """ + def __init__(self, message, body=None, status_code=None): super().__init__(message) self.body = body diff --git a/bioblend/_tests/CloudmanTestBase.py b/bioblend/_tests/CloudmanTestBase.py index 750220ba7..71070b060 100644 --- a/bioblend/_tests/CloudmanTestBase.py +++ b/bioblend/_tests/CloudmanTestBase.py @@ -11,19 +11,18 @@ class CloudmanTestBase(unittest.TestCase): - @classmethod @test_util.skip_unless_cloudman() def setUpClass(cls): - if os.environ.get('BIOBLEND_CLOUD_TYPE') == 'EC2': - cls.access_key = os.environ['BIOBLEND_ACCESS_KEY'] - cls.secret_key = os.environ['BIOBLEND_SECRET_KEY'] - cls.cluster_name = 'Blend CloudMan' - cls.ami_id = os.environ['BIOBLEND_AMI_ID'] - cls.instance_type = 'm1.small' - cls.password = 'password' + if os.environ.get("BIOBLEND_CLOUD_TYPE") == "EC2": + cls.access_key = os.environ["BIOBLEND_ACCESS_KEY"] + cls.secret_key = os.environ["BIOBLEND_SECRET_KEY"] + cls.cluster_name = "Blend CloudMan" + cls.ami_id = os.environ["BIOBLEND_AMI_ID"] + cls.instance_type = "m1.small" + cls.password = "password" cls.cloud_metadata = Bunch( - id='1', # for compatibility w/ DB representation + id="1", # for compatibility w/ DB representation name="Amazon", cloud_type="ec2", bucket_default="cloudman", @@ -35,29 +34,31 @@ def setUpClass(cls): is_secure=True, s3_host="s3.amazonaws.com", s3_port="", - s3_conn_path='/') + s3_conn_path="/", + ) else: # Assume OpenStack/NeCTAR - cls.access_key = os.environ['BIOBLEND_ACCESS_KEY'] - cls.secret_key = os.environ['BIOBLEND_SECRET_KEY'] + cls.access_key = os.environ["BIOBLEND_ACCESS_KEY"] + cls.secret_key = os.environ["BIOBLEND_SECRET_KEY"] cls.cloud_metadata = Bunch( - id='-1', + id="-1", name="NeCTAR", - cloud_type='openstack', - bucket_default='cloudman-os', - region_name='melbourne', - region_endpoint='nova.rc.nectar.org.au', + cloud_type="openstack", + bucket_default="cloudman-os", + region_name="melbourne", + region_endpoint="nova.rc.nectar.org.au", ec2_port=8773, - ec2_conn_path='/services/Cloud', - cidr_range='115.146.92.0/22', + ec2_conn_path="/services/Cloud", + cidr_range="115.146.92.0/22", is_secure=True, - s3_host='swift.rc.nectar.org.au', + s3_host="swift.rc.nectar.org.au", s3_port=8888, - s3_conn_path='/') - cls.cluster_name = 'Blend CloudMan' - cls.ami_id = os.environ['BIOBLEND_AMI_ID'] - cls.instance_type = 'm1.small' - cls.password = 'password' + s3_conn_path="/", + ) + cls.cluster_name = "Blend CloudMan" + cls.ami_id = os.environ["BIOBLEND_AMI_ID"] + cls.instance_type = "m1.small" + cls.password = "password" @classmethod @test_util.skip_unless_cloudman() diff --git a/bioblend/_tests/GalaxyTestBase.py b/bioblend/_tests/GalaxyTestBase.py index efa54bbd5..f62fa7623 100644 --- a/bioblend/_tests/GalaxyTestBase.py +++ b/bioblend/_tests/GalaxyTestBase.py @@ -5,17 +5,16 @@ import bioblend.galaxy from . import test_util -bioblend.set_stream_logger('test', level='INFO') +bioblend.set_stream_logger("test", level="INFO") BIOBLEND_TEST_JOB_TIMEOUT = int(os.environ.get("BIOBLEND_TEST_JOB_TIMEOUT", "60")) @test_util.skip_unless_galaxy() class GalaxyTestBase(unittest.TestCase): - def setUp(self): - galaxy_key = os.environ['BIOBLEND_GALAXY_API_KEY'] - galaxy_url = os.environ['BIOBLEND_GALAXY_URL'] + galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"] + galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] self.gi = bioblend.galaxy.GalaxyInstance(url=galaxy_url, key=galaxy_key) def _test_dataset(self, history_id, contents="1\t2\t3", **kwds): diff --git a/bioblend/_tests/TestCloudmanLaunch.py b/bioblend/_tests/TestCloudmanLaunch.py index b5d9d589e..feaa3873d 100644 --- a/bioblend/_tests/TestCloudmanLaunch.py +++ b/bioblend/_tests/TestCloudmanLaunch.py @@ -4,13 +4,18 @@ """ import contextlib -from bioblend.cloudman import CloudManConfig, CloudManInstance -from . import CloudmanTestBase, test_util +from bioblend.cloudman import ( + CloudManConfig, + CloudManInstance, +) +from . import ( + CloudmanTestBase, + test_util, +) @test_util.skip_unless_cloudman() class TestCloudmanLaunch(CloudmanTestBase.CloudmanTestBase): - def setUp(self): super().setUp() @@ -20,7 +25,15 @@ def test_validate_valid_config(self): """ # cfg = CloudManConfig(self.access_key, self.secret_key, self.cluster_name, self.ami_id, self.instance_type, self.password, cloud_metadata=self.cloud_metadata) cls = TestCloudmanLaunch - cfg = CloudManConfig(cls.access_key, cls.secret_key, cls.cluster_name, cls.ami_id, cls.instance_type, cls.password, cloud_metadata=cls.cloud_metadata) + cfg = CloudManConfig( + cls.access_key, + cls.secret_key, + cls.cluster_name, + cls.ami_id, + cls.instance_type, + cls.password, + cloud_metadata=cls.cloud_metadata, + ) result = cfg.validate() self.assertIsNone(result, "Validation did not return null to indicate success!") @@ -34,11 +47,25 @@ def test_validate_invalid_config(self): def test_launch_and_terminate(self): cls = TestCloudmanLaunch - cfg = CloudManConfig(cls.access_key, cls.secret_key, cls.cluster_name, cls.ami_id, cls.instance_type, cls.password, cloud_metadata=cls.cloud_metadata) + cfg = CloudManConfig( + cls.access_key, + cls.secret_key, + cls.cluster_name, + cls.ami_id, + cls.instance_type, + cls.password, + cloud_metadata=cls.cloud_metadata, + ) cmi = CloudManInstance.launch_instance(cfg) status = cmi.get_status() - self.assertNotEqual(status['cluster_status'], 'ERROR', "instance.get_status() returned ERROR. Should return a successful status!") + self.assertNotEqual( + status["cluster_status"], + "ERROR", + "instance.get_status() returned ERROR. Should return a successful status!", + ) with contextlib.suppress(Exception): # TODO: The terminate method is unpredictable! Needs fix. result = cmi.terminate(delete_cluster=True) - self.assertEqual(result['cluster_status'], 'SHUTDOWN', "Cluster should be in status SHUTDOWN after call to terminate!") + self.assertEqual( + result["cluster_status"], "SHUTDOWN", "Cluster should be in status SHUTDOWN after call to terminate!" + ) diff --git a/bioblend/_tests/TestCloudmanMock.py b/bioblend/_tests/TestCloudmanMock.py index 414ce42c8..4064509b2 100644 --- a/bioblend/_tests/TestCloudmanMock.py +++ b/bioblend/_tests/TestCloudmanMock.py @@ -12,20 +12,19 @@ class TestCloudmanMock(unittest.TestCase): - def setUp(self): url = "http://127.0.0.1:42284" password = "password" self.cm = cloudman.CloudManInstance(url, password) -# def test_initialize(self): -# self.cm._make_get_request = MagicMock(return_value="{}") -# -# ## Set cluster type -# self.cm.initialize(type="Galaxy") -# -# params = {'startup_opt': 'Galaxy'} -# self.cm._make_get_request.assert_called_with("initialize_cluster", parameters=params) + # def test_initialize(self): + # self.cm._make_get_request = MagicMock(return_value="{}") + # + # ## Set cluster type + # self.cm.initialize(type="Galaxy") + # + # params = {'startup_opt': 'Galaxy'} + # self.cm._make_get_request.assert_called_with("initialize_cluster", parameters=params) def test_get_status(self): # Set return value of call @@ -40,7 +39,7 @@ def test_get_status(self): def test_get_nodes(self): # Set return value of call - self.cm._make_get_request = MagicMock(return_value={'instances': []}) + self.cm._make_get_request = MagicMock(return_value={"instances": []}) nodes = self.cm.get_nodes() self.assertIsNotNone(nodes) @@ -56,7 +55,7 @@ def test_add_nodes(self): self.assertIsNotNone(status) # Check that the correct URL was called - params = {'number_nodes': 10, 'instance_type': '', 'spot_price': ''} + params = {"number_nodes": 10, "instance_type": "", "spot_price": ""} self.cm._make_get_request.assert_called_with("add_instances", parameters=params) def test_remove_nodes(self): @@ -66,7 +65,7 @@ def test_remove_nodes(self): self.assertIsNotNone(status) # Check that the correct URL was called - params = {'number_nodes': 10, 'force_termination': True} + params = {"number_nodes": 10, "force_termination": True} self.cm._make_get_request.assert_called_with("remove_instances", parameters=params) def test_remove_node(self): @@ -75,7 +74,7 @@ def test_remove_node(self): self.cm.remove_node(instance_id, force=True) # Check that the correct URL was called - params = {'instance_id': "abcdef"} + params = {"instance_id": "abcdef"} self.cm._make_get_request.assert_called_with("remove_instance", parameters=params) def test_reboot_node(self): @@ -84,7 +83,7 @@ def test_reboot_node(self): self.cm.reboot_node(instance_id) # Check that the correct URL was called - params = {'instance_id': "abcdef"} + params = {"instance_id": "abcdef"} self.cm._make_get_request.assert_called_with("reboot_instance", parameters=params) def test_autoscaling_enabled_true(self): @@ -104,14 +103,14 @@ def test_enable_autoscaling(self): self.cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) # Check that the correct URL was called - params = {'as_min': 0, 'as_max': 19} + params = {"as_min": 0, "as_max": 19} self.cm._make_get_request.assert_called_with("toggle_autoscaling", parameters=params) return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "19", "as_min": "0"}} self.cm.enable_autoscaling(minimum_nodes=0, maximum_nodes=19) # Check that the correct URL was called - params = {'as_min': 0, 'as_max': 19} + params = {"as_min": 0, "as_max": 19} self.cm._make_get_request.assert_called_with("toggle_autoscaling", parameters=params) def test_disable_autoscaling(self): @@ -125,13 +124,13 @@ def test_adjust_autoscaling(self): return_json_string = {"autoscaling": {"use_autoscaling": True, "as_max": "3", "as_min": "1"}} self.cm._make_get_request = MagicMock(return_value=return_json_string) self.cm.adjust_autoscaling(minimum_nodes=3, maximum_nodes=4) - params = {'as_min_adj': 3, 'as_max_adj': 4} + params = {"as_min_adj": 3, "as_max_adj": 4} self.cm._make_get_request.assert_called_with("adjust_autoscaling", parameters=params) def test_get_galaxy_state_stopped(self): return_json = {"status": "'Galaxy' is not running", "srvc": "Galaxy"} self.cm._make_get_request = MagicMock(return_value=return_json) - self.assertEqual(self.cm.get_galaxy_state()['status'], "'Galaxy' is not running") - params = {'srvc': "Galaxy"} + self.assertEqual(self.cm.get_galaxy_state()["status"], "'Galaxy' is not running") + params = {"srvc": "Galaxy"} self.cm._make_get_request.assert_called_with("get_srvc_status", parameters=params) diff --git a/bioblend/_tests/TestCloudmanServices.py b/bioblend/_tests/TestCloudmanServices.py index 41ef636d0..cde1f5878 100644 --- a/bioblend/_tests/TestCloudmanServices.py +++ b/bioblend/_tests/TestCloudmanServices.py @@ -2,17 +2,30 @@ Tests the functionality of the Blend CloudMan API. These tests require working credentials to supported cloud infrastructure. """ -from bioblend.cloudman import CloudManConfig, CloudManInstance -from . import CloudmanTestBase, test_util +from bioblend.cloudman import ( + CloudManConfig, + CloudManInstance, +) +from . import ( + CloudmanTestBase, + test_util, +) @test_util.skip_unless_cloudman() class TestCloudmanServices(CloudmanTestBase.CloudmanTestBase): - @classmethod def setUpClass(cls): super().setUpClass() - cls.cfg = CloudManConfig(cls.access_key, cls.secret_key, cls.cluster_name, cls.ami_id, cls.instance_type, cls.password, cloud_metadata=cls.cloud_metadata) + cls.cfg = CloudManConfig( + cls.access_key, + cls.secret_key, + cls.cluster_name, + cls.ami_id, + cls.instance_type, + cls.password, + cloud_metadata=cls.cloud_metadata, + ) cls.cmi = CloudManInstance.launch_instance(cls.cfg) def setUp(self): @@ -51,5 +64,6 @@ def test_disable_autoscaling(self): def test_adjust_autoscaling(self): self.cmi.adjust_autoscaling(minimum_nodes=3, maximum_nodes=4) + # def test_get_galaxy_state_stopped(self): # self.assertEqual(self.cmi.get_galaxy_state(), "'Galaxy' is not running") diff --git a/bioblend/_tests/TestGalaxyConfig.py b/bioblend/_tests/TestGalaxyConfig.py index d05fde07b..d52d5c074 100644 --- a/bioblend/_tests/TestGalaxyConfig.py +++ b/bioblend/_tests/TestGalaxyConfig.py @@ -5,14 +5,14 @@ class TestGalaxyConfig(GalaxyTestBase.GalaxyTestBase): def test_get_config(self): response = self.gi.config.get_config() self.assertTrue(isinstance(response, dict)) - self.assertTrue('brand' in response.keys()) + self.assertTrue("brand" in response.keys()) def test_get_version(self): response = self.gi.config.get_version() self.assertTrue(isinstance(response, dict)) - self.assertTrue('version_major' in response.keys()) + self.assertTrue("version_major" in response.keys()) def test_whoami(self): response = self.gi.config.whoami() self.assertTrue(isinstance(response, dict)) - self.assertTrue('username' in response.keys()) + self.assertTrue("username" in response.keys()) diff --git a/bioblend/_tests/TestGalaxyDatasetCollections.py b/bioblend/_tests/TestGalaxyDatasetCollections.py index b0838ca6d..5b490d0b7 100644 --- a/bioblend/_tests/TestGalaxyDatasetCollections.py +++ b/bioblend/_tests/TestGalaxyDatasetCollections.py @@ -7,12 +7,11 @@ from bioblend.galaxy import dataset_collections from . import ( GalaxyTestBase, - test_util + test_util, ) class TestGalaxyDatasetCollections(GalaxyTestBase.GalaxyTestBase): - def test_create_list_in_history(self): history_id = self.gi.histories.create_history(name="TestDSListCreate")["id"] dataset1_id = self._test_dataset(history_id) @@ -26,8 +25,8 @@ def test_create_list_in_history(self): dataset_collections.HistoryDatasetElement(name="sample1", id=dataset1_id), dataset_collections.HistoryDatasetElement(name="sample2", id=dataset2_id), dataset_collections.HistoryDatasetElement(name="sample3", id=dataset3_id), - ] - ) + ], + ), ) self.assertEqual(collection_response["name"], "MyDatasetList") self.assertEqual(collection_response["collection_type"], "list") @@ -57,7 +56,7 @@ def test_create_list_of_paired_datasets_in_history(self): elements=[ dataset_collections.HistoryDatasetElement(name="forward", id=dataset1_id), dataset_collections.HistoryDatasetElement(name="reverse", id=dataset2_id), - ] + ], ), dataset_collections.CollectionElement( name="sample2", @@ -65,10 +64,10 @@ def test_create_list_of_paired_datasets_in_history(self): elements=[ dataset_collections.HistoryDatasetElement(name="forward", id=dataset3_id), dataset_collections.HistoryDatasetElement(name="reverse", id=dataset4_id), - ] + ], ), - ] - ) + ], + ), ) self.assertEqual(collection_response["name"], "MyListOfPairedDatasets") self.assertEqual(collection_response["collection_type"], "list:paired") @@ -130,7 +129,7 @@ def test_update_history_dataset_collection(self): def test_show_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestDatasetCollectionShow")["id"] dataset_collection1 = self._create_pair_in_history(history_id) - dataset_collection2 = self.gi.dataset_collections.show_dataset_collection(dataset_collection1['id']) + dataset_collection2 = self.gi.dataset_collections.show_dataset_collection(dataset_collection1["id"]) for key in ( "collection_type", "deleted", @@ -143,28 +142,30 @@ def test_show_dataset_collection(self): "visible", ): self.assertEqual(dataset_collection1[key], dataset_collection2[key]) - for element1, element2 in zip(dataset_collection1['elements'], dataset_collection2['elements']): - self.assertEqual(element1['id'], element2['id']) + for element1, element2 in zip(dataset_collection1["elements"], dataset_collection2["elements"]): + self.assertEqual(element1["id"], element2["id"]) self.assertEqual(element1.keys(), element2.keys()) - for key in element1['object'].keys(): - self.assertIn(key, element2['object'].keys()) + for key in element1["object"].keys(): + self.assertIn(key, element2["object"].keys()) - @test_util.skip_unless_galaxy('release_18.01') + @test_util.skip_unless_galaxy("release_18.01") def test_download_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestDatasetCollectionDownload")["id"] - dataset_collection_id = self._create_pair_in_history(history_id)['id'] + dataset_collection_id = self._create_pair_in_history(history_id)["id"] self.gi.dataset_collections.wait_for_dataset_collection(dataset_collection_id) - tempdir = tempfile.mkdtemp(prefix='bioblend_test_dataset_collection_download_') - archive_path = os.path.join(tempdir, 'dataset_collection') - archive_type = self.gi.dataset_collections.download_dataset_collection(dataset_collection_id, file_path=archive_path)['archive_type'] - expected_contents = signature(self._test_dataset).parameters['contents'].default + '\n' - extract_dir_path = os.path.join(tempdir, 'extracted_files') + tempdir = tempfile.mkdtemp(prefix="bioblend_test_dataset_collection_download_") + archive_path = os.path.join(tempdir, "dataset_collection") + archive_type = self.gi.dataset_collections.download_dataset_collection( + dataset_collection_id, file_path=archive_path + )["archive_type"] + expected_contents = signature(self._test_dataset).parameters["contents"].default + "\n" + extract_dir_path = os.path.join(tempdir, "extracted_files") os.mkdir(extract_dir_path) - if archive_type == 'zip': + if archive_type == "zip": archive = ZipFile(archive_path) - elif archive_type == 'tgz': + elif archive_type == "tgz": archive = tarfile.open(archive_path) archive.extractall(extract_dir_path) @@ -177,10 +178,10 @@ def test_download_dataset_collection(self): def test_wait_for_dataset_collection(self): history_id = self.gi.histories.create_history(name="TestDatasetCollectionWait")["id"] - dataset_collection_id = self._create_pair_in_history(history_id)['id'] + dataset_collection_id = self._create_pair_in_history(history_id)["id"] dataset_collection = self.gi.dataset_collections.wait_for_dataset_collection(dataset_collection_id) - for element in dataset_collection['elements']: - self.assertEqual(element['object']['state'], 'ok') + for element in dataset_collection["elements"]: + self.assertEqual(element["object"]["state"], "ok") def _create_pair_in_history(self, history_id): dataset1_id = self._test_dataset(history_id) @@ -193,7 +194,7 @@ def _create_pair_in_history(self, history_id): elements=[ dataset_collections.HistoryDatasetElement(name="forward", id=dataset1_id), dataset_collections.HistoryDatasetElement(name="reverse", id=dataset2_id), - ] - ) + ], + ), ) return collection_response diff --git a/bioblend/_tests/TestGalaxyDatasets.py b/bioblend/_tests/TestGalaxyDatasets.py index fb309f103..98e909f63 100644 --- a/bioblend/_tests/TestGalaxyDatasets.py +++ b/bioblend/_tests/TestGalaxyDatasets.py @@ -1,18 +1,20 @@ import shutil import tempfile -from bioblend import ConnectionError, galaxy +from bioblend import ( + ConnectionError, + galaxy, +) from . import ( GalaxyTestBase, - test_util + test_util, ) class TestGalaxyDatasets(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() - self.history_id = self.gi.histories.create_history(name='TestDataset')['id'] + self.history_id = self.gi.histories.create_history(name="TestDataset")["id"] self.dataset_contents = "line 1\nline 2\rline 3\r\nline 4" self.dataset_id = self._test_dataset(self.history_id, contents=self.dataset_contents) self.gi.datasets.wait_for_dataset(self.dataset_id) @@ -20,10 +22,10 @@ def setUp(self): def tearDown(self): self.gi.histories.delete_history(self.history_id, purge=True) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_show_nonexistent_dataset(self): with self.assertRaises(ConnectionError): - self.gi.datasets.show_dataset('nonexistent_id') + self.gi.datasets.show_dataset("nonexistent_id") def test_show_dataset(self): self.gi.datasets.show_dataset(self.dataset_id) @@ -35,112 +37,115 @@ def test_download_dataset(self): expected_contents = ("\n".join(self.dataset_contents.splitlines()) + "\n").encode() # download_dataset() with file_path=None is already tested in TestGalaxyTools.test_paste_content() # self._wait_and_verify_dataset(self.dataset_id, expected_contents) - tempdir = tempfile.mkdtemp(prefix='bioblend_test_') + tempdir = tempfile.mkdtemp(prefix="bioblend_test_") try: downloaded_dataset = self.gi.datasets.download_dataset( - self.dataset_id, file_path=tempdir, - maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT * 2) + self.dataset_id, file_path=tempdir, maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT * 2 + ) self.assertTrue(downloaded_dataset.startswith(tempdir)) - with open(downloaded_dataset, 'rb') as f: + with open(downloaded_dataset, "rb") as f: self.assertEqual(f.read(), expected_contents) finally: shutil.rmtree(tempdir) - with tempfile.NamedTemporaryFile(prefix='bioblend_test_') as f: + with tempfile.NamedTemporaryFile(prefix="bioblend_test_") as f: download_filename = self.gi.datasets.download_dataset( - self.dataset_id, file_path=f.name, use_default_filename=False, - maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT) + self.dataset_id, + file_path=f.name, + use_default_filename=False, + maxwait=GalaxyTestBase.BIOBLEND_TEST_JOB_TIMEOUT, + ) self.assertEqual(download_filename, f.name) f.flush() self.assertEqual(f.read(), expected_contents) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets(self): datasets = self.gi.datasets.get_datasets() - dataset_ids = [dataset['id'] for dataset in datasets] + dataset_ids = [dataset["id"] for dataset in datasets] self.assertIn(self.dataset_id, dataset_ids) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_history(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id) self.assertEqual(len(datasets), 1) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_limit_offset(self): datasets = self.gi.datasets.get_datasets(limit=1) self.assertEqual(len(datasets), 1) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, offset=1) self.assertEqual(datasets, []) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_name(self): - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name='Pasted Entry') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Pasted Entry") self.assertEqual(len(datasets), 1) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name='Wrong Name') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, name="Wrong Name") self.assertEqual(datasets, []) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_time(self): dataset = self.gi.datasets.show_dataset(self.dataset_id) - ct = dataset['create_time'] + ct = dataset["create_time"] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min=ct) self.assertEqual(len(datasets), 1) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max=ct) self.assertEqual(len(datasets), 1) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min='2100-01-01T00:00:00') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_min="2100-01-01T00:00:00") self.assertEqual(datasets, []) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max='2000-01-01T00:00:00') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, create_time_max="2000-01-01T00:00:00") self.assertEqual(datasets, []) - ut = dataset['update_time'] + ut = dataset["update_time"] datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min=ut) self.assertEqual(len(datasets), 1) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max=ut) self.assertEqual(len(datasets), 1) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min='2100-01-01T00:00:00') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_min="2100-01-01T00:00:00") self.assertEqual(datasets, []) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max='2000-01-01T00:00:00') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, update_time_max="2000-01-01T00:00:00") self.assertEqual(datasets, []) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_extension(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension='txt') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="txt") self.assertEqual(len(datasets), 1) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension='bam') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, extension="bam") self.assertEqual(datasets, []) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_state(self): - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state='ok') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="ok") self.assertEqual(len(datasets), 1) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state='queued') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state="queued") self.assertEqual(datasets, []) with self.assertRaises(ConnectionError): - self.gi.datasets.get_datasets(history_id=self.history_id, state='nonexistent_state') - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state=['ok', 'queued']) + self.gi.datasets.get_datasets(history_id=self.history_id, state="nonexistent_state") + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, state=["ok", "queued"]) self.assertEqual(len(datasets), 1) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") def test_get_datasets_visible(self): datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=True) self.assertEqual(len(datasets), 1) datasets = self.gi.datasets.get_datasets(history_id=self.history_id, visible=False) self.assertEqual(len(datasets), 0) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_ordering(self): self.dataset_id2 = self._test_dataset(self.history_id, contents=self.dataset_contents) self.gi.datasets.wait_for_dataset(self.dataset_id2) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order='create_time-dsc') - self.assertEqual(datasets[0]['id'], self.dataset_id2) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order='create_time-asc') - self.assertEqual(datasets[0]['id'], self.dataset_id) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order='hid-dsc') - self.assertEqual(datasets[0]['id'], self.dataset_id2) - datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order='hid-asc') - self.assertEqual(datasets[0]['id'], self.dataset_id) - - @test_util.skip_unless_galaxy('release_19.05') + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-dsc") + self.assertEqual(datasets[0]["id"], self.dataset_id2) + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="create_time-asc") + self.assertEqual(datasets[0]["id"], self.dataset_id) + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-dsc") + self.assertEqual(datasets[0]["id"], self.dataset_id2) + datasets = self.gi.datasets.get_datasets(history_id=self.history_id, order="hid-asc") + self.assertEqual(datasets[0]["id"], self.dataset_id) + + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_deleted(self): deleted_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, deleted=True) self.assertEqual(deleted_datasets, []) @@ -153,47 +158,47 @@ def test_get_datasets_deleted(self): purged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, purged=True) self.assertEqual(len(purged_datasets), 1) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_get_datasets_tool_id_and_tag(self): - cat1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id='cat1') + cat1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="cat1") self.assertEqual(cat1_datasets, []) - upload1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id='upload1') + upload1_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tool_id="upload1") self.assertEqual(len(upload1_datasets), 1) - self.gi.histories.update_dataset(self.history_id, self.dataset_id, tags=['test']) - tagged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tag='test') + self.gi.histories.update_dataset(self.history_id, self.dataset_id, tags=["test"]) + tagged_datasets = self.gi.datasets.get_datasets(history_id=self.history_id, tag="test") self.assertEqual(len(tagged_datasets), 1) def test_wait_for_dataset(self): - history_id = self.gi.histories.create_history(name='TestWaitForDataset')['id'] + history_id = self.gi.histories.create_history(name="TestWaitForDataset")["id"] dataset_contents = "line 1\nline 2\rline 3\r\nline 4" dataset_id = self._test_dataset(history_id, contents=dataset_contents) dataset = self.gi.datasets.wait_for_dataset(dataset_id) - self.assertEqual(dataset['state'], 'ok') + self.assertEqual(dataset["state"], "ok") self.gi.histories.delete_history(history_id, purge=True) - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") def test_dataset_permissions(self): - admin_user_id = self.gi.users.get_current_user()['id'] - user_id = self.gi.users.create_local_user('newuser3', 'newuser3@example.org', 'secret')['id'] + admin_user_id = self.gi.users.get_current_user()["id"] + user_id = self.gi.users.create_local_user("newuser3", "newuser3@example.org", "secret")["id"] user_api_key = self.gi.users.create_user_apikey(user_id) anonymous_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=None) user_gi = galaxy.GalaxyInstance(url=self.gi.base_url, key=user_api_key) - sharing_role = self.gi.roles.create_role('sharing_role', 'sharing_role', [user_id, admin_user_id])['id'] + sharing_role = self.gi.roles.create_role("sharing_role", "sharing_role", [user_id, admin_user_id])["id"] self.gi.datasets.publish_dataset(self.dataset_id, published=False) with self.assertRaises(ConnectionError): anonymous_gi.datasets.show_dataset(self.dataset_id) self.gi.datasets.publish_dataset(self.dataset_id, published=True) # now dataset is public, i.e. accessible to anonymous users - self.assertEqual(anonymous_gi.datasets.show_dataset(self.dataset_id)['id'], self.dataset_id) + self.assertEqual(anonymous_gi.datasets.show_dataset(self.dataset_id)["id"], self.dataset_id) self.gi.datasets.publish_dataset(self.dataset_id, published=False) with self.assertRaises(ConnectionError): user_gi.datasets.show_dataset(self.dataset_id) self.gi.datasets.update_permissions(self.dataset_id, access_ids=[sharing_role], manage_ids=[sharing_role]) - self.assertEqual(user_gi.datasets.show_dataset(self.dataset_id)['id'], self.dataset_id) + self.assertEqual(user_gi.datasets.show_dataset(self.dataset_id)["id"], self.dataset_id) # anonymous access now fails because sharing is only with the shared user role with self.assertRaises(ConnectionError): anonymous_gi.datasets.show_dataset(self.dataset_id) diff --git a/bioblend/_tests/TestGalaxyFolders.py b/bioblend/_tests/TestGalaxyFolders.py index 2d6b5caf5..8f9127e4e 100644 --- a/bioblend/_tests/TestGalaxyFolders.py +++ b/bioblend/_tests/TestGalaxyFolders.py @@ -1,58 +1,58 @@ from . import GalaxyTestBase -FOO_DATA = 'foo\nbar\n' +FOO_DATA = "foo\nbar\n" class TestGalaxyFolders(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() - self.name = 'automated test folder' - self.library = self.gi.libraries.create_library(self.name, description='automated test', synopsis='automated test synopsis') - self.folder = self.gi.folders.create_folder(self.library['root_folder_id'], self.name, - description="automatically created folder") + self.name = "automated test folder" + self.library = self.gi.libraries.create_library( + self.name, description="automated test", synopsis="automated test synopsis" + ) + self.folder = self.gi.folders.create_folder( + self.library["root_folder_id"], self.name, description="automatically created folder" + ) def tearDown(self): - self.gi.libraries.delete_library(self.library['id']) + self.gi.libraries.delete_library(self.library["id"]) def test_create_folder(self): - self.assertEqual(self.folder['name'], self.name) - self.assertEqual(self.folder['description'], 'automatically created folder') + self.assertEqual(self.folder["name"], self.name) + self.assertEqual(self.folder["description"], "automatically created folder") def test_show_folder(self): - f2 = self.gi.folders.show_folder(self.folder['id']) - self.assertEqual(f2['id'], self.folder['id']) + f2 = self.gi.folders.show_folder(self.folder["id"]) + self.assertEqual(f2["id"], self.folder["id"]) def test_show_folder_contents(self): - f2 = self.gi.folders.show_folder(self.folder['id'], contents=True) - self.assertIn('folder_contents', f2) - self.assertIn('metadata', f2) - self.assertEqual(self.name, f2['metadata']['folder_name']) + f2 = self.gi.folders.show_folder(self.folder["id"], contents=True) + self.assertIn("folder_contents", f2) + self.assertIn("metadata", f2) + self.assertEqual(self.name, f2["metadata"]["folder_name"]) def test_delete_folder(self): - self.sub_folder = self.gi.folders.create_folder(self.folder['id'], self.name) - self.gi.folders.delete_folder(self.sub_folder['id']) + self.sub_folder = self.gi.folders.create_folder(self.folder["id"], self.name) + self.gi.folders.delete_folder(self.sub_folder["id"]) def test_update_folder(self): - self.folder = self.gi.folders.update_folder(self.folder['id'], 'new-name', 'new-description') - self.assertEqual(self.folder['name'], 'new-name') - self.assertEqual(self.folder['description'], 'new-description') + self.folder = self.gi.folders.update_folder(self.folder["id"], "new-name", "new-description") + self.assertEqual(self.folder["name"], "new-name") + self.assertEqual(self.folder["description"], "new-description") def test_get_set_permissions(self): - empty_permission = {'add_library_item_role_list': [], 'modify_folder_role_list': [], 'manage_folder_role_list': []} + empty_permission = { + "add_library_item_role_list": [], + "modify_folder_role_list": [], + "manage_folder_role_list": [], + } # They should be empty to start with - self.assertEqual( - self.gi.folders.get_permissions(self.folder['id'], scope='current'), - empty_permission - ) - self.assertEqual( - self.gi.folders.get_permissions(self.folder['id'], scope='available'), - empty_permission - ) + self.assertEqual(self.gi.folders.get_permissions(self.folder["id"], scope="current"), empty_permission) + self.assertEqual(self.gi.folders.get_permissions(self.folder["id"], scope="available"), empty_permission) # Then we'll add a role role = self.gi.roles.get_roles()[0] - self.gi.folders.set_permissions(self.folder['id'], add_ids=[role['id']]) + self.gi.folders.set_permissions(self.folder["id"], add_ids=[role["id"]]) self.assertTrue( - role['id'] in - self.gi.folders.get_permissions(self.folder['id'], scope='available')['add_library_item_role_list'][0] + role["id"] + in self.gi.folders.get_permissions(self.folder["id"], scope="available")["add_library_item_role_list"][0] ) diff --git a/bioblend/_tests/TestGalaxyGroups.py b/bioblend/_tests/TestGalaxyGroups.py index 7c2441fb2..0d59a4ddc 100644 --- a/bioblend/_tests/TestGalaxyGroups.py +++ b/bioblend/_tests/TestGalaxyGroups.py @@ -7,7 +7,6 @@ class TestGalaxyGroups(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() self.name = f"test_{uuid.uuid4().hex}" @@ -18,46 +17,46 @@ def tearDown(self): pass def test_create_group(self): - self.assertEqual(self.group['name'], self.name) - self.assertIsNotNone(self.group['id']) + self.assertEqual(self.group["name"], self.name) + self.assertIsNotNone(self.group["id"]) def test_get_groups(self): groups = self.gi.groups.get_groups() for group in groups: - self.assertIsNotNone(group['id']) - self.assertIsNotNone(group['name']) + self.assertIsNotNone(group["id"]) + self.assertIsNotNone(group["name"]) def test_show_group(self): - group_data = self.gi.groups.show_group(self.group['id']) - self.assertEqual(self.group['id'], group_data['id']) - self.assertEqual(self.group['name'], group_data['name']) + group_data = self.gi.groups.show_group(self.group["id"]) + self.assertEqual(self.group["id"], group_data["id"]) + self.assertEqual(self.group["name"], group_data["name"]) def test_get_group_users(self): - group_users = self.gi.groups.get_group_users(self.group['id']) + group_users = self.gi.groups.get_group_users(self.group["id"]) self.assertEqual(group_users, []) def test_get_group_roles(self): - group_roles = self.gi.groups.get_group_roles(self.group['id']) + group_roles = self.gi.groups.get_group_roles(self.group["id"]) self.assertEqual(group_roles, []) def test_update_group(self): new_name = f"test_{uuid.uuid4().hex}" - new_users = [self.gi.users.get_current_user()['id']] - self.gi.groups.update_group(self.group['id'], new_name, user_ids=new_users) - updated_group = self.gi.groups.show_group(self.group['id']) - self.assertEqual(self.group['id'], updated_group['id']) - self.assertEqual(updated_group['name'], new_name) - updated_group_users = [_['id'] for _ in self.gi.groups.get_group_users(self.group['id'])] + new_users = [self.gi.users.get_current_user()["id"]] + self.gi.groups.update_group(self.group["id"], new_name, user_ids=new_users) + updated_group = self.gi.groups.show_group(self.group["id"]) + self.assertEqual(self.group["id"], updated_group["id"]) + self.assertEqual(updated_group["name"], new_name) + updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] self.assertEqual(set(updated_group_users), set(new_users)) - updated_group_roles = [_['id'] for _ in self.gi.groups.get_group_roles(self.group['id'])] + updated_group_roles = [_["id"] for _ in self.gi.groups.get_group_roles(self.group["id"])] self.assertEqual(set(updated_group_roles), set()) def test_add_delete_group_user(self): - new_user = self.gi.users.get_current_user()['id'] - ret = self.gi.groups.add_group_user(self.group['id'], new_user) - self.assertEqual(ret['id'], new_user) - updated_group_users = [_['id'] for _ in self.gi.groups.get_group_users(self.group['id'])] + new_user = self.gi.users.get_current_user()["id"] + ret = self.gi.groups.add_group_user(self.group["id"], new_user) + self.assertEqual(ret["id"], new_user) + updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] self.assertIn(new_user, updated_group_users) - self.gi.groups.delete_group_user(self.group['id'], new_user) - updated_group_users = [_['id'] for _ in self.gi.groups.get_group_users(self.group['id'])] + self.gi.groups.delete_group_user(self.group["id"], new_user) + updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])] self.assertNotIn(new_user, updated_group_users) diff --git a/bioblend/_tests/TestGalaxyHistories.py b/bioblend/_tests/TestGalaxyHistories.py index 6dfb22a09..5184ba2c0 100644 --- a/bioblend/_tests/TestGalaxyHistories.py +++ b/bioblend/_tests/TestGalaxyHistories.py @@ -6,11 +6,13 @@ import tempfile import bioblend.galaxy -from . import GalaxyTestBase, test_util +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyHistories(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() self.default_history_name = "buildbot - automated test" @@ -19,41 +21,43 @@ def setUp(self): def test_create_history(self): history_name = "another buildbot - automated test" new_history = self.gi.histories.create_history(name=history_name) - self.assertIsNotNone(new_history['id']) - self.assertEqual(new_history['name'], history_name) - self.assertIsNotNone(new_history['url']) + self.assertIsNotNone(new_history["id"]) + self.assertEqual(new_history["name"], history_name) + self.assertIsNotNone(new_history["url"]) def test_update_history(self): - new_name = 'buildbot - automated test renamed' + new_name = "buildbot - automated test renamed" new_annotation = f"Annotation for {new_name}" - new_tags = ['tag1', 'tag2'] - updated_hist = self.gi.histories.update_history(self.history['id'], name=new_name, annotation=new_annotation, tags=new_tags) - if 'id' not in updated_hist: - updated_hist = self.gi.histories.show_history(self.history['id']) - self.assertEqual(self.history['id'], updated_hist['id']) - self.assertEqual(updated_hist['name'], new_name) - self.assertEqual(updated_hist['annotation'], new_annotation) - self.assertEqual(updated_hist['tags'], new_tags) + new_tags = ["tag1", "tag2"] + updated_hist = self.gi.histories.update_history( + self.history["id"], name=new_name, annotation=new_annotation, tags=new_tags + ) + if "id" not in updated_hist: + updated_hist = self.gi.histories.show_history(self.history["id"]) + self.assertEqual(self.history["id"], updated_hist["id"]) + self.assertEqual(updated_hist["name"], new_name) + self.assertEqual(updated_hist["annotation"], new_annotation) + self.assertEqual(updated_hist["tags"], new_tags) def test_publish_history(self): # Verify that searching for published histories does not return the test history published_histories = self.gi.histories.get_histories(published=True) - self.assertFalse(any(h['id'] == self.history['id'] for h in published_histories)) - updated_hist = self.gi.histories.update_history(self.history['id'], published=True) - if 'id' not in updated_hist: - updated_hist = self.gi.histories.show_history(self.history['id']) - self.assertEqual(self.history['id'], updated_hist['id']) - self.assertTrue(updated_hist['published']) + self.assertFalse(any(h["id"] == self.history["id"] for h in published_histories)) + updated_hist = self.gi.histories.update_history(self.history["id"], published=True) + if "id" not in updated_hist: + updated_hist = self.gi.histories.show_history(self.history["id"]) + self.assertEqual(self.history["id"], updated_hist["id"]) + self.assertTrue(updated_hist["published"]) # Verify that searching for published histories now returns the test history published_histories = self.gi.histories.get_histories(published=True) - self.assertTrue(any(h['id'] == self.history['id'] for h in published_histories)) + self.assertTrue(any(h["id"] == self.history["id"] for h in published_histories)) # Verify that get_published_histories as an anonymous user also returns the test history anonymous_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, key=None) published_histories = anonymous_gi.histories.get_published_histories() - self.assertTrue(any(h['id'] == self.history['id'] for h in published_histories)) - history_from_slug = anonymous_gi.histories.get_published_histories(slug=updated_hist['slug']) + self.assertTrue(any(h["id"] == self.history["id"] for h in published_histories)) + history_from_slug = anonymous_gi.histories.get_published_histories(slug=updated_hist["slug"]) self.assertTrue(len(history_from_slug) == 1) - self.assertEqual(self.history['id'], history_from_slug[0]['id']) + self.assertEqual(self.history["id"], history_from_slug[0]["id"]) def test_get_histories(self): # Make sure there's at least one value - the one we created @@ -62,7 +66,7 @@ def test_get_histories(self): # Check whether id is present, when searched by name histories = self.gi.histories.get_histories(name=self.default_history_name) - self.assertEqual(len([h for h in histories if h['id'] == self.history['id']]), 1) + self.assertEqual(len([h for h in histories if h["id"] == self.history["id"]]), 1) # TODO: check whether deleted history is returned correctly # At the moment, get_histories() returns only not-deleted histories @@ -75,10 +79,10 @@ def test_get_histories(self): # self.assertGreaterEqual(len(all_histories), len(deleted_history)) def test_show_history(self): - history_data = self.gi.histories.show_history(self.history['id']) - self.assertEqual(self.history['id'], history_data['id']) - self.assertEqual(self.history['name'], history_data['name']) - self.assertEqual('new', history_data['state']) + history_data = self.gi.histories.show_history(self.history["id"]) + self.assertEqual(self.history["id"], history_data["id"]) + self.assertEqual(self.history["name"], history_data["name"]) + self.assertEqual("new", history_data["state"]) def test_show_history_with_contents(self): history_id = self.history["id"] @@ -89,21 +93,21 @@ def test_show_history_with_contents(self): contents = self.gi.histories.show_history(history_id, contents=True) # history has 1 dataset, content length should be 1 self.assertEqual(len(contents), 1) - contents = self.gi.histories.show_history(history_id, contents=True, types=['dataset']) + contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset"]) # filtering for dataset, content length should still be 1 self.assertEqual(len(contents), 1) - contents = self.gi.histories.show_history(history_id, contents=True, types=['dataset_collection']) + contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset_collection"]) # filtering for dataset collection but there's no collection in the history self.assertEqual(len(contents), 0) - contents = self.gi.histories.show_history(history_id, contents=True, types=['dataset', 'dataset_collection']) + contents = self.gi.histories.show_history(history_id, contents=True, types=["dataset", "dataset_collection"]) self.assertEqual(len(contents), 1) def test_create_history_tag(self): - new_tag = 'tag1' - self.gi.histories.create_history_tag(self.history['id'], new_tag) - updated_hist = self.gi.histories.show_history(self.history['id']) - self.assertEqual(self.history['id'], updated_hist['id']) - self.assertIn(new_tag, updated_hist['tags']) + new_tag = "tag1" + self.gi.histories.create_history_tag(self.history["id"], new_tag) + updated_hist = self.gi.histories.show_history(self.history["id"]) + self.assertEqual(self.history["id"], updated_hist["id"]) + self.assertIn(new_tag, updated_hist["tags"]) def test_show_dataset(self): history_id = self.history["id"] @@ -130,7 +134,7 @@ def test_delete_dataset(self): self.gi.histories.delete_dataset(history_id, dataset1_id) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) self.assertTrue(dataset["deleted"]) - self.assertFalse(dataset['purged']) + self.assertFalse(dataset["purged"]) def test_purge_dataset(self): history_id = self.history["id"] @@ -138,13 +142,13 @@ def test_purge_dataset(self): self.gi.histories.delete_dataset(history_id, dataset1_id, purge=True) dataset = self.gi.histories.show_dataset(history_id, dataset1_id) self.assertTrue(dataset["deleted"]) - self.assertTrue(dataset['purged']) + self.assertTrue(dataset["purged"]) def test_update_dataset(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) updated_dataset = self.gi.histories.update_dataset(history_id, dataset1_id, visible=False) - if 'id' not in updated_dataset: + if "id" not in updated_dataset: updated_dataset = self.gi.histories.show_dataset(history_id, dataset1_id) self.assertFalse(updated_dataset["visible"]) @@ -154,71 +158,69 @@ def test_upload_dataset_from_library(self): # download_dataset() is already tested in TestGalaxyDatasets def test_delete_history(self): - result = self.gi.histories.delete_history(self.history['id']) - self.assertTrue(result['deleted']) + result = self.gi.histories.delete_history(self.history["id"]) + self.assertTrue(result["deleted"]) all_histories = self.gi.histories.get_histories() - self.assertTrue(not any(d['id'] == self.history['id'] for d in all_histories)) + self.assertTrue(not any(d["id"] == self.history["id"] for d in all_histories)) def test_undelete_history(self): - self.gi.histories.delete_history(self.history['id']) - self.gi.histories.undelete_history(self.history['id']) + self.gi.histories.delete_history(self.history["id"]) + self.gi.histories.undelete_history(self.history["id"]) all_histories = self.gi.histories.get_histories() - self.assertTrue(any(d['id'] == self.history['id'] for d in all_histories)) + self.assertTrue(any(d["id"] == self.history["id"] for d in all_histories)) def test_get_status(self): - state = self.gi.histories.get_status(self.history['id']) - self.assertEqual('new', state['state']) + state = self.gi.histories.get_status(self.history["id"]) + self.assertEqual("new", state["state"]) def test_get_most_recently_used_history(self): most_recently_used_history = self.gi.histories.get_most_recently_used_history() # if the user has been created via the API, it does not have # a session, therefore no history if most_recently_used_history is not None: - self.assertIsNotNone(most_recently_used_history['id']) - self.assertIsNotNone(most_recently_used_history['name']) - self.assertIsNotNone(most_recently_used_history['state']) + self.assertIsNotNone(most_recently_used_history["id"]) + self.assertIsNotNone(most_recently_used_history["name"]) + self.assertIsNotNone(most_recently_used_history["state"]) def test_download_history(self): - jeha_id = self.gi.histories.export_history( - self.history['id'], wait=True, maxwait=60) + jeha_id = self.gi.histories.export_history(self.history["id"], wait=True, maxwait=60) self.assertTrue(jeha_id) - tempdir = tempfile.mkdtemp(prefix='bioblend_test_') - temp_fn = os.path.join(tempdir, 'export.tar.gz') + tempdir = tempfile.mkdtemp(prefix="bioblend_test_") + temp_fn = os.path.join(tempdir, "export.tar.gz") try: - with open(temp_fn, 'wb') as fo: - self.gi.histories.download_history(self.history['id'], jeha_id, - fo) + with open(temp_fn, "wb") as fo: + self.gi.histories.download_history(self.history["id"], jeha_id, fo) self.assertTrue(tarfile.is_tarfile(temp_fn)) finally: shutil.rmtree(tempdir) def test_import_history(self): - path = test_util.get_abspath(os.path.join('data', 'Galaxy-History-Test-history-for-export.tar.gz')) + path = test_util.get_abspath(os.path.join("data", "Galaxy-History-Test-history-for-export.tar.gz")) self.gi.histories.import_history(file_path=path) def test_copy_dataset(self): history_id = self.history["id"] contents = "1\t2\t3" dataset1_id = self._test_dataset(history_id, contents=contents) - self.history_id2 = self.gi.histories.create_history('TestCopyDataset')['id'] + self.history_id2 = self.gi.histories.create_history("TestCopyDataset")["id"] copied_dataset = self.gi.histories.copy_dataset(self.history_id2, dataset1_id) expected_contents = ("\n".join(contents.splitlines()) + "\n").encode() - self._wait_and_verify_dataset(copied_dataset['id'], expected_contents) + self._wait_and_verify_dataset(copied_dataset["id"], expected_contents) self.gi.histories.delete_history(self.history_id2, purge=True) - @test_util.skip_unless_galaxy('release_20.09') + @test_util.skip_unless_galaxy("release_20.09") def test_update_dataset_datatype(self): history_id = self.history["id"] dataset1_id = self._test_dataset(history_id) - self._wait_and_verify_dataset(dataset1_id, b'1\t2\t3\n') + self._wait_and_verify_dataset(dataset1_id, b"1\t2\t3\n") original_hda = self.gi.datasets.show_dataset(dataset1_id) - assert original_hda['extension'] == 'bed' - self.gi.histories.update_dataset(history_id, dataset1_id, datatype='tabular') + assert original_hda["extension"] == "bed" + self.gi.histories.update_dataset(history_id, dataset1_id, datatype="tabular") updated_hda = self.gi.datasets.show_dataset(dataset1_id) - assert updated_hda['extension'] == 'tabular' + assert updated_hda["extension"] == "tabular" - @test_util.skip_unless_galaxy('release_19.01') + @test_util.skip_unless_galaxy("release_19.01") def test_get_extra_files(self): history_id = self.history["id"] dataset_id = self._test_dataset(history_id) @@ -226,4 +228,4 @@ def test_get_extra_files(self): self.assertEqual(extra_files, []) def tearDown(self): - self.gi.histories.delete_history(self.history['id'], purge=True) + self.gi.histories.delete_history(self.history["id"], purge=True) diff --git a/bioblend/_tests/TestGalaxyInstance.py b/bioblend/_tests/TestGalaxyInstance.py index 7b8414191..2e8c4c217 100644 --- a/bioblend/_tests/TestGalaxyInstance.py +++ b/bioblend/_tests/TestGalaxyInstance.py @@ -12,7 +12,6 @@ class TestGalaxyInstance(unittest.TestCase): - def setUp(self): # "connect" to a fake Galaxy instance self.gi = GalaxyInstance("http://localhost:56789", key="whatever") @@ -44,9 +43,9 @@ def test_missing_scheme_fake_url(self): @test_util.skip_unless_galaxy() def test_missing_scheme_real_url(self): - galaxy_url = os.environ['BIOBLEND_GALAXY_URL'] + galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] # Strip the scheme from galaxy_url - scheme_sep = '://' + scheme_sep = "://" if scheme_sep in galaxy_url: galaxy_url = galaxy_url.partition(scheme_sep)[2] GalaxyInstance(url=galaxy_url) diff --git a/bioblend/_tests/TestGalaxyInvocations.py b/bioblend/_tests/TestGalaxyInvocations.py index a7d95cd57..bce86c7a2 100644 --- a/bioblend/_tests/TestGalaxyInvocations.py +++ b/bioblend/_tests/TestGalaxyInvocations.py @@ -2,21 +2,24 @@ import os import time -from . import GalaxyTestBase, test_util +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyInvocations(GalaxyTestBase.GalaxyTestBase): def setUp(self): super().setUp() - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) - self.workflow_id = self.gi.workflows.import_workflow_from_local_path(path)['id'] + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) + self.workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"] self.history_id = self.gi.histories.create_history(name="TestGalaxyInvocations")["id"] self.dataset_id = self._test_dataset(self.history_id) def tearDown(self): self.gi.histories.delete_history(self.history_id, purge=True) - @test_util.skip_unless_galaxy('release_19.09') + @test_util.skip_unless_galaxy("release_19.09") def test_cancel_invocation(self): invocation = self._invoke_workflow() @@ -26,48 +29,42 @@ def test_cancel_invocation(self): self.assertEqual(invocations[0]["id"], invocation_id) self.gi.invocations.cancel_invocation(invocation_id) invocation = self.gi.invocations.show_invocation(invocation_id) - self.assertEqual(invocation['state'], 'cancelled') + self.assertEqual(invocation["state"], "cancelled") - @test_util.skip_unless_galaxy('release_20.01') + @test_util.skip_unless_galaxy("release_20.01") def test_get_invocations(self): invoc1 = self._invoke_workflow() # Run the first workflow on another history - dataset = {'src': 'hda', 'id': self.dataset_id} - hist2_id = self.gi.histories.create_history('hist2')['id'] + dataset = {"src": "hda", "id": self.dataset_id} + hist2_id = self.gi.histories.create_history("hist2")["id"] invoc2 = self.gi.workflows.invoke_workflow( - self.workflow_id, - history_id=hist2_id, - inputs={'Input 1': dataset, 'Input 2': dataset}, - inputs_by='name' + self.workflow_id, history_id=hist2_id, inputs={"Input 1": dataset, "Input 2": dataset}, inputs_by="name" ) # Run another workflow on the 2nd history - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) - workflow2_id = self.gi.workflows.import_workflow_from_local_path(path)['id'] + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) + workflow2_id = self.gi.workflows.import_workflow_from_local_path(path)["id"] invoc3 = self.gi.workflows.invoke_workflow( - workflow2_id, - history_id=hist2_id, - inputs={'Input 1': dataset, 'Input 2': dataset}, - inputs_by='name' + workflow2_id, history_id=hist2_id, inputs={"Input 1": dataset, "Input 2": dataset}, inputs_by="name" ) for invoc in (invoc1, invoc2, invoc3): - self.gi.invocations.wait_for_invocation(invoc['id']) + self.gi.invocations.wait_for_invocation(invoc["id"]) # Test filtering by workflow ID for wf_id, expected_invoc_num in {self.workflow_id: 2, workflow2_id: 1}.items(): invocs = self.gi.invocations.get_invocations(workflow_id=wf_id) self.assertEqual(len(invocs), expected_invoc_num) for invoc in invocs: - self.assertEqual(invoc['workflow_id'], wf_id) + self.assertEqual(invoc["workflow_id"], wf_id) # Test filtering by history ID for hist_id, expected_invoc_num in {self.history_id: 1, hist2_id: 2}.items(): invocs = self.gi.invocations.get_invocations(history_id=hist_id) self.assertEqual(len(invocs), expected_invoc_num) for invoc in invocs: - self.assertEqual(invoc['history_id'], hist_id) + self.assertEqual(invoc["history_id"], hist_id) # Test limiting limit_invocs = self.gi.invocations.get_invocations(limit=2) @@ -75,41 +72,41 @@ def test_get_invocations(self): self.gi.histories.delete_history(hist2_id, purge=True) - @test_util.skip_unless_galaxy('release_19.09') + @test_util.skip_unless_galaxy("release_19.09") def test_get_invocation_report(self): invocation = self._invoke_workflow() - invocation_id = invocation['id'] - workflow_id = invocation['workflow_id'] + invocation_id = invocation["id"] + workflow_id = invocation["workflow_id"] report = self.gi.invocations.get_invocation_report(invocation_id) - assert report['workflows'] == {workflow_id: {'name': 'paste_columns'}} + assert report["workflows"] == {workflow_id: {"name": "paste_columns"}} with contextlib.suppress(Exception): # This can fail if dependencies as weasyprint are not installed on the Galaxy server - self.gi.invocations.get_invocation_report_pdf(invocation_id, 'report.pdf') + self.gi.invocations.get_invocation_report_pdf(invocation_id, "report.pdf") - @test_util.skip_unless_galaxy('release_20.09') + @test_util.skip_unless_galaxy("release_20.09") def test_get_invocation_biocompute_object(self): invocation = self._invoke_workflow() - self.gi.invocations.wait_for_invocation(invocation['id']) - biocompute_object = self.gi.invocations.get_invocation_biocompute_object(invocation['id']) - self.assertEqual(len(biocompute_object['description_domain']['pipeline_steps']), 1) + self.gi.invocations.wait_for_invocation(invocation["id"]) + biocompute_object = self.gi.invocations.get_invocation_biocompute_object(invocation["id"]) + self.assertEqual(len(biocompute_object["description_domain"]["pipeline_steps"]), 1) - @test_util.skip_unless_galaxy('release_19.09') + @test_util.skip_unless_galaxy("release_19.09") def test_get_invocation_jobs_summary(self): invocation = self._invoke_workflow() - self.gi.invocations.wait_for_invocation(invocation['id']) - jobs_summary = self.gi.invocations.get_invocation_summary(invocation['id']) - self.assertEqual(jobs_summary['populated_state'], 'ok') - step_jobs_summary = self.gi.invocations.get_invocation_step_jobs_summary(invocation['id']) + self.gi.invocations.wait_for_invocation(invocation["id"]) + jobs_summary = self.gi.invocations.get_invocation_summary(invocation["id"]) + self.assertEqual(jobs_summary["populated_state"], "ok") + step_jobs_summary = self.gi.invocations.get_invocation_step_jobs_summary(invocation["id"]) self.assertEqual(len(step_jobs_summary), 1) - self.assertEqual(step_jobs_summary[0]['populated_state'], 'ok') + self.assertEqual(step_jobs_summary[0]["populated_state"], "ok") - @test_util.skip_unless_galaxy('release_19.09') + @test_util.skip_unless_galaxy("release_19.09") @test_util.skip_unless_tool("cat1") @test_util.skip_unless_tool("cat") def test_workflow_scheduling(self): - path = test_util.get_abspath(os.path.join('data', 'test_workflow_pause.ga')) + path = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga")) workflow = self.gi.workflows.import_workflow_from_local_path(path) invocation = self.gi.workflows.invoke_workflow( @@ -126,30 +123,29 @@ def invocation_steps_by_order_index(): for _ in range(20): if 2 in invocation_steps_by_order_index(): break - time.sleep(.5) + time.sleep(0.5) steps = invocation_steps_by_order_index() pause_step = steps[2] - self.assertIsNone( - self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) + self.assertIsNone(self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) self.gi.invocations.run_invocation_step_action(invocation_id, pause_step["id"], action=True) self.assertTrue(self.gi.invocations.show_invocation_step(invocation_id, pause_step["id"])["action"]) - self.gi.invocations.wait_for_invocation(invocation['id']) + self.gi.invocations.wait_for_invocation(invocation["id"]) - @test_util.skip_unless_galaxy('release_21.01') + @test_util.skip_unless_galaxy("release_21.01") def test_rerun_invocation(self): invocation = self._invoke_workflow() - self.gi.invocations.wait_for_invocation(invocation['id']) - rerun_invocation = self.gi.invocations.rerun_invocation(invocation['id'], import_inputs_to_history=True) - self.gi.invocations.wait_for_invocation(rerun_invocation['id']) - history = self.gi.histories.show_history(rerun_invocation['history_id'], contents=True) + self.gi.invocations.wait_for_invocation(invocation["id"]) + rerun_invocation = self.gi.invocations.rerun_invocation(invocation["id"], import_inputs_to_history=True) + self.gi.invocations.wait_for_invocation(rerun_invocation["id"]) + history = self.gi.histories.show_history(rerun_invocation["history_id"], contents=True) self.assertEqual(len(history), 3) def _invoke_workflow(self): - dataset = {'src': 'hda', 'id': self.dataset_id} + dataset = {"src": "hda", "id": self.dataset_id} return self.gi.workflows.invoke_workflow( self.workflow_id, - inputs={'Input 1': dataset, 'Input 2': dataset}, + inputs={"Input 1": dataset, "Input 2": dataset}, history_id=self.history_id, - inputs_by='name', + inputs_by="name", ) diff --git a/bioblend/_tests/TestGalaxyJobs.py b/bioblend/_tests/TestGalaxyJobs.py index d04c29d71..3dc0c8693 100644 --- a/bioblend/_tests/TestGalaxyJobs.py +++ b/bioblend/_tests/TestGalaxyJobs.py @@ -1,5 +1,8 @@ import os -from datetime import datetime, timedelta +from datetime import ( + datetime, + timedelta, +) from operator import itemgetter from bioblend.galaxy.tools.inputs import ( @@ -8,14 +11,14 @@ ) from . import ( GalaxyTestBase, - test_util + test_util, ) class TestGalaxyJobs(GalaxyTestBase.GalaxyTestBase): def setUp(self): super().setUp() - self.history_id = self.gi.histories.create_history(name='TestGalaxyJobs')['id'] + self.history_id = self.gi.histories.create_history(name="TestGalaxyJobs")["id"] self.dataset_contents = "line 1\nline 2\rline 3\r\nline 4" self.dataset_id = self._test_dataset(self.history_id, contents=self.dataset_contents) @@ -24,95 +27,89 @@ def tearDown(self): @test_util.skip_unless_tool("cat1") def test_wait_for_job(self): - tool_inputs = inputs().set( - "input1", dataset(self.dataset_id) - ) - tool_output = self.gi.tools.run_tool( - history_id=self.history_id, - tool_id="cat1", - tool_inputs=tool_inputs - ) - job_id = tool_output['jobs'][0]['id'] + tool_inputs = inputs().set("input1", dataset(self.dataset_id)) + tool_output = self.gi.tools.run_tool(history_id=self.history_id, tool_id="cat1", tool_inputs=tool_inputs) + job_id = tool_output["jobs"][0]["id"] job = self.gi.jobs.wait_for_job(job_id) - self.assertEqual(job['state'], 'ok') + self.assertEqual(job["state"], "ok") @test_util.skip_unless_tool("random_lines1") def test_get_jobs(self): self._run_tool() self._run_tool() - jobs = self.gi.jobs.get_jobs(tool_id='random_lines1', history_id=self.history_id) + jobs = self.gi.jobs.get_jobs(tool_id="random_lines1", history_id=self.history_id) self.assertEqual(len(jobs), 2) - jobs = self.gi.jobs.get_jobs(history_id=self.history_id, state='failed') + jobs = self.gi.jobs.get_jobs(history_id=self.history_id, state="failed") self.assertEqual(len(jobs), 0) yesterday = datetime.today() - timedelta(days=1) - jobs = self.gi.jobs.get_jobs(date_range_max=yesterday.strftime('%Y-%m-%d'), history_id=self.history_id) + jobs = self.gi.jobs.get_jobs(date_range_max=yesterday.strftime("%Y-%m-%d"), history_id=self.history_id) self.assertEqual(len(jobs), 0) tomorrow = datetime.today() + timedelta(days=1) - jobs = self.gi.jobs.get_jobs(date_range_min=tomorrow.strftime('%Y-%m-%d')) + jobs = self.gi.jobs.get_jobs(date_range_min=tomorrow.strftime("%Y-%m-%d")) self.assertEqual(len(jobs), 0) - jobs = self.gi.jobs.get_jobs(date_range_min=datetime.today().strftime('%Y-%m-%d'), history_id=self.history_id) + jobs = self.gi.jobs.get_jobs(date_range_min=datetime.today().strftime("%Y-%m-%d"), history_id=self.history_id) self.assertEqual(len(jobs), 3) - @test_util.skip_unless_galaxy('release_21.05') + @test_util.skip_unless_galaxy("release_21.05") def test_get_jobs_with_filtering(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) - workflow_id = self.gi.workflows.import_workflow_from_local_path(path)['id'] - dataset = {'src': 'hda', 'id': self.dataset_id} + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) + workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"] + dataset = {"src": "hda", "id": self.dataset_id} invocation1 = self.gi.workflows.invoke_workflow( workflow_id, - inputs={'Input 1': dataset, 'Input 2': dataset}, + inputs={"Input 1": dataset, "Input 2": dataset}, history_id=self.history_id, - inputs_by='name', + inputs_by="name", ) invocation2 = self.gi.workflows.invoke_workflow( workflow_id, - inputs={'Input 1': dataset, 'Input 2': dataset}, + inputs={"Input 1": dataset, "Input 2": dataset}, history_id=self.history_id, - inputs_by='name', + inputs_by="name", ) - self.gi.invocations.wait_for_invocation(invocation1['id']) - self.gi.invocations.wait_for_invocation(invocation2['id']) + self.gi.invocations.wait_for_invocation(invocation1["id"]) + self.gi.invocations.wait_for_invocation(invocation2["id"]) - all_jobs = self.gi.jobs.get_jobs(history_id=self.history_id, order_by='create_time') + all_jobs = self.gi.jobs.get_jobs(history_id=self.history_id, order_by="create_time") self.assertEqual(len(all_jobs), 3) - job1_id = all_jobs[1]['id'] - jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1, order_by='create_time') + job1_id = all_jobs[1]["id"] + jobs = self.gi.jobs.get_jobs(history_id=self.history_id, limit=1, offset=1, order_by="create_time") self.assertEqual(len(jobs), 1) - self.assertEqual(jobs[0]['id'], job1_id) - jobs = self.gi.jobs.get_jobs(invocation_id=invocation1['id']) + self.assertEqual(jobs[0]["id"], job1_id) + jobs = self.gi.jobs.get_jobs(invocation_id=invocation1["id"]) self.assertEqual(len(jobs), 1) - job_id_inv = jobs[0]['id'] + job_id_inv = jobs[0]["id"] jobs = self.gi.jobs.get_jobs(workflow_id=workflow_id) self.assertEqual(len(jobs), 2) - self.assertIn(job_id_inv, [job['id'] for job in jobs]) + self.assertIn(job_id_inv, [job["id"] for job in jobs]) - @test_util.skip_unless_galaxy('release_21.01') + @test_util.skip_unless_galaxy("release_21.01") @test_util.skip_unless_tool("random_lines1") def test_run_and_rerun_random_lines(self): - original_output = self._run_tool(input_format='21.01') - original_job_id = original_output['jobs'][0]['id'] + original_output = self._run_tool(input_format="21.01") + original_job_id = original_output["jobs"][0]["id"] rerun_output = self.gi.jobs.rerun_job(original_job_id) - original_output_content = self.gi.datasets.download_dataset(original_output['outputs'][0]['id']) - rerun_output_content = self.gi.datasets.download_dataset(rerun_output['outputs'][0]['id']) + original_output_content = self.gi.datasets.download_dataset(original_output["outputs"][0]["id"]) + rerun_output_content = self.gi.datasets.download_dataset(rerun_output["outputs"][0]["id"]) self.assertEqual(rerun_output_content, original_output_content) - @test_util.skip_unless_galaxy('release_21.01') + @test_util.skip_unless_galaxy("release_21.01") @test_util.skip_unless_tool("Show beginning1") def test_rerun_and_remap(self): - path = test_util.get_abspath(os.path.join('data', 'select_first.ga')) + path = test_util.get_abspath(os.path.join("data", "select_first.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) wf_inputs = { - "0": {'src': 'hda', 'id': self.dataset_id}, + "0": {"src": "hda", "id": self.dataset_id}, "1": "-1", } - invocation_id = self.gi.workflows.invoke_workflow(wf['id'], inputs=wf_inputs, history_id=self.history_id)['id'] + invocation_id = self.gi.workflows.invoke_workflow(wf["id"], inputs=wf_inputs, history_id=self.history_id)["id"] invocation = self.gi.invocations.wait_for_invocation(invocation_id) - job_steps = [step for step in invocation['steps'] if step['job_id']] - job_steps.sort(key=itemgetter('order_index')) + job_steps = [step for step in invocation["steps"] if step["job_id"]] + job_steps.sort(key=itemgetter("order_index")) try: - self.gi.jobs.wait_for_job(job_steps[0]['job_id']) + self.gi.jobs.wait_for_job(job_steps[0]["job_id"]) except Exception: pass # indicates the job failed as expected else: @@ -120,131 +117,122 @@ def test_rerun_and_remap(self): history_contents = self.gi.histories.show_history(self.history_id, contents=True) self.assertEqual(len(history_contents), 3) - self.assertEqual(history_contents[1]['state'], 'error') - self.assertEqual(history_contents[2]['state'], 'paused') + self.assertEqual(history_contents[1]["state"], "error") + self.assertEqual(history_contents[2]["state"], "paused") # resume the paused step job - resumed_job = self.gi.jobs.resume_job(job_steps[-1]['job_id']) - self.assertEqual(resumed_job[0]['name'], 'out_file1') + resumed_job = self.gi.jobs.resume_job(job_steps[-1]["job_id"]) + self.assertEqual(resumed_job[0]["name"], "out_file1") # the following does not pass stably - the job goes back to paused too quickly # history_contents_resumed = self.gi.histories.show_history(self.history_id, contents=True) # self.assertNotEqual(history_contents_resumed[2]['state'], 'paused') # now rerun and remap with correct input param - failed_job_id = self.gi.datasets.show_dataset(history_contents[1]['id'])['creating_job'] - tool_inputs_update = { - 'lineNum': '1' - } + failed_job_id = self.gi.datasets.show_dataset(history_contents[1]["id"])["creating_job"] + tool_inputs_update = {"lineNum": "1"} rerun_job = self.gi.jobs.rerun_job(failed_job_id, remap=True, tool_inputs_update=tool_inputs_update) - new_job_id = rerun_job['jobs'][0]['id'] + new_job_id = rerun_job["jobs"][0]["id"] # Wait for the last dataset in the history to be unpaused and complete last_dataset = self.gi.histories.show_history(self.history_id, contents=True)[-1] - last_job_id = self.gi.datasets.show_dataset(last_dataset['id'])['creating_job'] + last_job_id = self.gi.datasets.show_dataset(last_dataset["id"])["creating_job"] self.gi.jobs.wait_for_job(new_job_id) self.gi.jobs.resume_job(last_job_id) # last_job can get stuck on paused - resume it in case self.gi.jobs.wait_for_job(last_job_id) - self.assertEqual(last_dataset['hid'], 3) - self.assertEqual(last_dataset['id'], history_contents[2]['id']) - self._wait_and_verify_dataset(last_dataset['id'], b'line 1\tline 1\n') + self.assertEqual(last_dataset["hid"], 3) + self.assertEqual(last_dataset["id"], history_contents[2]["id"]) + self._wait_and_verify_dataset(last_dataset["id"], b"line 1\tline 1\n") - @test_util.skip_unless_galaxy('release_19.05') + @test_util.skip_unless_galaxy("release_19.05") @test_util.skip_unless_tool("random_lines1") def test_get_common_problems(self): - job_id = self._run_tool()['jobs'][0]['id'] + job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_common_problems(job_id) - self.assertEqual(response, {'has_duplicate_inputs': False, 'has_empty_inputs': True}) + self.assertEqual(response, {"has_duplicate_inputs": False, "has_empty_inputs": True}) @test_util.skip_unless_tool("random_lines1") def test_get_inputs(self): - job_id = self._run_tool()['jobs'][0]['id'] + job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_inputs(job_id) - self.assertEqual(response, [{'name': 'input', 'dataset': {'src': 'hda', 'id': self.dataset_id}}]) + self.assertEqual(response, [{"name": "input", "dataset": {"src": "hda", "id": self.dataset_id}}]) @test_util.skip_unless_tool("random_lines1") def test_get_outputs(self): output = self._run_tool() - job_id, output_id = output['jobs'][0]['id'], output['outputs'][0]['id'] + job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"] response = self.gi.jobs.get_outputs(job_id) - self.assertEqual(response, [{'name': 'out_file1', 'dataset': {'src': 'hda', 'id': output_id}}]) + self.assertEqual(response, [{"name": "out_file1", "dataset": {"src": "hda", "id": output_id}}]) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") @test_util.skip_unless_tool("random_lines1") def test_get_destination_params(self): - job_id = self._run_tool()['jobs'][0]['id'] + job_id = self._run_tool()["jobs"][0]["id"] response = self.gi.jobs.get_destination_params(job_id) - self.assertIn('Runner', response) - self.assertIn('Runner Job ID', response) - self.assertIn('Handler', response) + self.assertIn("Runner", response) + self.assertIn("Runner Job ID", response) + self.assertIn("Handler", response) - @test_util.skip_unless_galaxy('release_18.01') + @test_util.skip_unless_galaxy("release_18.01") @test_util.skip_unless_tool("random_lines1") def test_search_jobs(self): - job_id = self._run_tool()['jobs'][0]['id'] + job_id = self._run_tool()["jobs"][0]["id"] inputs = { - 'num_lines': '1', - 'input': { - 'src': 'hda', - 'id': self.dataset_id - }, - 'seed_source|seed_source_selector': 'set_seed', - 'seed_source|seed': 'asdf' + "num_lines": "1", + "input": {"src": "hda", "id": self.dataset_id}, + "seed_source|seed_source_selector": "set_seed", + "seed_source|seed": "asdf", } - response = self.gi.jobs.search_jobs('random_lines1', inputs) - self.assertIn(job_id, [job['id'] for job in response]) + response = self.gi.jobs.search_jobs("random_lines1", inputs) + self.assertIn(job_id, [job["id"] for job in response]) - @test_util.skip_unless_galaxy('release_20.01') + @test_util.skip_unless_galaxy("release_20.01") @test_util.skip_unless_tool("random_lines1") def test_report_error(self): output = self._run_tool() - job_id, output_id = output['jobs'][0]['id'], output['outputs'][0]['id'] - response = self.gi.jobs.report_error(job_id, output_id, 'Test error') + job_id, output_id = output["jobs"][0]["id"], output["outputs"][0]["id"] + response = self.gi.jobs.report_error(job_id, output_id, "Test error") # expected response when the Galaxy server does not have mail configured - self.assertEqual(response, {'messages': [['An error occurred sending the report by email: Mail is not configured for this Galaxy instance', 'danger']]}) + self.assertEqual( + response, + { + "messages": [ + [ + "An error occurred sending the report by email: Mail is not configured for this Galaxy instance", + "danger", + ] + ] + }, + ) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") def test_show_job_lock(self): status = self.gi.jobs.show_job_lock() self.assertFalse(status) - @test_util.skip_unless_galaxy('release_20.05') + @test_util.skip_unless_galaxy("release_20.05") def test_update_job_lock(self): status = self.gi.jobs.update_job_lock(active=True) self.assertTrue(status) status = self.gi.jobs.update_job_lock(active=False) self.assertFalse(status) - @test_util.skip_unless_galaxy('release_18.01') + @test_util.skip_unless_galaxy("release_18.01") def test_cancel_job(self): - job_id = self._run_tool()['jobs'][0]['id'] - job_state = self.gi.jobs.show_job(job_id)['state'] - self.assertTrue(job_state, 'deleted') + job_id = self._run_tool()["jobs"][0]["id"] + job_state = self.gi.jobs.show_job(job_id)["state"] + self.assertTrue(job_state, "deleted") - def _run_tool(self, input_format: str = 'legacy') -> dict: + def _run_tool(self, input_format: str = "legacy") -> dict: tool_inputs = { - 'num_lines': '1', - 'input': { - 'src': 'hda', - 'id': self.dataset_id - }, + "num_lines": "1", + "input": {"src": "hda", "id": self.dataset_id}, } - if input_format == '21.01': - tool_inputs.update({ - 'seed_source': { - 'seed_source_selector': 'set_seed', - 'seed': 'asdf' - } - }) + if input_format == "21.01": + tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}}) else: # legacy format - tool_inputs.update({ - 'seed_source|seed_source_selector': 'set_seed', - 'seed_source|seed': 'asdf' - }) + tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"}) return self.gi.tools.run_tool( - history_id=self.history_id, - tool_id='random_lines1', - tool_inputs=tool_inputs, - input_format=input_format + history_id=self.history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format ) diff --git a/bioblend/_tests/TestGalaxyLibraries.py b/bioblend/_tests/TestGalaxyLibraries.py index 8b0332f44..1be7ef1a9 100644 --- a/bioblend/_tests/TestGalaxyLibraries.py +++ b/bioblend/_tests/TestGalaxyLibraries.py @@ -2,137 +2,157 @@ import shutil import tempfile -from . import GalaxyTestBase, test_util +from . import ( + GalaxyTestBase, + test_util, +) -FOO_DATA = 'foo\nbar\n' +FOO_DATA = "foo\nbar\n" class TestGalaxyLibraries(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() - self.name = 'automated test library' - self.library = self.gi.libraries.create_library(self.name, description='automated test', synopsis='automated test synopsis') + self.name = "automated test library" + self.library = self.gi.libraries.create_library( + self.name, description="automated test", synopsis="automated test synopsis" + ) def tearDown(self): - self.gi.libraries.delete_library(self.library['id']) + self.gi.libraries.delete_library(self.library["id"]) def test_create_library(self): - self.assertEqual(self.library['name'], self.name) - self.assertIsNotNone(self.library['id']) + self.assertEqual(self.library["name"], self.name) + self.assertIsNotNone(self.library["id"]) def test_get_libraries(self): libraries_with_name = self.gi.libraries.get_libraries(name=self.name) - self.assertEqual(len([l for l in libraries_with_name if l['id'] == self.library['id']]), 1) + self.assertEqual(len([l for l in libraries_with_name if l["id"] == self.library["id"]]), 1) - deleted_name = 'deleted test library' - deleted_library_id = self.gi.libraries.create_library(deleted_name, description='a deleted library', synopsis='automated test synopsis')['id'] + deleted_name = "deleted test library" + deleted_library_id = self.gi.libraries.create_library( + deleted_name, description="a deleted library", synopsis="automated test synopsis" + )["id"] self.gi.libraries.delete_library(deleted_library_id) deleted_libraries_with_name = self.gi.libraries.get_libraries(name=deleted_name, deleted=True) - self.assertEqual(len([l for l in deleted_libraries_with_name if l['id'] == deleted_library_id]), 1) + self.assertEqual(len([l for l in deleted_libraries_with_name if l["id"] == deleted_library_id]), 1) all_non_deleted_libraries = self.gi.libraries.get_libraries(deleted=False) - self.assertEqual(len([l for l in all_non_deleted_libraries if l['id'] == self.library['id']]), 1) - self.assertEqual([l for l in all_non_deleted_libraries if l['id'] == deleted_library_id], []) + self.assertEqual(len([l for l in all_non_deleted_libraries if l["id"] == self.library["id"]]), 1) + self.assertEqual([l for l in all_non_deleted_libraries if l["id"] == deleted_library_id], []) all_deleted_libraries = self.gi.libraries.get_libraries(deleted=True) - self.assertEqual([l for l in all_deleted_libraries if l['id'] == self.library['id']], []) - self.assertEqual(len([l for l in all_deleted_libraries if l['id'] == deleted_library_id]), 1) + self.assertEqual([l for l in all_deleted_libraries if l["id"] == self.library["id"]], []) + self.assertEqual(len([l for l in all_deleted_libraries if l["id"] == deleted_library_id]), 1) all_libraries = self.gi.libraries.get_libraries(deleted=None) - self.assertEqual(len([l for l in all_libraries if l['id'] == self.library['id']]), 1) - self.assertEqual(len([l for l in all_libraries if l['id'] == deleted_library_id]), 1) + self.assertEqual(len([l for l in all_libraries if l["id"] == self.library["id"]]), 1) + self.assertEqual(len([l for l in all_libraries if l["id"] == deleted_library_id]), 1) def test_show_library(self): - library_data = self.gi.libraries.show_library(self.library['id']) - self.assertEqual(self.library['id'], library_data['id']) - self.assertEqual(self.library['name'], library_data['name']) + library_data = self.gi.libraries.show_library(self.library["id"]) + self.assertEqual(self.library["id"], library_data["id"]) + self.assertEqual(self.library["name"], library_data["name"]) def test_upload_file_from_url(self): - self.gi.libraries.upload_file_from_url(self.library['id'], 'https://zenodo.org/record/582600/files/wildtype.fna?download=1') + self.gi.libraries.upload_file_from_url( + self.library["id"], "https://zenodo.org/record/582600/files/wildtype.fna?download=1" + ) def test_upload_file_contents(self): - self.gi.libraries.upload_file_contents(self.library['id'], FOO_DATA) + self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA) def test_upload_file_from_local_path(self): - with tempfile.NamedTemporaryFile(mode='w', prefix='bioblend_test_') as f: + with tempfile.NamedTemporaryFile(mode="w", prefix="bioblend_test_") as f: f.write(FOO_DATA) f.flush() - self.gi.libraries.upload_file_from_local_path(self.library['id'], f.name) + self.gi.libraries.upload_file_from_local_path(self.library["id"], f.name) def test_upload_file_from_server(self): pass def test_upload_from_galaxy_filesystem(self): bnames = [f"f{i}.txt" for i in range(2)] - tempdir = tempfile.mkdtemp(prefix='bioblend_test_') + tempdir = tempfile.mkdtemp(prefix="bioblend_test_") try: fnames = [os.path.join(tempdir, _) for _ in bnames] for fn in fnames: - with open(fn, 'w') as f: + with open(fn, "w") as f: f.write(FOO_DATA) - filesystem_paths = '\n'.join(fnames) - ret = self.gi.libraries.upload_from_galaxy_filesystem(self.library['id'], filesystem_paths) + filesystem_paths = "\n".join(fnames) + ret = self.gi.libraries.upload_from_galaxy_filesystem(self.library["id"], filesystem_paths) for dataset_dict in ret: - dataset = self.gi.libraries.wait_for_dataset(self.library['id'], dataset_dict['id']) - self.assertEqual(dataset['state'], 'ok') - ret = self.gi.libraries.upload_from_galaxy_filesystem(self.library['id'], filesystem_paths, link_data_only='link_to_files') + dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"]) + self.assertEqual(dataset["state"], "ok") + ret = self.gi.libraries.upload_from_galaxy_filesystem( + self.library["id"], filesystem_paths, link_data_only="link_to_files" + ) for dataset_dict in ret: - dataset = self.gi.libraries.wait_for_dataset(self.library['id'], dataset_dict['id']) - self.assertEqual(dataset['state'], 'ok') + dataset = self.gi.libraries.wait_for_dataset(self.library["id"], dataset_dict["id"]) + self.assertEqual(dataset["state"], "ok") finally: shutil.rmtree(tempdir) def test_copy_from_dataset(self): history = self.gi.histories.create_history() - dataset_id = self._test_dataset(history['id']) - self.gi.libraries.copy_from_dataset(self.library['id'], dataset_id, message='Copied from dataset') + dataset_id = self._test_dataset(history["id"]) + self.gi.libraries.copy_from_dataset(self.library["id"], dataset_id, message="Copied from dataset") def test_update_dataset(self): library_id = self.library["id"] dataset1 = self.gi.libraries.upload_file_contents(library_id, FOO_DATA) - updated_dataset = self.gi.libraries.update_library_dataset(dataset1[0]['id'], name='Modified name', misc_info='Modified the name succesfully') - self.assertEqual(updated_dataset["name"], 'Modified name') - self.assertEqual(updated_dataset["misc_info"], 'Modified the name succesfully') + updated_dataset = self.gi.libraries.update_library_dataset( + dataset1[0]["id"], name="Modified name", misc_info="Modified the name succesfully" + ) + self.assertEqual(updated_dataset["name"], "Modified name") + self.assertEqual(updated_dataset["misc_info"], "Modified the name succesfully") def test_library_permissions(self): current_user = self.gi.users.get_current_user() - user_id_list_new = [current_user['id']] - self.gi.libraries.set_library_permissions(self.library['id'], access_in=user_id_list_new, modify_in=user_id_list_new, add_in=user_id_list_new, manage_in=user_id_list_new) - ret = self.gi.libraries.get_library_permissions(self.library['id']) - self.assertEqual({_[1] for _ in ret['access_library_role_list']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret['modify_library_role_list']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret['add_library_item_role_list']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret['manage_library_role_list']}, set(user_id_list_new)) + user_id_list_new = [current_user["id"]] + self.gi.libraries.set_library_permissions( + self.library["id"], + access_in=user_id_list_new, + modify_in=user_id_list_new, + add_in=user_id_list_new, + manage_in=user_id_list_new, + ) + ret = self.gi.libraries.get_library_permissions(self.library["id"]) + self.assertEqual({_[1] for _ in ret["access_library_role_list"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret["modify_library_role_list"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret["add_library_item_role_list"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret["manage_library_role_list"]}, set(user_id_list_new)) def test_dataset_permissions(self): current_user = self.gi.users.get_current_user() - user_id_list_new = [current_user['id']] + user_id_list_new = [current_user["id"]] library_id = self.library["id"] dataset1 = self.gi.libraries.upload_file_contents(library_id, FOO_DATA) - ret = self.gi.libraries.set_dataset_permissions(dataset1[0]['id'], access_in=user_id_list_new, modify_in=user_id_list_new, manage_in=user_id_list_new) - self.assertEqual({_[1] for _ in ret['access_dataset_roles']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret['modify_item_roles']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret['manage_dataset_roles']}, set(user_id_list_new)) + ret = self.gi.libraries.set_dataset_permissions( + dataset1[0]["id"], access_in=user_id_list_new, modify_in=user_id_list_new, manage_in=user_id_list_new + ) + self.assertEqual({_[1] for _ in ret["access_dataset_roles"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret["modify_item_roles"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret["manage_dataset_roles"]}, set(user_id_list_new)) # test get_dataset_permissions - ret_get = self.gi.libraries.get_dataset_permissions(dataset1[0]['id']) - self.assertEqual({_[1] for _ in ret_get['access_dataset_roles']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret_get['modify_item_roles']}, set(user_id_list_new)) - self.assertEqual({_[1] for _ in ret_get['manage_dataset_roles']}, set(user_id_list_new)) + ret_get = self.gi.libraries.get_dataset_permissions(dataset1[0]["id"]) + self.assertEqual({_[1] for _ in ret_get["access_dataset_roles"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret_get["modify_item_roles"]}, set(user_id_list_new)) + self.assertEqual({_[1] for _ in ret_get["manage_dataset_roles"]}, set(user_id_list_new)) - @test_util.skip_unless_galaxy('release_19.09') + @test_util.skip_unless_galaxy("release_19.09") def test_upload_file_contents_with_tags(self): - datasets = self.gi.libraries.upload_file_contents(self.library['id'], FOO_DATA, tags=["name:foobar", "barfoo"]) - dataset_show = self.gi.libraries.show_dataset(self.library['id'], datasets[0]['id']) - self.assertEqual(dataset_show['tags'], 'name:foobar, barfoo') + datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA, tags=["name:foobar", "barfoo"]) + dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"]) + self.assertEqual(dataset_show["tags"], "name:foobar, barfoo") - @test_util.skip_unless_galaxy('release_19.09') + @test_util.skip_unless_galaxy("release_19.09") def test_update_dataset_tags(self): - datasets = self.gi.libraries.upload_file_contents(self.library['id'], FOO_DATA) - dataset_show = self.gi.libraries.show_dataset(self.library['id'], datasets[0]['id']) - self.assertEqual(dataset_show['tags'], "") + datasets = self.gi.libraries.upload_file_contents(self.library["id"], FOO_DATA) + dataset_show = self.gi.libraries.show_dataset(self.library["id"], datasets[0]["id"]) + self.assertEqual(dataset_show["tags"], "") - updated_dataset = self.gi.libraries.update_library_dataset(datasets[0]['id'], tags=["name:foobar", "barfoo"]) - dataset_show = self.gi.libraries.show_dataset(self.library['id'], updated_dataset['id']) + updated_dataset = self.gi.libraries.update_library_dataset(datasets[0]["id"], tags=["name:foobar", "barfoo"]) + dataset_show = self.gi.libraries.show_dataset(self.library["id"], updated_dataset["id"]) - self.assertEqual(dataset_show['tags'], 'name:foobar, barfoo') + self.assertEqual(dataset_show["tags"], "name:foobar, barfoo") diff --git a/bioblend/_tests/TestGalaxyObjects.py b/bioblend/_tests/TestGalaxyObjects.py index 739903d27..b931449a8 100644 --- a/bioblend/_tests/TestGalaxyObjects.py +++ b/bioblend/_tests/TestGalaxyObjects.py @@ -20,102 +20,95 @@ ) from . import test_util - -bioblend.set_stream_logger('test', level='INFO') +bioblend.set_stream_logger("test", level="INFO") socket.setdefaulttimeout(10.0) -SAMPLE_FN = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) -SAMPLE_WF_COLL_FN = test_util.get_abspath(os.path.join('data', 'paste_columns_collections.ga')) -SAMPLE_WF_PARAMETER_INPUT_FN = test_util.get_abspath(os.path.join('data', 'workflow_with_parameter_input.ga')) -FOO_DATA = 'foo\nbar\n' -FOO_DATA_2 = 'foo2\nbar2\n' +SAMPLE_FN = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) +SAMPLE_WF_COLL_FN = test_util.get_abspath(os.path.join("data", "paste_columns_collections.ga")) +SAMPLE_WF_PARAMETER_INPUT_FN = test_util.get_abspath(os.path.join("data", "workflow_with_parameter_input.ga")) +FOO_DATA = "foo\nbar\n" +FOO_DATA_2 = "foo2\nbar2\n" SAMPLE_WF_DICT = { - 'deleted': False, - 'id': '9005c5112febe774', - 'inputs': { - '571': {'label': 'Input Dataset', 'value': ''}, - '572': {'label': 'Input Dataset', 'value': ''}, + "deleted": False, + "id": "9005c5112febe774", + "inputs": { + "571": {"label": "Input Dataset", "value": ""}, + "572": {"label": "Input Dataset", "value": ""}, }, - 'model_class': 'StoredWorkflow', - 'name': 'paste_columns', - 'owner': 'user_foo', - 'published': False, - 'steps': { - '571': { - 'id': 571, - 'input_steps': {}, - 'tool_id': None, - 'tool_inputs': {'name': 'Input Dataset'}, - 'tool_version': None, - 'type': 'data_input', + "model_class": "StoredWorkflow", + "name": "paste_columns", + "owner": "user_foo", + "published": False, + "steps": { + "571": { + "id": 571, + "input_steps": {}, + "tool_id": None, + "tool_inputs": {"name": "Input Dataset"}, + "tool_version": None, + "type": "data_input", }, - '572': { - 'id': 572, - 'input_steps': {}, - 'tool_id': None, - 'tool_inputs': {'name': 'Input Dataset'}, - 'tool_version': None, - 'type': 'data_input', + "572": { + "id": 572, + "input_steps": {}, + "tool_id": None, + "tool_inputs": {"name": "Input Dataset"}, + "tool_version": None, + "type": "data_input", }, - '573': { - 'id': 573, - 'input_steps': { - 'input1': {'source_step': 571, 'step_output': 'output'}, - 'input2': {'source_step': 572, 'step_output': 'output'}, + "573": { + "id": 573, + "input_steps": { + "input1": {"source_step": 571, "step_output": "output"}, + "input2": {"source_step": 572, "step_output": "output"}, }, - 'tool_id': 'Paste1', - 'tool_inputs': { - 'delimiter': '"T"', - 'input1': 'null', - 'input2': 'null', + "tool_id": "Paste1", + "tool_inputs": { + "delimiter": '"T"', + "input1": "null", + "input2": "null", }, - 'tool_version': '1.0.0', - 'type': 'tool', - } + "tool_version": "1.0.0", + "type": "tool", + }, }, - 'tags': [], - 'url': '/api/workflows/9005c5112febe774', + "tags": [], + "url": "/api/workflows/9005c5112febe774", } SAMPLE_INV_DICT = { - 'history_id': '2f94e8ae9edff68a', - 'id': 'df7a1f0c02a5b08e', - 'inputs': { - '0': { - 'id': 'a7db2fac67043c7e', - 'src': 'hda', - 'uuid': '7932ffe0-2340-4952-8857-dbaa50f1f46a' - } - }, - 'model_class': 'WorkflowInvocation', - 'state': 'ready', - 'steps': [ + "history_id": "2f94e8ae9edff68a", + "id": "df7a1f0c02a5b08e", + "inputs": {"0": {"id": "a7db2fac67043c7e", "src": "hda", "uuid": "7932ffe0-2340-4952-8857-dbaa50f1f46a"}}, + "model_class": "WorkflowInvocation", + "state": "ready", + "steps": [ { - 'action': None, - 'id': 'd413a19dec13d11e', - 'job_id': None, - 'model_class': 'WorkflowInvocationStep', - 'order_index': 0, - 'state': None, - 'update_time': '2015-10-31T22:00:26', - 'workflow_step_id': 'cbbbf59e8f08c98c', - 'workflow_step_label': None, - 'workflow_step_uuid': 'b81250fd-3278-4e6a-b269-56a1f01ef485' + "action": None, + "id": "d413a19dec13d11e", + "job_id": None, + "model_class": "WorkflowInvocationStep", + "order_index": 0, + "state": None, + "update_time": "2015-10-31T22:00:26", + "workflow_step_id": "cbbbf59e8f08c98c", + "workflow_step_label": None, + "workflow_step_uuid": "b81250fd-3278-4e6a-b269-56a1f01ef485", }, { - 'action': None, - 'id': '2f94e8ae9edff68a', - 'job_id': 'e89067bb68bee7a0', - 'model_class': 'WorkflowInvocationStep', - 'order_index': 1, - 'state': 'new', - 'update_time': '2015-10-31T22:00:26', - 'workflow_step_id': '964b37715ec9bd22', - 'workflow_step_label': None, - 'workflow_step_uuid': 'e62440b8-e911-408b-b124-e05435d3125e' - } + "action": None, + "id": "2f94e8ae9edff68a", + "job_id": "e89067bb68bee7a0", + "model_class": "WorkflowInvocationStep", + "order_index": 1, + "state": "new", + "update_time": "2015-10-31T22:00:26", + "workflow_step_id": "964b37715ec9bd22", + "workflow_step_label": None, + "workflow_step_uuid": "e62440b8-e911-408b-b124-e05435d3125e", + }, ], - 'update_time': '2015-10-31T22:00:26', - 'uuid': 'c8aa2b1c-801a-11e5-a9e5-8ca98228593c', - 'workflow_id': '03501d7626bd192f' + "update_time": "2015-10-31T22:00:26", + "uuid": "c8aa2b1c-801a-11e5-a9e5-8ca98228593c", + "workflow_id": "03501d7626bd192f", } @@ -131,11 +124,11 @@ def is_reachable(url): def upload_from_fs(lib, bnames, **kwargs): - tempdir = tempfile.mkdtemp(prefix='bioblend_test_') + tempdir = tempfile.mkdtemp(prefix="bioblend_test_") try: fnames = [os.path.join(tempdir, _) for _ in bnames] for fn in fnames: - with open(fn, 'w') as f: + with open(fn, "w") as f: f.write(FOO_DATA) dss = lib.upload_from_galaxy_fs(fnames, **kwargs) finally: @@ -144,16 +137,15 @@ def upload_from_fs(lib, bnames, **kwargs): class MockWrapper(wrappers.Wrapper): - BASE_ATTRS = ('a', 'b') + BASE_ATTRS = ("a", "b") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) class TestWrapper(unittest.TestCase): - def setUp(self): - self.d = {'a': 1, 'b': [2, 3], 'c': {'x': 4}} + self.d = {"a": 1, "b": [2, 3], "c": {"x": 4}} with self.assertRaises(TypeError): wrappers.Wrapper(self.d) self.w = MockWrapper(self.d) @@ -165,8 +157,8 @@ def test_initialize(self): self.w.b[0] = 222 self.assertEqual(self.w.a, 222) self.assertEqual(self.w.b[0], 222) - self.assertEqual(self.d['a'], 1) - self.assertEqual(self.d['b'][0], 2) + self.assertEqual(self.d["a"], 1) + self.assertEqual(self.d["b"][0], 2) with self.assertRaises(AttributeError): self.w.foo with self.assertRaises(AttributeError): @@ -188,7 +180,7 @@ def test_clone(self): self.assertEqual(self.w.b[0], 2) def test_kwargs(self): - parent = MockWrapper({'a': 10}) + parent = MockWrapper({"a": 10}) w = MockWrapper(self.d, parent=parent) self.assertIs(w.parent, parent) with self.assertRaises(AttributeError): @@ -196,23 +188,21 @@ def test_kwargs(self): class TestWorkflow(unittest.TestCase): - def setUp(self): self.wf = wrappers.Workflow(SAMPLE_WF_DICT) def test_initialize(self): - self.assertEqual(self.wf.id, '9005c5112febe774') - self.assertEqual(self.wf.name, 'paste_columns') + self.assertEqual(self.wf.id, "9005c5112febe774") + self.assertEqual(self.wf.name, "paste_columns") self.assertEqual(self.wf.deleted, False) - self.assertEqual(self.wf.owner, 'user_foo') + self.assertEqual(self.wf.owner, "user_foo") self.assertEqual(self.wf.published, False) self.assertEqual(self.wf.tags, []) - self.assertEqual( - self.wf.input_labels_to_ids, {'Input Dataset': {'571', '572'}}) - self.assertEqual(self.wf.tool_labels_to_ids, {'Paste1': {'573'}}) - self.assertEqual(self.wf.data_input_ids, {'571', '572'}) - self.assertEqual(self.wf.source_ids, {'571', '572'}) - self.assertEqual(self.wf.sink_ids, {'573'}) + self.assertEqual(self.wf.input_labels_to_ids, {"Input Dataset": {"571", "572"}}) + self.assertEqual(self.wf.tool_labels_to_ids, {"Paste1": {"573"}}) + self.assertEqual(self.wf.data_input_ids, {"571", "572"}) + self.assertEqual(self.wf.source_ids, {"571", "572"}) + self.assertEqual(self.wf.sink_ids, {"573"}) def test_dag(self): inv_dag = {} @@ -231,100 +221,105 @@ def test_dag(self): self.assertLess(ids.index(h), ids.index(t)) def test_steps(self): - steps = SAMPLE_WF_DICT['steps'] + steps = SAMPLE_WF_DICT["steps"] for sid, s in self.wf.steps.items(): self.assertIsInstance(s, wrappers.Step) self.assertEqual(s.id, sid) self.assertIn(sid, steps) self.assertIs(s.parent, self.wf) - self.assertEqual(self.wf.data_input_ids, {'571', '572'}) - self.assertEqual(self.wf.tool_ids, {'573'}) + self.assertEqual(self.wf.data_input_ids, {"571", "572"}) + self.assertEqual(self.wf.tool_ids, {"573"}) def test_taint(self): self.assertFalse(self.wf.is_modified) - self.wf.steps['571'].tool_id = 'foo' + self.wf.steps["571"].tool_id = "foo" self.assertTrue(self.wf.is_modified) def test_input_map(self): - hda = wrappers.HistoryDatasetAssociation({'id': 'hda_id'}, container='mock_history') - ldda = wrappers.LibraryDatasetDatasetAssociation({'id': 'ldda_id'}, container='mock_library') - input_map = self.wf._convert_input_map({'0': hda, '1': ldda, '2': {'id': 'hda2_id', 'src': 'hda'}}) - self.assertEqual(input_map, {'0': {'id': 'hda_id', 'src': 'hda'}, '1': {'id': 'ldda_id', 'src': 'ldda'}, '2': {'id': 'hda2_id', 'src': 'hda'}}) + hda = wrappers.HistoryDatasetAssociation({"id": "hda_id"}, container="mock_history") + ldda = wrappers.LibraryDatasetDatasetAssociation({"id": "ldda_id"}, container="mock_library") + input_map = self.wf._convert_input_map({"0": hda, "1": ldda, "2": {"id": "hda2_id", "src": "hda"}}) + self.assertEqual( + input_map, + { + "0": {"id": "hda_id", "src": "hda"}, + "1": {"id": "ldda_id", "src": "ldda"}, + "2": {"id": "hda2_id", "src": "hda"}, + }, + ) @test_util.skip_unless_galaxy() class GalaxyObjectsTestBase(unittest.TestCase): - def setUp(self): - galaxy_key = os.environ['BIOBLEND_GALAXY_API_KEY'] - galaxy_url = os.environ['BIOBLEND_GALAXY_URL'] + galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"] + galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] self.gi = galaxy_instance.GalaxyInstance(galaxy_url, galaxy_key) -@test_util.skip_unless_galaxy('release_19.09') +@test_util.skip_unless_galaxy("release_19.09") class TestInvocation(GalaxyObjectsTestBase): - @classmethod def setUpClass(cls): super().setUp(cls) cls.inv = wrappers.Invocation(SAMPLE_INV_DICT) with open(SAMPLE_FN) as f: cls.workflow = cls.gi.workflows.import_new(f.read()) - path_pause = test_util.get_abspath(os.path.join('data', 'test_workflow_pause.ga')) + path_pause = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga")) with open(path_pause) as f: cls.workflow_pause = cls.gi.workflows.import_new(f.read()) cls.history = cls.gi.histories.create(name="TestInvocation") - cls.dataset = cls.history.paste_content('1\t2\t3') + cls.dataset = cls.history.paste_content("1\t2\t3") @classmethod def tearDownClass(cls): cls.history.delete(purge=True) def test_initialize(self): - self.assertEqual(self.inv.workflow_id, '03501d7626bd192f') - self.assertEqual(self.inv.history_id, '2f94e8ae9edff68a') - self.assertEqual(self.inv.id, 'df7a1f0c02a5b08e') - self.assertEqual(self.inv.state, 'ready') - self.assertEqual(self.inv.update_time, '2015-10-31T22:00:26') - self.assertEqual(self.inv.uuid, 'c8aa2b1c-801a-11e5-a9e5-8ca98228593c') + self.assertEqual(self.inv.workflow_id, "03501d7626bd192f") + self.assertEqual(self.inv.history_id, "2f94e8ae9edff68a") + self.assertEqual(self.inv.id, "df7a1f0c02a5b08e") + self.assertEqual(self.inv.state, "ready") + self.assertEqual(self.inv.update_time, "2015-10-31T22:00:26") + self.assertEqual(self.inv.uuid, "c8aa2b1c-801a-11e5-a9e5-8ca98228593c") def test_initialize_steps(self): - for step, step_dict in zip(self.inv.steps, SAMPLE_INV_DICT['steps']): + for step, step_dict in zip(self.inv.steps, SAMPLE_INV_DICT["steps"]): self.assertIsInstance(step, wrappers.InvocationStep) self.assertIs(step.parent, self.inv) - self.assertEqual(step.id, step_dict['id']) - self.assertEqual(step.job_id, step_dict['job_id']) - self.assertEqual(step.order_index, step_dict['order_index']) - self.assertEqual(step.state, step_dict['state']) - self.assertEqual(step.update_time, step_dict['update_time']) - self.assertEqual(step.workflow_step_id, step_dict['workflow_step_id']) - self.assertEqual(step.workflow_step_label, step_dict['workflow_step_label']) - self.assertEqual(step.workflow_step_uuid, step_dict['workflow_step_uuid']) + self.assertEqual(step.id, step_dict["id"]) + self.assertEqual(step.job_id, step_dict["job_id"]) + self.assertEqual(step.order_index, step_dict["order_index"]) + self.assertEqual(step.state, step_dict["state"]) + self.assertEqual(step.update_time, step_dict["update_time"]) + self.assertEqual(step.workflow_step_id, step_dict["workflow_step_id"]) + self.assertEqual(step.workflow_step_label, step_dict["workflow_step_label"]) + self.assertEqual(step.workflow_step_uuid, step_dict["workflow_step_uuid"]) def test_initialize_inputs(self): for i, input in enumerate(self.inv.inputs): - self.assertEqual(input, {**SAMPLE_INV_DICT['inputs'][str(i)], 'label': str(i)}) + self.assertEqual(input, {**SAMPLE_INV_DICT["inputs"][str(i)], "label": str(i)}) def test_sorted_step_ids(self): - self.assertListEqual(self.inv.sorted_step_ids(), ['d413a19dec13d11e', '2f94e8ae9edff68a']) + self.assertListEqual(self.inv.sorted_step_ids(), ["d413a19dec13d11e", "2f94e8ae9edff68a"]) def test_step_states(self): - self.assertSetEqual(self.inv.step_states(), {None, 'new'}) + self.assertSetEqual(self.inv.step_states(), {None, "new"}) def test_number_of_steps(self): self.assertEqual(self.inv.number_of_steps(), 2) def test_sorted_steps_by(self): self.assertEqual(len(self.inv.sorted_steps_by()), 2) - steps = self.inv.sorted_steps_by(step_ids={'2f94e8ae9edff68a'}) + steps = self.inv.sorted_steps_by(step_ids={"2f94e8ae9edff68a"}) self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].id, '2f94e8ae9edff68a') - self.assertListEqual(self.inv.sorted_steps_by(step_ids={'unmatched_id'}), []) - steps = self.inv.sorted_steps_by(states={'new'}) + self.assertEqual(steps[0].id, "2f94e8ae9edff68a") + self.assertListEqual(self.inv.sorted_steps_by(step_ids={"unmatched_id"}), []) + steps = self.inv.sorted_steps_by(states={"new"}) self.assertEqual(len(steps), 1) - self.assertEqual(steps[0].state, 'new') - self.assertListEqual(self.inv.sorted_steps_by(states={'unmatched_state'}), []) - steps = self.inv.sorted_steps_by(indices={0}, states={None, 'new'}) + self.assertEqual(steps[0].state, "new") + self.assertListEqual(self.inv.sorted_steps_by(states={"unmatched_state"}), []) + steps = self.inv.sorted_steps_by(indices={0}, states={None, "new"}) self.assertEqual(len(steps), 1) self.assertEqual(steps[0].order_index, 0) self.assertListEqual(self.inv.sorted_steps_by(indices={2}), []) @@ -332,20 +327,20 @@ def test_sorted_steps_by(self): def test_cancel(self): inv = self._obj_invoke_workflow() inv.cancel() - self.assertEqual(inv.state, 'cancelled') + self.assertEqual(inv.state, "cancelled") def test_wait(self): inv = self._obj_invoke_workflow() inv.wait() - self.assertEqual(inv.state, 'scheduled') + self.assertEqual(inv.state, "scheduled") def test_refresh(self): inv = self._obj_invoke_workflow() - inv.state = 'placeholder' + inv.state = "placeholder" # use wait_for_invocation() directly, because inv.wait() will update inv automatically self.gi.gi.invocations.wait_for_invocation(inv.id) inv.refresh() - self.assertEqual(inv.state, 'scheduled') + self.assertEqual(inv.state, "scheduled") def test_run_step_actions(self): inv = self.workflow_pause.invoke( @@ -366,49 +361,48 @@ def test_summary(self): inv = self._obj_invoke_workflow() inv.wait() summary = inv.summary() - self.assertEqual(summary['populated_state'], 'ok') + self.assertEqual(summary["populated_state"], "ok") def test_step_jobs_summary(self): inv = self._obj_invoke_workflow() inv.wait() step_jobs_summary = inv.step_jobs_summary() self.assertEqual(len(step_jobs_summary), 1) - self.assertEqual(step_jobs_summary[0]['populated_state'], 'ok') + self.assertEqual(step_jobs_summary[0]["populated_state"], "ok") def test_report(self): inv = self._obj_invoke_workflow() report = inv.report() - assert report['workflows'] == {self.workflow.id: {'name': 'paste_columns'}} + assert report["workflows"] == {self.workflow.id: {"name": "paste_columns"}} - @test_util.skip_unless_galaxy('release_20.09') + @test_util.skip_unless_galaxy("release_20.09") def test_biocompute_object(self): inv = self._obj_invoke_workflow() inv.wait() biocompute_object = inv.biocompute_object() - self.assertEqual(len(biocompute_object['description_domain']['pipeline_steps']), 1) + self.assertEqual(len(biocompute_object["description_domain"]["pipeline_steps"]), 1) def _obj_invoke_workflow(self): return self.workflow.invoke( - inputs={'Input 1': self.dataset, 'Input 2': self.dataset}, + inputs={"Input 1": self.dataset, "Input 2": self.dataset}, history=self.history, - inputs_by='name', + inputs_by="name", ) -@test_util.skip_unless_galaxy('release_19.09') +@test_util.skip_unless_galaxy("release_19.09") class TestObjInvocationClient(GalaxyObjectsTestBase): - @classmethod def setUpClass(cls): super().setUp(cls) with open(SAMPLE_FN) as f: cls.workflow = cls.gi.workflows.import_new(f.read()) cls.history = cls.gi.histories.create(name="TestGalaxyObjInvocationClient") - dataset = cls.history.paste_content('1\t2\t3') + dataset = cls.history.paste_content("1\t2\t3") cls.inv = cls.workflow.invoke( - inputs={'Input 1': dataset, 'Input 2': dataset}, + inputs={"Input 1": dataset, "Input 2": dataset}, history=cls.history, - inputs_by='name', + inputs_by="name", ) cls.inv.wait() @@ -421,7 +415,7 @@ def test_get(self): self.assertEqual(inv.id, self.inv.id) self.assertEqual(inv.workflow_id, self.workflow.id) self.assertEqual(inv.history_id, self.history.id) - self.assertEqual(inv.state, 'scheduled') + self.assertEqual(inv.state, "scheduled") self.assertEqual(inv.update_time, self.inv.update_time) self.assertEqual(inv.uuid, self.inv.uuid) @@ -432,7 +426,7 @@ def test_get_previews(self): self.assertEqual(inv_preview.id, self.inv.id) self.assertEqual(inv_preview.workflow_id, self.workflow.id) self.assertEqual(inv_preview.history_id, self.history.id) - self.assertEqual(inv_preview.state, 'scheduled') + self.assertEqual(inv_preview.state, "scheduled") self.assertEqual(inv_preview.update_time, self.inv.update_time) self.assertEqual(inv_preview.uuid, self.inv.uuid) @@ -442,7 +436,7 @@ def test_list(self): self.assertEqual(inv.id, self.inv.id) self.assertEqual(inv.workflow_id, self.workflow.id) self.assertEqual(inv.history_id, self.history.id) - self.assertEqual(inv.state, 'scheduled') + self.assertEqual(inv.state, "scheduled") self.assertEqual(inv.update_time, self.inv.update_time) self.assertEqual(inv.uuid, self.inv.uuid) self.assertGreater(len(self.inv.steps), 0) @@ -452,12 +446,10 @@ def test_list(self): class TestGalaxyInstance(GalaxyObjectsTestBase): - def test_library(self): name = f"test_{uuid.uuid4().hex}" - description, synopsis = 'D', 'S' - lib = self.gi.libraries.create( - name, description=description, synopsis=synopsis) + description, synopsis = "D", "S" + lib = self.gi.libraries.create(name, description=description, synopsis=synopsis) self.assertEqual(lib.name, name) self.assertEqual(lib.description, description) self.assertEqual(lib.synopsis, synopsis) @@ -478,7 +470,7 @@ def test_workflow_collections_from_str(self): wf = self.gi.workflows.import_new(f.read()) self._check_and_del_workflow(wf) - @test_util.skip_unless_galaxy('release_19.01') + @test_util.skip_unless_galaxy("release_19.01") def test_workflow_parameter_input(self): with open(SAMPLE_WF_PARAMETER_INPUT_FN) as f: self.gi.workflows.import_new(f.read()) @@ -497,11 +489,11 @@ def test_workflow_missing_tools(self): with open(SAMPLE_FN) as f: wf_dump = json.load(f) wf_info = self.gi.gi.workflows.import_workflow_dict(wf_dump) - wf_dict = self.gi.gi.workflows.show_workflow(wf_info['id']) - for id_, step in wf_dict['steps'].items(): - if step['type'] == 'tool': - for k in 'tool_inputs', 'tool_version': - wf_dict['steps'][id_][k] = None + wf_dict = self.gi.gi.workflows.show_workflow(wf_info["id"]) + for id_, step in wf_dict["steps"].items(): + if step["type"] == "tool": + for k in "tool_inputs", "tool_version": + wf_dict["steps"][id_][k] = None wf = wrappers.Workflow(wf_dict, gi=self.gi) self.assertFalse(wf.is_runnable) with self.assertRaises(RuntimeError): @@ -518,18 +510,18 @@ def test_workflow_export(self): def _check_and_del_workflow(self, wf, check_is_public=False): # Galaxy appends additional text to imported workflow names - self.assertTrue(wf.name.startswith('paste_columns')) + self.assertTrue(wf.name.startswith("paste_columns")) self.assertEqual(type(wf.owner), str) self.assertEqual(len(wf.steps), 3) for step_id, step in wf.steps.items(): self.assertIsInstance(step, wrappers.Step) self.assertEqual(step_id, step.id) self.assertIsInstance(step.tool_inputs, dict) - if step.type == 'tool': + if step.type == "tool": self.assertIsNotNone(step.tool_id) self.assertIsNotNone(step.tool_version) self.assertIsInstance(step.input_steps, dict) - elif step.type in ('data_collection_input', 'data_input'): + elif step.type in ("data_collection_input", "data_input"): self.assertIsNone(step.tool_id) self.assertIsNone(step.tool_version) self.assertEqual(step.input_steps, {}) @@ -543,40 +535,38 @@ def _check_and_del_workflow(self, wf, check_is_public=False): # * we can't publish a wf from the API # * we can't directly get another user's wf def test_workflow_from_shared(self): - all_prevs = { - _.id: _ for _ in self.gi.workflows.get_previews(published=True) - } - pub_only_ids = set(all_prevs).difference( - _.id for _ in self.gi.workflows.get_previews()) + all_prevs = {_.id: _ for _ in self.gi.workflows.get_previews(published=True)} + pub_only_ids = set(all_prevs).difference(_.id for _ in self.gi.workflows.get_previews()) if pub_only_ids: wf_id = pub_only_ids.pop() imported = self.gi.workflows.import_shared(wf_id) self.assertIsInstance(imported, wrappers.Workflow) imported.delete() else: - self.skipTest('no published workflows, manually publish a workflow to run this test') + self.skipTest("no published workflows, manually publish a workflow to run this test") def test_get_libraries(self): - self._test_multi_get('libraries') + self._test_multi_get("libraries") def test_get_histories(self): - self._test_multi_get('histories') + self._test_multi_get("histories") def test_get_workflows(self): - self._test_multi_get('workflows') + self._test_multi_get("workflows") def _normalized_functions(self, obj_type): - if obj_type == 'libraries': + if obj_type == "libraries": create = self.gi.libraries.create del_kwargs = {} - elif obj_type == 'histories': + elif obj_type == "histories": create = self.gi.histories.create - del_kwargs = {'purge': True} - elif obj_type == 'workflows': + del_kwargs = {"purge": True} + elif obj_type == "workflows": + def create(name): with open(SAMPLE_FN) as f: d = json.load(f) - d['name'] = name + d["name"] = name return self.gi.workflows.import_new(d) del_kwargs = {} @@ -584,8 +574,7 @@ def create(name): def _test_multi_get(self, obj_type): obj_gi_client = getattr(self.gi, obj_type) - create, del_kwargs = self._normalized_functions( - obj_type) + create, del_kwargs = self._normalized_functions(obj_type) def ids(seq): return {_.id for _ in seq} @@ -595,7 +584,7 @@ def ids(seq): try: objs = [create(_) for _ in names] self.assertLessEqual(ids(objs), ids(obj_gi_client.list())) - if obj_type != 'workflows': + if obj_type != "workflows": filtered = obj_gi_client.list(name=names[0]) self.assertEqual(len(filtered), 1) self.assertEqual(filtered[0].id, objs[0].id) @@ -613,16 +602,16 @@ def ids(seq): o.delete(**del_kwargs) def test_delete_libraries_by_name(self): - self._test_delete_by_name('libraries') - self._test_delete_by_ambiguous_name('libraries') + self._test_delete_by_name("libraries") + self._test_delete_by_ambiguous_name("libraries") def test_delete_histories_by_name(self): - self._test_delete_by_name('histories') - self._test_delete_by_ambiguous_name('histories') + self._test_delete_by_name("histories") + self._test_delete_by_ambiguous_name("histories") def test_delete_workflows_by_name(self): - self._test_delete_by_name('workflows') - self._test_delete_by_ambiguous_name('workflows') + self._test_delete_by_name("workflows") + self._test_delete_by_ambiguous_name("workflows") def _test_delete_by_name(self, obj_type): obj_gi_client = getattr(self.gi, obj_type) @@ -655,7 +644,7 @@ def _test_delete_by_ambiguous_name(self, obj_type): class TestLibrary(GalaxyObjectsTestBase): # just something that can be expected to be always up - DS_URL = 'https://tools.ietf.org/rfc/rfc1866.txt' + DS_URL = "https://tools.ietf.org/rfc/rfc1866.txt" def setUp(self): super().setUp() @@ -703,7 +692,7 @@ def test_dataset_from_url(self): self.skipTest(f"{self.DS_URL} not reachable") def test_dataset_from_local(self): - with tempfile.NamedTemporaryFile(mode='w', prefix='bioblend_test_') as f: + with tempfile.NamedTemporaryFile(mode="w", prefix="bioblend_test_") as f: f.write(FOO_DATA) f.flush() ds = self.lib.upload_from_local(f.name) @@ -713,8 +702,7 @@ def test_datasets_from_fs(self): bnames = [f"f{i}.txt" for i in range(2)] dss, fnames = upload_from_fs(self.lib, bnames) self._check_datasets(dss) - dss, fnames = upload_from_fs( - self.lib, bnames, link_data_only='link_to_files') + dss, fnames = upload_from_fs(self.lib, bnames, link_data_only="link_to_files") for ds, fn in zip(dss, fnames): self.assertEqual(ds.file_name, fn) @@ -745,7 +733,6 @@ def test_get_datasets(self): class TestLDContents(GalaxyObjectsTestBase): - def setUp(self): super().setUp() self.lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}") @@ -781,7 +768,7 @@ def test_dataset_delete(self): def test_dataset_update(self): new_name = f"test_{uuid.uuid4().hex}" new_misc_info = f"Annotation for {new_name}" - new_genome_build = 'hg19' + new_genome_build = "hg19" updated_ldda = self.ds.update(name=new_name, misc_info=new_misc_info, genome_build=new_genome_build) self.assertEqual(self.ds.id, updated_ldda.id) self.assertEqual(self.ds.name, new_name) @@ -790,7 +777,6 @@ def test_dataset_update(self): class TestHistory(GalaxyObjectsTestBase): - def setUp(self): super().setUp() self.hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}") @@ -824,7 +810,7 @@ def test_import_dataset(self): self._check_dataset(hda) def test_upload_file(self): - with tempfile.NamedTemporaryFile(mode='w', prefix='bioblend_test_') as f: + with tempfile.NamedTemporaryFile(mode="w", prefix="bioblend_test_") as f: f.write(FOO_DATA) f.flush() hda = self.hist.upload_file(f.name) @@ -855,10 +841,10 @@ def test_get_datasets(self): def test_export_and_download(self): jeha_id = self.hist.export(wait=True, maxwait=60) self.assertTrue(jeha_id) - tempdir = tempfile.mkdtemp(prefix='bioblend_test_') - temp_fn = os.path.join(tempdir, 'export.tar.gz') + tempdir = tempfile.mkdtemp(prefix="bioblend_test_") + temp_fn = os.path.join(tempdir, "export.tar.gz") try: - with open(temp_fn, 'wb') as fo: + with open(temp_fn, "wb") as fo: self.hist.download(jeha_id, fo) self.assertTrue(tarfile.is_tarfile(temp_fn)) finally: @@ -867,7 +853,7 @@ def test_export_and_download(self): def test_update(self): new_name = f"test_{uuid.uuid4().hex}" new_annotation = f"Annotation for {new_name}" - new_tags = ['tag1', 'tag2'] + new_tags = ["tag1", "tag2"] updated_hist = self.hist.update(name=new_name, annotation=new_annotation, tags=new_tags) self.assertEqual(self.hist.id, updated_hist.id) self.assertEqual(self.hist.name, new_name) @@ -881,11 +867,11 @@ def test_create_dataset_collection(self): self._create_collection_description() hdca = self.hist.create_dataset_collection(self.collection_description) self.assertIsInstance(hdca, wrappers.HistoryDatasetCollectionAssociation) - self.assertEqual(hdca.collection_type, 'list') + self.assertEqual(hdca.collection_type, "list") self.assertIs(hdca.container, self.hist) self.assertEqual(len(hdca.elements), 2) - self.assertEqual(self.dataset1.id, hdca.elements[0]['object']['id']) - self.assertEqual(self.dataset2.id, hdca.elements[1]['object']['id']) + self.assertEqual(self.dataset1.id, hdca.elements[0]["object"]["id"]) + self.assertEqual(self.dataset2.id, hdca.elements[1]["object"]["id"]) def test_delete_dataset_collection(self): self._create_collection_description() @@ -901,12 +887,11 @@ def _create_collection_description(self): elements=[ dataset_collections.HistoryDatasetElement(name="sample1", id=self.dataset1.id), dataset_collections.HistoryDatasetElement(name="sample2", id=self.dataset2.id), - ] + ], ) class TestHDAContents(GalaxyObjectsTestBase): - def setUp(self): super().setUp() self.hist = self.gi.histories.create(f"test_{uuid.uuid4().hex}") @@ -935,7 +920,7 @@ def test_dataset_get_contents(self): def test_dataset_update(self): new_name = f"test_{uuid.uuid4().hex}" new_annotation = f"Annotation for {new_name}" - new_genome_build = 'hg19' + new_genome_build = "hg19" updated_hda = self.ds.update(name=new_name, annotation=new_annotation, genome_build=new_genome_build) self.assertEqual(self.ds.id, updated_hda.id) self.assertEqual(self.ds.name, new_name) @@ -953,15 +938,14 @@ def test_dataset_purge(self): self.assertTrue(self.ds.purged) -@test_util.skip_unless_galaxy('release_19.09') +@test_util.skip_unless_galaxy("release_19.09") class TestRunWorkflow(GalaxyObjectsTestBase): - def setUp(self): super().setUp() self.lib = self.gi.libraries.create(f"test_{uuid.uuid4().hex}") with open(SAMPLE_FN) as f: self.wf = self.gi.workflows.import_new(f.read()) - self.contents = ['one\ntwo\n', '1\n2\n'] + self.contents = ["one\ntwo\n", "1\n2\n"] self.inputs = [self.lib.upload_data(_) for _ in self.contents] def tearDown(self): @@ -975,19 +959,18 @@ def _test(self, existing_hist=False, params=False): else: hist = hist_name if params: - params = {'Paste1': {'delimiter': 'U'}} - sep = '_' # 'U' maps to '_' in the paste tool + params = {"Paste1": {"delimiter": "U"}} + sep = "_" # 'U' maps to '_' in the paste tool else: params = None - sep = '\t' # default - input_map = {'Input 1': self.inputs[0], 'Input 2': self.inputs[1]} + sep = "\t" # default + input_map = {"Input 1": self.inputs[0], "Input 2": self.inputs[1]} sys.stderr.write(os.linesep) - inv = self.wf.invoke( - inputs=input_map, params=params, history=hist, inputs_by='name') + inv = self.wf.invoke(inputs=input_map, params=params, history=hist, inputs_by="name") out_hist = self.gi.histories.get(inv.history_id) inv.wait() last_step = inv.sorted_steps_by()[-1] - out_ds = last_step.get_outputs()['out_file1'] + out_ds = last_step.get_outputs()["out_file1"] self.assertEqual(out_ds.container.id, out_hist.id) res = out_ds.get_contents() exp_rows = zip(*(_.splitlines() for _ in self.contents)) @@ -1007,9 +990,8 @@ def test_params(self): self._test(params=True) -@test_util.skip_unless_galaxy('release_19.09') +@test_util.skip_unless_galaxy("release_19.09") class TestRunDatasetCollectionWorkflow(GalaxyObjectsTestBase): - def setUp(self): super().setUp() with open(SAMPLE_WF_COLL_FN) as f: @@ -1028,25 +1010,23 @@ def test_run_workflow_with_dataset_collection(self): elements=[ dataset_collections.HistoryDatasetElement(name="sample1", id=dataset1.id), dataset_collections.HistoryDatasetElement(name="sample2", id=dataset2.id), - ] + ], ) dataset_collection = self.hist.create_dataset_collection(collection_description) self.assertEqual(len(self.hist.content_infos), 3) - input_map = {"0": dataset_collection, - "1": dataset1} + input_map = {"0": dataset_collection, "1": dataset1} inv = self.wf.invoke(input_map, history=self.hist) inv.wait() self.hist.refresh() self.assertEqual(len(self.hist.content_infos), 6) last_step = inv.sorted_steps_by()[-1] - out_hdca = last_step.get_output_collections()['out_file1'] - self.assertEqual(out_hdca.collection_type, 'list') + out_hdca = last_step.get_output_collections()["out_file1"] + self.assertEqual(out_hdca.collection_type, "list") self.assertEqual(len(out_hdca.elements), 2) self.assertEqual(out_hdca.container.id, self.hist.id) class TestJob(GalaxyObjectsTestBase): - def test_get(self): job_prevs = self.gi.jobs.get_previews() if len(job_prevs) > 0: @@ -1062,20 +1042,25 @@ def test_get(self): def suite(): loader = unittest.TestLoader() s = unittest.TestSuite() - s.addTests([loader.loadTestsFromTestCase(c) for c in ( - TestWrapper, - TestWorkflow, - TestGalaxyInstance, - TestLibrary, - TestLDContents, - TestHistory, - TestHDAContents, - TestRunWorkflow, - )]) + s.addTests( + [ + loader.loadTestsFromTestCase(c) + for c in ( + TestWrapper, + TestWorkflow, + TestGalaxyInstance, + TestLibrary, + TestLDContents, + TestHistory, + TestHDAContents, + TestRunWorkflow, + ) + ] + ) return s -if __name__ == '__main__': +if __name__ == "__main__": tests = suite() RUNNER = unittest.TextTestRunner(verbosity=2) RUNNER.run(tests) diff --git a/bioblend/_tests/TestGalaxyQuotas.py b/bioblend/_tests/TestGalaxyQuotas.py index 1bdc1d9af..4ab8b703e 100644 --- a/bioblend/_tests/TestGalaxyQuotas.py +++ b/bioblend/_tests/TestGalaxyQuotas.py @@ -4,52 +4,49 @@ class TestGalaxyQuotas(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() # Quota names must be unique, and they're impossible to delete # without accessing the database. self.quota_name = f"BioBlend-Test-Quota-{uuid.uuid4().hex}" - self.quota = self.gi.quotas.create_quota( - self.quota_name, 'testing', '100 GB', '=', - default='registered' - ) + self.quota = self.gi.quotas.create_quota(self.quota_name, "testing", "100 GB", "=", default="registered") def tearDown(self): - self.gi.quotas.update_quota(self.quota['id'], default='registered') - self.gi.quotas.update_quota(self.quota['id'], default='no') - self.gi.quotas.delete_quota(self.quota['id']) + self.gi.quotas.update_quota(self.quota["id"], default="registered") + self.gi.quotas.update_quota(self.quota["id"], default="no") + self.gi.quotas.delete_quota(self.quota["id"]) def test_create_quota(self): - quota = self.gi.quotas.show_quota(self.quota['id']) - self.assertEqual(quota['name'], self.quota_name) - self.assertEqual(quota['bytes'], 107374182400) - self.assertEqual(quota['operation'], '=') - self.assertEqual(quota['description'], 'testing') + quota = self.gi.quotas.show_quota(self.quota["id"]) + self.assertEqual(quota["name"], self.quota_name) + self.assertEqual(quota["bytes"], 107374182400) + self.assertEqual(quota["operation"], "=") + self.assertEqual(quota["description"], "testing") def test_get_quotas(self): quotas = self.gi.quotas.get_quotas() - self.assertIn(self.quota['id'], [quota['id'] for quota in quotas]) + self.assertIn(self.quota["id"], [quota["id"] for quota in quotas]) def test_update_quota(self): response = self.gi.quotas.update_quota( - self.quota['id'], name=self.quota_name + '-new', - description='asdf', default='registered', operation='-', - amount='.01 TB' + self.quota["id"], + name=self.quota_name + "-new", + description="asdf", + default="registered", + operation="-", + amount=".01 TB", ) self.assertIn(f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""", response) - quota = self.gi.quotas.show_quota(self.quota['id']) - self.assertEqual(quota['name'], self.quota_name + '-new') - self.assertEqual(quota['bytes'], 10995116277) - self.assertEqual(quota['operation'], '-') - self.assertEqual(quota['description'], 'asdf') + quota = self.gi.quotas.show_quota(self.quota["id"]) + self.assertEqual(quota["name"], self.quota_name + "-new") + self.assertEqual(quota["bytes"], 10995116277) + self.assertEqual(quota["operation"], "-") + self.assertEqual(quota["description"], "asdf") def test_delete_undelete_quota(self): - self.gi.quotas.update_quota( - self.quota['id'], default='no' - ) - response = self.gi.quotas.delete_quota(self.quota['id']) - self.assertEqual(response, 'Deleted 1 quotas: ' + self.quota_name) - response = self.gi.quotas.undelete_quota(self.quota['id']) - self.assertEqual(response, 'Undeleted 1 quotas: ' + self.quota_name) + self.gi.quotas.update_quota(self.quota["id"], default="no") + response = self.gi.quotas.delete_quota(self.quota["id"]) + self.assertEqual(response, "Deleted 1 quotas: " + self.quota_name) + response = self.gi.quotas.undelete_quota(self.quota["id"]) + self.assertEqual(response, "Undeleted 1 quotas: " + self.quota_name) diff --git a/bioblend/_tests/TestGalaxyRoles.py b/bioblend/_tests/TestGalaxyRoles.py index 2842e51e5..260ac50b6 100644 --- a/bioblend/_tests/TestGalaxyRoles.py +++ b/bioblend/_tests/TestGalaxyRoles.py @@ -8,11 +8,10 @@ class TestGalaxyRoles(GalaxyTestBase.GalaxyTestBase): - def setUp(self): super().setUp() self.name = f"test_{uuid.uuid4().hex}" - self.description = 'automated test role' + self.description = "automated test role" self.role = self.gi.roles.create_role(self.name, self.description) def tearDown(self): @@ -22,10 +21,10 @@ def tearDown(self): def test_get_roles(self): roles = self.gi.roles.get_roles() for role in roles: - self.assertIsNotNone(role['id']) - self.assertIsNotNone(role['name']) + self.assertIsNotNone(role["id"]) + self.assertIsNotNone(role["name"]) def test_create_role(self): - self.assertEqual(self.role['name'], self.name) - self.assertEqual(self.role['description'], self.description) - self.assertIsNotNone(self.role['id']) + self.assertEqual(self.role["name"], self.name) + self.assertEqual(self.role["description"], self.description) + self.assertIsNotNone(self.role["id"]) diff --git a/bioblend/_tests/TestGalaxyToolData.py b/bioblend/_tests/TestGalaxyToolData.py index a01e68e05..d4d83b595 100644 --- a/bioblend/_tests/TestGalaxyToolData.py +++ b/bioblend/_tests/TestGalaxyToolData.py @@ -6,15 +6,14 @@ class TestGalaxyToolData(GalaxyTestBase.GalaxyTestBase): - def test_get_data_tables(self): tables = self.gi.tool_data.get_data_tables() for table in tables: - self.assertIsNotNone(table['name']) + self.assertIsNotNone(table["name"]) def test_show_data_table(self): tables = self.gi.tool_data.get_data_tables() - table = self.gi.tool_data.show_data_table(tables[0]['name']) - self.assertIsNotNone(table['columns']) - self.assertIsNotNone(table['fields']) - self.assertIsNotNone(table['name']) + table = self.gi.tool_data.show_data_table(tables[0]["name"]) + self.assertIsNotNone(table["columns"]) + self.assertIsNotNone(table["fields"]) + self.assertIsNotNone(table["name"]) diff --git a/bioblend/_tests/TestGalaxyToolDependencies.py b/bioblend/_tests/TestGalaxyToolDependencies.py index ffd040aec..0a8b7e3a3 100644 --- a/bioblend/_tests/TestGalaxyToolDependencies.py +++ b/bioblend/_tests/TestGalaxyToolDependencies.py @@ -1,26 +1,30 @@ """ Test functions in bioblend.galaxy.tool_dependencies """ -from . import GalaxyTestBase, test_util +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyToolDependencies(GalaxyTestBase.GalaxyTestBase): - - @test_util.skip_unless_galaxy('release_20.01') + @test_util.skip_unless_galaxy("release_20.01") def test_summarize_toolbox(self): toolbox_summary = self.gi.tool_dependencies.summarize_toolbox() self.assertTrue(isinstance(toolbox_summary, list)) self.assertGreater(len(toolbox_summary), 0) - toolbox_summary_by_tool = self.gi.tool_dependencies.summarize_toolbox(index_by='tools') + toolbox_summary_by_tool = self.gi.tool_dependencies.summarize_toolbox(index_by="tools") self.assertTrue(isinstance(toolbox_summary_by_tool, list)) self.assertGreater(len(toolbox_summary_by_tool), 0) self.assertTrue(isinstance(toolbox_summary_by_tool[0], dict)) - self.assertTrue('tool_ids' in toolbox_summary_by_tool[0]) - self.assertTrue(isinstance(toolbox_summary_by_tool[0]['tool_ids'], list)) - tool_id = toolbox_summary_by_tool[0]['tool_ids'][0] + self.assertTrue("tool_ids" in toolbox_summary_by_tool[0]) + self.assertTrue(isinstance(toolbox_summary_by_tool[0]["tool_ids"], list)) + tool_id = toolbox_summary_by_tool[0]["tool_ids"][0] - toolbox_summary_select_tool_ids = self.gi.tool_dependencies.summarize_toolbox(index_by='tools', tool_ids=[tool_id]) + toolbox_summary_select_tool_ids = self.gi.tool_dependencies.summarize_toolbox( + index_by="tools", tool_ids=[tool_id] + ) self.assertTrue(isinstance(toolbox_summary_select_tool_ids, list)) self.assertEqual(len(toolbox_summary_select_tool_ids), 1) - self.assertEqual(toolbox_summary_select_tool_ids[0]['tool_ids'][0], tool_id) + self.assertEqual(toolbox_summary_select_tool_ids[0]["tool_ids"][0], tool_id) diff --git a/bioblend/_tests/TestGalaxyToolInputs.py b/bioblend/_tests/TestGalaxyToolInputs.py index 80a65141e..92848d25e 100644 --- a/bioblend/_tests/TestGalaxyToolInputs.py +++ b/bioblend/_tests/TestGalaxyToolInputs.py @@ -8,17 +8,13 @@ def test_conditional(): # Build up example inputs for random_lines1 - as_dict = inputs().set( - "num_lines", 5 - ).set( - "input", dataset("encoded1") - ).set( - "seed_source", conditional().set( - "seed_source_selector", "set_seed" - ).set( - "seed", "asdf" - ) - ).to_dict() + as_dict = ( + inputs() + .set("num_lines", 5) + .set("input", dataset("encoded1")) + .set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf")) + .to_dict() + ) assert as_dict["num_lines"] == 5 assert as_dict["input"]["src"] == "hda" assert as_dict["input"]["id"] == "encoded1" @@ -28,15 +24,17 @@ def test_conditional(): def test_repeat(): # Build up inputs for cat1 - as_dict = inputs().set( - "input1", dataset("encoded1") - ).set( - "queries", repeat().instance( - inputs().set_dataset_param("input2", "encoded2") - ).instance( - inputs().set_dataset_param("input2", "encoded3") + as_dict = ( + inputs() + .set("input1", dataset("encoded1")) + .set( + "queries", + repeat() + .instance(inputs().set_dataset_param("input2", "encoded2")) + .instance(inputs().set_dataset_param("input2", "encoded3")), ) - ).to_dict() + .to_dict() + ) assert as_dict["input1"]["src"] == "hda" assert as_dict["input1"]["id"] == "encoded1" assert as_dict["queries_0|input2"]["src"] == "hda" diff --git a/bioblend/_tests/TestGalaxyTools.py b/bioblend/_tests/TestGalaxyTools.py index f3a5b6561..6be47be0f 100644 --- a/bioblend/_tests/TestGalaxyTools.py +++ b/bioblend/_tests/TestGalaxyTools.py @@ -8,11 +8,13 @@ inputs, repeat, ) -from . import GalaxyTestBase, test_util +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyTools(GalaxyTestBase.GalaxyTestBase): - def test_get_tools(self): # Test requires target Galaxy is configured with at least one tool. tools = self.gi.tools.get_tools() @@ -38,17 +40,17 @@ def _assert_is_tool_rep(self, data): def test_paste_content(self): history = self.gi.histories.create_history(name="test_paste_data history") - paste_text = 'line 1\nline 2\rline 3\r\nline 4' + paste_text = "line 1\nline 2\rline 3\r\nline 4" tool_output = self.gi.tools.paste_content(paste_text, history["id"]) self.assertEqual(len(tool_output["outputs"]), 1) # All lines in the resulting dataset should end with "\n" expected_contents = ("\n".join(paste_text.splitlines()) + "\n").encode() - self._wait_and_verify_dataset(tool_output['outputs'][0]['id'], expected_contents) + self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents) # Same with space_to_tab=True tool_output = self.gi.tools.paste_content(paste_text, history["id"], space_to_tab=True) self.assertEqual(len(tool_output["outputs"]), 1) expected_contents = ("\n".join("\t".join(_.split()) for _ in paste_text.splitlines()) + "\n").encode() - self._wait_and_verify_dataset(tool_output['outputs'][0]['id'], expected_contents) + self._wait_and_verify_dataset(tool_output["outputs"][0]["id"], expected_contents) def test_upload_file(self): history = self.gi.histories.create_history(name="test_upload_file history") @@ -86,22 +88,13 @@ def test_run_random_lines(self): with open(test_util.get_abspath(os.path.join("data", "1.bed"))) as f: contents = f.read() dataset_id = self._test_dataset(history_id, contents=contents) - tool_inputs = inputs().set( - "num_lines", "1" - ).set( - "input", dataset(dataset_id) - ).set( - "seed_source", conditional().set( - "seed_source_selector", "set_seed" - ).set( - "seed", "asdf" - ) - ) - tool_output = self.gi.tools.run_tool( - history_id=history_id, - tool_id="random_lines1", - tool_inputs=tool_inputs + tool_inputs = ( + inputs() + .set("num_lines", "1") + .set("input", dataset(dataset_id)) + .set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf")) ) + tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs) self.assertEqual(len(tool_output["outputs"]), 1) # TODO: Wait for results and verify has 1 line and is # chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 + @@ -112,56 +105,64 @@ def test_run_cat1(self): dataset1_id = self._test_dataset(history_id, contents="1 2 3") dataset2_id = self._test_dataset(history_id, contents="4 5 6") dataset3_id = self._test_dataset(history_id, contents="7 8 9") - tool_inputs = inputs().set( - "input1", dataset(dataset1_id) - ).set( - "queries", repeat().instance( - inputs().set("input2", dataset(dataset2_id)) - ).instance( - inputs().set("input2", dataset(dataset3_id)) + tool_inputs = ( + inputs() + .set("input1", dataset(dataset1_id)) + .set( + "queries", + repeat() + .instance(inputs().set("input2", dataset(dataset2_id))) + .instance(inputs().set("input2", dataset(dataset3_id))), ) ) - tool_output = self.gi.tools.run_tool( - history_id=history_id, - tool_id="cat1", - tool_inputs=tool_inputs - ) + tool_output = self.gi.tools.run_tool(history_id=history_id, tool_id="cat1", tool_inputs=tool_inputs) self.assertEqual(len(tool_output["outputs"]), 1) # TODO: Wait for results and verify it has 3 lines - 1 2 3, 4 5 6, # and 7 8 9. - @test_util.skip_unless_galaxy('release_19.05') - @test_util.skip_unless_tool('CONVERTER_fasta_to_bowtie_color_index') + @test_util.skip_unless_galaxy("release_19.05") + @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_dependency_install(self): - installed_dependencies = self.gi.tools.install_dependencies('CONVERTER_fasta_to_bowtie_color_index') - self.assertTrue(any(True for d in installed_dependencies if d.get('name') == 'bowtie' and d.get('dependency_type') == 'conda'), f"installed_dependencies is {installed_dependencies}") - status = self.gi.tools.uninstall_dependencies('CONVERTER_fasta_to_bowtie_color_index') - self.assertEqual(status[0]['model_class'], 'NullDependency', status) + installed_dependencies = self.gi.tools.install_dependencies("CONVERTER_fasta_to_bowtie_color_index") + self.assertTrue( + any( + True + for d in installed_dependencies + if d.get("name") == "bowtie" and d.get("dependency_type") == "conda" + ), + f"installed_dependencies is {installed_dependencies}", + ) + status = self.gi.tools.uninstall_dependencies("CONVERTER_fasta_to_bowtie_color_index") + self.assertEqual(status[0]["model_class"], "NullDependency", status) - @test_util.skip_unless_tool('CONVERTER_fasta_to_bowtie_color_index') + @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_tool_requirements(self): - tool_requirements = self.gi.tools.requirements('CONVERTER_fasta_to_bowtie_color_index') + tool_requirements = self.gi.tools.requirements("CONVERTER_fasta_to_bowtie_color_index") self.assertTrue( - any(True for tr in tool_requirements if {'dependency_type', 'version'} <= set(tr.keys()) and tr.get('name') == 'bowtie'), + any( + True + for tr in tool_requirements + if {"dependency_type", "version"} <= set(tr.keys()) and tr.get("name") == "bowtie" + ), f"tool_requirements is {tool_requirements}", ) - @test_util.skip_unless_tool('CONVERTER_fasta_to_bowtie_color_index') + @test_util.skip_unless_tool("CONVERTER_fasta_to_bowtie_color_index") def test_reload(self): - response = self.gi.tools.reload('CONVERTER_fasta_to_bowtie_color_index') + response = self.gi.tools.reload("CONVERTER_fasta_to_bowtie_color_index") self.assertIsInstance(response, dict) - self.assertIn('message', response) - self.assertIn('id', response['message']) + self.assertIn("message", response) + self.assertIn("id", response["message"]) - @test_util.skip_unless_tool('sra_source') + @test_util.skip_unless_tool("sra_source") def test_get_citations(self): - citations = self.gi.tools.get_citations('sra_source') + citations = self.gi.tools.get_citations("sra_source") self.assertEqual(len(citations), 2) def _wait_for_and_verify_upload(self, tool_output, file_name, fn, expected_dbkey="?"): self.assertEqual(len(tool_output["outputs"]), 1) - output = tool_output['outputs'][0] - self.assertEqual(output['name'], file_name) + output = tool_output["outputs"][0] + self.assertEqual(output["name"], file_name) expected_contents = open(fn, "rb").read() self._wait_and_verify_dataset(output["id"], expected_contents) self.assertEqual(output["genome_build"], expected_dbkey) @@ -170,4 +171,4 @@ def _wait_for_and_verify_upload(self, tool_output, file_name, fn, expected_dbkey def test_get_tool_model(self): history_id = self.gi.histories.create_history(name="test_run_random_lines history")["id"] tool_model = self.gi.tools.build(tool_id="random_lines1", history_id=history_id) - self.assertEqual(len(tool_model['inputs']), 3) + self.assertEqual(len(tool_model["inputs"]), 3) diff --git a/bioblend/_tests/TestGalaxyUsers.py b/bioblend/_tests/TestGalaxyUsers.py index 2dfabaada..c20b8349f 100644 --- a/bioblend/_tests/TestGalaxyUsers.py +++ b/bioblend/_tests/TestGalaxyUsers.py @@ -7,84 +7,84 @@ class TestGalaxyUsers(GalaxyTestBase.GalaxyTestBase): - def test_get_users(self): users = self.gi.users.get_users() for user in users: - self.assertIsNotNone(user['id']) - self.assertIsNotNone(user['email']) + self.assertIsNotNone(user["id"]) + self.assertIsNotNone(user["email"]) def test_show_user(self): current_user = self.gi.users.get_current_user() - user = self.gi.users.show_user(current_user['id']) - self.assertEqual(user['id'], current_user['id']) - self.assertEqual(user['username'], current_user['username']) - self.assertEqual(user['email'], current_user['email']) + user = self.gi.users.show_user(current_user["id"]) + self.assertEqual(user["id"], current_user["id"]) + self.assertEqual(user["username"], current_user["username"]) + self.assertEqual(user["email"], current_user["email"]) # The 2 following tests randomly fail -# self.assertEqual(user['nice_total_disk_usage'], current_user['nice_total_disk_usage']) -# self.assertEqual(user['total_disk_usage'], current_user['total_disk_usage']) + + # self.assertEqual(user['nice_total_disk_usage'], current_user['nice_total_disk_usage']) + # self.assertEqual(user['total_disk_usage'], current_user['total_disk_usage']) def test_create_remote_user(self): # WARNING: only admins can create users! # WARNING: Users cannot be purged through the Galaxy API, so execute # this test only on a disposable Galaxy instance! - if not self.gi.config.get_config()['use_remote_user']: - self.skipTest('This Galaxy instance is not configured to use remote users') - new_user_email = 'newuser@example.org' + if not self.gi.config.get_config()["use_remote_user"]: + self.skipTest("This Galaxy instance is not configured to use remote users") + new_user_email = "newuser@example.org" user = self.gi.users.create_remote_user(new_user_email) - self.assertEqual(user['email'], new_user_email) - if self.gi.config.get_config()['allow_user_deletion']: - deleted_user = self.gi.users.delete_user(user['id']) - self.assertEqual(deleted_user['email'], new_user_email) - self.assertTrue(deleted_user['deleted']) + self.assertEqual(user["email"], new_user_email) + if self.gi.config.get_config()["allow_user_deletion"]: + deleted_user = self.gi.users.delete_user(user["id"]) + self.assertEqual(deleted_user["email"], new_user_email) + self.assertTrue(deleted_user["deleted"]) def test_create_local_user(self): # WARNING: only admins can create users! # WARNING: Users cannot be purged through the Galaxy API, so execute # this test only on a disposable Galaxy instance! - if self.gi.config.get_config()['use_remote_user']: - self.skipTest('This Galaxy instance is not configured to use local users') - new_user_email = 'newuser@example.org' - username = 'newuser' - password = 'secret' + if self.gi.config.get_config()["use_remote_user"]: + self.skipTest("This Galaxy instance is not configured to use local users") + new_user_email = "newuser@example.org" + username = "newuser" + password = "secret" user = self.gi.users.create_local_user(username, new_user_email, password) - self.assertEqual(user['username'], username) - self.assertEqual(user['email'], new_user_email) + self.assertEqual(user["username"], username) + self.assertEqual(user["email"], new_user_email) # test a BioBlend GalaxyInstance can be created using username+password user_gi = bioblend.galaxy.GalaxyInstance(url=self.gi.base_url, email=new_user_email, password=password) - self.assertEqual(user_gi.users.get_current_user()['email'], new_user_email) + self.assertEqual(user_gi.users.get_current_user()["email"], new_user_email) # test deletion - if self.gi.config.get_config()['allow_user_deletion']: - deleted_user = self.gi.users.delete_user(user['id']) - self.assertEqual(deleted_user['email'], new_user_email) - self.assertTrue(deleted_user['deleted']) + if self.gi.config.get_config()["allow_user_deletion"]: + deleted_user = self.gi.users.delete_user(user["id"]) + self.assertEqual(deleted_user["email"], new_user_email) + self.assertTrue(deleted_user["deleted"]) def test_get_current_user(self): user = self.gi.users.get_current_user() - self.assertIsNotNone(user['id']) - self.assertIsNotNone(user['username']) - self.assertIsNotNone(user['email']) - self.assertIsNotNone(user['nice_total_disk_usage']) - self.assertIsNotNone(user['total_disk_usage']) + self.assertIsNotNone(user["id"]) + self.assertIsNotNone(user["username"]) + self.assertIsNotNone(user["email"]) + self.assertIsNotNone(user["nice_total_disk_usage"]) + self.assertIsNotNone(user["total_disk_usage"]) def test_update_user(self): # WARNING: only admins can create users! # WARNING: Users cannot be purged through the Galaxy API, so execute # this test only on a disposable Galaxy instance! - if self.gi.config.get_config()['use_remote_user']: - self.skipTest('This Galaxy instance is not configured to use local users') - new_user_email = 'newuser2@example.org' - user = self.gi.users.create_local_user('newuser2', new_user_email, 'secret') - self.assertEqual(user['username'], 'newuser2') - self.assertEqual(user['email'], new_user_email) + if self.gi.config.get_config()["use_remote_user"]: + self.skipTest("This Galaxy instance is not configured to use local users") + new_user_email = "newuser2@example.org" + user = self.gi.users.create_local_user("newuser2", new_user_email, "secret") + self.assertEqual(user["username"], "newuser2") + self.assertEqual(user["email"], new_user_email) - updated_user_email = 'updateduser@example.org' - updated_username = 'updateduser' - user_id = user['id'] + updated_user_email = "updateduser@example.org" + updated_username = "updateduser" + user_id = user["id"] self.gi.users.update_user(user_id, username=updated_username, email=updated_user_email) user = self.gi.users.show_user(user_id) - self.assertEqual(user['username'], updated_username) - self.assertEqual(user['email'], updated_user_email) + self.assertEqual(user["username"], updated_username) + self.assertEqual(user["email"], updated_user_email) - if self.gi.config.get_config()['allow_user_deletion']: - self.gi.users.delete_user(user['id']) + if self.gi.config.get_config()["allow_user_deletion"]: + self.gi.users.delete_user(user["id"]) diff --git a/bioblend/_tests/TestGalaxyWorkflows.py b/bioblend/_tests/TestGalaxyWorkflows.py index 4f73e8d88..f5a57ff91 100644 --- a/bioblend/_tests/TestGalaxyWorkflows.py +++ b/bioblend/_tests/TestGalaxyWorkflows.py @@ -5,15 +5,17 @@ import time from bioblend import ConnectionError -from . import GalaxyTestBase, test_util +from . import ( + GalaxyTestBase, + test_util, +) class TestGalaxyWorkflows(GalaxyTestBase.GalaxyTestBase): - @test_util.skip_unless_tool("cat1") @test_util.skip_unless_tool("cat") def test_workflow_scheduling(self): - path = test_util.get_abspath(os.path.join('data', 'test_workflow_pause.ga')) + path = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga")) workflow = self.gi.workflows.import_workflow_from_local_path(path) workflow_id = workflow["id"] history_id = self.gi.histories.create_history(name="TestWorkflowState")["id"] @@ -23,14 +25,14 @@ def test_workflow_scheduling(self): # Try invalid invocation (no input) with self.assertRaises(ConnectionError): - self.gi.workflows.invoke_workflow(workflow['id']) + self.gi.workflows.invoke_workflow(workflow["id"]) dataset1_id = self._test_dataset(history_id) invocation = self.gi.workflows.invoke_workflow( workflow["id"], inputs={"0": {"src": "hda", "id": dataset1_id}}, ) - self.assertEqual(invocation['state'], 'new') + self.assertEqual(invocation["state"], "new") invocation_id = invocation["id"] invocations = self.gi.workflows.get_invocations(workflow_id) self.assertEqual(len(invocations), 1) @@ -43,15 +45,16 @@ def invocation_steps_by_order_index(): for _ in range(20): if 2 in invocation_steps_by_order_index(): break - time.sleep(.5) + time.sleep(0.5) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation['state'], "ready") + self.assertEqual(invocation["state"], "ready") steps = invocation_steps_by_order_index() pause_step = steps[2] self.assertIsNone( - self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"]) + self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"] + ) self.gi.workflows.run_invocation_step_action(workflow_id, invocation_id, pause_step["id"], action=True) self.assertTrue(self.gi.workflows.show_invocation_step(workflow_id, invocation_id, pause_step["id"])["action"]) for _ in range(20): @@ -59,34 +62,32 @@ def invocation_steps_by_order_index(): if invocation["state"] == "scheduled": break - time.sleep(.5) + time.sleep(0.5) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) self.assertEqual(invocation["state"], "scheduled") - @test_util.skip_unless_galaxy('release_19.01') + @test_util.skip_unless_galaxy("release_19.01") def test_invoke_workflow_parameters_normalized(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns_subworkflow.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns_subworkflow.ga")) workflow_id = self.gi.workflows.import_workflow_from_local_path(path)["id"] history_id = self.gi.histories.create_history(name="TestWorkflowInvokeParametersNormalized")["id"] dataset_id = self._test_dataset(history_id) with self.assertRaises(ConnectionError): self.gi.workflows.invoke_workflow( - workflow_id, - inputs={'0': {'src': 'hda', 'id': dataset_id}}, - params={'1': {'1|2': 'comma'}} + workflow_id, inputs={"0": {"src": "hda", "id": dataset_id}}, params={"1": {"1|2": "comma"}} ) self.gi.workflows.invoke_workflow( workflow_id, - inputs={'0': {'src': 'hda', 'id': dataset_id}}, - params={'1': {'1|2': 'comma'}}, - parameters_normalized=True + inputs={"0": {"src": "hda", "id": dataset_id}}, + params={"1": {"1|2": "comma"}}, + parameters_normalized=True, ) @test_util.skip_unless_tool("cat1") @test_util.skip_unless_tool("cat") def test_cancelling_workflow_scheduling(self): - path = test_util.get_abspath(os.path.join('data', 'test_workflow_pause.ga')) + path = test_util.get_abspath(os.path.join("data", "test_workflow_pause.ga")) workflow = self.gi.workflows.import_workflow_from_local_path(path) workflow_id = workflow["id"] history_id = self.gi.histories.create_history(name="TestWorkflowState")["id"] @@ -105,27 +106,27 @@ def test_cancelling_workflow_scheduling(self): self.assertEqual(invocations[0]["id"], invocation_id) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertIn(invocation['state'], ['new', 'ready']) + self.assertIn(invocation["state"], ["new", "ready"]) self.gi.workflows.cancel_invocation(workflow_id, invocation_id) invocation = self.gi.workflows.show_invocation(workflow_id, invocation_id) - self.assertEqual(invocation['state'], 'cancelled') + self.assertEqual(invocation["state"], "cancelled") def test_import_export_workflow_from_local_path(self): with self.assertRaises(TypeError): self.gi.workflows.import_workflow_from_local_path(None) - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) imported_wf = self.gi.workflows.import_workflow_from_local_path(path) self.assertIsInstance(imported_wf, dict) - self.assertEqual(imported_wf['name'], 'paste_columns') - self.assertTrue(imported_wf['url'].startswith('/api/workflows/')) - self.assertFalse(imported_wf['deleted']) - self.assertFalse(imported_wf['published']) + self.assertEqual(imported_wf["name"], "paste_columns") + self.assertTrue(imported_wf["url"].startswith("/api/workflows/")) + self.assertFalse(imported_wf["deleted"]) + self.assertFalse(imported_wf["published"]) with self.assertRaises(TypeError): self.gi.workflows.export_workflow_to_local_path(None, None, None) - export_dir = tempfile.mkdtemp(prefix='bioblend_test_') + export_dir = tempfile.mkdtemp(prefix="bioblend_test_") try: - self.gi.workflows.export_workflow_to_local_path(imported_wf['id'], export_dir) + self.gi.workflows.export_workflow_to_local_path(imported_wf["id"], export_dir) dir_contents = os.listdir(export_dir) self.assertEqual(len(dir_contents), 1) export_path = os.path.join(export_dir, dir_contents[0]) @@ -136,152 +137,154 @@ def test_import_export_workflow_from_local_path(self): self.assertIsInstance(exported_wf_dict, dict) def test_import_publish_workflow_from_local_path(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) imported_wf = self.gi.workflows.import_workflow_from_local_path(path, publish=True) self.assertIsInstance(imported_wf, dict) - self.assertFalse(imported_wf['deleted']) - self.assertTrue(imported_wf['published']) + self.assertFalse(imported_wf["deleted"]) + self.assertTrue(imported_wf["published"]) def test_import_export_workflow_dict(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) with open(path) as f: wf_dict = json.load(f) imported_wf = self.gi.workflows.import_workflow_dict(wf_dict) self.assertIsInstance(imported_wf, dict) - self.assertEqual(imported_wf['name'], 'paste_columns') - self.assertTrue(imported_wf['url'].startswith('/api/workflows/')) - self.assertFalse(imported_wf['deleted']) - self.assertFalse(imported_wf['published']) - exported_wf_dict = self.gi.workflows.export_workflow_dict(imported_wf['id']) + self.assertEqual(imported_wf["name"], "paste_columns") + self.assertTrue(imported_wf["url"].startswith("/api/workflows/")) + self.assertFalse(imported_wf["deleted"]) + self.assertFalse(imported_wf["published"]) + exported_wf_dict = self.gi.workflows.export_workflow_dict(imported_wf["id"]) self.assertIsInstance(exported_wf_dict, dict) def test_import_publish_workflow_dict(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) with open(path) as f: wf_dict = json.load(f) imported_wf = self.gi.workflows.import_workflow_dict(wf_dict, publish=True) self.assertIsInstance(imported_wf, dict) - self.assertFalse(imported_wf['deleted']) - self.assertTrue(imported_wf['published']) + self.assertFalse(imported_wf["deleted"]) + self.assertTrue(imported_wf["published"]) def test_get_workflows(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) workflow = self.gi.workflows.import_workflow_from_local_path(path) all_wfs = self.gi.workflows.get_workflows() self.assertGreater(len(all_wfs), 0) - wfs_with_name = self.gi.workflows.get_workflows(name=workflow['name']) - wf_list = [w for w in wfs_with_name if w['id'] == workflow['id']] + wfs_with_name = self.gi.workflows.get_workflows(name=workflow["name"]) + wf_list = [w for w in wfs_with_name if w["id"] == workflow["id"]] self.assertEqual(len(wf_list), 1) wf_data = wf_list[0] - if 'create_time' in workflow: # Galaxy >= 20.01 - self.assertEqual(wf_data['create_time'], workflow['create_time']) + if "create_time" in workflow: # Galaxy >= 20.01 + self.assertEqual(wf_data["create_time"], workflow["create_time"]) else: # Galaxy <= 22.01 - self.assertEqual(wf_data['url'], workflow['url']) + self.assertEqual(wf_data["url"], workflow["url"]) def test_show_workflow(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - wf_data = self.gi.workflows.show_workflow(wf['id']) - self.assertEqual(wf_data['id'], wf['id']) - self.assertEqual(wf_data['name'], wf['name']) - self.assertEqual(wf_data['url'], wf['url']) - self.assertEqual(len(wf_data['steps']), 3) - self.assertIsNotNone(wf_data['inputs']) - - @test_util.skip_unless_galaxy('release_18.05') + wf_data = self.gi.workflows.show_workflow(wf["id"]) + self.assertEqual(wf_data["id"], wf["id"]) + self.assertEqual(wf_data["name"], wf["name"]) + self.assertEqual(wf_data["url"], wf["url"]) + self.assertEqual(len(wf_data["steps"]), 3) + self.assertIsNotNone(wf_data["inputs"]) + + @test_util.skip_unless_galaxy("release_18.05") def test_update_workflow_name(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - new_name = 'new name' - updated_wf = self.gi.workflows.update_workflow(wf['id'], name=new_name) - self.assertEqual(updated_wf['name'], new_name) + new_name = "new name" + updated_wf = self.gi.workflows.update_workflow(wf["id"], name=new_name) + self.assertEqual(updated_wf["name"], new_name) - @test_util.skip_unless_galaxy('release_21.01') + @test_util.skip_unless_galaxy("release_21.01") def test_update_workflow_published(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - self.assertFalse(wf['published']) - updated_wf = self.gi.workflows.update_workflow(wf['id'], published=True) - self.assertTrue(updated_wf['published']) - updated_wf = self.gi.workflows.update_workflow(wf['id'], published=False) - self.assertFalse(updated_wf['published']) - - @test_util.skip_unless_galaxy('release_19.09') # due to Galaxy bug fixed in https://github.com/galaxyproject/galaxy/pull/9014 + self.assertFalse(wf["published"]) + updated_wf = self.gi.workflows.update_workflow(wf["id"], published=True) + self.assertTrue(updated_wf["published"]) + updated_wf = self.gi.workflows.update_workflow(wf["id"], published=False) + self.assertFalse(updated_wf["published"]) + + @test_util.skip_unless_galaxy( + "release_19.09" + ) # due to Galaxy bug fixed in https://github.com/galaxyproject/galaxy/pull/9014 def test_show_workflow_versions(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - wf_data = self.gi.workflows.show_workflow(wf['id']) - self.assertEqual(wf_data['version'], 0) - new_name = 'new name' - self.gi.workflows.update_workflow(wf['id'], name=new_name) - updated_wf = self.gi.workflows.show_workflow(wf['id']) - self.assertEqual(updated_wf['name'], new_name) - self.assertEqual(updated_wf['version'], 1) - updated_wf = self.gi.workflows.show_workflow(wf['id'], version=0) - self.assertEqual(updated_wf['name'], 'paste_columns') - self.assertEqual(updated_wf['version'], 0) - updated_wf = self.gi.workflows.show_workflow(wf['id'], version=1) - self.assertEqual(updated_wf['name'], new_name) - self.assertEqual(updated_wf['version'], 1) - - @test_util.skip_unless_galaxy('release_19.09') + wf_data = self.gi.workflows.show_workflow(wf["id"]) + self.assertEqual(wf_data["version"], 0) + new_name = "new name" + self.gi.workflows.update_workflow(wf["id"], name=new_name) + updated_wf = self.gi.workflows.show_workflow(wf["id"]) + self.assertEqual(updated_wf["name"], new_name) + self.assertEqual(updated_wf["version"], 1) + updated_wf = self.gi.workflows.show_workflow(wf["id"], version=0) + self.assertEqual(updated_wf["name"], "paste_columns") + self.assertEqual(updated_wf["version"], 0) + updated_wf = self.gi.workflows.show_workflow(wf["id"], version=1) + self.assertEqual(updated_wf["name"], new_name) + self.assertEqual(updated_wf["version"], 1) + + @test_util.skip_unless_galaxy("release_19.09") def test_extract_workflow_from_history(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - history_id = self.gi.histories.create_history(name="test_wf_invocation")['id'] + history_id = self.gi.histories.create_history(name="test_wf_invocation")["id"] dataset1_id = self._test_dataset(history_id) - dataset = {'src': 'hda', 'id': dataset1_id} + dataset = {"src": "hda", "id": dataset1_id} invocation_id = self.gi.workflows.invoke_workflow( - wf['id'], - inputs={'Input 1': dataset, 'Input 2': dataset}, + wf["id"], + inputs={"Input 1": dataset, "Input 2": dataset}, history_id=history_id, - inputs_by='name', - )['id'] + inputs_by="name", + )["id"] invocation = self.gi.invocations.wait_for_invocation(invocation_id) - wf1 = self.gi.workflows.show_workflow(invocation['workflow_id']) - datasets = self.gi.histories.show_history(invocation['history_id'], contents=True) - dataset_hids = [dataset['hid'] for dataset in datasets] - job_ids = [step['job_id'] for step in invocation['steps'] if step['job_id']] + wf1 = self.gi.workflows.show_workflow(invocation["workflow_id"]) + datasets = self.gi.histories.show_history(invocation["history_id"], contents=True) + dataset_hids = [dataset["hid"] for dataset in datasets] + job_ids = [step["job_id"] for step in invocation["steps"] if step["job_id"]] for job_id in job_ids: self.gi.jobs.wait_for_job(job_id) - new_workflow_name = 'My new workflow!' + new_workflow_name = "My new workflow!" wf2 = self.gi.workflows.extract_workflow_from_history( - history_id=invocation['history_id'], + history_id=invocation["history_id"], workflow_name=new_workflow_name, job_ids=job_ids, dataset_hids=dataset_hids, ) - wf2 = self.gi.workflows.show_workflow(wf2['id']) - self.assertEqual(wf2['name'], new_workflow_name) - self.assertEqual(len(wf1['steps']), len(wf2['steps'])) - for i in range(len(wf1['steps'])): - self.assertEqual(wf1['steps'][str(i)]['type'], wf2['steps'][str(i)]['type']) - self.assertEqual(wf1['steps'][str(i)]['tool_id'], wf2['steps'][str(i)]['tool_id']) - - @test_util.skip_unless_galaxy('release_18.09') + wf2 = self.gi.workflows.show_workflow(wf2["id"]) + self.assertEqual(wf2["name"], new_workflow_name) + self.assertEqual(len(wf1["steps"]), len(wf2["steps"])) + for i in range(len(wf1["steps"])): + self.assertEqual(wf1["steps"][str(i)]["type"], wf2["steps"][str(i)]["type"]) + self.assertEqual(wf1["steps"][str(i)]["tool_id"], wf2["steps"][str(i)]["tool_id"]) + + @test_util.skip_unless_galaxy("release_18.09") def test_show_versions(self): - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - versions = self.gi.workflows.show_versions(wf['id']) + versions = self.gi.workflows.show_versions(wf["id"]) self.assertEqual(len(versions), 1) version = versions[0] - self.assertEqual(version['version'], 0) - self.assertTrue('update_time' in version) - self.assertTrue('steps' in version) + self.assertEqual(version["version"], 0) + self.assertTrue("update_time" in version) + self.assertTrue("steps" in version) - @test_util.skip_unless_galaxy('release_21.01') + @test_util.skip_unless_galaxy("release_21.01") def test_refactor_workflow(self): actions = [ {"action_type": "add_input", "type": "data", "label": "foo"}, {"action_type": "update_step_label", "label": "bar", "step": {"label": "foo"}}, ] - path = test_util.get_abspath(os.path.join('data', 'paste_columns.ga')) + path = test_util.get_abspath(os.path.join("data", "paste_columns.ga")) wf = self.gi.workflows.import_workflow_from_local_path(path) - response = self.gi.workflows.refactor_workflow(wf['id'], actions, dry_run=True) - self.assertEqual(len(response['action_executions']), len(actions)) - self.assertEqual(response['dry_run'], True) - updated_steps = response['workflow']['steps'] + response = self.gi.workflows.refactor_workflow(wf["id"], actions, dry_run=True) + self.assertEqual(len(response["action_executions"]), len(actions)) + self.assertEqual(response["dry_run"], True) + updated_steps = response["workflow"]["steps"] self.assertEqual(len(updated_steps), 4) - self.assertEqual({step['label'] for step in updated_steps.values()}, {'bar', None, 'Input 1', 'Input 2'}) + self.assertEqual({step["label"] for step in updated_steps.values()}, {"bar", None, "Input 1", "Input 2"}) diff --git a/bioblend/_tests/test_util.py b/bioblend/_tests/test_util.py index 1b28aa739..af0bef8d0 100644 --- a/bioblend/_tests/test_util.py +++ b/bioblend/_tests/test_util.py @@ -14,41 +14,44 @@ def skip_unless_cloudman(): - """ Decorate tests with this to skip the test if CloudMan is not + """Decorate tests with this to skip the test if CloudMan is not configured. """ - if 'BIOBLEND_AMI_ID' not in os.environ: + if "BIOBLEND_AMI_ID" not in os.environ: return unittest.skip(NO_CLOUDMAN_MESSAGE) else: return lambda f: f def skip_unless_galaxy(min_release=None): - """ Decorate tests with this to skip the test if Galaxy is not + """Decorate tests with this to skip the test if Galaxy is not configured. """ if min_release is not None: - galaxy_release = os.environ.get('GALAXY_VERSION', None) - if galaxy_release is not None and galaxy_release != 'dev': - if not galaxy_release.startswith('release_'): + galaxy_release = os.environ.get("GALAXY_VERSION", None) + if galaxy_release is not None and galaxy_release != "dev": + if not galaxy_release.startswith("release_"): raise ValueError("The value of GALAXY_VERSION environment variable should start with 'release_'") - if not min_release.startswith('release_'): + if not min_release.startswith("release_"): raise Exception("min_release should start with 'release_'") if galaxy_release[8:] < min_release[8:]: return unittest.skip(OLD_GALAXY_RELEASE % (galaxy_release, min_release)) - if 'BIOBLEND_GALAXY_URL' not in os.environ: + if "BIOBLEND_GALAXY_URL" not in os.environ: return unittest.skip(NO_GALAXY_MESSAGE) - if 'BIOBLEND_GALAXY_API_KEY' not in os.environ and 'BIOBLEND_GALAXY_MASTER_API_KEY' in os.environ: - galaxy_url = os.environ['BIOBLEND_GALAXY_URL'] - galaxy_master_api_key = os.environ['BIOBLEND_GALAXY_MASTER_API_KEY'] + if "BIOBLEND_GALAXY_API_KEY" not in os.environ and "BIOBLEND_GALAXY_MASTER_API_KEY" in os.environ: + galaxy_url = os.environ["BIOBLEND_GALAXY_URL"] + galaxy_master_api_key = os.environ["BIOBLEND_GALAXY_MASTER_API_KEY"] gi = bioblend.galaxy.GalaxyInstance(galaxy_url, galaxy_master_api_key) - if 'BIOBLEND_GALAXY_USER_EMAIL' in os.environ: - galaxy_user_email = os.environ['BIOBLEND_GALAXY_USER_EMAIL'] + if "BIOBLEND_GALAXY_USER_EMAIL" in os.environ: + galaxy_user_email = os.environ["BIOBLEND_GALAXY_USER_EMAIL"] else: - galaxy_user_email = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(5)) + "@localhost.localdomain" + galaxy_user_email = ( + "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(5)) + + "@localhost.localdomain" + ) galaxy_user_id = None for user in gi.users.get_users(): @@ -67,7 +70,7 @@ def skip_unless_galaxy(min_release=None): new_user = gi.users.create_remote_user(galaxy_user_email) else: galaxy_user = galaxy_user_email.split("@", 1)[0] - galaxy_password = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(20)) + galaxy_password = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(20)) # Create a new user and get a new API key for her new_user = gi.users.create_local_user(galaxy_user, galaxy_user_email, galaxy_password) @@ -76,23 +79,22 @@ def skip_unless_galaxy(min_release=None): api_key = gi.users.create_user_apikey(galaxy_user_id) os.environ["BIOBLEND_GALAXY_API_KEY"] = api_key - if 'BIOBLEND_GALAXY_API_KEY' not in os.environ: + if "BIOBLEND_GALAXY_API_KEY" not in os.environ: return unittest.skip(NO_GALAXY_MESSAGE) return lambda f: f def skip_unless_tool(tool_id): - """ Decorate a Galaxy test method as requiring a specific tool, + """Decorate a Galaxy test method as requiring a specific tool, skip the test case if the tool is unavailable. """ def method_wrapper(method): - def wrapped_method(has_gi, *args, **kwargs): tools = has_gi.gi.tools.get_tools() # In panels by default, so flatten out sections... - tool_ids = [_['id'] for _ in tools] + tool_ids = [_["id"] for _ in tools] if tool_id not in tool_ids: raise unittest.SkipTest(MISSING_TOOL_MESSAGE % tool_id) diff --git a/bioblend/cloudman/__init__.py b/bioblend/cloudman/__init__.py index d9704e04e..61bea98d3 100644 --- a/bioblend/cloudman/__init__.py +++ b/bioblend/cloudman/__init__.py @@ -37,17 +37,21 @@ class GenericVMInstance. All methods to which this decorator is applied :param vm_ready_check_interval: The number of seconds to pause between consecutive calls when polling the VM's ready status. """ + @functools.wraps(func) def wrapper(*args, **kwargs): obj = args[0] - timeout = kwargs.pop('vm_ready_timeout', 300) - interval = kwargs.pop('vm_ready_check_interval', 10) + timeout = kwargs.pop("vm_ready_timeout", 300) + interval = kwargs.pop("vm_ready_check_interval", 10) try: obj.wait_until_instance_ready(timeout, interval) except AttributeError: - raise VMLaunchException("Decorated object does not define a wait_until_instance_ready method." - "Make sure that the object is of type GenericVMInstance.") + raise VMLaunchException( + "Decorated object does not define a wait_until_instance_ready method." + "Make sure that the object is of type GenericVMInstance." + ) return func(*args, **kwargs) + return wrapper @@ -60,25 +64,26 @@ def __str__(self): class CloudManConfig: - - def __init__(self, - access_key=None, - secret_key=None, - cluster_name=None, - image_id=None, - instance_type='m1.medium', - password=None, - cloud_metadata=None, - cluster_type=None, - galaxy_data_option='', - initial_storage_size=10, - key_name='cloudman_key_pair', - security_groups=None, - placement='', - kernel_id=None, - ramdisk_id=None, - block_until_ready=False, - **kwargs): + def __init__( + self, + access_key=None, + secret_key=None, + cluster_name=None, + image_id=None, + instance_type="m1.medium", + password=None, + cloud_metadata=None, + cluster_type=None, + galaxy_data_option="", + initial_storage_size=10, + key_name="cloudman_key_pair", + security_groups=None, + placement="", + kernel_id=None, + ramdisk_id=None, + block_until_ready=False, + **kwargs, + ): """ Initializes a CloudMan launch configuration object. @@ -161,12 +166,20 @@ def __init__(self, method. """ if security_groups is None: - security_groups = ['CloudMan'] + security_groups = ["CloudMan"] self.set_connection_parameters(access_key, secret_key, cloud_metadata) self.set_pre_launch_parameters( - cluster_name, image_id, instance_type, - password, kernel_id, ramdisk_id, key_name, security_groups, - placement, block_until_ready) + cluster_name, + image_id, + instance_type, + password, + kernel_id, + ramdisk_id, + key_name, + security_groups, + placement, + block_until_ready, + ) self.set_post_launch_parameters(cluster_type, galaxy_data_option, initial_storage_size) self.set_extra_parameters(**kwargs) @@ -176,11 +189,20 @@ def set_connection_parameters(self, access_key, secret_key, cloud_metadata=None) self.cloud_metadata = cloud_metadata def set_pre_launch_parameters( - self, cluster_name, image_id, instance_type, password, - kernel_id=None, ramdisk_id=None, key_name='cloudman_key_pair', - security_groups=None, placement='', block_until_ready=False): + self, + cluster_name, + image_id, + instance_type, + password, + kernel_id=None, + ramdisk_id=None, + key_name="cloudman_key_pair", + security_groups=None, + placement="", + block_until_ready=False, + ): if security_groups is None: - security_groups = ['CloudMan'] + security_groups = ["CloudMan"] self.cluster_name = cluster_name self.image_id = image_id self.instance_type = instance_type @@ -192,7 +214,7 @@ def set_pre_launch_parameters( self.placement = placement self.block_until_ready = block_until_ready - def set_post_launch_parameters(self, cluster_type=None, galaxy_data_option='', initial_storage_size=10): + def set_post_launch_parameters(self, cluster_type=None, galaxy_data_option="", initial_storage_size=10): self.cluster_type = cluster_type self.galaxy_data_option = galaxy_data_option self.initial_storage_size = initial_storage_size @@ -209,10 +231,10 @@ def default(self, obj): @staticmethod def CustomTypeDecoder(dct): - if '__CloudManConfig__' in dct: - return CloudManConfig(**dct['__CloudManConfig__']) - elif '__Bunch__' in dct: - return Bunch(**dct['__Bunch__']) + if "__CloudManConfig__" in dct: + return CloudManConfig(**dct["__CloudManConfig__"]) + elif "__Bunch__" in dct: + return Bunch(**dct["__Bunch__"]) else: return dct @@ -236,9 +258,9 @@ def validate(self): return "Instance type must not be null" elif self.password is None: return "Password must not be null" - elif self.cluster_type not in [None, 'Test', 'Data', 'Galaxy', 'Shared_cluster']: + elif self.cluster_type not in [None, "Test", "Data", "Galaxy", "Shared_cluster"]: return f"Unrecognized cluster type ({self.cluster_type})" - elif self.galaxy_data_option not in [None, '', 'custom-size', 'transient']: + elif self.galaxy_data_option not in [None, "", "custom-size", "transient"]: return f"Unrecognized galaxy data option ({self.galaxy_data_option})" elif self.key_name is None: return "Key-pair name must not be null" @@ -247,7 +269,6 @@ def validate(self): class GenericVMInstance: - def __init__(self, launcher, launch_result): """ Create an instance of the CloudMan API class, which is to be used when @@ -274,7 +295,7 @@ def instance_id(self): Returns the ID of this instance (e.g., ``i-87ey32dd``) if launch was successful or ``None`` otherwise. """ - return None if self.launch_result is None else self.launch_result['instance_id'] + return None if self.launch_result is None else self.launch_result["instance_id"] @property def key_pair_name(self): @@ -282,7 +303,7 @@ def key_pair_name(self): Returns the name of the key pair used by this instance. If instance was not launched properly, returns ``None``. """ - return None if self.launch_result is None else self.launch_result['kp_name'] + return None if self.launch_result is None else self.launch_result["kp_name"] @property def key_pair_material(self): @@ -291,7 +312,7 @@ def key_pair_material(self): if the instance was properly launched and key pair generated; ``None`` otherwise. """ - return None if self.launch_result is None else self.launch_result['kp_material'] + return None if self.launch_result is None else self.launch_result["kp_material"] def get_machine_status(self): """ @@ -309,10 +330,12 @@ def get_machine_status(self): # elif self.host_name: else: - state = {'instance_state': "", - 'public_ip': "", - 'placement': "", - 'error': "No reference to the instance object"} + state = { + "instance_state": "", + "public_ip": "", + "placement": "", + "error": "No reference to the instance object", + } return state def _init_instance(self, host_name): @@ -332,10 +355,10 @@ def wait_until_instance_ready(self, vm_ready_timeout=300, vm_ready_check_interva for time_left in range(vm_ready_timeout, 0, -vm_ready_check_interval): status = self.get_machine_status() - if status['public_ip'] != '' and status['error'] == '': - self._init_instance(status['public_ip']) + if status["public_ip"] != "" and status["error"] == "": + self._init_instance(status["public_ip"]) return - elif status['error'] != '': + elif status["error"] != "": msg = f"Error launching an instance: {status['error']}" bioblend.log.error(msg) raise VMLaunchException(msg) @@ -345,13 +368,10 @@ def wait_until_instance_ready(self, vm_ready_timeout=300, vm_ready_check_interva ) time.sleep(vm_ready_check_interval) - raise VMLaunchException( - f"Waited too long for instance to become ready. Instance Id: {self.instance_id}" - ) + raise VMLaunchException(f"Waited too long for instance to become ready. Instance Id: {self.instance_id}") class CloudManInstance(GenericVMInstance): - def __init__(self, url, password, **kwargs): """ Create an instance of the CloudMan API class, which is to be used when @@ -362,11 +382,11 @@ def __init__(self, url, password, **kwargs): as defined in the user data sent to CloudMan on instance creation. """ self.initialized = False - if kwargs.get('launch_result', None) is not None: # Used internally by the launch_instance method - super().__init__(kwargs['launcher'], kwargs['launch_result']) + if kwargs.get("launch_result", None) is not None: # Used internally by the launch_instance method + super().__init__(kwargs["launcher"], kwargs["launch_result"]) else: super().__init__(None, None) - self.config = kwargs.pop('cloudman_config', CloudManConfig()) + self.config = kwargs.pop("cloudman_config", CloudManConfig()) self.password = password or self.config.password self.use_ssl = kwargs.get("use_ssl", self.config.kwargs.get("use_ssl", False)) self.verify = kwargs.get("verify", self.config.kwargs.get("verify", False)) @@ -389,7 +409,11 @@ def _update_host_name(self, host_name): def _init_instance(self, hostname): super()._init_instance(hostname) if self.config.cluster_type: - self.initialize(self.config.cluster_type, galaxy_data_option=self.config.galaxy_data_option, initial_storage_size=self.config.initial_storage_size) + self.initialize( + self.config.cluster_type, + galaxy_data_option=self.config.galaxy_data_option, + initial_storage_size=self.config.initial_storage_size, + ) def _set_url(self, url): """ @@ -397,7 +421,7 @@ def _set_url(self, url): """ if url: # Make sure the URL scheme is defined (otherwise requests will not work) - if not url.lower().startswith('http'): + if not url.lower().startswith("http"): # Check to see whether https scheme is required if self.use_ssl: url = "https://" + url @@ -422,7 +446,7 @@ def cloudman_url(self): Returns the URL for accessing this instance of CloudMan. """ if self.url: - return self.url + '/cloud' + return self.url + "/cloud" return None @staticmethod @@ -436,18 +460,23 @@ def launch_instance(cfg, **kwargs): """ validation_result = cfg.validate() if validation_result is not None: - raise VMLaunchException( - f"Invalid CloudMan configuration provided: {validation_result}" - ) + raise VMLaunchException(f"Invalid CloudMan configuration provided: {validation_result}") launcher = CloudManLauncher(cfg.access_key, cfg.secret_key, cfg.cloud_metadata) result = launcher.launch( - cfg.cluster_name, cfg.image_id, cfg.instance_type, cfg.password, - cfg.kernel_id, cfg.ramdisk_id, cfg.key_name, cfg.security_groups, - cfg.placement, **cfg.kwargs) - if result['error'] is not None: + cfg.cluster_name, + cfg.image_id, + cfg.instance_type, + cfg.password, + cfg.kernel_id, + cfg.ramdisk_id, + cfg.key_name, + cfg.security_groups, + cfg.placement, + **cfg.kwargs, + ) + if result["error"] is not None: raise VMLaunchException(f"Error launching cloudman instance: {result['error']}") - instance = CloudManInstance(None, None, launcher=launcher, - launch_result=result, cloudman_config=cfg) + instance = CloudManInstance(None, None, launcher=launcher, launch_result=result, cloudman_config=cfg) if cfg.block_until_ready and cfg.cluster_type: instance.get_status() # this will indirect result in initialize being invoked return instance @@ -462,16 +491,16 @@ def update(self): """ ms = self.get_machine_status() # Check if the machine is running and update IP and state - self.vm_status = ms.get('instance_state', None) - self.vm_error = ms.get('error', None) - public_ip = ms.get('public_ip', None) + self.vm_status = ms.get("instance_state", None) + self.vm_error = ms.get("error", None) + public_ip = ms.get("public_ip", None) # Update url if we don't have it or is different than what we have if not self.url and (public_ip and self.url != public_ip): self._set_url(public_ip) # See if the cluster has been initialized - if self.vm_status == 'running' or self.url: + if self.vm_status == "running" or self.url: ct = self.get_cluster_type() - if ct.get('cluster_type', None): + if ct.get("cluster_type", None): self.initialized = True if self.vm_error: bioblend.log.error(self.vm_error) @@ -484,12 +513,12 @@ def get_cloudman_version(self): """ try: r = self._make_get_request("cloudman_version") - return r['version'] + return r["version"] except Exception: return 1 @block_until_vm_ready - def initialize(self, cluster_type, galaxy_data_option='', initial_storage_size=None, shared_bucket=None): + def initialize(self, cluster_type, galaxy_data_option="", initial_storage_size=None, shared_bucket=None): """ Initialize CloudMan platform. This needs to be done before the cluster can be used. @@ -502,19 +531,21 @@ def initialize(self, cluster_type, galaxy_data_option='', initial_storage_size=N r = self._make_get_request( "initialize_cluster", parameters={ - 'startup_opt': cluster_type, - 'g_pss': initial_storage_size, - 'shared_bucket': shared_bucket - }) + "startup_opt": cluster_type, + "g_pss": initial_storage_size, + "shared_bucket": shared_bucket, + }, + ) else: r = self._make_get_request( "initialize_cluster", parameters={ - 'startup_opt': cluster_type, - 'galaxy_data_option': galaxy_data_option, - 'pss': initial_storage_size, - 'shared_bucket': shared_bucket - }) + "startup_opt": cluster_type, + "galaxy_data_option": galaxy_data_option, + "pss": initial_storage_size, + "shared_bucket": shared_bucket, + }, + ) self.initialized = True return r @@ -526,7 +557,7 @@ def get_cluster_type(self): for example: ``{'cluster_type': 'Test'}``. """ cluster_type = self._make_get_request("cluster_type") - if cluster_type['cluster_type']: + if cluster_type["cluster_type"]: self.initialized = True return cluster_type @@ -543,7 +574,7 @@ def get_nodes(self): Get a list of nodes currently running in this CloudMan cluster. """ instance_feed_json = self._make_get_request("instance_feed_json") - return instance_feed_json['instances'] + return instance_feed_json["instances"] @block_until_vm_ready def get_cluster_size(self): @@ -567,7 +598,7 @@ def get_master_ip(self): Returns the public IP of the master node in this CloudMan cluster """ status_json = self.get_static_state() - return status_json['master_ip'] + return status_json["master_ip"] @block_until_vm_ready def get_master_id(self): @@ -575,10 +606,10 @@ def get_master_id(self): Returns the instance ID of the master node in this CloudMan cluster """ status_json = self.get_static_state() - return status_json['master_id'] + return status_json["master_id"] @block_until_vm_ready - def add_nodes(self, num_nodes, instance_type='', spot_price=''): + def add_nodes(self, num_nodes, instance_type="", spot_price=""): """ Add a number of worker nodes to the cluster, optionally specifying the type for new instances. If ``instance_type`` is not specified, @@ -590,9 +621,7 @@ def add_nodes(self, num_nodes, instance_type='', spot_price=''): price for Spot instances, thus turning this request for more instances into a Spot request. """ - payload = {'number_nodes': num_nodes, - 'instance_type': instance_type, - 'spot_price': spot_price} + payload = {"number_nodes": num_nodes, "instance_type": instance_type, "spot_price": spot_price} return self._make_get_request("add_instances", parameters=payload) @block_until_vm_ready @@ -604,7 +633,7 @@ def remove_nodes(self, num_nodes, force=False): The ``force`` parameter (defaulting to False), is a boolean indicating whether the nodes should be forcibly removed rather than gracefully removed. """ - payload = {'number_nodes': num_nodes, 'force_termination': force} + payload = {"number_nodes": num_nodes, "force_termination": force} result = self._make_get_request("remove_instances", parameters=payload) return result @@ -619,7 +648,7 @@ def remove_node(self, instance_id, force=False): than gracefully removed. """ - payload = {'instance_id': instance_id} + payload = {"instance_id": instance_id} return self._make_get_request("remove_instance", parameters=payload) @block_until_vm_ready @@ -630,7 +659,7 @@ def reboot_node(self, instance_id): The ``instance_id`` parameter defines the ID, as a string, of a worker node to reboot. """ - payload = {'instance_id': instance_id} + payload = {"instance_id": instance_id} return self._make_get_request("reboot_instance", parameters=payload) @block_until_vm_ready @@ -638,7 +667,7 @@ def autoscaling_enabled(self): """ Returns a boolean indicating whether autoscaling is enabled. """ - return bool(self.get_status()['autoscaling']['use_autoscaling']) + return bool(self.get_status()["autoscaling"]["use_autoscaling"]) @block_until_vm_ready def enable_autoscaling(self, minimum_nodes=0, maximum_nodes=19): @@ -650,7 +679,7 @@ def enable_autoscaling(self, minimum_nodes=0, maximum_nodes=19): (default is 0) and ``maximum_nodes`` (default is 19) parameters. """ if not self.autoscaling_enabled(): - payload = {'as_min': minimum_nodes, 'as_max': maximum_nodes} + payload = {"as_min": minimum_nodes, "as_max": maximum_nodes} self._make_get_request("toggle_autoscaling", parameters=payload) @block_until_vm_ready @@ -672,7 +701,7 @@ def adjust_autoscaling(self, minimum_nodes=None, maximum_nodes=None): not provided then its configuration value does not change. """ if self.autoscaling_enabled(): - payload = {'as_min_adj': minimum_nodes, 'as_max_adj': maximum_nodes} + payload = {"as_min_adj": minimum_nodes, "as_max_adj": maximum_nodes} self._make_get_request("adjust_autoscaling", parameters=payload) @block_until_vm_ready @@ -682,7 +711,7 @@ def is_master_execution_host(self): """ status = self._make_get_request("get_all_services_status") - return bool(status['master_is_exec_host']) + return bool(status["master_is_exec_host"]) @block_until_vm_ready def set_master_as_execution_host(self, enable): @@ -698,9 +727,9 @@ def get_galaxy_state(self): """ Get the current status of Galaxy running on the cluster. """ - payload = {'srvc': 'Galaxy'} + payload = {"srvc": "Galaxy"} status = self._make_get_request("get_srvc_status", parameters=payload) - return {'status': status['status']} + return {"status": status["status"]} @block_until_vm_ready def terminate(self, terminate_master_instance=True, delete_cluster=False): @@ -713,10 +742,8 @@ def terminate(self, terminate_master_instance=True, delete_cluster=False): Deleting a cluster is irreversible - all of the data will be permanently deleted. """ - payload = {'terminate_master_instance': terminate_master_instance, - 'delete_cluster': delete_cluster} - result = self._make_get_request("kill_all", parameters=payload, - timeout=15) + payload = {"terminate_master_instance": terminate_master_instance, "delete_cluster": delete_cluster} + result = self._make_get_request("kill_all", parameters=payload, timeout=15) return result def _make_get_request(self, url, parameters=None, timeout=None): @@ -729,7 +756,7 @@ def _make_get_request(self, url, parameters=None, timeout=None): """ if parameters is None: parameters = {} - req_url = '/'.join((self.cloudman_url, 'root', url)) + req_url = "/".join((self.cloudman_url, "root", url)) r = requests.get( req_url, params=parameters, diff --git a/bioblend/cloudman/launch.py b/bioblend/cloudman/launch.py index 4a1f33810..dd8649ccf 100644 --- a/bioblend/cloudman/launch.py +++ b/bioblend/cloudman/launch.py @@ -13,8 +13,15 @@ import boto import yaml from boto.ec2.regioninfo import RegionInfo -from boto.exception import EC2ResponseError, S3ResponseError -from boto.s3.connection import OrdinaryCallingFormat, S3Connection, SubdomainCallingFormat +from boto.exception import ( + EC2ResponseError, + S3ResponseError, +) +from boto.s3.connection import ( + OrdinaryCallingFormat, + S3Connection, + SubdomainCallingFormat, +) import bioblend from bioblend.util import Bunch @@ -24,7 +31,7 @@ # bioblend.logging.getLogger('boto').setLevel(bioblend.logging.CRITICAL) # Uncomment the following line if logging at the prompt is desired # bioblend.set_stream_logger(__name__) -def instance_types(cloud_name='generic'): +def instance_types(cloud_name="generic"): """ Return a list of dictionaries containing details about the available instance types for the given `cloud_name`. @@ -39,37 +46,24 @@ def instance_types(cloud_name='generic'): contain the following keys: `name`, `model`, and `description`. """ instance_list = [] - if cloud_name.lower() == 'aws': - instance_list.append({"model": "c3.large", - "name": "Compute optimized Large", - "description": "2 vCPU/4GB RAM"}) - instance_list.append({"model": "c3.2xlarge", - "name": "Compute optimized 2xLarge", - "description": "8 vCPU/15GB RAM"}) - instance_list.append({"model": "c3.8xlarge", - "name": "Compute optimized 8xLarge", - "description": "32 vCPU/60GB RAM"}) - elif cloud_name.lower() in ['nectar', 'generic']: - instance_list.append({"model": "m1.small", - "name": "Small", - "description": "1 vCPU / 4GB RAM"}) - instance_list.append({"model": "m1.medium", - "name": "Medium", - "description": "2 vCPU / 8GB RAM"}) - instance_list.append({"model": "m1.large", - "name": "Large", - "description": "4 vCPU / 16GB RAM"}) - instance_list.append({"model": "m1.xlarge", - "name": "Extra Large", - "description": "8 vCPU / 32GB RAM"}) - instance_list.append({"model": "m1.xxlarge", - "name": "Extra-extra Large", - "description": "16 vCPU / 64GB RAM"}) + if cloud_name.lower() == "aws": + instance_list.append({"model": "c3.large", "name": "Compute optimized Large", "description": "2 vCPU/4GB RAM"}) + instance_list.append( + {"model": "c3.2xlarge", "name": "Compute optimized 2xLarge", "description": "8 vCPU/15GB RAM"} + ) + instance_list.append( + {"model": "c3.8xlarge", "name": "Compute optimized 8xLarge", "description": "32 vCPU/60GB RAM"} + ) + elif cloud_name.lower() in ["nectar", "generic"]: + instance_list.append({"model": "m1.small", "name": "Small", "description": "1 vCPU / 4GB RAM"}) + instance_list.append({"model": "m1.medium", "name": "Medium", "description": "2 vCPU / 8GB RAM"}) + instance_list.append({"model": "m1.large", "name": "Large", "description": "4 vCPU / 16GB RAM"}) + instance_list.append({"model": "m1.xlarge", "name": "Extra Large", "description": "8 vCPU / 32GB RAM"}) + instance_list.append({"model": "m1.xxlarge", "name": "Extra-extra Large", "description": "16 vCPU / 64GB RAM"}) return instance_list class CloudManLauncher: - def __init__(self, access_key, secret_key, cloud=None): """ Define the environment in which this instance of CloudMan will be launched. @@ -87,44 +81,46 @@ def __init__(self, access_key, secret_key, cloud=None): self.secret_key = secret_key if cloud is None: # Default to an EC2-compatible object - self.cloud = Bunch(id='1', # for compatibility w/ DB representation - name="Amazon", - cloud_type="ec2", - bucket_default="cloudman", - region_name="us-east-1", - region_endpoint="ec2.amazonaws.com", - ec2_port="", - ec2_conn_path="/", - cidr_range="", - is_secure=True, - s3_host="s3.amazonaws.com", - s3_port="", - s3_conn_path='/') + self.cloud = Bunch( + id="1", # for compatibility w/ DB representation + name="Amazon", + cloud_type="ec2", + bucket_default="cloudman", + region_name="us-east-1", + region_endpoint="ec2.amazonaws.com", + ec2_port="", + ec2_conn_path="/", + cidr_range="", + is_secure=True, + s3_host="s3.amazonaws.com", + s3_port="", + s3_conn_path="/", + ) else: self.cloud = cloud - self.ec2_conn = self.connect_ec2( - self.access_key, - self.secret_key, - self.cloud) - self.vpc_conn = self.connect_vpc( - self.access_key, - self.secret_key, - self.cloud) + self.ec2_conn = self.connect_ec2(self.access_key, self.secret_key, self.cloud) + self.vpc_conn = self.connect_vpc(self.access_key, self.secret_key, self.cloud) # Define exceptions that we want to catch and retry - self.http_exceptions = ( - HTTPException, - socket.error, - socket.gaierror, - BadStatusLine - ) + self.http_exceptions = (HTTPException, socket.error, socket.gaierror, BadStatusLine) def __repr__(self): return f"Cloud: {self.cloud.name}; acct ID: {self.access_key}" - def launch(self, cluster_name, image_id, instance_type, password, - kernel_id=None, ramdisk_id=None, key_name='cloudman_key_pair', - security_groups=None, placement='', subnet_id=None, - ebs_optimized=False, **kwargs): + def launch( + self, + cluster_name, + image_id, + instance_type, + password, + kernel_id=None, + ramdisk_id=None, + key_name="cloudman_key_pair", + security_groups=None, + placement="", + subnet_id=None, + ebs_optimized=False, + **kwargs, + ): """ Check all the prerequisites (key pair and security groups) for launching a CloudMan instance, compose the user data based on the @@ -146,14 +142,16 @@ def launch(self, cluster_name, image_id, instance_type, password, ``error`` containing an error message if there was one. """ if security_groups is None: - security_groups = ['CloudMan'] - ret = {'sg_names': [], - 'sg_ids': [], - 'kp_name': '', - 'kp_material': '', - 'rs': None, - 'instance_id': '', - 'error': None} + security_groups = ["CloudMan"] + ret = { + "sg_names": [], + "sg_ids": [], + "kp_name": "", + "kp_material": "", + "rs": None, + "instance_id": "", + "error": None, + } # First satisfy the prerequisites for sg in security_groups: # Get VPC ID in case we're launching into a VPC @@ -165,83 +163,84 @@ def launch(self, cluster_name, image_id, instance_type, password, except (EC2ResponseError, IndexError): bioblend.log.exception("Trouble fetching subnet %s", subnet_id) cmsg = self.create_cm_security_group(sg, vpc_id=vpc_id) - ret['error'] = cmsg['error'] - if ret['error']: + ret["error"] = cmsg["error"] + if ret["error"]: return ret - if cmsg['name']: - ret['sg_names'].append(cmsg['name']) - ret['sg_ids'].append(cmsg['sg_id']) + if cmsg["name"]: + ret["sg_names"].append(cmsg["name"]) + ret["sg_ids"].append(cmsg["sg_id"]) if subnet_id: # Must setup a network interface if launching into VPC security_groups = None interface = boto.ec2.networkinterface.NetworkInterfaceSpecification( - subnet_id=subnet_id, groups=[cmsg['sg_id']], - associate_public_ip_address=True) - network_interfaces = (boto.ec2.networkinterface. - NetworkInterfaceCollection(interface)) + subnet_id=subnet_id, groups=[cmsg["sg_id"]], associate_public_ip_address=True + ) + network_interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface) else: network_interfaces = None kp_info = self.create_key_pair(key_name) - ret['kp_name'] = kp_info['name'] - ret['kp_material'] = kp_info['material'] - ret['error'] = kp_info['error'] - if ret['error']: + ret["kp_name"] = kp_info["name"] + ret["kp_material"] = kp_info["material"] + ret["error"] = kp_info["error"] + if ret["error"]: return ret # If not provided, try to find a placement # TODO: Should placement always be checked? To make sure it's correct # for existing clusters. if not placement: - placement = self._find_placement( - cluster_name).get('placement', None) + placement = self._find_placement(cluster_name).get("placement", None) # Compose user data for launching an instance, ensuring we have the # required fields - kwargs['access_key'] = self.access_key - kwargs['secret_key'] = self.secret_key - kwargs['cluster_name'] = cluster_name - kwargs['password'] = password - kwargs['cloud_name'] = self.cloud.name + kwargs["access_key"] = self.access_key + kwargs["secret_key"] = self.secret_key + kwargs["cluster_name"] = cluster_name + kwargs["password"] = password + kwargs["cloud_name"] = self.cloud.name ud = self._compose_user_data(kwargs) # Now launch an instance try: rs = None - rs = self.ec2_conn.run_instances(image_id=image_id, - instance_type=instance_type, - key_name=key_name, - security_groups=security_groups, - # The following two arguments are - # provided in the network_interface - # instead of arguments: - # security_group_ids=security_group_ids, - # subnet_id=subnet_id, - network_interfaces=network_interfaces, - user_data=ud, - kernel_id=kernel_id, - ramdisk_id=ramdisk_id, - placement=placement, - ebs_optimized=ebs_optimized) - ret['rs'] = rs + rs = self.ec2_conn.run_instances( + image_id=image_id, + instance_type=instance_type, + key_name=key_name, + security_groups=security_groups, + # The following two arguments are + # provided in the network_interface + # instead of arguments: + # security_group_ids=security_group_ids, + # subnet_id=subnet_id, + network_interfaces=network_interfaces, + user_data=ud, + kernel_id=kernel_id, + ramdisk_id=ramdisk_id, + placement=placement, + ebs_optimized=ebs_optimized, + ) + ret["rs"] = rs except EC2ResponseError as e: err_msg = f"Problem launching an instance: {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - ret['error'] = err_msg + ret["error"] = err_msg return ret else: if rs: try: bioblend.log.info("Launched an instance with ID %s", rs.instances[0].id) - ret['instance_id'] = rs.instances[0].id - ret['instance_ip'] = rs.instances[0].ip_address + ret["instance_id"] = rs.instances[0].id + ret["instance_ip"] = rs.instances[0].ip_address except EC2ResponseError as e: err_msg = f"Problem with the launched instance object: {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - ret['error'] = err_msg + ret["error"] = err_msg else: - ret['error'] = ("No response after launching an instance. Check " - "your account permissions and try again.") + ret["error"] = ( + "No response after launching an instance. Check " "your account permissions and try again." + ) return ret - def create_cm_security_group(self, sg_name='CloudMan', vpc_id=None): + def create_cm_security_group(self, sg_name="CloudMan", vpc_id=None): """ Create a security group with all authorizations required to run CloudMan. @@ -264,28 +263,27 @@ def create_cm_security_group(self, sg_name='CloudMan', vpc_id=None): .. versionchanged:: 0.6.1 The return value changed from a string to a dict """ - ports = (('20', '21'), # FTP - ('22', '22'), # SSH - ('80', '80'), # Web UI - ('443', '443'), # SSL Web UI - ('8800', '8800'), # NodeJS Proxy for Galaxy IPython IE - ('9600', '9700'), # HTCondor - ('30000', '30100')) # FTP transfer - progress = {'name': None, - 'sg_id': None, - 'error': None, - 'ports': ports} + ports = ( + ("20", "21"), # FTP + ("22", "22"), # SSH + ("80", "80"), # Web UI + ("443", "443"), # SSL Web UI + ("8800", "8800"), # NodeJS Proxy for Galaxy IPython IE + ("9600", "9700"), # HTCondor + ("30000", "30100"), + ) # FTP transfer + progress = {"name": None, "sg_id": None, "error": None, "ports": ports} cmsg = None filters = None if vpc_id: - filters = {'vpc-id': vpc_id} + filters = {"vpc-id": vpc_id} # Check if this security group already exists try: sgs = self.ec2_conn.get_all_security_groups(filters=filters) except EC2ResponseError as e: err_msg = f"Problem getting security groups. This could indicate a problem with your account credentials or permissions: {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - progress['error'] = err_msg + progress["error"] = err_msg return progress for sg in sgs: if sg.name == sg_name: @@ -296,49 +294,40 @@ def create_cm_security_group(self, sg_name='CloudMan', vpc_id=None): if cmsg is None: bioblend.log.debug("Creating Security Group %s", sg_name) try: - cmsg = self.ec2_conn.create_security_group(sg_name, 'A security ' - 'group for CloudMan', - vpc_id=vpc_id) + cmsg = self.ec2_conn.create_security_group(sg_name, "A security " "group for CloudMan", vpc_id=vpc_id) except EC2ResponseError as e: err_msg = f"Problem creating security group '{sg_name}': {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - progress['error'] = err_msg + progress["error"] = err_msg if cmsg: - progress['name'] = cmsg.name - progress['sg_id'] = cmsg.id + progress["name"] = cmsg.name + progress["sg_id"] = cmsg.id # Add appropriate authorization rules # If these rules already exist, nothing will be changed in the SG for port in ports: try: - if not self.rule_exists( - cmsg.rules, from_port=port[0], to_port=port[1]): - cmsg.authorize( - ip_protocol='tcp', - from_port=port[0], - to_port=port[1], - cidr_ip='0.0.0.0/0') + if not self.rule_exists(cmsg.rules, from_port=port[0], to_port=port[1]): + cmsg.authorize(ip_protocol="tcp", from_port=port[0], to_port=port[1], cidr_ip="0.0.0.0/0") else: bioblend.log.debug("Rule (%s:%s) already exists in the SG", port[0], port[1]) except EC2ResponseError as e: - err_msg = f"A problem adding security group authorizations: {e} (code {e.error_code}; status {e.status})" + err_msg = ( + f"A problem adding security group authorizations: {e} (code {e.error_code}; status {e.status})" + ) bioblend.log.exception(err_msg) - progress['error'] = err_msg + progress["error"] = err_msg # Add ICMP (i.e., ping) rule required by HTCondor try: - if not self.rule_exists( - cmsg.rules, from_port='-1', to_port='-1', ip_protocol='icmp'): - cmsg.authorize( - ip_protocol='icmp', - from_port=-1, - to_port=-1, - cidr_ip='0.0.0.0/0') + if not self.rule_exists(cmsg.rules, from_port="-1", to_port="-1", ip_protocol="icmp"): + cmsg.authorize(ip_protocol="icmp", from_port=-1, to_port=-1, cidr_ip="0.0.0.0/0") else: - bioblend.log.debug( - f"ICMP rule already exists in {sg_name} SG.") + bioblend.log.debug(f"ICMP rule already exists in {sg_name} SG.") except EC2ResponseError as e: - err_msg = f"A problem with security ICMP rule authorization: {e} (code {e.error_code}; status {e.status})" + err_msg = ( + f"A problem with security ICMP rule authorization: {e} (code {e.error_code}; status {e.status})" + ) bioblend.log.exception(err_msg) - progress['err_msg'] = err_msg + progress["err_msg"] = err_msg # Add rule that allows communication between instances in the same # SG # A flag to indicate if group rule already exists @@ -347,40 +336,37 @@ def create_cm_security_group(self, sg_name='CloudMan', vpc_id=None): for grant in rule.grants: if grant.name == cmsg.name: g_rule_exists = True - bioblend.log.debug( - "Group rule already exists in the SG.") + bioblend.log.debug("Group rule already exists in the SG.") if g_rule_exists: break if not g_rule_exists: try: - cmsg.authorize( - src_group=cmsg, - ip_protocol='tcp', - from_port=0, - to_port=65535) + cmsg.authorize(src_group=cmsg, ip_protocol="tcp", from_port=0, to_port=65535) except EC2ResponseError as e: err_msg = f"A problem with security group group authorization: {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - progress['err_msg'] = err_msg + progress["err_msg"] = err_msg bioblend.log.info("Done configuring '%s' security group", cmsg.name) else: - bioblend.log.warning( - f"Did not create security group '{sg_name}'") + bioblend.log.warning(f"Did not create security group '{sg_name}'") return progress - def rule_exists( - self, rules, from_port, to_port, ip_protocol='tcp', cidr_ip='0.0.0.0/0'): + def rule_exists(self, rules, from_port, to_port, ip_protocol="tcp", cidr_ip="0.0.0.0/0"): """ A convenience method to check if an authorization rule in a security group already exists. """ for rule in rules: - if rule.ip_protocol == ip_protocol and rule.from_port == from_port and \ - rule.to_port == to_port and cidr_ip in [ip.cidr_ip for ip in rule.grants]: + if ( + rule.ip_protocol == ip_protocol + and rule.from_port == from_port + and rule.to_port == to_port + and cidr_ip in [ip.cidr_ip for ip in rule.grants] + ): return True return False - def create_key_pair(self, key_name='cloudman_key_pair'): + def create_key_pair(self, key_name="cloudman_key_pair"): """ If a key pair with the provided ``key_name`` does not exist, create it. @@ -398,9 +384,7 @@ def create_key_pair(self, key_name='cloudman_key_pair'): .. versionchanged:: 0.6.1 The return value changed from a tuple to a dict """ - progress = {'name': None, - 'material': None, - 'error': None} + progress = {"name": None, "material": None, "error": None} kp = None # Check if a key pair under the given name already exists. If it does not, # create it, else return. @@ -409,23 +393,23 @@ def create_key_pair(self, key_name='cloudman_key_pair'): except EC2ResponseError as e: err_msg = f"Problem getting key pairs: {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - progress['error'] = err_msg + progress["error"] = err_msg return progress for akp in kps: if akp.name == key_name: bioblend.log.info("Key pair '%s' already exists; reusing it.", key_name) - progress['name'] = akp.name + progress["name"] = akp.name return progress try: kp = self.ec2_conn.create_key_pair(key_name) except EC2ResponseError as e: err_msg = f"Problem creating key pair '{key_name}': {e} (code {e.error_code}; status {e.status})" bioblend.log.exception(err_msg) - progress['error'] = err_msg + progress["error"] = err_msg return progress bioblend.log.info("Created key pair '%s'", kp.name) - progress['name'] = kp.name - progress['material'] = kp.material + progress["name"] = kp.name + progress["material"] = kp.material return progress def assign_floating_ip(self, ec2_conn, instance): @@ -435,8 +419,7 @@ def assign_floating_ip(self, ec2_conn, instance): except EC2ResponseError: bioblend.log.exception("Exception allocating a new floating IP address") bioblend.log.info("Associating floating IP %s to instance %s", address.public_ip, instance.id) - ec2_conn.associate_address(instance_id=instance.id, - public_ip=address.public_ip) + ec2_conn.associate_address(instance_id=instance.id, public_ip=address.public_ip) def get_status(self, instance_id): """ @@ -462,24 +445,21 @@ class may be used to launch multiple instances but only the most recent """ ec2_conn = self.ec2_conn rs = None - state = {'instance_state': "", - 'public_ip': "", - 'placement': "", - 'error': ""} + state = {"instance_state": "", "public_ip": "", "placement": "", "error": ""} # Make sure we have an instance ID if instance_id is None: err = "Missing instance ID, cannot check the state." bioblend.log.error(err) - state['error'] = err + state["error"] = err return state try: rs = ec2_conn.get_all_instances([instance_id]) if rs is not None: inst_state = rs[0].instances[0].update() public_ip = rs[0].instances[0].ip_address - state['public_ip'] = public_ip - if inst_state == 'running': + state["public_ip"] = public_ip + if inst_state == "running": # if there's a private ip, but no public ip # attempt auto allocation of floating IP if rs[0].instances[0].private_ip_address and not public_ip: @@ -492,16 +472,16 @@ class may be used to launch multiple instances but only the most recent # check using http or https. cm_url = f"http://{public_ip}/cloud" if self._checkURL(cm_url) is True: - state['instance_state'] = inst_state - state['placement'] = rs[0].instances[0].placement + state["instance_state"] = inst_state + state["placement"] = rs[0].instances[0].placement else: - state['instance_state'] = 'booting' + state["instance_state"] = "booting" else: - state['instance_state'] = inst_state + state["instance_state"] = inst_state except Exception as e: err = f"Problem updating instance '{instance_id}' state: {e}" bioblend.log.error(err) - state['error'] = err + state["error"] = err return state def get_clusters_pd(self, include_placement=True): @@ -528,21 +508,21 @@ def get_clusters_pd(self, include_placement=True): The return value changed from a list to a dictionary. """ clusters = [] - response = {'clusters': clusters, 'error': None} + response = {"clusters": clusters, "error": None} s3_conn = self.connect_s3(self.access_key, self.secret_key, self.cloud) try: buckets = s3_conn.get_all_buckets() except S3ResponseError as e: - response['error'] = f"S3ResponseError getting buckets: {e}" + response["error"] = f"S3ResponseError getting buckets: {e}" except self.http_exceptions as ex: - response['error'] = f"Exception getting buckets: {ex}" - if response['error']: - bioblend.log.exception(response['error']) + response["error"] = f"Exception getting buckets: {ex}" + if response["error"]: + bioblend.log.exception(response["error"]) return response - for bucket in [b for b in buckets if b.name.startswith('cm-')]: + for bucket in [b for b in buckets if b.name.startswith("cm-")]: try: # TODO: first lookup if persistent_data.yaml key exists - pd = bucket.get_key('persistent_data.yaml') + pd = bucket.get_key("persistent_data.yaml") except S3ResponseError: # This can fail for a number of reasons for non-us and/or # CNAME'd buckets but it is not a terminal error @@ -552,21 +532,19 @@ def get_clusters_pd(self, include_placement=True): # We are dealing with a CloudMan bucket pd_contents = pd.get_contents_as_string() pd = yaml.safe_load(pd_contents) - if 'cluster_name' in pd: - cluster_name = pd['cluster_name'] + if "cluster_name" in pd: + cluster_name = pd["cluster_name"] else: for key in bucket.list(): - if key.name.endswith('.clusterName'): - cluster_name = key.name.split('.clusterName')[0] - cluster = {'cluster_name': cluster_name, - 'persistent_data': pd, - 'bucket_name': bucket.name} + if key.name.endswith(".clusterName"): + cluster_name = key.name.split(".clusterName")[0] + cluster = {"cluster_name": cluster_name, "persistent_data": pd, "bucket_name": bucket.name} # Look for cluster's placement too if include_placement: placement = self._find_placement(cluster_name, cluster) - cluster['placement'] = placement + cluster["placement"] = placement clusters.append(cluster) - response['clusters'] = clusters + response["clusters"] = clusters return response def get_cluster_pd(self, cluster_name): @@ -578,9 +556,9 @@ def get_cluster_pd(self, cluster_name): .. versionadded:: 0.3 """ cluster = {} - clusters = self.get_clusters_pd().get('clusters', []) + clusters = self.get_clusters_pd().get("clusters", []) for c in clusters: - if c['cluster_name'] == cluster_name: + if c["cluster_name"] == cluster_name: cluster = c break return cluster @@ -595,14 +573,16 @@ def connect_ec2(self, a_key, s_key, cloud=None): if cloud is None: cloud = self.cloud ci = self._get_cloud_info(cloud) - r = RegionInfo(name=ci['region_name'], endpoint=ci['region_endpoint']) - ec2_conn = boto.connect_ec2(aws_access_key_id=a_key, - aws_secret_access_key=s_key, - is_secure=ci['is_secure'], - region=r, - port=ci['ec2_port'], - path=ci['ec2_conn_path'], - validate_certs=False) + r = RegionInfo(name=ci["region_name"], endpoint=ci["region_endpoint"]) + ec2_conn = boto.connect_ec2( + aws_access_key_id=a_key, + aws_secret_access_key=s_key, + is_secure=ci["is_secure"], + region=r, + port=ci["ec2_port"], + path=ci["ec2_conn_path"], + validate_certs=False, + ) return ec2_conn def connect_s3(self, a_key, s_key, cloud=None): @@ -615,14 +595,19 @@ def connect_s3(self, a_key, s_key, cloud=None): if cloud is None: cloud = self.cloud ci = self._get_cloud_info(cloud) - if ci['cloud_type'] == 'amazon': + if ci["cloud_type"] == "amazon": calling_format = SubdomainCallingFormat() else: calling_format = OrdinaryCallingFormat() s3_conn = S3Connection( - aws_access_key_id=a_key, aws_secret_access_key=s_key, - is_secure=ci['is_secure'], port=ci['s3_port'], host=ci['s3_host'], - path=ci['s3_conn_path'], calling_format=calling_format) + aws_access_key_id=a_key, + aws_secret_access_key=s_key, + is_secure=ci["is_secure"], + port=ci["s3_port"], + host=ci["s3_host"], + path=ci["s3_conn_path"], + calling_format=calling_format, + ) return s3_conn def connect_vpc(self, a_key, s_key, cloud=None): @@ -634,15 +619,16 @@ def connect_vpc(self, a_key, s_key, cloud=None): if cloud is None: cloud = self.cloud ci = self._get_cloud_info(cloud) - r = RegionInfo(name=ci['region_name'], endpoint=ci['region_endpoint']) + r = RegionInfo(name=ci["region_name"], endpoint=ci["region_endpoint"]) vpc_conn = boto.connect_vpc( aws_access_key_id=a_key, aws_secret_access_key=s_key, - is_secure=ci['is_secure'], + is_secure=ci["is_secure"], region=r, - port=ci['ec2_port'], - path=ci['ec2_conn_path'], - validate_certs=False) + port=ci["ec2_port"], + path=ci["ec2_conn_path"], + validate_certs=False, + ) return vpc_conn def _compose_user_data(self, user_provided_data): @@ -656,30 +642,35 @@ def _compose_user_data(self, user_provided_data): form_data = {} # Do not include the following fields in the user data but do include # any 'advanced startup fields' that might be added in the future - excluded_fields = ['sg_name', 'image_id', 'instance_id', 'kp_name', - 'cloud', 'cloud_type', 'public_dns', 'cidr_range', - 'kp_material', 'placement', 'flavor_id'] + excluded_fields = [ + "sg_name", + "image_id", + "instance_id", + "kp_name", + "cloud", + "cloud_type", + "public_dns", + "cidr_range", + "kp_material", + "placement", + "flavor_id", + ] for key, value in user_provided_data.items(): if key not in excluded_fields: form_data[key] = value # If the following user data keys are empty, do not include them in the # request user data - udkeys = [ - 'post_start_script_url', - 'worker_post_start_script_url', - 'bucket_default', - 'share_string'] + udkeys = ["post_start_script_url", "worker_post_start_script_url", "bucket_default", "share_string"] for udkey in udkeys: - if udkey in form_data and form_data[udkey] == '': + if udkey in form_data and form_data[udkey] == "": del form_data[udkey] # If bucket_default was not provided, add a default value to the user data # (missing value does not play nicely with CloudMan's ec2autorun.py) - if not form_data.get( - 'bucket_default', None) and self.cloud.bucket_default: - form_data['bucket_default'] = self.cloud.bucket_default + if not form_data.get("bucket_default", None) and self.cloud.bucket_default: + form_data["bucket_default"] = self.cloud.bucket_default # Reuse the ``password`` for the ``freenxpass`` user data option - if 'freenxpass' not in form_data and 'password' in form_data: - form_data['freenxpass'] = form_data['password'] + if "freenxpass" not in form_data and "password" in form_data: + form_data["freenxpass"] = form_data["password"] # Convert form_data into the YAML format ud = yaml.dump(form_data, default_flow_style=False, allow_unicode=False) # Also include connection info about the selected cloud @@ -691,18 +682,18 @@ def _get_cloud_info(self, cloud, as_str=False): Get connection information about a given cloud """ ci = {} - ci['cloud_type'] = cloud.cloud_type - ci['region_name'] = cloud.region_name - ci['region_endpoint'] = cloud.region_endpoint - ci['is_secure'] = cloud.is_secure - ci['ec2_port'] = cloud.ec2_port if cloud.ec2_port != '' else None - ci['ec2_conn_path'] = cloud.ec2_conn_path + ci["cloud_type"] = cloud.cloud_type + ci["region_name"] = cloud.region_name + ci["region_endpoint"] = cloud.region_endpoint + ci["is_secure"] = cloud.is_secure + ci["ec2_port"] = cloud.ec2_port if cloud.ec2_port != "" else None + ci["ec2_conn_path"] = cloud.ec2_conn_path # Include cidr_range only if not empty - if cloud.cidr_range != '': - ci['cidr_range'] = cloud.cidr_range - ci['s3_host'] = cloud.s3_host - ci['s3_port'] = cloud.s3_port if cloud.s3_port != '' else None - ci['s3_conn_path'] = cloud.s3_conn_path + if cloud.cidr_range != "": + ci["cidr_range"] = cloud.cidr_range + ci["s3_host"] = cloud.s3_host + ci["s3_port"] = cloud.s3_port if cloud.s3_port != "" else None + ci["s3_conn_path"] = cloud.s3_conn_path if as_str: ci = yaml.dump(ci, default_flow_style=False, allow_unicode=False) return ci @@ -740,42 +731,38 @@ def _find_placement(self, cluster_name, cluster=None): The return value changed from a list to a dictionary. """ placement = None - response = {'placement': placement, 'error': None} + response = {"placement": placement, "error": None} cluster = cluster or self.get_cluster_pd(cluster_name) - if cluster and 'persistent_data' in cluster: - pd = cluster['persistent_data'] + if cluster and "persistent_data" in cluster: + pd = cluster["persistent_data"] try: - if 'placement' in pd: - response['placement'] = pd['placement'] - elif 'data_filesystems' in pd: + if "placement" in pd: + response["placement"] = pd["placement"] + elif "data_filesystems" in pd: # We have v1 format persistent data so get the volume first and # then the placement zone - vol_id = pd['data_filesystems']['galaxyData'][0]['vol_id'] - response['placement'] = self._get_volume_placement(vol_id) - elif 'filesystems' in pd: + vol_id = pd["data_filesystems"]["galaxyData"][0]["vol_id"] + response["placement"] = self._get_volume_placement(vol_id) + elif "filesystems" in pd: # V2 format. - for fs in [fs for fs in pd['filesystems'] if fs.get( - 'kind', None) == 'volume' and 'ids' in fs]: + for fs in [fs for fs in pd["filesystems"] if fs.get("kind", None) == "volume" and "ids" in fs]: # All volumes must be in the same zone - vol_id = fs['ids'][0] - response['placement'] = self._get_volume_placement( - vol_id) + vol_id = fs["ids"][0] + response["placement"] = self._get_volume_placement(vol_id) # No need to continue to iterate through # filesystems, if we found one with a volume. break except Exception as exc: - response['error'] = \ - f"Exception while finding placement for cluster '{cluster_name}'. This can indicate malformed instance data. Or that this method is broken: {exc}" - bioblend.log.error(response['error']) - response['placement'] = None + response[ + "error" + ] = f"Exception while finding placement for cluster '{cluster_name}'. This can indicate malformed instance data. Or that this method is broken: {exc}" + bioblend.log.error(response["error"]) + response["placement"] = None else: - bioblend.log.debug( - f"Insufficient info about cluster {cluster_name} to get placement." - ) + bioblend.log.debug(f"Insufficient info about cluster {cluster_name} to get placement.") return response - def find_placements( - self, ec2_conn, instance_type, cloud_type, cluster_name=None): + def find_placements(self, ec2_conn, instance_type, cloud_type, cluster_name=None): """ Find a list of placement zones that support the specified instance type. @@ -804,34 +791,32 @@ def find_placements( """ # First look for a specific zone a given cluster is bound to zones = [] - response = {'zones': zones, 'error': None} + response = {"zones": zones, "error": None} if cluster_name: placement = self._find_placement(cluster_name) - if placement.get('error'): - response['error'] = placement['error'] + if placement.get("error"): + response["error"] = placement["error"] return response - response['zones'] = placement.get('placement', []) + response["zones"] = placement.get("placement", []) # If placement is not found, look for a list of available zones - if not response['zones']: + if not response["zones"]: in_the_past = datetime.datetime.now() - datetime.timedelta(hours=1) back_compatible_zone = "us-east-1e" - for zone in [ - z for z in ec2_conn.get_all_zones() if z.state == 'available']: + for zone in [z for z in ec2_conn.get_all_zones() if z.state == "available"]: # Non EC2 clouds may not support get_spot_price_history - if instance_type is None or cloud_type != 'ec2': + if instance_type is None or cloud_type != "ec2": zones.append(zone.name) - elif ec2_conn.get_spot_price_history(instance_type=instance_type, - end_time=in_the_past.isoformat(), - availability_zone=zone.name): + elif ec2_conn.get_spot_price_history( + instance_type=instance_type, end_time=in_the_past.isoformat(), availability_zone=zone.name + ): zones.append(zone.name) # Higher-lettered zones seem to have more availability currently zones.sort(reverse=True) if back_compatible_zone in zones: - zones = [back_compatible_zone] + \ - [z for z in zones if z != back_compatible_zone] + zones = [back_compatible_zone] + [z for z in zones if z != back_compatible_zone] if len(zones) == 0: - response['error'] = f"Did not find availabilty zone for {instance_type}" - bioblend.log.error(response['error']) + response["error"] = f"Did not find availabilty zone for {instance_type}" + bioblend.log.error(response["error"]) zones.append(back_compatible_zone) return response @@ -843,7 +828,7 @@ def _checkURL(self, url): try: p = urlparse(url) h = HTTPConnection(p[1]) - h.putrequest('HEAD', p[2]) + h.putrequest("HEAD", p[2]) h.endheaders() r = h.getresponse() # CloudMan UI is pwd protected so include 401 diff --git a/bioblend/config.py b/bioblend/config.py index bb5feb15b..0e992bcb9 100644 --- a/bioblend/config.py +++ b/bioblend/config.py @@ -1,9 +1,9 @@ import configparser import os -BioBlendConfigPath = '/etc/bioblend.cfg' +BioBlendConfigPath = "/etc/bioblend.cfg" BioBlendConfigLocations = [BioBlendConfigPath] -UserConfigPath = os.path.join(os.path.expanduser('~'), '.bioblend') +UserConfigPath = os.path.join(os.path.expanduser("~"), ".bioblend") BioBlendConfigLocations.append(UserConfigPath) @@ -16,8 +16,9 @@ class Config(configparser.ConfigParser): * System wide: ``/etc/bioblend.cfg`` * Individual user: ``~/.bioblend`` (which works on both Windows and Unix) """ + def __init__(self, path=None, fp=None, do_load=True): - super().__init__({'working_dir': '/mnt/pyami', 'debug': '0'}) + super().__init__({"working_dir": "/mnt/pyami", "debug": "0"}) if do_load: if path: self.load_from_path(path) @@ -30,8 +31,7 @@ def get_value(self, section, name, default=None): return self.get(section, name, default) def get(self, section, name, default=None): - """ - """ + """ """ try: val = super().get(section, name) except Exception: @@ -55,7 +55,7 @@ def getfloat(self, section, name, default=0.0): def getbool(self, section, name, default=False): if self.has_option(section, name): val = self.get(section, name) - if val.lower() == 'true': + if val.lower() == "true": val = True else: val = False diff --git a/bioblend/galaxy/client.py b/bioblend/galaxy/client.py index fc0b1d1f6..c40bf7dfc 100644 --- a/bioblend/galaxy/client.py +++ b/bioblend/galaxy/client.py @@ -11,6 +11,7 @@ import requests import bioblend + # The following import must be preserved for compatibility because # ConnectionError class was originally defined here from bioblend import ConnectionError # noqa: I202 @@ -94,13 +95,13 @@ def _make_url(self, module_id: Optional[str] = None, deleted: bool = False, cont :param contents: If ``True``, include 'contents' in the URL, after the module ID: ``/api/libraries//contents`` """ - c_url = '/'.join((self.gi.url, self.module)) + c_url = "/".join((self.gi.url, self.module)) if deleted: - c_url = c_url + '/deleted' + c_url = c_url + "/deleted" if module_id: - c_url = '/'.join((c_url, module_id)) + c_url = "/".join((c_url, module_id)) if contents: - c_url = c_url + '/contents' + c_url = c_url + "/contents" return c_url def _get( @@ -129,9 +130,8 @@ def _get( url = self._make_url(module_id=id, deleted=deleted, contents=contents) attempts_left = self.max_get_retries() retry_delay = self.get_retry_delay() - bioblend.log.debug("GET - attempts left: %s; retry delay: %s", - attempts_left, retry_delay) - msg = '' + bioblend.log.debug("GET - attempts left: %s; retry delay: %s", attempts_left, retry_delay) + msg = "" while attempts_left > 0: attempts_left -= 1 try: @@ -164,8 +164,7 @@ def _get( bioblend.log.warning(msg) time.sleep(retry_delay) - def _post(self, payload=None, id=None, deleted=False, contents=None, url=None, - files_attached=False): + def _post(self, payload=None, id=None, deleted=False, contents=None, url=None, files_attached=False): """ Do a generic POST request, composing the url from the contents of the arguments. Alternatively, an explicit ``url`` can be provided to use @@ -183,8 +182,7 @@ def _post(self, payload=None, id=None, deleted=False, contents=None, url=None, """ if not url: url = self._make_url(module_id=id, deleted=deleted, contents=contents) - return self.gi.make_post_request(url, payload=payload, - files_attached=files_attached) + return self.gi.make_post_request(url, payload=payload, files_attached=files_attached) def _put(self, payload=None, id=None, url=None, params=None): """ diff --git a/bioblend/galaxy/config/__init__.py b/bioblend/galaxy/config/__init__.py index 92dde44c0..3adb1c39c 100644 --- a/bioblend/galaxy/config/__init__.py +++ b/bioblend/galaxy/config/__init__.py @@ -6,7 +6,7 @@ class ConfigClient(Client): - module = 'configuration' + module = "configuration" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -46,7 +46,7 @@ def get_version(self): {'extra': {}, 'version_major': '17.01'} """ - url = self.gi.url + '/version' + url = self.gi.url + "/version" return self._get(url=url) def whoami(self): diff --git a/bioblend/galaxy/dataset_collections/__init__.py b/bioblend/galaxy/dataset_collections/__init__.py index 5909ae2e3..9213d481d 100644 --- a/bioblend/galaxy/dataset_collections/__init__.py +++ b/bioblend/galaxy/dataset_collections/__init__.py @@ -12,7 +12,6 @@ class HasElements: - def __init__(self, name, type="list", elements=None): self.name = name self.type = type @@ -27,28 +26,21 @@ def add(self, element): class CollectionDescription(HasElements): - def to_dict(self): - return dict( - name=self.name, - collection_type=self.type, - element_identifiers=[e.to_dict() for e in self.elements] - ) + return dict(name=self.name, collection_type=self.type, element_identifiers=[e.to_dict() for e in self.elements]) class CollectionElement(HasElements): - def to_dict(self): return dict( src="new_collection", name=self.name, collection_type=self.type, - element_identifiers=[e.to_dict() for e in self.elements] + element_identifiers=[e.to_dict() for e in self.elements], ) class SimpleElement: - def __init__(self, value): self.value = value @@ -57,43 +49,45 @@ def to_dict(self): class HistoryDatasetElement(SimpleElement): - def __init__(self, name, id): - super().__init__(dict( - name=name, - src="hda", - id=id, - )) + super().__init__( + dict( + name=name, + src="hda", + id=id, + ) + ) class HistoryDatasetCollectionElement(SimpleElement): - def __init__(self, name, id): - super().__init__(dict( - name=name, - src="hdca", - id=id, - )) + super().__init__( + dict( + name=name, + src="hdca", + id=id, + ) + ) class LibraryDatasetElement(SimpleElement): - def __init__(self, name, id): - super().__init__(dict( - name=name, - src="ldda", - id=id, - )) + super().__init__( + dict( + name=name, + src="ldda", + id=id, + ) + ) class DatasetCollectionClient(Client): - module = 'dataset_collections' + module = "dataset_collections" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) - def show_dataset_collection(self, dataset_collection_id: str, - instance_type: str = 'history') -> dict: + def show_dataset_collection(self, dataset_collection_id: str, instance_type: str = "history") -> dict: """ Get details of a given dataset collection of the current user @@ -107,7 +101,7 @@ def show_dataset_collection(self, dataset_collection_id: str, :return: element view of the dataset collection """ params = { - 'instance_type': instance_type, + "instance_type": instance_type, } url = self._make_url(module_id=dataset_collection_id) return self._get(id=dataset_collection_id, url=url, params=params) @@ -130,25 +124,27 @@ def download_dataset_collection(self, dataset_collection_id: str, file_path: str For earlier versions of Galaxy this method downloads a ``tgz`` archive. This method works only on Galaxy 18.01 or later. """ - url = self._make_url(module_id=dataset_collection_id) + '/download' + url = self._make_url(module_id=dataset_collection_id) + "/download" r = self.gi.make_get_request(url, stream=True) r.raise_for_status() - archive_type = 'zip' if self.gi.config.get_version()['version_major'] >= '21.01' else 'tgz' + archive_type = "zip" if self.gi.config.get_version()["version_major"] >= "21.01" else "tgz" - with open(file_path, 'wb') as fp: + with open(file_path, "wb") as fp: for chunk in r.iter_content(chunk_size=CHUNK_SIZE): if chunk: fp.write(chunk) - return { - 'file_path': file_path, - 'archive_type': archive_type - } + return {"file_path": file_path, "archive_type": archive_type} - def wait_for_dataset_collection(self, dataset_collection_id: str, maxwait: float = 12000, - interval: float = 3, proportion_complete: float = 1.0, - check: bool = True) -> dict: + def wait_for_dataset_collection( + self, + dataset_collection_id: str, + maxwait: float = 12000, + interval: float = 3, + proportion_complete: float = 1.0, + check: bool = True, + ) -> dict: """ Wait until all or a specified proportion of elements of a dataset collection are in a terminal state. @@ -189,20 +185,26 @@ def wait_for_dataset_collection(self, dataset_collection_id: str, maxwait: float time_left = maxwait while True: dataset_collection = self.gi.dataset_collections.show_dataset_collection(dataset_collection_id) - states = [elem['object']['state'] for elem in dataset_collection['elements']] + states = [elem["object"]["state"] for elem in dataset_collection["elements"]] terminal_states = [state for state in states if state in TERMINAL_STATES] - if set(terminal_states) not in [{'ok'}, set()]: - raise Exception(f"Dataset collection {dataset_collection_id} contains elements in the " - f"following non-ok terminal states: {', '.join(set(terminal_states) - {'ok'})}") + if set(terminal_states) not in [{"ok"}, set()]: + raise Exception( + f"Dataset collection {dataset_collection_id} contains elements in the " + f"following non-ok terminal states: {', '.join(set(terminal_states) - {'ok'})}" + ) proportion = len(terminal_states) / len(states) if proportion >= proportion_complete: return dataset_collection if time_left > 0: - log.info(f"The dataset collection {dataset_collection_id} has {len(terminal_states)} out of {len(states)} datasets in a terminal state. Will wait {time_left} more s") + log.info( + f"The dataset collection {dataset_collection_id} has {len(terminal_states)} out of {len(states)} datasets in a terminal state. Will wait {time_left} more s" + ) time.sleep(min(time_left, interval)) time_left -= interval else: - raise DatasetCollectionTimeoutException(f"Less than {proportion_complete * 100}% of datasets in the dataset collection is in a terminal state after {maxwait} s") + raise DatasetCollectionTimeoutException( + f"Less than {proportion_complete * 100}% of datasets in the dataset collection is in a terminal state after {maxwait} s" + ) class DatasetCollectionTimeoutException(TimeoutException): diff --git a/bioblend/galaxy/datasets/__init__.py b/bioblend/galaxy/datasets/__init__.py index 82480fbfb..68d467607 100644 --- a/bioblend/galaxy/datasets/__init__.py +++ b/bioblend/galaxy/datasets/__init__.py @@ -21,17 +21,17 @@ log = logging.getLogger(__name__) -TERMINAL_STATES = {'ok', 'empty', 'error', 'discarded', 'failed_metadata'} +TERMINAL_STATES = {"ok", "empty", "error", "discarded", "failed_metadata"} # Non-terminal states are: 'new', 'upload', 'queued', 'running', 'paused', 'setting_metadata' class DatasetClient(Client): - module = 'datasets' + module = "datasets" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) - def show_dataset(self, dataset_id, deleted=False, hda_ldda='hda'): + def show_dataset(self, dataset_id, deleted=False, hda_ldda="hda"): """ Get details about a given dataset. This can be a history or a library dataset. @@ -53,35 +53,37 @@ def show_dataset(self, dataset_id, deleted=False, hda_ldda='hda'): ) return self._get(id=dataset_id, deleted=deleted, params=params) - def _initiate_download(self, dataset_id: str, stream_content: bool, - require_ok_state: bool = True, maxwait: float = 12000): + def _initiate_download( + self, dataset_id: str, stream_content: bool, require_ok_state: bool = True, maxwait: float = 12000 + ): dataset = self.wait_for_dataset(dataset_id, maxwait=maxwait, check=False) - if not dataset['state'] == 'ok': + if not dataset["state"] == "ok": message = f"Dataset state is not 'ok'. Dataset id: {dataset_id}, current state: {dataset['state']}" if require_ok_state: raise DatasetStateException(message) else: warnings.warn(message, DatasetStateWarning) - file_ext = dataset.get('file_ext') + file_ext = dataset.get("file_ext") # Resort to 'data' when Galaxy returns an empty or temporary extension - if not file_ext or file_ext == 'auto' or file_ext == '_sniff_': - file_ext = 'data' + if not file_ext or file_ext == "auto" or file_ext == "_sniff_": + file_ext = "data" # The preferred download URL is # '/api/histories//contents//display?to_ext=' # since the old URL: # '/dataset//display?to_ext=' # does not work when using REMOTE_USER with access disabled to # everything but /api without auth - download_url = dataset['download_url'] + '?to_ext=' + file_ext + download_url = dataset["download_url"] + "?to_ext=" + file_ext url = urljoin(self.gi.base_url, download_url) r = self.gi.make_get_request(url, stream=stream_content) r.raise_for_status() return dataset, file_ext, r - def download_dataset(self, dataset_id, file_path=None, use_default_filename=True, - require_ok_state=True, maxwait=12000): + def download_dataset( + self, dataset_id, file_path=None, use_default_filename=True, require_ok_state=True, maxwait=12000 + ): """ Download a dataset to file or in memory. If the dataset state is not 'ok', a ``DatasetStateException`` will be thrown, unless ``require_ok_state=False``. @@ -117,25 +119,26 @@ def download_dataset(self, dataset_id, file_path=None, use_default_filename=True content. Otherwise returns the local path of the downloaded file. """ dataset, file_ext, r = self._initiate_download( - dataset_id, - stream_content=file_path is not None, - require_ok_state=require_ok_state, - maxwait=maxwait + dataset_id, stream_content=file_path is not None, require_ok_state=require_ok_state, maxwait=maxwait ) if file_path is None: - if 'content-length' in r.headers and len(r.content) != int(r.headers['content-length']): - log.warning("Transferred content size does not match content-length header (%s != %s)", len(r.content), r.headers['content-length']) + if "content-length" in r.headers and len(r.content) != int(r.headers["content-length"]): + log.warning( + "Transferred content size does not match content-length header (%s != %s)", + len(r.content), + r.headers["content-length"], + ) return r.content else: if use_default_filename: # Build a useable filename - filename = dataset['name'] + '.' + file_ext + filename = dataset["name"] + "." + file_ext # Now try to get a better filename from the response headers # We expect tokens 'filename' '=' to be followed by the quoted filename - if 'content-disposition' in r.headers: - tokens = list(shlex.shlex(r.headers['content-disposition'], posix=True)) + if "content-disposition" in r.headers: + tokens = list(shlex.shlex(r.headers["content-disposition"], posix=True)) try: - header_filepath = tokens[tokens.index('filename') + 2] + header_filepath = tokens[tokens.index("filename") + 2] filename = os.path.basename(header_filepath) except (ValueError, IndexError): pass @@ -143,7 +146,7 @@ def download_dataset(self, dataset_id, file_path=None, use_default_filename=True else: file_local_path = file_path - with open(file_local_path, 'wb') as fp: + with open(file_local_path, "wb") as fp: for chunk in r.iter_content(chunk_size=bioblend.CHUNK_SIZE): if chunk: fp.write(chunk) @@ -168,7 +171,7 @@ def get_datasets( create_time_max: str = None, update_time_min: str = None, update_time_max: str = None, - order: str = 'create_time-dsc', + order: str = "create_time-dsc", ) -> List[dict]: """ Get the latest datasets, or select another subset by specifying optional @@ -242,67 +245,67 @@ def get_datasets( :param: A list of datasets """ params: Dict[str, Any] = { - 'limit': limit, - 'offset': offset, - 'order': order, + "limit": limit, + "offset": offset, + "order": order, } if history_id: - params['history_id'] = history_id + params["history_id"] = history_id q: List[str] = [] qv: List[Any] = [] if name: - q.append('name') + q.append("name") qv.append(name) if state: op, val = self._param_to_filter(state) - q.append(f'state-{op}') + q.append(f"state-{op}") qv.append(val) if extension: op, val = self._param_to_filter(extension) - q.append(f'extension-{op}') + q.append(f"extension-{op}") qv.append(val) if visible is not None: - q.append('visible') + q.append("visible") qv.append(str(visible)) if deleted is not None: - q.append('deleted') + q.append("deleted") qv.append(str(deleted)) if purged is not None: - q.append('purged') + q.append("purged") qv.append(str(purged)) if tool_id is not None: - q.append('tool_id') + q.append("tool_id") qv.append(str(tool_id)) if tag is not None: - q.append('tag') + q.append("tag") qv.append(str(tag)) if create_time_min: - q.append('create_time-ge') + q.append("create_time-ge") qv.append(create_time_min) if create_time_max: - q.append('create_time-le') + q.append("create_time-le") qv.append(create_time_max) if update_time_min: - q.append('update_time-ge') + q.append("update_time-ge") qv.append(update_time_min) if update_time_max: - q.append('update_time-le') + q.append("update_time-le") qv.append(update_time_max) - params['q'] = q - params['qv'] = qv + params["q"] = q + params["qv"] = qv return self._get(params=params) def _param_to_filter(self, param): if type(param) is str: - return 'eq', param + return "eq", param if type(param) is list: if len(param) == 1: - return 'eq', param.pop() - return 'in', ','.join(param) + return "eq", param.pop() + return "in", ",".join(param) raise Exception("Filter param is not of type ``str`` or ``list``") def publish_dataset(self, dataset_id: str, published: bool = False): @@ -322,14 +325,17 @@ def publish_dataset(self, dataset_id: str, published: bool = False): .. note:: This method works only on Galaxy 19.05 or later. """ - payload: Dict[str, Any] = { - 'action': 'remove_restrictions' if published else 'make_private' - } - url = self._make_url(dataset_id) + '/permissions' + payload: Dict[str, Any] = {"action": "remove_restrictions" if published else "make_private"} + url = self._make_url(dataset_id) + "/permissions" self.gi.datasets._put(url=url, payload=payload) - def update_permissions(self, dataset_id: str, access_ids: Optional[list] = None, - manage_ids: Optional[list] = None, modify_ids: Optional[list] = None): + def update_permissions( + self, + dataset_id: str, + access_ids: Optional[list] = None, + manage_ids: Optional[list] = None, + modify_ids: Optional[list] = None, + ): """ Set access, manage or modify permissions for a dataset to a list of roles. @@ -351,16 +357,14 @@ def update_permissions(self, dataset_id: str, access_ids: Optional[list] = None, .. note:: This method works only on Galaxy 19.05 or later. """ - payload: Dict[str, Any] = { - 'action': 'set_permissions' - } + payload: Dict[str, Any] = {"action": "set_permissions"} if access_ids: - payload['access'] = access_ids + payload["access"] = access_ids if manage_ids: - payload['manage'] = manage_ids + payload["manage"] = manage_ids if modify_ids: - payload['modify'] = modify_ids - url = self._make_url(dataset_id) + '/permissions' + payload["modify"] = modify_ids + url = self._make_url(dataset_id) + "/permissions" self.gi.datasets._put(url=url, payload=payload) def wait_for_dataset(self, dataset_id, maxwait=12000, interval=3, check=True): @@ -390,9 +394,9 @@ def wait_for_dataset(self, dataset_id, maxwait=12000, interval=3, check=True): time_left = maxwait while True: dataset = self.show_dataset(dataset_id) - state = dataset['state'] + state = dataset["state"] if state in TERMINAL_STATES: - if check and state != 'ok': + if check and state != "ok": raise Exception(f"Dataset {dataset_id} is in terminal state {state}") return dataset if time_left > 0: @@ -400,7 +404,9 @@ def wait_for_dataset(self, dataset_id, maxwait=12000, interval=3, check=True): time.sleep(min(time_left, interval)) time_left -= interval else: - raise DatasetTimeoutException(f"Dataset {dataset_id} is still in non-terminal state {state} after {maxwait} s") + raise DatasetTimeoutException( + f"Dataset {dataset_id} is still in non-terminal state {state} after {maxwait} s" + ) class DatasetStateException(Exception): diff --git a/bioblend/galaxy/datatypes/__init__.py b/bioblend/galaxy/datatypes/__init__.py index 3231d5083..d2fe3e408 100644 --- a/bioblend/galaxy/datatypes/__init__.py +++ b/bioblend/galaxy/datatypes/__init__.py @@ -5,7 +5,7 @@ class DatatypesClient(Client): - module = 'datatypes' + module = "datatypes" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -38,10 +38,10 @@ def get_datatypes(self, extension_only=False, upload_only=False): params = {} if extension_only: - params['extension_only'] = True + params["extension_only"] = True if upload_only: - params['upload_only'] = True + params["upload_only"] = True return self._get(params=params) @@ -63,5 +63,5 @@ def get_sniffers(self): 'galaxy.datatypes.sequence:Lav', 'galaxy.datatypes.sequence:csFasta'] """ - url = self._make_url() + '/sniffers' + url = self._make_url() + "/sniffers" return self._get(url=url) diff --git a/bioblend/galaxy/folders/__init__.py b/bioblend/galaxy/folders/__init__.py index b86120e36..ecde88bd7 100644 --- a/bioblend/galaxy/folders/__init__.py +++ b/bioblend/galaxy/folders/__init__.py @@ -5,7 +5,7 @@ class FoldersClient(Client): - module = 'folders' + module = "folders" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -26,9 +26,9 @@ def create_folder(self, parent_folder_id, name, description=None): :rtype: dict :return: details of the updated folder """ - payload = {'name': name} + payload = {"name": name} if description: - payload['description'] = description + payload["description"] = description return self._post(payload=payload, id=parent_folder_id) def show_folder(self, folder_id, contents=False): @@ -63,7 +63,7 @@ def delete_folder(self, folder_id, undelete=False): :return: detailed folder information :rtype: dict """ - payload = {'undelete': undelete} + payload = {"undelete": undelete} return self._delete(payload=payload, id=folder_id) def update_folder(self, folder_id, name, description=None): @@ -82,9 +82,9 @@ def update_folder(self, folder_id, name, description=None): :rtype: dict :return: details of the updated folder """ - payload = {'name': name} + payload = {"name": name} if description: - payload['description'] = description + payload["description"] = description return self._put(payload=payload, id=folder_id) def get_permissions(self, folder_id, scope): @@ -100,10 +100,10 @@ def get_permissions(self, folder_id, scope): :rtype: dict :return: dictionary including details of the folder """ - url = self._make_url(folder_id) + '/permissions' + url = self._make_url(folder_id) + "/permissions" return self._get(url=url) - def set_permissions(self, folder_id, action='set_permissions', add_ids=None, manage_ids=None, modify_ids=None): + def set_permissions(self, folder_id, action="set_permissions", add_ids=None, manage_ids=None, modify_ids=None): """ Set the permissions of a folder. @@ -125,12 +125,12 @@ def set_permissions(self, folder_id, action='set_permissions', add_ids=None, man :rtype: dict :return: dictionary including details of the folder """ - url = self._make_url(folder_id) + '/permissions' - payload = {'action': action} + url = self._make_url(folder_id) + "/permissions" + payload = {"action": action} if add_ids: - payload['add_ids[]'] = add_ids + payload["add_ids[]"] = add_ids if manage_ids: - payload['manage_ids[]'] = manage_ids + payload["manage_ids[]"] = manage_ids if modify_ids: - payload['modify_ids[]'] = modify_ids + payload["modify_ids[]"] = modify_ids return self._post(url=url, payload=payload) diff --git a/bioblend/galaxy/forms/__init__.py b/bioblend/galaxy/forms/__init__.py index 7a679767d..630f3aba8 100644 --- a/bioblend/galaxy/forms/__init__.py +++ b/bioblend/galaxy/forms/__init__.py @@ -5,7 +5,7 @@ class FormsClient(Client): - module = 'forms' + module = "forms" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) diff --git a/bioblend/galaxy/ftpfiles/__init__.py b/bioblend/galaxy/ftpfiles/__init__.py index 9b2948119..6c32edb83 100644 --- a/bioblend/galaxy/ftpfiles/__init__.py +++ b/bioblend/galaxy/ftpfiles/__init__.py @@ -5,7 +5,7 @@ class FTPFilesClient(Client): - module = 'ftp_files' + module = "ftp_files" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) diff --git a/bioblend/galaxy/genomes/__init__.py b/bioblend/galaxy/genomes/__init__.py index 8eb5c0085..0e2484642 100644 --- a/bioblend/galaxy/genomes/__init__.py +++ b/bioblend/galaxy/genomes/__init__.py @@ -5,7 +5,7 @@ class GenomeClient(Client): - module = 'genomes' + module = "genomes" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -44,18 +44,25 @@ def show_genome(self, id, num=None, chrom=None, low=None, high=None): """ params = {} if num: - params['num'] = num + params["num"] = num if chrom: - params['chrom'] = chrom + params["chrom"] = chrom if low: - params['low'] = low + params["low"] = low if high: - params['high'] = high + params["high"] = high return self._get(id, params) - def install_genome(self, func='download', source=None, dbkey=None, - ncbi_name=None, ensembl_dbkey=None, url_dbkey=None, - indexers=None): + def install_genome( + self, + func="download", + source=None, + dbkey=None, + ncbi_name=None, + ensembl_dbkey=None, + url_dbkey=None, + indexers=None, + ): """ Download and/or index a genome. @@ -88,17 +95,17 @@ def install_genome(self, func='download', source=None, dbkey=None, """ payload = {} if source: - payload['source'] = source + payload["source"] = source if func: - payload['func'] = func + payload["func"] = func if dbkey: - payload['dbkey'] = dbkey + payload["dbkey"] = dbkey if ncbi_name: - payload['ncbi_name'] = ncbi_name + payload["ncbi_name"] = ncbi_name if ensembl_dbkey: - payload['ensembl_dbkey'] = ensembl_dbkey + payload["ensembl_dbkey"] = ensembl_dbkey if url_dbkey: - payload['url_dbkey'] = url_dbkey + payload["url_dbkey"] = url_dbkey if indexers: - payload['indexers'] = indexers + payload["indexers"] = indexers return self._post(payload) diff --git a/bioblend/galaxy/groups/__init__.py b/bioblend/galaxy/groups/__init__.py index 189eebc36..18dab1c96 100644 --- a/bioblend/galaxy/groups/__init__.py +++ b/bioblend/galaxy/groups/__init__.py @@ -5,7 +5,7 @@ class GroupsClient(Client): - module = 'groups' + module = "groups" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -75,11 +75,7 @@ def create_group(self, group_name, user_ids=None, role_ids=None): user_ids = [] if role_ids is None: role_ids = [] - payload = { - 'name': group_name, - 'user_ids': user_ids, - 'role_ids': role_ids - } + payload = {"name": group_name, "user_ids": user_ids, "role_ids": role_ids} return self._post(payload) def update_group(self, group_id, group_name=None, user_ids=None, role_ids=None): @@ -108,11 +104,7 @@ def update_group(self, group_id, group_name=None, user_ids=None, role_ids=None): user_ids = [] if role_ids is None: role_ids = [] - payload = { - 'name': group_name, - 'user_ids': user_ids, - 'role_ids': role_ids - } + payload = {"name": group_name, "user_ids": user_ids, "role_ids": role_ids} return self._put(payload=payload, id=group_id) def get_group_users(self, group_id): @@ -125,7 +117,7 @@ def get_group_users(self, group_id): :rtype: list of dicts :return: List of group users' info """ - url = self._make_url(group_id) + '/users' + url = self._make_url(group_id) + "/users" return self._get(url=url) def get_group_roles(self, group_id): @@ -138,7 +130,7 @@ def get_group_roles(self, group_id): :rtype: list of dicts :return: List of group roles' info """ - url = self._make_url(group_id) + '/roles' + url = self._make_url(group_id) + "/roles" return self._get(url=url) def add_group_user(self, group_id, user_id): @@ -154,7 +146,7 @@ def add_group_user(self, group_id, user_id): :rtype: dict :return: Added group user's info """ - url = '/'.join((self._make_url(group_id), 'users', user_id)) + url = "/".join((self._make_url(group_id), "users", user_id)) return self._put(url=url) def add_group_role(self, group_id, role_id): @@ -170,7 +162,7 @@ def add_group_role(self, group_id, role_id): :rtype: dict :return: Added group role's info """ - url = '/'.join((self._make_url(group_id), 'roles', role_id)) + url = "/".join((self._make_url(group_id), "roles", role_id)) return self._put(url=url) def delete_group_user(self, group_id, user_id): @@ -186,7 +178,7 @@ def delete_group_user(self, group_id, user_id): :rtype: dict :return: The user which was removed """ - url = '/'.join((self._make_url(group_id), 'users', user_id)) + url = "/".join((self._make_url(group_id), "users", user_id)) return self._delete(url=url) def delete_group_role(self, group_id, role_id): @@ -202,5 +194,5 @@ def delete_group_role(self, group_id, role_id): :rtype: dict :return: The role which was removed """ - url = '/'.join((self._make_url(group_id), 'roles', role_id)) + url = "/".join((self._make_url(group_id), "roles", role_id)) return self._delete(url=url) diff --git a/bioblend/galaxy/histories/__init__.py b/bioblend/galaxy/histories/__init__.py index 0ff1d7b82..b69ca6b24 100644 --- a/bioblend/galaxy/histories/__init__.py +++ b/bioblend/galaxy/histories/__init__.py @@ -18,7 +18,7 @@ class HistoryClient(Client): - module = 'histories' + module = "histories" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -35,7 +35,7 @@ def create_history(self, name=None): """ payload = {} if name is not None: - payload['name'] = name + payload["name"] = name return self._post(payload) def import_history(self, file_path=None, url=None): @@ -49,9 +49,9 @@ def import_history(self, file_path=None, url=None): """ if file_path: archive_file = attach_file(file_path) - payload = dict(archive_source='', archive_file=archive_file, archive_type="file") + payload = dict(archive_source="", archive_file=archive_file, archive_type="file") else: - payload = dict(archive_source=url, archive_type='url') + payload = dict(archive_source=url, archive_type="url") return self._post(payload=payload, files_attached=file_path is not None) @@ -63,20 +63,20 @@ def _get_histories(self, name=None, deleted=False, filter_user_published=None, g params = {} if deleted: - params.setdefault('q', []).append('deleted') - params.setdefault('qv', []).append(deleted) + params.setdefault("q", []).append("deleted") + params.setdefault("qv", []).append(deleted) if filter_user_published is not None: - params.setdefault('q', []).append('published') - params.setdefault('qv', []).append(filter_user_published) + params.setdefault("q", []).append("published") + params.setdefault("qv", []).append(filter_user_published) if slug is not None: - params.setdefault('q', []).append('slug') - params.setdefault('qv', []).append(slug) + params.setdefault("q", []).append("slug") + params.setdefault("qv", []).append(slug) - url = '/'.join((self._make_url(), 'published')) if get_all_published else None + url = "/".join((self._make_url(), "published")) if get_all_published else None histories = self._get(url=url, params=params) if name is not None: - histories = [_ for _ in histories if _['name'] == name] + histories = [_ for _ in histories if _["name"] == name] return histories def get_histories(self, history_id=None, name=None, deleted=False, published=None, slug=None): @@ -110,9 +110,11 @@ def get_histories(self, history_id=None, name=None, deleted=False, published=Non """ if history_id is not None: raise ValueError( - 'The history_id parameter has been removed, use the show_history() method to view details of a history for which you know the ID.', + "The history_id parameter has been removed, use the show_history() method to view details of a history for which you know the ID.", ) - return self._get_histories(name=name, deleted=deleted, filter_user_published=published, get_all_published=False, slug=slug) + return self._get_histories( + name=name, deleted=deleted, filter_user_published=published, get_all_published=False, slug=slug + ) def get_published_histories(self, name=None, deleted=False, slug=None): """ @@ -132,7 +134,9 @@ def get_published_histories(self, name=None, deleted=False, slug=None): :rtype: list :return: List of history dicts. """ - return self._get_histories(name=name, deleted=deleted, filter_user_published=None, get_all_published=True, slug=slug) + return self._get_histories( + name=name, deleted=deleted, filter_user_published=None, get_all_published=True, slug=slug + ) def show_history(self, history_id, contents=False, deleted=None, visible=None, details=None, types=None): """ @@ -181,13 +185,13 @@ def show_history(self, history_id, contents=False, deleted=None, visible=None, d params = {} if contents: if details: - params['details'] = details + params["details"] = details if deleted is not None: - params['deleted'] = deleted + params["deleted"] = deleted if visible is not None: - params['visible'] = visible + params["visible"] = visible if types is not None: - params['types'] = types + params["types"] = types return self._get(id=history_id, contents=contents, params=params) def delete_dataset(self, history_id, dataset_id, purge=False): @@ -211,10 +215,10 @@ def delete_dataset(self, history_id, dataset_id, purge=False): ``allow_user_dataset_purge`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ - url = '/'.join((self._make_url(history_id, contents=True), dataset_id)) + url = "/".join((self._make_url(history_id, contents=True), dataset_id)) payload = {} if purge is True: - payload['purge'] = purge + payload["purge"] = purge self._delete(payload=payload, url=url) def delete_dataset_collection(self, history_id, dataset_collection_id): @@ -230,7 +234,7 @@ def delete_dataset_collection(self, history_id, dataset_collection_id): :rtype: None :return: None """ - url = '/'.join((self._make_url(history_id, contents=True), 'dataset_collections', dataset_collection_id)) + url = "/".join((self._make_url(history_id, contents=True), "dataset_collections", dataset_collection_id)) self._delete(url=url) def show_dataset(self, history_id, dataset_id): @@ -246,7 +250,7 @@ def show_dataset(self, history_id, dataset_id): :rtype: dict :return: Information about the dataset """ - url = '/'.join((self._make_url(history_id, contents=True), dataset_id)) + url = "/".join((self._make_url(history_id, contents=True), dataset_id)) return self._get(url=url) def show_dataset_collection(self, history_id, dataset_collection_id): @@ -262,7 +266,7 @@ def show_dataset_collection(self, history_id, dataset_collection_id): :rtype: dict :return: Information about the dataset collection """ - url = '/'.join((self._make_url(history_id, contents=True), 'dataset_collections', dataset_collection_id)) + url = "/".join((self._make_url(history_id, contents=True), "dataset_collections", dataset_collection_id)) return self._get(url=url) def show_matching_datasets(self, history_id, name_filter=None): @@ -282,10 +286,12 @@ def show_matching_datasets(self, history_id, name_filter=None): :return: List of dictionaries """ if isinstance(name_filter, str): - name_filter = re.compile(name_filter + '$') - return [self.show_dataset(history_id, h['id']) - for h in self.show_history(history_id, contents=True) - if name_filter is None or name_filter.match(h['name'])] + name_filter = re.compile(name_filter + "$") + return [ + self.show_dataset(history_id, h["id"]) + for h in self.show_history(history_id, contents=True) + if name_filter is None or name_filter.match(h["name"]) + ] def show_dataset_provenance(self, history_id, dataset_id, follow=False): """ @@ -322,7 +328,7 @@ def show_dataset_provenance(self, history_id, dataset_id, follow=False): 'tool_id': 'toolshed.g2.bx.psu.edu/repos/ziru-zhou/macs2/modencode_peakcalling_macs2/2.0.10.2', 'uuid': '5c0c43f5-8d93-44bd-939d-305e82f213c6'} """ - url = '/'.join((self._make_url(history_id, contents=True), dataset_id, 'provenance')) + url = "/".join((self._make_url(history_id, contents=True), dataset_id, "provenance")) return self._get(url=url) def update_history(self, history_id, **kwds): @@ -401,7 +407,7 @@ def update_dataset(self, history_id, dataset_id, **kwds): .. versionchanged:: 0.8.0 Changed the return value from the status code (type int) to a dict. """ - url = '/'.join((self._make_url(history_id, contents=True), dataset_id)) + url = "/".join((self._make_url(history_id, contents=True), dataset_id)) return self._put(payload=kwds, url=url) def update_dataset_collection(self, history_id, dataset_collection_id, **kwds): @@ -431,7 +437,7 @@ def update_dataset_collection(self, history_id, dataset_collection_id, **kwds): .. versionchanged:: 0.8.0 Changed the return value from the status code (type int) to a dict. """ - url = '/'.join((self._make_url(history_id, contents=True), 'dataset_collections', dataset_collection_id)) + url = "/".join((self._make_url(history_id, contents=True), "dataset_collections", dataset_collection_id)) return self._put(payload=kwds, url=url) def create_history_tag(self, history_id, tag): @@ -455,7 +461,7 @@ def create_history_tag(self, history_id, tag): """ # empty payload since we are adding the new tag using the url payload = {} - url = '/'.join((self._make_url(history_id), 'tags', tag)) + url = "/".join((self._make_url(history_id), "tags", tag)) return self._post(payload, url=url) def upload_dataset_from_library(self, history_id, lib_dataset_id): @@ -474,9 +480,9 @@ def upload_dataset_from_library(self, history_id, lib_dataset_id): :return: Information about the newly created HDA """ payload = { - 'content': lib_dataset_id, - 'source': 'library', - 'from_ld_id': lib_dataset_id, # compatibility with old API + "content": lib_dataset_id, + "source": "library", + "from_ld_id": lib_dataset_id, # compatibility with old API } return self._post(payload, id=history_id, contents=True) @@ -538,7 +544,7 @@ def delete_history(self, history_id, purge=False): """ payload = {} if purge is True: - payload['purge'] = purge + payload["purge"] = purge return self._delete(payload=payload, id=history_id) def undelete_history(self, history_id): @@ -551,7 +557,7 @@ def undelete_history(self, history_id): :rtype: str :return: 'OK' if it was deleted """ - url = self._make_url(history_id, deleted=True) + '/undelete' + url = self._make_url(history_id, deleted=True) + "/undelete" return self._post(url=url) def get_status(self, history_id): @@ -569,14 +575,14 @@ def get_status(self, history_id): """ state = {} history = self.show_history(history_id) - state['state'] = history['state'] - if history.get('state_details') is not None: - state['state_details'] = history['state_details'] - total_complete = sum(history['state_details'].values()) + state["state"] = history["state"] + if history.get("state_details") is not None: + state["state_details"] = history["state_details"] + total_complete = sum(history["state_details"].values()) if total_complete > 0: - state['percent_complete'] = 100 * history['state_details']['ok'] / total_complete + state["percent_complete"] = 100 * history["state_details"]["ok"] / total_complete else: - state['percent_complete'] = 0 + state["percent_complete"] = 0 return state def get_most_recently_used_history(self): @@ -586,11 +592,12 @@ def get_most_recently_used_history(self): :rtype: dict :return: History representation """ - url = self._make_url() + '/most_recently_used' + url = self._make_url() + "/most_recently_used" return self._get(url=url) - def export_history(self, history_id, gzip=True, include_hidden=False, - include_deleted=False, wait=False, maxwait=None): + def export_history( + self, history_id, gzip=True, include_hidden=False, include_deleted=False, wait=False, maxwait=None + ): """ Start a job to create an export archive for the given history. @@ -628,9 +635,9 @@ def export_history(self, history_id, gzip=True, include_hidden=False, else: maxwait = 0 params = { - 'gzip': gzip, - 'include_hidden': include_hidden, - 'include_deleted': include_deleted, + "gzip": gzip, + "include_hidden": include_hidden, + "include_deleted": include_deleted, } url = f"{self._make_url(history_id)}/exports" time_left = maxwait @@ -640,20 +647,23 @@ def export_history(self, history_id, gzip=True, include_hidden=False, except ConnectionError as e: if e.status_code == 202: # export is not ready if time_left > 0: - log.info("Waiting for the export of history %s to complete. Will wait %i more s", history_id, time_left) + log.info( + "Waiting for the export of history %s to complete. Will wait %i more s", + history_id, + time_left, + ) time.sleep(1) time_left -= 1 else: - return '' + return "" else: raise else: break - jeha_id = r['download_url'].rsplit('/', 1)[-1] + jeha_id = r["download_url"].rsplit("/", 1)[-1] return jeha_id - def download_history(self, history_id, jeha_id, outf, - chunk_size=bioblend.CHUNK_SIZE): + def download_history(self, history_id, jeha_id, outf, chunk_size=bioblend.CHUNK_SIZE): """ Download a history export archive. Use :meth:`export_history` to create an export. @@ -680,7 +690,7 @@ def download_history(self, history_id, jeha_id, outf, for chunk in r.iter_content(chunk_size): outf.write(chunk) - def copy_dataset(self, history_id, dataset_id, source='hda'): + def copy_dataset(self, history_id, dataset_id, source="hda"): """ Copy a dataset to a history. @@ -698,7 +708,7 @@ def copy_dataset(self, history_id, dataset_id, source='hda'): """ return self.copy_content(history_id, dataset_id, source) - def copy_content(self, history_id, content_id, source='hda'): + def copy_content(self, history_id, content_id, source="hda"): """ Copy existing content (e.g. a dataset) to a history. @@ -719,9 +729,9 @@ def copy_content(self, history_id, content_id, source='hda'): """ payload = { - 'content': content_id, - 'source': source, - 'type': 'dataset' if source != 'hdca' else 'dataset_collection', + "content": content_id, + "source": source, + "type": "dataset" if source != "hdca" else "dataset_collection", } url = self._make_url(history_id, contents=True) @@ -765,5 +775,5 @@ def get_extra_files(self, history_id: str, dataset_id: str) -> List[dict]: .. note:: This method works only on Galaxy 19.01 or later. """ - url = '/'.join((self._make_url(history_id, contents=True), dataset_id, 'extra_files')) + url = "/".join((self._make_url(history_id, contents=True), dataset_id, "extra_files")) return self._get(url=url) diff --git a/bioblend/galaxy/invocations/__init__.py b/bioblend/galaxy/invocations/__init__.py index 62ccd6e4e..073951175 100644 --- a/bioblend/galaxy/invocations/__init__.py +++ b/bioblend/galaxy/invocations/__init__.py @@ -3,9 +3,7 @@ """ import logging import time -from typing import ( - Optional, -) +from typing import Optional from bioblend import ( CHUNK_SIZE, @@ -15,19 +13,26 @@ log = logging.getLogger(__name__) -INVOCATION_TERMINAL_STATES = {'cancelled', 'failed', 'scheduled'} +INVOCATION_TERMINAL_STATES = {"cancelled", "failed", "scheduled"} # Invocation non-terminal states are: 'new', 'ready' class InvocationClient(Client): - module = 'invocations' + module = "invocations" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) - def get_invocations(self, workflow_id=None, history_id=None, user_id=None, - include_terminal=True, limit=None, view='collection', - step_details=False): + def get_invocations( + self, + workflow_id=None, + history_id=None, + user_id=None, + include_terminal=True, + limit=None, + view="collection", + step_details=False, + ): """ Get all workflow invocations, or select a subset by specifying optional arguments for filtering (e.g. a workflow ID). @@ -69,19 +74,15 @@ def get_invocations(self, workflow_id=None, history_id=None, user_id=None, 'uuid': 'c8aa2b1c-801a-11e5-a9e5-8ca98228593c', 'workflow_id': '03501d7626bd192f'}] """ - params = { - 'include_terminal': include_terminal, - 'view': view, - 'step_details': step_details - } + params = {"include_terminal": include_terminal, "view": view, "step_details": step_details} if workflow_id: - params['workflow_id'] = workflow_id + params["workflow_id"] = workflow_id if history_id: - params['history_id'] = history_id + params["history_id"] = history_id if user_id: - params['user_id'] = user_id + params["user_id"] = user_id if limit is not None: - params['limit'] = limit + params["limit"] = limit return self._get(params=params) def show_invocation(self, invocation_id): @@ -132,11 +133,19 @@ def show_invocation(self, invocation_id): url = self._make_url(invocation_id) return self._get(url=url) - def rerun_invocation(self, invocation_id: str, inputs_update: Optional[dict] = None, - params_update: Optional[dict] = None, history_id: Optional[str] = None, - history_name: Optional[str] = None, import_inputs_to_history: bool = False, - replacement_params: Optional[dict] = None, allow_tool_state_corrections: bool = False, - inputs_by: Optional[str] = None, parameters_normalized: bool = False): + def rerun_invocation( + self, + invocation_id: str, + inputs_update: Optional[dict] = None, + params_update: Optional[dict] = None, + history_id: Optional[str] = None, + history_name: Optional[str] = None, + import_inputs_to_history: bool = False, + replacement_params: Optional[dict] = None, + allow_tool_state_corrections: bool = False, + inputs_by: Optional[str] = None, + parameters_normalized: bool = False, + ): """ Rerun a workflow invocation. For more extensive documentation of all parameters, see the ``gi.workflows.invoke_workflow()`` method. @@ -197,33 +206,33 @@ def rerun_invocation(self, invocation_id: str, inputs_update: Optional[dict] = N This method works only on Galaxy 21.01 or later. """ invocation_details = self.show_invocation(invocation_id) - workflow_id = invocation_details['workflow_id'] - inputs = invocation_details['inputs'] - wf_params = invocation_details['input_step_parameters'] + workflow_id = invocation_details["workflow_id"] + inputs = invocation_details["inputs"] + wf_params = invocation_details["input_step_parameters"] if inputs_update: for inp, input_value in inputs_update.items(): inputs[inp] = input_value if params_update: for param, param_value in params_update.items(): wf_params[param] = param_value - payload = {'inputs': inputs, 'params': wf_params} + payload = {"inputs": inputs, "params": wf_params} if replacement_params: - payload['replacement_params'] = replacement_params + payload["replacement_params"] = replacement_params if history_id: - payload['history'] = f'hist_id={history_id}' + payload["history"] = f"hist_id={history_id}" elif history_name: - payload['history'] = history_name + payload["history"] = history_name if not import_inputs_to_history: - payload['no_add_to_history'] = True + payload["no_add_to_history"] = True if allow_tool_state_corrections: - payload['allow_tool_state_corrections'] = allow_tool_state_corrections + payload["allow_tool_state_corrections"] = allow_tool_state_corrections if inputs_by is not None: - payload['inputs_by'] = inputs_by + payload["inputs_by"] = inputs_by if parameters_normalized: - payload['parameters_normalized'] = parameters_normalized - api_params = {'instance': True} - url = '/'.join((self.gi.url, 'workflows', workflow_id, 'invocations')) + payload["parameters_normalized"] = parameters_normalized + api_params = {"instance": True} + url = "/".join((self.gi.url, "workflows", workflow_id, "invocations")) return self.gi.make_post_request(url=url, payload=payload, params=api_params) def cancel_invocation(self, invocation_id): @@ -268,7 +277,7 @@ def show_invocation_step(self, invocation_id, step_id): return self._get(url=url) def run_invocation_step_action(self, invocation_id, step_id, action): - """ Execute an action for an active workflow invocation step. The + """Execute an action for an active workflow invocation step. The nature of this action and what is expected will vary based on the the type of workflow step (the only currently valid action is True/False for pause steps). @@ -307,7 +316,7 @@ def get_invocation_summary(self, invocation_id): 'id': 'a799d38679e985db', 'populated_state': 'ok'} """ - url = self._make_url(invocation_id) + '/jobs_summary' + url = self._make_url(invocation_id) + "/jobs_summary" return self._get(url=url) def get_invocation_step_jobs_summary(self, invocation_id): @@ -335,7 +344,7 @@ def get_invocation_step_jobs_summary(self, invocation_id): 'populated_state': 'ok', 'states': {'new': 1}}] """ - url = self._make_url(invocation_id) + '/step_jobs_summary' + url = self._make_url(invocation_id) + "/step_jobs_summary" return self._get(url=url) def get_invocation_report(self, invocation_id): @@ -356,7 +365,7 @@ def get_invocation_report(self, invocation_id): 'render_format': 'markdown', 'workflows': {'f2db41e1fa331b3e': {'name': 'Example workflow'}}} """ - url = self._make_url(invocation_id) + '/report' + url = self._make_url(invocation_id) + "/report" return self._get(url=url) def get_invocation_report_pdf(self, invocation_id, file_path, chunk_size=CHUNK_SIZE): @@ -369,11 +378,13 @@ def get_invocation_report_pdf(self, invocation_id, file_path, chunk_size=CHUNK_S :type file_path: str :param file_path: Path to save the report """ - url = self._make_url(invocation_id) + '/report.pdf' + url = self._make_url(invocation_id) + "/report.pdf" r = self.gi.make_get_request(url, stream=True) if r.status_code != 200: - raise Exception("Failed to get the PDF report, the necessary dependencies may not be installed on the Galaxy server.") - with open(file_path, 'wb') as outf: + raise Exception( + "Failed to get the PDF report, the necessary dependencies may not be installed on the Galaxy server." + ) + with open(file_path, "wb") as outf: for chunk in r.iter_content(chunk_size): outf.write(chunk) @@ -387,7 +398,7 @@ def get_invocation_biocompute_object(self, invocation_id): :rtype: dict :return: The BioCompute object """ - url = self._make_url(invocation_id) + '/biocompute' + url = self._make_url(invocation_id) + "/biocompute" return self._get(url=url) def wait_for_invocation(self, invocation_id, maxwait=12000, interval=3, check=True): @@ -418,9 +429,9 @@ def wait_for_invocation(self, invocation_id, maxwait=12000, interval=3, check=Tr time_left = maxwait while True: invocation = self.gi.invocations.show_invocation(invocation_id) - state = invocation['state'] + state = invocation["state"] if state in INVOCATION_TERMINAL_STATES: - if check and state != 'scheduled': + if check and state != "scheduled": raise Exception(f"Invocation {invocation_id} is in terminal state {state}") return invocation if time_left > 0: @@ -428,10 +439,12 @@ def wait_for_invocation(self, invocation_id, maxwait=12000, interval=3, check=Tr time.sleep(min(time_left, interval)) time_left -= interval else: - raise TimeoutException(f"Invocation {invocation_id} is still in non-terminal state {state} after {maxwait} s") + raise TimeoutException( + f"Invocation {invocation_id} is still in non-terminal state {state} after {maxwait} s" + ) def _invocation_step_url(self, invocation_id, step_id): - return '/'.join((self._make_url(invocation_id), "steps", step_id)) + return "/".join((self._make_url(invocation_id), "steps", step_id)) -__all__ = ('InvocationClient',) +__all__ = ("InvocationClient",) diff --git a/bioblend/galaxy/jobs/__init__.py b/bioblend/galaxy/jobs/__init__.py index da2e7aaeb..14e703b3b 100644 --- a/bioblend/galaxy/jobs/__init__.py +++ b/bioblend/galaxy/jobs/__init__.py @@ -13,20 +13,32 @@ log = logging.getLogger(__name__) -JOB_TERMINAL_STATES = {'deleted', 'error', 'ok'} +JOB_TERMINAL_STATES = {"deleted", "error", "ok"} # Job non-terminal states are: 'deleted_new', 'failed', 'new', 'paused', # 'queued', 'resubmitted', 'running', 'upload', 'waiting' class JobsClient(Client): - module = 'jobs' + module = "jobs" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) - def get_jobs(self, state=None, history_id=None, invocation_id=None, tool_id=None, - workflow_id=None, user_id=None, date_range_min=None, date_range_max=None, - limit=500, offset=0, user_details=False, order_by=None): + def get_jobs( + self, + state=None, + history_id=None, + invocation_id=None, + tool_id=None, + workflow_id=None, + user_id=None, + date_range_min=None, + date_range_max=None, + limit=500, + offset=0, + user_details=False, + order_by=None, + ): """ Get all jobs, or select a subset by specifying optional arguments for filtering (e.g. a state). @@ -100,30 +112,27 @@ def get_jobs(self, state=None, history_id=None, invocation_id=None, tool_id=None The following options work only on Galaxy 21.05 or later: ``user_id``, ``limit``, ``offset``, ``workflow_id``, ``invocation_id``. """ - params = { - 'limit': limit, - 'offset': offset - } + params = {"limit": limit, "offset": offset} if state: - params['state'] = state + params["state"] = state if history_id: - params['history_id'] = history_id + params["history_id"] = history_id if invocation_id: - params['invocation_id'] = invocation_id + params["invocation_id"] = invocation_id if tool_id: - params['tool_id'] = tool_id + params["tool_id"] = tool_id if workflow_id: - params['workflow_id'] = workflow_id + params["workflow_id"] = workflow_id if user_id: - params['user_id'] = user_id + params["user_id"] = user_id if date_range_min: - params['date_range_min'] = date_range_min + params["date_range_min"] = date_range_min if date_range_max: - params['date_range_max'] = date_range_max + params["date_range_max"] = date_range_max if user_details: - params['user_details'] = user_details + params["user_details"] = user_details if order_by: - params['order_by'] = order_by + params["order_by"] = order_by return self._get(params=params) def show_job(self, job_id, full_details=False): @@ -157,7 +166,7 @@ def show_job(self, job_id, full_details=False): """ params = {} if full_details: - params['full'] = full_details + params["full"] = full_details return self._get(id=job_id, params=params) @@ -172,7 +181,7 @@ def _build_for_rerun(self, job_id): :return: A description of the given job, with all parameters required to rerun. """ - url = '/'.join((self._make_url(job_id), 'build_for_rerun')) + url = "/".join((self._make_url(job_id), "build_for_rerun")) return self._get(url=url) def rerun_job(self, job_id, remap=False, tool_inputs_update=None, history_id=None): @@ -207,12 +216,12 @@ def rerun_job(self, job_id, remap=False, tool_inputs_update=None, history_id=Non This method works only on Galaxy 21.01 or later. """ job_rerun_params = self._build_for_rerun(job_id) - job_inputs = job_rerun_params['state_inputs'] + job_inputs = job_rerun_params["state_inputs"] if remap: - if not job_rerun_params['job_remap']: - raise ValueError('remap was set to True, but this job is not remappable.') - job_inputs['rerun_remap_job_id'] = job_id + if not job_rerun_params["job_remap"]: + raise ValueError("remap was set to True, but this job is not remappable.") + job_inputs["rerun_remap_job_id"] = job_id def update_inputs(inputs, tool_inputs_update): # recursively update inputs with tool_inputs_update @@ -226,12 +235,12 @@ def update_inputs(inputs, tool_inputs_update): if tool_inputs_update: update_inputs(job_inputs, tool_inputs_update) - url = '/'.join((self.gi.url, 'tools')) + url = "/".join((self.gi.url, "tools")) payload = { - "history_id": history_id if history_id else job_rerun_params['history_id'], - "tool_id": job_rerun_params['id'], + "history_id": history_id if history_id else job_rerun_params["history_id"], + "tool_id": job_rerun_params["id"], "inputs": job_inputs, - "input_format": '21.01' + "input_format": "21.01", } return self._post(url=url, payload=payload) @@ -249,7 +258,7 @@ def get_state(self, job_id: str) -> str: .. versionadded:: 0.5.3 """ - return self.show_job(job_id).get('state', '') + return self.show_job(job_id).get("state", "") def search_jobs(self, tool_id: str, inputs: dict, state: Optional[str] = None) -> List[dict]: """ @@ -280,12 +289,12 @@ def search_jobs(self, tool_id: str, inputs: dict, state: Optional[str] = None) - This method works only on Galaxy 18.01 or later. """ job_info = { - 'tool_id': tool_id, - 'inputs': inputs, + "tool_id": tool_id, + "inputs": inputs, } if state: - job_info['state'] = state - url = self._make_url() + '/search' + job_info["state"] = state + url = self._make_url() + "/search" return self._post(url=url, payload=job_info) def get_metrics(self, job_id: str) -> List[dict]: @@ -305,7 +314,7 @@ def get_metrics(self, job_id: str) -> List[dict]: ``expose_potentially_sensitive_job_metrics`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ - url = self._make_url(module_id=job_id) + '/metrics' + url = self._make_url(module_id=job_id) + "/metrics" return self._get(url=url) def cancel_job(self, job_id: str): @@ -351,7 +360,7 @@ def report_error(self, job_id: str, dataset_id: str, message: str, email: str = if email is not None: payload["email"] = email - url = self._make_url(module_id=job_id) + '/error' + url = self._make_url(module_id=job_id) + "/error" return self._post(url=url, payload=payload) def get_common_problems(self, job_id: str) -> dict: @@ -368,7 +377,7 @@ def get_common_problems(self, job_id: str) -> dict: .. note:: This method works only on Galaxy 19.05 or later. """ - url = self._make_url(module_id=job_id) + '/common_problems' + url = self._make_url(module_id=job_id) + "/common_problems" return self._get(url=url) def get_inputs(self, job_id: str) -> List[dict]: @@ -381,7 +390,7 @@ def get_inputs(self, job_id: str) -> List[dict]: :rtype: list of dicts :return: Inputs for the given job """ - url = self._make_url(module_id=job_id) + '/inputs' + url = self._make_url(module_id=job_id) + "/inputs" return self._get(url=url) def get_outputs(self, job_id: str) -> List[dict]: @@ -394,7 +403,7 @@ def get_outputs(self, job_id: str) -> List[dict]: :rtype: list of dicts :return: Outputs of the given job """ - url = self._make_url(module_id=job_id) + '/outputs' + url = self._make_url(module_id=job_id) + "/outputs" return self._get(url=url) def resume_job(self, job_id: str) -> dict: @@ -410,7 +419,7 @@ def resume_job(self, job_id: str) -> dict: .. note:: This method works only on Galaxy 18.09 or later. """ - url = self._make_url(module_id=job_id) + '/resume' + url = self._make_url(module_id=job_id) + "/resume" return self._put(url=url) def get_destination_params(self, job_id: str) -> dict: @@ -428,7 +437,7 @@ def get_destination_params(self, job_id: str) -> dict: This method works only on Galaxy 20.05 or later and if the user is a Galaxy admin. """ - url = self._make_url(module_id=job_id) + '/destination_params' + url = self._make_url(module_id=job_id) + "/destination_params" return self._get(url=url) def show_job_lock(self) -> bool: @@ -443,9 +452,9 @@ def show_job_lock(self) -> bool: This method works only on Galaxy 20.05 or later and if the user is a Galaxy admin. """ - url = self.gi.url + '/job_lock' + url = self.gi.url + "/job_lock" response = self._get(url=url) - return response['active'] + return response["active"] def update_job_lock(self, active=False) -> bool: """ @@ -461,11 +470,11 @@ def update_job_lock(self, active=False) -> bool: Galaxy admin. """ payload = { - 'active': active, + "active": active, } - url = self.gi.url + '/job_lock' + url = self.gi.url + "/job_lock" response = self._put(url=url, payload=payload) - return response['active'] + return response["active"] def wait_for_job(self, job_id, maxwait=12000, interval=3, check=True): """ @@ -494,9 +503,9 @@ def wait_for_job(self, job_id, maxwait=12000, interval=3, check=True): time_left = maxwait while True: job = self.show_job(job_id) - state = job['state'] + state = job["state"] if state in JOB_TERMINAL_STATES: - if check and state != 'ok': + if check and state != "ok": raise Exception(f"Job {job_id} is in terminal state {state}") return job if time_left > 0: diff --git a/bioblend/galaxy/libraries/__init__.py b/bioblend/galaxy/libraries/__init__.py index 8664e9e52..03be52640 100644 --- a/bioblend/galaxy/libraries/__init__.py +++ b/bioblend/galaxy/libraries/__init__.py @@ -16,7 +16,7 @@ class LibraryClient(Client): - module = 'libraries' + module = "libraries" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -42,11 +42,11 @@ def create_library(self, name, description=None, synopsis=None): 'name': 'Library from bioblend', 'url': '/api/libraries/f740ab636b360a70'} """ - payload = {'name': name} + payload = {"name": name} if description: - payload['description'] = description + payload["description"] = description if synopsis: - payload['synopsis'] = synopsis + payload["synopsis"] = synopsis return self._post(payload) def delete_library(self, library_id): @@ -70,7 +70,7 @@ def _show_item(self, library_id, item_id): """ Get details about a given library item. """ - url = '/'.join((self._make_url(library_id, contents=True), item_id)) + url = "/".join((self._make_url(library_id, contents=True), item_id)) return self._get(url=url) def delete_library_dataset(self, library_id, dataset_id, purged=False): @@ -95,8 +95,8 @@ def delete_library_dataset(self, library_id, dataset_id, purged=False): {'deleted': True, 'id': '60e680a037f41974'} """ - url = '/'.join((self._make_url(library_id, contents=True), dataset_id)) - return self._delete(payload={'purged': purged}, url=url) + url = "/".join((self._make_url(library_id, contents=True), dataset_id)) + return self._delete(payload={"purged": purged}, url=url) def update_library_dataset(self, dataset_id, **kwds): """ @@ -124,7 +124,7 @@ def update_library_dataset(self, dataset_id, **kwds): :rtype: dict :return: details of the updated dataset """ - url = '/'.join((self._make_url(), 'datasets', dataset_id)) + url = "/".join((self._make_url(), "datasets", dataset_id)) return self._patch(payload=kwds, url=url) def show_dataset(self, library_id, dataset_id): @@ -173,15 +173,23 @@ def wait_for_dataset(self, library_id, dataset_id, maxwait=12000, interval=3): time_left = maxwait while True: dataset = self.show_dataset(library_id, dataset_id) - state = dataset['state'] + state = dataset["state"] if state in TERMINAL_STATES: return dataset if time_left > 0: - log.info("Dataset %s in library %s is in non-terminal state %s. Will wait %i more s", dataset_id, library_id, state, time_left) + log.info( + "Dataset %s in library %s is in non-terminal state %s. Will wait %i more s", + dataset_id, + library_id, + state, + time_left, + ) time.sleep(min(time_left, interval)) time_left -= interval else: - raise DatasetTimeoutException(f"Waited too long for dataset {dataset_id} in library {library_id} to complete") + raise DatasetTimeoutException( + f"Waited too long for dataset {dataset_id} in library {library_id} to complete" + ) def show_folder(self, library_id, folder_id): """ @@ -207,7 +215,7 @@ def _get_root_folder_id(self, library_id): :param library_id: library id to find root of """ l = self.show_library(library_id=library_id) - return l['root_folder_id'] + return l["root_folder_id"] def create_folder(self, library_id, folder_name, description=None, base_folder_id=None): """ @@ -234,11 +242,11 @@ def create_folder(self, library_id, folder_name, description=None, base_folder_i base_folder_id = self._get_root_folder_id(library_id) # Compose the payload payload = {} - payload['name'] = folder_name - payload['folder_id'] = base_folder_id - payload['create_type'] = 'folder' + payload["name"] = folder_name + payload["folder_id"] = base_folder_id + payload["create_type"] = "folder" if description is not None: - payload['description'] = description + payload["description"] = description return self._post(payload, id=library_id, contents=True) def get_folders(self, library_id, folder_id=None, name=None): @@ -266,19 +274,19 @@ def get_folders(self, library_id, folder_id=None, name=None): """ if folder_id is not None: warnings.warn( - 'The folder_id parameter is deprecated, use the show_folder() method to view details of a folder for which you know the ID.', - category=FutureWarning + "The folder_id parameter is deprecated, use the show_folder() method to view details of a folder for which you know the ID.", + category=FutureWarning, ) if folder_id is not None and name is not None: - raise ValueError('Provide only one argument between name or folder_id, but not both') + raise ValueError("Provide only one argument between name or folder_id, but not both") library_contents = self.show_library(library_id=library_id, contents=True) if folder_id is not None: - folder = next((_ for _ in library_contents if _['type'] == 'folder' and _['id'] == folder_id), None) + folder = next((_ for _ in library_contents if _["type"] == "folder" and _["id"] == folder_id), None) folders = [folder] if folder is not None else [] elif name is not None: - folders = [_ for _ in library_contents if _['type'] == 'folder' and _['name'] == name] + folders = [_ for _ in library_contents if _["type"] == "folder" and _["name"] == name] else: - folders = [_ for _ in library_contents if _['type'] == 'folder'] + folders = [_ for _ in library_contents if _["type"] == "folder"] return folders def get_libraries(self, library_id=None, name=None, deleted=False): @@ -306,17 +314,17 @@ def get_libraries(self, library_id=None, name=None, deleted=False): """ if library_id is not None: warnings.warn( - 'The library_id parameter is deprecated, use the show_library() method to view details of a library for which you know the ID.', - category=FutureWarning + "The library_id parameter is deprecated, use the show_library() method to view details of a library for which you know the ID.", + category=FutureWarning, ) if library_id is not None and name is not None: - raise ValueError('Provide only one argument between name or library_id, but not both') + raise ValueError("Provide only one argument between name or library_id, but not both") libraries = self._get(params={"deleted": deleted}) if library_id is not None: - library = next((_ for _ in libraries if _['id'] == library_id), None) + library = next((_ for _ in libraries if _["id"] == library_id), None) libraries = [library] if library is not None else [] if name is not None: - libraries = [_ for _ in libraries if _['name'] == name] + libraries = [_ for _ in libraries if _["name"] == name] return libraries def show_library(self, library_id, contents=False): @@ -341,52 +349,49 @@ def _do_upload(self, library_id, **keywords): This method should not be called directly but instead refer to the methods specific for the desired type of data upload. """ - folder_id = keywords.get('folder_id', None) + folder_id = keywords.get("folder_id", None) if folder_id is None: folder_id = self._get_root_folder_id(library_id) files_attached = False # Compose the payload dict payload = {} - payload['folder_id'] = folder_id - payload['file_type'] = keywords.get('file_type', 'auto') - payload['dbkey'] = keywords.get('dbkey', '?') - payload['create_type'] = 'file' + payload["folder_id"] = folder_id + payload["file_type"] = keywords.get("file_type", "auto") + payload["dbkey"] = keywords.get("dbkey", "?") + payload["create_type"] = "file" if keywords.get("roles", None): payload["roles"] = keywords["roles"] - if keywords.get("link_data_only", None) and keywords['link_data_only'] != 'copy_files': - payload["link_data_only"] = 'link_to_files' - payload['tag_using_filenames'] = keywords.get('tag_using_filenames', False) - if keywords.get('tags'): - payload['tags'] = keywords['tags'] - payload['preserve_dirs'] = keywords.get('preserve_dirs', False) + if keywords.get("link_data_only", None) and keywords["link_data_only"] != "copy_files": + payload["link_data_only"] = "link_to_files" + payload["tag_using_filenames"] = keywords.get("tag_using_filenames", False) + if keywords.get("tags"): + payload["tags"] = keywords["tags"] + payload["preserve_dirs"] = keywords.get("preserve_dirs", False) # upload options - if keywords.get('file_url', None) is not None: - payload['upload_option'] = 'upload_file' - payload['files_0|url_paste'] = keywords['file_url'] - elif keywords.get('pasted_content', None) is not None: - payload['upload_option'] = 'upload_file' - payload['files_0|url_paste'] = keywords['pasted_content'] - elif keywords.get('server_dir', None) is not None: - payload['upload_option'] = 'upload_directory' - payload['server_dir'] = keywords['server_dir'] - elif keywords.get('file_local_path', None) is not None: - payload['upload_option'] = 'upload_file' - payload['files_0|file_data'] = attach_file(keywords['file_local_path']) + if keywords.get("file_url", None) is not None: + payload["upload_option"] = "upload_file" + payload["files_0|url_paste"] = keywords["file_url"] + elif keywords.get("pasted_content", None) is not None: + payload["upload_option"] = "upload_file" + payload["files_0|url_paste"] = keywords["pasted_content"] + elif keywords.get("server_dir", None) is not None: + payload["upload_option"] = "upload_directory" + payload["server_dir"] = keywords["server_dir"] + elif keywords.get("file_local_path", None) is not None: + payload["upload_option"] = "upload_file" + payload["files_0|file_data"] = attach_file(keywords["file_local_path"]) files_attached = True elif keywords.get("filesystem_paths", None) is not None: payload["upload_option"] = "upload_paths" payload["filesystem_paths"] = keywords["filesystem_paths"] try: - return self._post(payload, id=library_id, contents=True, - files_attached=files_attached) + return self._post(payload, id=library_id, contents=True, files_attached=files_attached) finally: - if payload.get('files_0|file_data', None) is not None: - payload['files_0|file_data'].close() + if payload.get("files_0|file_data", None) is not None: + payload["files_0|file_data"].close() - def upload_file_from_url(self, library_id, file_url, folder_id=None, - file_type='auto', dbkey='?', - tags=None): + def upload_file_from_url(self, library_id, file_url, folder_id=None, file_type="auto", dbkey="?", tags=None): """ Upload a file to a library from a URL. @@ -412,14 +417,11 @@ def upload_file_from_url(self, library_id, file_url, folder_id=None, :rtype: list :return: List with a single dictionary containing information about the LDDA """ - return self._do_upload(library_id, file_url=file_url, - folder_id=folder_id, file_type=file_type, - dbkey=dbkey, - tags=tags) + return self._do_upload( + library_id, file_url=file_url, folder_id=folder_id, file_type=file_type, dbkey=dbkey, tags=tags + ) - def upload_file_contents(self, library_id, pasted_content, - folder_id=None, file_type='auto', dbkey='?', - tags=None): + def upload_file_contents(self, library_id, pasted_content, folder_id=None, file_type="auto", dbkey="?", tags=None): """ Upload pasted_content to a data library as a new file. @@ -445,14 +447,13 @@ def upload_file_contents(self, library_id, pasted_content, :rtype: list :return: List with a single dictionary containing information about the LDDA """ - return self._do_upload(library_id, pasted_content=pasted_content, - folder_id=folder_id, file_type=file_type, - dbkey=dbkey, - tags=tags) + return self._do_upload( + library_id, pasted_content=pasted_content, folder_id=folder_id, file_type=file_type, dbkey=dbkey, tags=tags + ) - def upload_file_from_local_path(self, library_id, file_local_path, - folder_id=None, file_type='auto', dbkey='?', - tags=None): + def upload_file_from_local_path( + self, library_id, file_local_path, folder_id=None, file_type="auto", dbkey="?", tags=None + ): """ Read local file contents from file_local_path and upload data to a library. @@ -479,15 +480,28 @@ def upload_file_from_local_path(self, library_id, file_local_path, :rtype: list :return: List with a single dictionary containing information about the LDDA """ - return self._do_upload(library_id, file_local_path=file_local_path, - folder_id=folder_id, file_type=file_type, - dbkey=dbkey, - tags=tags) - - def upload_file_from_server(self, library_id, server_dir, folder_id=None, - file_type='auto', dbkey='?', link_data_only=None, - roles="", preserve_dirs=False, tag_using_filenames=False, - tags=None): + return self._do_upload( + library_id, + file_local_path=file_local_path, + folder_id=folder_id, + file_type=file_type, + dbkey=dbkey, + tags=tags, + ) + + def upload_file_from_server( + self, + library_id, + server_dir, + folder_id=None, + file_type="auto", + dbkey="?", + link_data_only=None, + roles="", + preserve_dirs=False, + tag_using_filenames=False, + tags=None, + ): """ Upload all files in the specified subdirectory of the Galaxy library import directory to a library. @@ -540,17 +554,32 @@ def upload_file_from_server(self, library_id, server_dir, folder_id=None, ``library_import_dir`` option configured in the ``config/galaxy.yml`` configuration file. """ - return self._do_upload(library_id, server_dir=server_dir, - folder_id=folder_id, file_type=file_type, - dbkey=dbkey, link_data_only=link_data_only, - roles=roles, preserve_dirs=preserve_dirs, - tag_using_filenames=tag_using_filenames, - tags=tags) - - def upload_from_galaxy_filesystem(self, library_id, filesystem_paths, folder_id=None, - file_type="auto", dbkey="?", link_data_only=None, - roles="", preserve_dirs=False, tag_using_filenames=False, - tags=None): + return self._do_upload( + library_id, + server_dir=server_dir, + folder_id=folder_id, + file_type=file_type, + dbkey=dbkey, + link_data_only=link_data_only, + roles=roles, + preserve_dirs=preserve_dirs, + tag_using_filenames=tag_using_filenames, + tags=tags, + ) + + def upload_from_galaxy_filesystem( + self, + library_id, + filesystem_paths, + folder_id=None, + file_type="auto", + dbkey="?", + link_data_only=None, + roles="", + preserve_dirs=False, + tag_using_filenames=False, + tags=None, + ): """ Upload a set of files already present on the filesystem of the Galaxy server to a library. @@ -601,14 +630,20 @@ def upload_from_galaxy_filesystem(self, library_id, filesystem_paths, folder_id= ``allow_path_paste`` option set to ``true`` in the ``config/galaxy.yml`` configuration file. """ - return self._do_upload(library_id, filesystem_paths=filesystem_paths, - folder_id=folder_id, file_type=file_type, - dbkey=dbkey, link_data_only=link_data_only, - roles=roles, preserve_dirs=preserve_dirs, - tag_using_filenames=tag_using_filenames, - tags=tags) - - def copy_from_dataset(self, library_id, dataset_id, folder_id=None, message=''): + return self._do_upload( + library_id, + filesystem_paths=filesystem_paths, + folder_id=folder_id, + file_type=file_type, + dbkey=dbkey, + link_data_only=link_data_only, + roles=roles, + preserve_dirs=preserve_dirs, + tag_using_filenames=tag_using_filenames, + tags=tags, + ) + + def copy_from_dataset(self, library_id, dataset_id, folder_id=None, message=""): """ Copy a Galaxy dataset into a library. @@ -631,10 +666,10 @@ def copy_from_dataset(self, library_id, dataset_id, folder_id=None, message=''): if folder_id is None: folder_id = self._get_root_folder_id(library_id) payload = {} - payload['folder_id'] = folder_id - payload['create_type'] = 'file' - payload['from_hda_id'] = dataset_id - payload['ldda_message'] = message + payload["folder_id"] = folder_id + payload["create_type"] = "file" + payload["from_hda_id"] = dataset_id + payload["ldda_message"] = message return self._post(payload, id=library_id, contents=True) def get_library_permissions(self, library_id): @@ -647,7 +682,7 @@ def get_library_permissions(self, library_id): :rtype: dict :return: dictionary with all applicable permissions' values """ - url = self._make_url(library_id) + '/permissions' + url = self._make_url(library_id) + "/permissions" return self._get(url=url) def get_dataset_permissions(self, dataset_id): @@ -660,11 +695,10 @@ def get_dataset_permissions(self, dataset_id): :rtype: dict :return: dictionary with all applicable permissions' values """ - url = '/'.join((self._make_url(), 'datasets', dataset_id, 'permissions')) + url = "/".join((self._make_url(), "datasets", dataset_id, "permissions")) return self._get(url=url) - def set_library_permissions(self, library_id, access_in=None, - modify_in=None, add_in=None, manage_in=None): + def set_library_permissions(self, library_id, access_in=None, modify_in=None, add_in=None, manage_in=None): """ Set the permissions for a library. Note: it will override all security for this library even if you leave out a permission type. @@ -689,18 +723,17 @@ def set_library_permissions(self, library_id, access_in=None, """ payload = {} if access_in: - payload['LIBRARY_ACCESS_in'] = access_in + payload["LIBRARY_ACCESS_in"] = access_in if modify_in: - payload['LIBRARY_MODIFY_in'] = modify_in + payload["LIBRARY_MODIFY_in"] = modify_in if add_in: - payload['LIBRARY_ADD_in'] = add_in + payload["LIBRARY_ADD_in"] = add_in if manage_in: - payload['LIBRARY_MANAGE_in'] = manage_in - url = self._make_url(library_id) + '/permissions' + payload["LIBRARY_MANAGE_in"] = manage_in + url = self._make_url(library_id) + "/permissions" return self._post(payload, url=url) - def set_dataset_permissions(self, dataset_id, access_in=None, - modify_in=None, manage_in=None): + def set_dataset_permissions(self, dataset_id, access_in=None, modify_in=None, manage_in=None): """ Set the permissions for a dataset. Note: it will override all security for this dataset even if you leave out a permission type. @@ -722,12 +755,12 @@ def set_dataset_permissions(self, dataset_id, access_in=None, """ payload = {} if access_in: - payload['access_ids[]'] = access_in + payload["access_ids[]"] = access_in if modify_in: - payload['modify_ids[]'] = modify_in + payload["modify_ids[]"] = modify_in if manage_in: - payload['manage_ids[]'] = manage_in + payload["manage_ids[]"] = manage_in # we need here to define an action - payload['action'] = 'set_permissions' - url = '/'.join((self._make_url(), 'datasets', dataset_id, 'permissions')) + payload["action"] = "set_permissions" + url = "/".join((self._make_url(), "datasets", dataset_id, "permissions")) return self._post(payload, url=url) diff --git a/bioblend/galaxy/objects/client.py b/bioblend/galaxy/objects/client.py index ad07a1f7c..eec4b2291 100644 --- a/bioblend/galaxy/objects/client.py +++ b/bioblend/galaxy/objects/client.py @@ -17,7 +17,6 @@ class ObjClient(abc.ABC): - def __init__(self, obj_gi): self.obj_gi = obj_gi self.gi = self.obj_gi.gi @@ -63,9 +62,9 @@ def _select_id(self, id_=None, name=None): Return the id that corresponds to the given id or name info. """ if id_ is None and name is None: - raise ValueError('Neither id nor name provided') + raise ValueError("Neither id nor name provided") if id_ is not None and name is not None: - raise ValueError('Both id and name provided') + raise ValueError("Both id and name provided") if id_ is None: id_list = [_.id for _ in self.get_previews(name=name)] if len(id_list) > 1: @@ -84,21 +83,20 @@ def _get_dict(self, meth_name, reply): try: return reply[0] except (TypeError, IndexError): - raise RuntimeError(f'{meth_name}: unexpected reply: {reply!r}') + raise RuntimeError(f"{meth_name}: unexpected reply: {reply!r}") class ObjDatasetContainerClient(ObjClient): - def _get_container(self, id_, ctype): show_fname = f"show_{ctype.__name__.lower()}" gi_client = getattr(self.gi, ctype.API_MODULE) show_f = getattr(gi_client, show_fname) res = show_f(id_) cdict = self._get_dict(show_fname, res) - cdict['id'] = id_ # overwrite unencoded id + cdict["id"] = id_ # overwrite unencoded id c_infos = show_f(id_, contents=True) if not isinstance(c_infos, Sequence): - raise RuntimeError(f'{show_fname}: unexpected reply: {c_infos!r}') + raise RuntimeError(f"{show_fname}: unexpected reply: {c_infos!r}") c_infos = [ctype.CONTENT_INFO_TYPE(_) for _ in c_infos] return ctype(cdict, content_infos=c_infos, gi=self.obj_gi) @@ -119,8 +117,8 @@ def create(self, name, description=None, synopsis=None): :return: the library just created """ res = self.gi.libraries.create_library(name, description, synopsis) - lib_info = self._get_dict('create_library', res) - return self.get(lib_info['id']) + lib_info = self._get_dict("create_library", res) + return self.get(lib_info["id"]) def get(self, id_): """ @@ -151,9 +149,9 @@ def list(self, name=None, deleted=False): # return Library objects only for not-deleted libraries since Galaxy # does not filter them out and Galaxy release_14.08 and earlier # crashes when trying to get a deleted library - return [self.get(_['id']) for _ in dicts if not _['deleted']] + return [self.get(_["id"]) for _ in dicts if not _["deleted"]] else: - return [self.get(_['id']) for _ in dicts] + return [self.get(_["id"]) for _ in dicts] def delete(self, id_=None, name=None): """ @@ -168,7 +166,7 @@ def delete(self, id_=None, name=None): id_ = self._select_id(id_=id_, name=name) res = self.gi.libraries.delete_library(id_) if not isinstance(res, Mapping): - raise RuntimeError(f'delete_library: unexpected reply: {res!r}') + raise RuntimeError(f"delete_library: unexpected reply: {res!r}") class ObjHistoryClient(ObjDatasetContainerClient): @@ -187,8 +185,8 @@ def create(self, name=None): :return: the history just created """ res = self.gi.histories.create_history(name=name) - hist_info = self._get_dict('create_history', res) - return self.get(hist_info['id']) + hist_info = self._get_dict("create_history", res) + return self.get(hist_info["id"]) def get(self, id_): """ @@ -215,7 +213,7 @@ def list(self, name=None, deleted=False): :rtype: list of :class:`~.wrappers.History` """ dicts = self.gi.histories.get_histories(name=name, deleted=deleted) - return [self.get(_['id']) for _ in dicts] + return [self.get(_["id"]) for _ in dicts] def delete(self, id_=None, name=None, purge=False): """ @@ -234,7 +232,7 @@ def delete(self, id_=None, name=None, purge=False): id_ = self._select_id(id_=id_, name=name) res = self.gi.histories.delete_history(id_, purge=purge) if not isinstance(res, Mapping): - raise RuntimeError(f'delete_history: unexpected reply: {res!r}') + raise RuntimeError(f"delete_history: unexpected reply: {res!r}") class ObjWorkflowClient(ObjClient): @@ -264,9 +262,9 @@ def import_new(self, src, publish=False): try: wf_dict = json.loads(src) except (TypeError, ValueError): - raise ValueError(f'src not supported: {src!r}') + raise ValueError(f"src not supported: {src!r}") wf_info = self.gi.workflows.import_workflow_dict(wf_dict, publish) - return self.get(wf_info['id']) + return self.get(wf_info["id"]) def import_shared(self, id_): """ @@ -279,7 +277,7 @@ def import_shared(self, id_): :return: the workflow just imported """ wf_info = self.gi.workflows.import_shared_workflow(id_) - return self.get(wf_info['id']) + return self.get(wf_info["id"]) def get(self, id_): """ @@ -289,7 +287,7 @@ def get(self, id_): :return: the workflow corresponding to ``id_`` """ res = self.gi.workflows.show_workflow(id_) - wf_dict = self._get_dict('show_workflow', res) + wf_dict = self._get_dict("show_workflow", res) return wrappers.Workflow(wf_dict, gi=self.obj_gi) # the 'deleted' option is not available for workflows @@ -310,7 +308,7 @@ def list(self, name=None, published=False): :rtype: list of :class:`~.wrappers.Workflow` """ dicts = self.gi.workflows.get_workflows(name=name, published=published) - return [self.get(_['id']) for _ in dicts] + return [self.get(_["id"]) for _ in dicts] def delete(self, id_=None, name=None): """ @@ -332,6 +330,7 @@ class ObjInvocationClient(ObjClient): """ Interacts with Galaxy Invocations. """ + def get(self, id_) -> wrappers.Invocation: """ Get an invocation by ID. @@ -352,13 +351,7 @@ def get_previews(self) -> List[wrappers.InvocationPreview]: inv_list = self.gi.invocations.get_invocations() return [wrappers.InvocationPreview(inv_dict, self.obj_gi) for inv_dict in inv_list] - def list( - self, - workflow=None, - history=None, - include_terminal=True, - limit=None - ) -> List[wrappers.Invocation]: + def list(self, workflow=None, history=None, include_terminal=True, limit=None) -> List[wrappers.Invocation]: """ Get full listing of workflow invocations, or select a subset by specifying optional arguments for filtering (e.g. a workflow). @@ -386,8 +379,8 @@ def list( history_id=history.id if history else None, include_terminal=include_terminal, limit=limit, - view='element', - step_details=True + view="element", + step_details=True, ) return [wrappers.Invocation(inv_dict, self.obj_gi) for inv_dict in inv_dict_list] @@ -410,9 +403,8 @@ def get(self, id_, io_details=False, link_details=False): :rtype: :class:`~.wrappers.Tool` :return: the tool corresponding to ``id_`` """ - res = self.gi.tools.show_tool(id_, io_details=io_details, - link_details=link_details) - tool_dict = self._get_dict('show_tool', res) + res = self.gi.tools.show_tool(id_, io_details=io_details, link_details=link_details) + tool_dict = self._get_dict("show_tool", res) return wrappers.Tool(tool_dict, gi=self.obj_gi) def get_previews(self, name=None, trackster=None): @@ -474,7 +466,7 @@ def get(self, id_, full_details=False): :return: the job corresponding to ``id_`` """ res = self.gi.jobs.show_job(id_, full_details) - job_dict = self._get_dict('show_job', res) + job_dict = self._get_dict("show_job", res) return wrappers.Job(job_dict, gi=self.obj_gi) def get_previews(self): @@ -488,7 +480,7 @@ def list(self): :rtype: list of :class:`~.wrappers.Job` """ dicts = self.gi.jobs.get_jobs() - return [self.get(_['id']) for _ in dicts] + return [self.get(_["id"]) for _ in dicts] class ObjDatasetClient(ObjClient): @@ -496,7 +488,7 @@ class ObjDatasetClient(ObjClient): Interacts with Galaxy datasets. """ - def get(self, id_: str, hda_ldda: str = 'hda'): + def get(self, id_: str, hda_ldda: str = "hda"): """ Retrieve the dataset corresponding to the given id. @@ -508,12 +500,12 @@ def get(self, id_: str, hda_ldda: str = 'hda'): :return: the history or library dataset corresponding to ``id_`` """ res = self.gi.datasets.show_dataset(id_, hda_ldda=hda_ldda) - ds_dict = self._get_dict('show_dataset', res) - if hda_ldda == 'hda': - hist = self.obj_gi.histories.get(ds_dict['history_id']) + ds_dict = self._get_dict("show_dataset", res) + if hda_ldda == "hda": + hist = self.obj_gi.histories.get(ds_dict["history_id"]) return wrappers.HistoryDatasetAssociation(ds_dict, hist, gi=self.obj_gi) - elif hda_ldda == 'ldda': - lib = self.obj_gi.libraries.get(ds_dict['parent_library_id']) + elif hda_ldda == "ldda": + lib = self.obj_gi.libraries.get(ds_dict["parent_library_id"]) return wrappers.LibraryDatasetDatasetAssociation(ds_dict, lib, gi=self.obj_gi) else: raise ValueError(f"Unsupported value for hda_ldda: {hda_ldda}") @@ -538,8 +530,8 @@ def get(self, id_: str): :return: the history dataset collection corresponding to ``id_`` """ res = self.gi.dataset_collections.show_dataset_collection(id_) - ds_dict = self._get_dict('show_dataset_collection', res) - hist = self.obj_gi.histories.get(ds_dict['history_id']) + ds_dict = self._get_dict("show_dataset_collection", res) + hist = self.obj_gi.histories.get(ds_dict["history_id"]) return wrappers.HistoryDatasetCollectionAssociation(ds_dict, hist, gi=self.obj_gi) def get_previews(self) -> list: diff --git a/bioblend/galaxy/objects/galaxy_instance.py b/bioblend/galaxy/objects/galaxy_instance.py index ff65109d0..b30972dcb 100644 --- a/bioblend/galaxy/objects/galaxy_instance.py +++ b/bioblend/galaxy/objects/galaxy_instance.py @@ -14,9 +14,9 @@ def _get_error_info(hda): msg = hda.id try: msg += f" ({hda.name}): " - msg += hda.wrapped['misc_info'] + msg += hda.wrapped["misc_info"] except Exception: # avoid 'error while generating an error report' - msg += ': error' + msg += ": error" return msg @@ -42,6 +42,7 @@ class GalaxyInstance: gi = GalaxyInstance('http://127.0.0.1:8080', 'foo') histories = gi.histories.list() """ + def __init__(self, url, api_key=None, email=None, password=None, verify=True): self.gi = bioblend.galaxy.GalaxyInstance(url, api_key, email, password, verify) self.log = bioblend.log @@ -76,11 +77,12 @@ def _wait_datasets(self, datasets, polling_interval, break_on_error=True): however, each input dataset is refreshed (possibly multiple times) during the execution. """ + def poll(ds_list): pending = [] for ds in ds_list: ds.refresh() - if break_on_error and ds.state == 'error': + if break_on_error and ds.state == "error": raise RuntimeError(_get_error_info(ds)) if not ds.state: self.log.warning("Dataset %s has an empty state", ds.id) @@ -89,7 +91,7 @@ def poll(ds_list): pending.append(ds) return pending - self.log.info('Waiting for datasets') + self.log.info("Waiting for datasets") while datasets: datasets = poll(datasets) time.sleep(polling_interval) diff --git a/bioblend/galaxy/objects/wrappers.py b/bioblend/galaxy/objects/wrappers.py index c6f6f8938..05cc8e16a 100644 --- a/bioblend/galaxy/objects/wrappers.py +++ b/bioblend/galaxy/objects/wrappers.py @@ -15,28 +15,27 @@ import bioblend from bioblend.util import abstractclass - __all__ = ( - 'Wrapper', - 'Step', - 'Workflow', - 'LibraryContentInfo', - 'HistoryContentInfo', - 'DatasetContainer', - 'History', - 'Library', - 'Folder', - 'Dataset', - 'HistoryDatasetAssociation', - 'DatasetCollection', - 'HistoryDatasetCollectionAssociation', - 'LibraryDatasetDatasetAssociation', - 'LibraryDataset', - 'Tool', - 'Job', - 'LibraryPreview', - 'HistoryPreview', - 'WorkflowPreview', + "Wrapper", + "Step", + "Workflow", + "LibraryContentInfo", + "HistoryContentInfo", + "DatasetContainer", + "History", + "Library", + "Folder", + "Dataset", + "HistoryDatasetAssociation", + "DatasetCollection", + "HistoryDatasetCollectionAssociation", + "LibraryDatasetDatasetAssociation", + "LibraryDataset", + "Tool", + "Job", + "LibraryPreview", + "HistoryPreview", + "WorkflowPreview", ) @@ -54,7 +53,8 @@ class Wrapper: Note that the wrapped dictionary is accessible via the ``wrapped`` attribute. """ - BASE_ATTRS: Tuple[str, ...] = ('id', ) + + BASE_ATTRS: Tuple[str, ...] = ("id",) def __init__(self, wrapped, parent=None, gi=None): """ @@ -68,18 +68,18 @@ def __init__(self, wrapped, parent=None, gi=None): :param gi: the GalaxyInstance through which we can access this wrapper """ if not isinstance(wrapped, Mapping): - raise TypeError('wrapped object must be a mapping type') + raise TypeError("wrapped object must be a mapping type") # loads(dumps(x)) is a bit faster than deepcopy and allows type checks try: dumped = json.dumps(wrapped) except (TypeError, ValueError): - raise ValueError('wrapped object must be JSON-serializable') - object.__setattr__(self, 'wrapped', json.loads(dumped)) + raise ValueError("wrapped object must be JSON-serializable") + object.__setattr__(self, "wrapped", json.loads(dumped)) for k in self.BASE_ATTRS: object.__setattr__(self, k, self.wrapped.get(k)) - object.__setattr__(self, '_cached_parent', parent) - object.__setattr__(self, 'is_modified', False) - object.__setattr__(self, 'gi', gi) + object.__setattr__(self, "_cached_parent", parent) + object.__setattr__(self, "is_modified", False) + object.__setattr__(self, "gi", gi) @property def parent(self): @@ -99,7 +99,7 @@ def unmap(self): """ Disconnect this wrapper from Galaxy. """ - object.__setattr__(self, 'id', None) + object.__setattr__(self, "id", None) def clone(self): """ @@ -111,7 +111,7 @@ def touch(self): """ Mark this wrapper as having been modified since its creation. """ - object.__setattr__(self, 'is_modified', True) + object.__setattr__(self, "is_modified", True) if self.parent: self.parent.touch() @@ -150,22 +150,23 @@ class Step(Wrapper): ``parameter_input``), a computational tool (type ``tool``), a subworkflow (type ``subworkflow``) or a pause (type ``pause``). """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'input_steps', - 'name', - 'tool_id', - 'tool_inputs', - 'tool_version', - 'type', + "input_steps", + "name", + "tool_id", + "tool_inputs", + "tool_version", + "type", ) def __init__(self, step_dict, parent): super().__init__(step_dict, parent=parent, gi=parent.gi) try: - stype = step_dict['type'] + stype = step_dict["type"] except KeyError: - raise ValueError('not a step dict') - if stype not in {'data_collection_input', 'data_input', 'parameter_input', 'pause', 'subworkflow', 'tool'}: + raise ValueError("not a step dict") + if stype not in {"data_collection_input", "data_input", "parameter_input", "pause", "subworkflow", "tool"}: raise ValueError(f"Unknown step type: {stype!r}") @@ -173,15 +174,16 @@ class InvocationStep(Wrapper): """ Invocation step. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'action', - 'job_id', - 'order_index', - 'state', - 'update_time', - 'workflow_step_id', - 'workflow_step_label', - 'workflow_step_uuid', + "action", + "job_id", + "order_index", + "state", + "update_time", + "workflow_step_id", + "workflow_step_label", + "workflow_step_uuid", ) def refresh(self): @@ -201,11 +203,11 @@ def get_outputs(self): :rtype: dict of `HistoryDatasetAssociation` :return: dictionary mapping output names to history datasets """ - if not hasattr(self, 'outputs'): + if not hasattr(self, "outputs"): self.refresh() outputs = {} - for name, out_dict in self.wrapped['outputs'].items(): - outputs[name] = self.gi.datasets.get(out_dict['id']) + for name, out_dict in self.wrapped["outputs"].items(): + outputs[name] = self.gi.datasets.get(out_dict["id"]) return outputs def get_output_collections(self): @@ -215,11 +217,11 @@ def get_output_collections(self): :rtype: dict of `HistoryDatasetCollectionAssociation` :return: dictionary mapping output names to history dataset collections """ - if not hasattr(self, 'output_collections'): + if not hasattr(self, "output_collections"): self.refresh() output_collections = {} - for name, out_coll_dict in self.wrapped['output_collections'].items(): - output_collections[name] = self.gi.dataset_collections.get(out_coll_dict['id']) + for name, out_coll_dict in self.wrapped["output_collections"].items(): + output_collections[name] = self.gi.dataset_collections.get(out_coll_dict["id"]) return output_collections @@ -230,15 +232,16 @@ class Workflow(Wrapper): A workflow defines a sequence of steps that produce one or more results from an input dataset. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'deleted', - 'inputs', - 'latest_workflow_uuid', - 'name', - 'owner', - 'published', - 'steps', - 'tags', + "deleted", + "inputs", + "latest_workflow_uuid", + "name", + "owner", + "published", + "steps", + "tags", ) POLLING_INTERVAL = 10 # for output state monitoring @@ -252,29 +255,30 @@ def __init__(self, wf_dict, gi=None): tool_labels_to_ids = {} for k, v in self.steps.items(): # convert step ids to str for consistency with outer keys - v['id'] = str(v['id']) - for i in v['input_steps'].values(): - i['source_step'] = str(i['source_step']) + v["id"] = str(v["id"]) + for i in v["input_steps"].values(): + i["source_step"] = str(i["source_step"]) step = Step(v, self) self.steps[k] = step - if step.type == 'tool': + if step.type == "tool": if not step.tool_inputs or step.tool_id not in tools_list_by_id: missing_ids.append(k) tool_labels_to_ids.setdefault(step.tool_id, set()).add(step.id) input_labels_to_ids = {} for id_, d in self.inputs.items(): - input_labels_to_ids.setdefault(d['label'], set()).add(id_) - object.__setattr__(self, 'input_labels_to_ids', input_labels_to_ids) - object.__setattr__(self, 'tool_labels_to_ids', tool_labels_to_ids) + input_labels_to_ids.setdefault(d["label"], set()).add(id_) + object.__setattr__(self, "input_labels_to_ids", input_labels_to_ids) + object.__setattr__(self, "tool_labels_to_ids", tool_labels_to_ids) dag, inv_dag = self._get_dag() heads, tails = set(dag), set(inv_dag) - object.__setattr__(self, 'dag', dag) - object.__setattr__(self, 'inv_dag', inv_dag) - object.__setattr__(self, 'source_ids', heads - tails) - assert set(self.inputs) == self.data_collection_input_ids | self.data_input_ids | self.parameter_input_ids, \ - f"inputs is {self.inputs!r}, while data_collection_input_ids is {self.data_collection_input_ids!r}, data_input_ids is {self.data_input_ids!r} and parameter_input_ids is {self.parameter_input_ids!r}" - object.__setattr__(self, 'sink_ids', tails - heads) - object.__setattr__(self, 'missing_ids', missing_ids) + object.__setattr__(self, "dag", dag) + object.__setattr__(self, "inv_dag", inv_dag) + object.__setattr__(self, "source_ids", heads - tails) + assert ( + set(self.inputs) == self.data_collection_input_ids | self.data_input_ids | self.parameter_input_ids + ), f"inputs is {self.inputs!r}, while data_collection_input_ids is {self.data_collection_input_ids!r}, data_input_ids is {self.data_input_ids!r} and parameter_input_ids is {self.parameter_input_ids!r}" + object.__setattr__(self, "sink_ids", tails - heads) + object.__setattr__(self, "missing_ids", missing_ids) def _get_dag(self): """ @@ -296,7 +300,7 @@ def _get_dag(self): dag, inv_dag = {}, {} for s in self.steps.values(): for i in s.input_steps.values(): - head, tail = i['source_step'], s.id + head, tail = i["source_step"], s.id dag.setdefault(head, set()).add(tail) inv_dag.setdefault(tail, set()).add(head) return dag, inv_dag @@ -323,28 +327,28 @@ def data_input_ids(self): """ Return the ids of data input steps for this workflow. """ - return {id_ for id_, s in self.steps.items() if s.type == 'data_input'} + return {id_ for id_, s in self.steps.items() if s.type == "data_input"} @property def data_collection_input_ids(self): """ Return the ids of data collection input steps for this workflow. """ - return {id_ for id_, s in self.steps.items() if s.type == 'data_collection_input'} + return {id_ for id_, s in self.steps.items() if s.type == "data_collection_input"} @property def parameter_input_ids(self): """ Return the ids of parameter input steps for this workflow. """ - return {id_ for id_, s in self.steps.items() if s.type == 'parameter_input'} + return {id_ for id_, s in self.steps.items() if s.type == "parameter_input"} @property def tool_ids(self): """ Return the ids of tool steps for this workflow. """ - return {id_ for id_, s in self.steps.items() if s.type == 'tool'} + return {id_ for id_, s in self.steps.items() if s.type == "tool"} @property def input_labels(self): @@ -376,8 +380,16 @@ def _convert_input_map(input_map: dict) -> dict: """ ret = {} for key, value in input_map.items(): - if isinstance(value, (HistoryDatasetAssociation, HistoryDatasetCollectionAssociation, LibraryDatasetDatasetAssociation, LibraryDataset)): - ret[key] = {'id': value.id, 'src': value.SRC} # type: ignore + if isinstance( + value, + ( + HistoryDatasetAssociation, + HistoryDatasetCollectionAssociation, + LibraryDatasetDatasetAssociation, + LibraryDataset, + ), + ): + ret[key] = {"id": value.id, "src": value.SRC} # type: ignore else: ret[key] = value return ret @@ -410,10 +422,17 @@ def delete(self): self.gi.workflows.delete(id_=self.id) self.unmap() - def invoke(self, inputs=None, params=None, history=None, - import_inputs_to_history=None, replacement_params=None, - allow_tool_state_corrections=True, inputs_by=None, - parameters_normalized=False): + def invoke( + self, + inputs=None, + params=None, + history=None, + import_inputs_to_history=None, + replacement_params=None, + allow_tool_state_corrections=True, + inputs_by=None, + parameters_normalized=False, + ): """ Invoke the workflow. This will cause a workflow to be scheduled and return an object describing the workflow invocation. @@ -545,9 +564,9 @@ def invoke(self, inputs=None, params=None, history=None, also stable. """ if not self.is_mapped: - raise RuntimeError('workflow is not mapped to a Galaxy object') + raise RuntimeError("workflow is not mapped to a Galaxy object") if not self.is_runnable: - missing_tools_str = ', '.join(f"{self.steps[step_id].tool_id}[{step_id}]" for step_id in self.missing_ids) + missing_tools_str = ", ".join(f"{self.steps[step_id].tool_id}[{step_id}]" for step_id in self.missing_ids) raise RuntimeError(f"workflow has missing tools: {missing_tools_str}") inv_dict = self.gi.gi.workflows.invoke_workflow( @@ -560,9 +579,9 @@ def invoke(self, inputs=None, params=None, history=None, replacement_params=replacement_params, allow_tool_state_corrections=allow_tool_state_corrections, inputs_by=inputs_by, - parameters_normalized=parameters_normalized + parameters_normalized=parameters_normalized, ) - return self.gi.invocations.get(inv_dict['id']) + return self.gi.invocations.get(inv_dict["id"]) class Invocation(Wrapper): @@ -570,20 +589,21 @@ class Invocation(Wrapper): Invocation of a workflow. This causes the steps of a workflow to be executed in sequential order. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'history_id', - 'inputs', - 'state', - 'steps', - 'update_time', - 'uuid', - 'workflow_id', + "history_id", + "inputs", + "state", + "steps", + "update_time", + "uuid", + "workflow_id", ) def __init__(self, inv_dict, gi=None): super().__init__(inv_dict, gi=gi) self.steps = [InvocationStep(step, parent=self, gi=gi) for step in self.steps] - self.inputs = [{**v, 'label': k} for k, v in self.inputs.items()] + self.inputs = [{**v, "label": k} for k, v in self.inputs.items()] def sorted_step_ids(self): """ @@ -672,8 +692,13 @@ def run_step_actions(self, steps, actions): On success, this method updates the Invocation object's internal step variables. """ if not len(steps) == len(actions): - raise RuntimeError(f'Different number of ``steps`` ({len(steps)}) and ``actions`` ({len(actions)}) in ``{self}.run_step_actions()``') - step_dict_list = [self.gi.gi.invocations.run_invocation_step_action(self.id, step.id, action) for step, action in zip(steps, actions)] + raise RuntimeError( + f"Different number of ``steps`` ({len(steps)}) and ``actions`` ({len(actions)}) in ``{self}.run_step_actions()``" + ) + step_dict_list = [ + self.gi.gi.invocations.run_invocation_step_action(self.id, step.id, action) + for step, action in zip(steps, actions) + ] for step, step_dict in zip(steps, step_dict_list): step.__init__(step_dict, parent=self) @@ -749,21 +774,22 @@ class Dataset(Wrapper, metaclass=abc.ABCMeta): """ Abstract base class for Galaxy datasets. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'data_type', - 'file_ext', - 'file_name', - 'file_size', - 'genome_build', - 'misc_info', - 'name', - 'state', + "data_type", + "file_ext", + "file_name", + "file_size", + "genome_build", + "misc_info", + "name", + "state", ) POLLING_INTERVAL = 1 # for state monitoring def __init__(self, ds_dict, container, gi=None): super().__init__(ds_dict, gi=gi) - object.__setattr__(self, 'container', container) + object.__setattr__(self, "container", container) @property @abc.abstractmethod @@ -780,13 +806,13 @@ def get_stream(self, chunk_size=bioblend.CHUNK_SIZE): :type chunk_size: int :param chunk_size: read this amount of bytes at a time """ - kwargs = {'stream': True} + kwargs = {"stream": True} if isinstance(self, LibraryDataset): - kwargs['params'] = {'ld_ids%5B%5D': self.id} + kwargs["params"] = {"ld_ids%5B%5D": self.id} r = self.gi.gi.make_get_request(self._stream_url, **kwargs) if isinstance(self, LibraryDataset) and r.status_code == 500: # compatibility with older Galaxy releases - kwargs['params'] = {'ldda_ids%5B%5D': self.id} + kwargs["params"] = {"ldda_ids%5B%5D": self.id} r = self.gi.gi.make_get_request(self._stream_url, **kwargs) r.raise_for_status() return r.iter_content(chunk_size) # FIXME: client can't close r @@ -800,7 +826,7 @@ def peek(self, chunk_size=bioblend.CHUNK_SIZE): try: return next(self.get_stream(chunk_size=chunk_size)) except StopIteration: - return b'' + return b"" def download(self, file_object, chunk_size=bioblend.CHUNK_SIZE): """ @@ -820,7 +846,7 @@ def get_contents(self, chunk_size=bioblend.CHUNK_SIZE): See :meth:`.get_stream` for param info. """ - return b''.join(self.get_stream(chunk_size=chunk_size)) + return b"".join(self.get_stream(chunk_size=chunk_size)) def refresh(self): """ @@ -850,16 +876,16 @@ def wait(self, polling_interval=POLLING_INTERVAL, break_on_error=True): note that this method does not return anything; however, this dataset is refreshed (possibly multiple times) during the execution. """ - self.gi._wait_datasets([self], polling_interval=polling_interval, - break_on_error=break_on_error) + self.gi._wait_datasets([self], polling_interval=polling_interval, break_on_error=break_on_error) class HistoryDatasetAssociation(Dataset): """ Maps to a Galaxy ``HistoryDatasetAssociation``. """ - BASE_ATTRS = Dataset.BASE_ATTRS + ('annotation', 'deleted', 'purged', 'tags', 'visible') - SRC = 'hda' + + BASE_ATTRS = Dataset.BASE_ATTRS + ("annotation", "deleted", "purged", "tags", "visible") + SRC = "hda" @property def _stream_url(self): @@ -926,16 +952,17 @@ class DatasetCollection(Wrapper, metaclass=abc.ABCMeta): """ Abstract base class for Galaxy dataset collections. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'collection_type', - 'deleted', - 'name', - 'state', + "collection_type", + "deleted", + "name", + "state", ) def __init__(self, dsc_dict, container, gi=None): super().__init__(dsc_dict, gi=gi) - object.__setattr__(self, 'container', container) + object.__setattr__(self, "container", container) def refresh(self): """ @@ -957,8 +984,9 @@ class HistoryDatasetCollectionAssociation(DatasetCollection): """ Maps to a Galaxy ``HistoryDatasetCollectionAssociation``. """ - BASE_ATTRS = DatasetCollection.BASE_ATTRS + ('tags', 'visible', 'elements') - SRC = 'hdca' + + BASE_ATTRS = DatasetCollection.BASE_ATTRS + ("tags", "visible", "elements") + SRC = "hdca" def delete(self): """ @@ -985,15 +1013,17 @@ class LibraryDatasetDatasetAssociation(LibRelatedDataset): """ Maps to a Galaxy ``LibraryDatasetDatasetAssociation``. """ - BASE_ATTRS = LibRelatedDataset.BASE_ATTRS + ('deleted',) - SRC = 'ldda' + + BASE_ATTRS = LibRelatedDataset.BASE_ATTRS + ("deleted",) + SRC = "ldda" class LibraryDataset(LibRelatedDataset): """ Maps to a Galaxy ``LibraryDataset``. """ - SRC = 'ld' + + SRC = "ld" def delete(self, purged=False): """ @@ -1002,8 +1032,7 @@ def delete(self, purged=False): :type purged: bool :param purged: if ``True``, also purge (permanently delete) the dataset """ - self.gi.gi.libraries.delete_library_dataset( - self.container.id, self.id, purged=purged) + self.gi.gi.libraries.delete_library_dataset(self.container.id, self.id, purged=purged) self.container.refresh() self.refresh() @@ -1030,9 +1059,10 @@ class ContentInfo(Wrapper): Instances of this class wrap dictionaries obtained by getting ``/api/{histories,libraries}//contents`` from Galaxy. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'name', - 'type', + "name", + "type", ) @@ -1048,16 +1078,18 @@ class HistoryContentInfo(ContentInfo): Instances of this class wrap dictionaries obtained by getting ``/api/histories//contents`` from Galaxy. """ - BASE_ATTRS = ContentInfo.BASE_ATTRS + ('deleted', 'state', 'visible') + + BASE_ATTRS = ContentInfo.BASE_ATTRS + ("deleted", "state", "visible") class DatasetContainer(Wrapper, metaclass=abc.ABCMeta): """ Abstract base class for dataset containers (histories and libraries). """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'deleted', - 'name', + "deleted", + "name", ) def __init__(self, c_dict, content_infos=None, gi=None): @@ -1068,8 +1100,8 @@ def __init__(self, c_dict, content_infos=None, gi=None): super().__init__(c_dict, gi=gi) if content_infos is None: content_infos = [] - object.__setattr__(self, 'content_infos', content_infos) - object.__setattr__(self, 'obj_gi_client', getattr(self.gi, self.API_MODULE)) + object.__setattr__(self, "content_infos", content_infos) + object.__setattr__(self, "obj_gi_client", getattr(self.gi, self.API_MODULE)) @property @abc.abstractmethod @@ -1081,7 +1113,7 @@ def dataset_ids(self): """ Return the ids of the contained datasets. """ - return [_.id for _ in self.content_infos if _.type == 'file'] + return [_.id for _ in self.content_infos if _.type == "file"] def preview(self): getf = self.obj_gi_client.get_previews @@ -1102,8 +1134,7 @@ def refresh(self): :return: self """ fresh = self.obj_gi_client.get(self.id) - self.__init__( - fresh.wrapped, content_infos=fresh.content_infos, gi=self.gi) + self.__init__(fresh.wrapped, content_infos=fresh.content_infos, gi=self.gi) return self def get_dataset(self, ds_id): @@ -1151,11 +1182,19 @@ class History(DatasetContainer): """ Maps to a Galaxy history. """ - BASE_ATTRS = DatasetContainer.BASE_ATTRS + ('annotation', 'published', 'state', 'state_ids', 'state_details', 'tags') + + BASE_ATTRS = DatasetContainer.BASE_ATTRS + ( + "annotation", + "published", + "state", + "state_ids", + "state_details", + "tags", + ) DS_TYPE = HistoryDatasetAssociation DSC_TYPE = HistoryDatasetCollectionAssociation CONTENT_INFO_TYPE = HistoryContentInfo - API_MODULE = 'histories' + API_MODULE = "histories" def update(self, **kwds): """ @@ -1215,16 +1254,14 @@ def import_dataset(self, lds): :return: the imported history dataset """ if not self.is_mapped: - raise RuntimeError('history is not mapped to a Galaxy object') + raise RuntimeError("history is not mapped to a Galaxy object") if not isinstance(lds, LibraryDataset): - raise TypeError('lds is not a LibraryDataset') + raise TypeError("lds is not a LibraryDataset") res = self.gi.gi.histories.upload_dataset_from_library(self.id, lds.id) if not isinstance(res, Mapping): - raise RuntimeError( - f"upload_dataset_from_library: unexpected reply: {res!r}" - ) + raise RuntimeError(f"upload_dataset_from_library: unexpected reply: {res!r}") self.refresh() - return self.get_dataset(res['id']) + return self.get_dataset(res["id"]) def upload_file(self, path, **kwargs): """ @@ -1241,7 +1278,7 @@ def upload_file(self, path, **kwargs): """ out_dict = self.gi.gi.tools.upload_file(path, self.id, **kwargs) self.refresh() - return self.get_dataset(out_dict['outputs'][0]['id']) + return self.get_dataset(out_dict["outputs"][0]["id"]) upload_dataset = upload_file @@ -1261,7 +1298,7 @@ def upload_from_ftp(self, path, **kwargs): """ out_dict = self.gi.gi.tools.upload_from_ftp(path, self.id, **kwargs) self.refresh() - return self.get_dataset(out_dict['outputs'][0]['id']) + return self.get_dataset(out_dict["outputs"][0]["id"]) def paste_content(self, content, **kwargs): """ @@ -1278,18 +1315,22 @@ def paste_content(self, content, **kwargs): """ out_dict = self.gi.gi.tools.paste_content(content, self.id, **kwargs) self.refresh() - return self.get_dataset(out_dict['outputs'][0]['id']) + return self.get_dataset(out_dict["outputs"][0]["id"]) - def export(self, gzip=True, include_hidden=False, include_deleted=False, - wait=False, maxwait=None): + def export(self, gzip=True, include_hidden=False, include_deleted=False, wait=False, maxwait=None): """ Start a job to create an export archive for this history. See :meth:`~bioblend.galaxy.histories.HistoryClient.export_history` for parameter and return value info. """ return self.gi.gi.histories.export_history( - self.id, gzip=gzip, include_hidden=include_hidden, - include_deleted=include_deleted, wait=wait, maxwait=maxwait) + self.id, + gzip=gzip, + include_hidden=include_hidden, + include_deleted=include_deleted, + wait=wait, + maxwait=maxwait, + ) def download(self, jeha_id, outf, chunk_size=bioblend.CHUNK_SIZE): """ @@ -1298,8 +1339,7 @@ def download(self, jeha_id, outf, chunk_size=bioblend.CHUNK_SIZE): :meth:`~bioblend.galaxy.histories.HistoryClient.download_history` for parameter and return value info. """ - return self.gi.gi.histories.download_history( - self.id, jeha_id, outf, chunk_size=chunk_size) + return self.gi.gi.histories.download_history(self.id, jeha_id, outf, chunk_size=chunk_size) def create_dataset_collection(self, collection_description): """ @@ -1313,7 +1353,7 @@ def create_dataset_collection(self, collection_description): """ dataset_collection = self.gi.gi.histories.create_dataset_collection(self.id, collection_description) self.refresh() - return self.get_dataset_collection(dataset_collection['id']) + return self.get_dataset_collection(dataset_collection["id"]) def get_dataset_collection(self, dsc_id): """ @@ -1333,17 +1373,18 @@ class Library(DatasetContainer): """ Maps to a Galaxy library. """ - BASE_ATTRS = DatasetContainer.BASE_ATTRS + ('description', 'synopsis') + + BASE_ATTRS = DatasetContainer.BASE_ATTRS + ("description", "synopsis") DS_TYPE = LibraryDataset CONTENT_INFO_TYPE = LibraryContentInfo - API_MODULE = 'libraries' + API_MODULE = "libraries" @property def folder_ids(self): """ Return the ids of the contained folders. """ - return [_.id for _ in self.content_infos if _.type == 'folder'] + return [_.id for _ in self.content_infos if _.type == "folder"] def delete(self): """ @@ -1358,7 +1399,7 @@ def _pre_upload(self, folder): Return the id of the given folder, after sanity checking. """ if not self.is_mapped: - raise RuntimeError('library is not mapped to a Galaxy object') + raise RuntimeError("library is not mapped to a Galaxy object") return None if folder is None else folder.id def upload_data(self, data, folder=None, **kwargs): @@ -1377,10 +1418,9 @@ def upload_data(self, data, folder=None, **kwargs): Optional keyword arguments: ``file_type``, ``dbkey``. """ fid = self._pre_upload(folder) - res = self.gi.gi.libraries.upload_file_contents( - self.id, data, folder_id=fid, **kwargs) + res = self.gi.gi.libraries.upload_file_contents(self.id, data, folder_id=fid, **kwargs) self.refresh() - return self.get_dataset(res[0]['id']) + return self.get_dataset(res[0]["id"]) def upload_from_url(self, url, folder=None, **kwargs): """ @@ -1392,10 +1432,9 @@ def upload_from_url(self, url, folder=None, **kwargs): See :meth:`.upload_data` for info on other params. """ fid = self._pre_upload(folder) - res = self.gi.gi.libraries.upload_file_from_url( - self.id, url, folder_id=fid, **kwargs) + res = self.gi.gi.libraries.upload_file_from_url(self.id, url, folder_id=fid, **kwargs) self.refresh() - return self.get_dataset(res[0]['id']) + return self.get_dataset(res[0]["id"]) def upload_from_local(self, path, folder=None, **kwargs): """ @@ -1407,10 +1446,9 @@ def upload_from_local(self, path, folder=None, **kwargs): See :meth:`.upload_data` for info on other params. """ fid = self._pre_upload(folder) - res = self.gi.gi.libraries.upload_file_from_local_path( - self.id, path, folder_id=fid, **kwargs) + res = self.gi.gi.libraries.upload_file_from_local_path(self.id, path, folder_id=fid, **kwargs) self.refresh() - return self.get_dataset(res[0]['id']) + return self.get_dataset(res[0]["id"]) def upload_from_galaxy_fs(self, paths, folder=None, link_data_only=None, **kwargs): """ @@ -1437,23 +1475,19 @@ def upload_from_galaxy_fs(self, paths, folder=None, link_data_only=None, **kwarg fid = self._pre_upload(folder) if isinstance(paths, str): paths = (paths,) - paths = '\n'.join(paths) + paths = "\n".join(paths) res = self.gi.gi.libraries.upload_from_galaxy_filesystem( - self.id, paths, folder_id=fid, link_data_only=link_data_only, - **kwargs) + self.id, paths, folder_id=fid, link_data_only=link_data_only, **kwargs + ) if res is None: - raise RuntimeError('upload_from_galaxy_filesystem: no reply') + raise RuntimeError("upload_from_galaxy_filesystem: no reply") if not isinstance(res, Sequence): - raise RuntimeError( - f"upload_from_galaxy_filesystem: unexpected reply: {res!r}" - ) - new_datasets = [ - self.get_dataset(ds_info['id']) for ds_info in res - ] + raise RuntimeError(f"upload_from_galaxy_filesystem: unexpected reply: {res!r}") + new_datasets = [self.get_dataset(ds_info["id"]) for ds_info in res] self.refresh() return new_datasets - def copy_from_dataset(self, hda, folder=None, message=''): + def copy_from_dataset(self, hda, folder=None, message=""): """ Copy a history dataset into this library. @@ -1463,10 +1497,9 @@ def copy_from_dataset(self, hda, folder=None, message=''): See :meth:`.upload_data` for info on other params. """ fid = self._pre_upload(folder) - res = self.gi.gi.libraries.copy_from_dataset( - self.id, hda.id, folder_id=fid, message=message) + res = self.gi.gi.libraries.copy_from_dataset(self.id, hda.id, folder_id=fid, message=message) self.refresh() - return self.get_dataset(res['library_dataset_id']) + return self.get_dataset(res["library_dataset_id"]) def create_folder(self, name, description=None, base_folder=None): """ @@ -1486,10 +1519,9 @@ def create_folder(self, name, description=None, base_folder=None): :return: the folder just created """ bfid = None if base_folder is None else base_folder.id - res = self.gi.gi.libraries.create_folder( - self.id, name, description=description, base_folder_id=bfid) + res = self.gi.gi.libraries.create_folder(self.id, name, description=description, base_folder_id=bfid) self.refresh() - return self.get_folder(res[0]['id']) + return self.get_folder(res[0]["id"]) def get_folder(self, f_id): """ @@ -1516,16 +1548,17 @@ class Folder(Wrapper): """ Maps to a folder in a Galaxy library. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'deleted', - 'description', - 'item_count', - 'name', + "deleted", + "description", + "item_count", + "name", ) def __init__(self, f_dict, container, gi=None): super().__init__(f_dict, gi=gi) - object.__setattr__(self, 'container', container) + object.__setattr__(self, "container", container) @property def parent(self): @@ -1537,16 +1570,14 @@ def parent(self): :return: the parent of this folder """ if self._cached_parent is None: - object.__setattr__(self, - '_cached_parent', - self._get_parent()) + object.__setattr__(self, "_cached_parent", self._get_parent()) return self._cached_parent def _get_parent(self): """ Return the parent folder of this folder. """ - parent_id = self.wrapped['parent_id'] + parent_id = self.wrapped["parent_id"] if parent_id is None: return None return self.container.get_folder(parent_id) @@ -1566,14 +1597,14 @@ class Tool(Wrapper): """ Maps to a Galaxy tool. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'name', - 'version', + "name", + "version", ) POLLING_INTERVAL = 10 # for output state monitoring - def run(self, inputs, history, wait=False, - polling_interval=POLLING_INTERVAL): + def run(self, inputs, history, wait=False, polling_interval=POLLING_INTERVAL): """ Execute this tool in the given history with inputs from dict ``inputs``. @@ -1605,9 +1636,9 @@ def run(self, inputs, history, wait=False, """ for k, v in inputs.items(): if isinstance(v, Dataset): - inputs[k] = {'src': v.SRC, 'id': v.id} + inputs[k] = {"src": v.SRC, "id": v.id} out_dict = self.gi.gi.tools.run_tool(history.id, self.id, inputs) - outputs = [history.get_dataset(_['id']) for _ in out_dict['outputs']] + outputs = [history.get_dataset(_["id"]) for _ in out_dict["outputs"]] if wait: self.gi._wait_datasets(outputs, polling_interval=polling_interval) return outputs @@ -1617,7 +1648,8 @@ class Job(Wrapper): """ Maps to a Galaxy job. """ - BASE_ATTRS = Wrapper.BASE_ATTRS + ('state',) + + BASE_ATTRS = Wrapper.BASE_ATTRS + ("state",) @abstractclass @@ -1625,9 +1657,10 @@ class DatasetContainerPreview(Wrapper): """ Abstract base class for dataset container (history and library) 'previews'. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'deleted', - 'name', + "deleted", + "name", ) @@ -1647,11 +1680,12 @@ class HistoryPreview(DatasetContainerPreview): Instances of this class wrap dictionaries obtained by getting ``/api/histories`` from Galaxy. """ + BASE_ATTRS = DatasetContainerPreview.BASE_ATTRS + ( - 'annotation', - 'published', - 'purged', - 'tags', + "annotation", + "published", + "purged", + "tags", ) @@ -1662,15 +1696,16 @@ class WorkflowPreview(Wrapper): Instances of this class wrap dictionaries obtained by getting ``/api/workflows`` from Galaxy. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'deleted', - 'latest_workflow_uuid', - 'name', - 'number_of_steps', - 'owner', - 'published', - 'show_in_tool_panel', - 'tags', + "deleted", + "latest_workflow_uuid", + "name", + "number_of_steps", + "owner", + "published", + "show_in_tool_panel", + "tags", ) @@ -1681,13 +1716,14 @@ class InvocationPreview(Wrapper): Instances of this class wrap dictionaries obtained by getting ``/api/invocations`` from Galaxy. """ + BASE_ATTRS = Wrapper.BASE_ATTRS + ( - 'history_id', - 'id', - 'state', - 'update_time', - 'uuid', - 'workflow_id', + "history_id", + "id", + "state", + "update_time", + "uuid", + "workflow_id", ) @@ -1698,4 +1734,5 @@ class JobPreview(Wrapper): Instances of this class wrap dictionaries obtained by getting ``/api/jobs`` from Galaxy. """ - BASE_ATTRS = Wrapper.BASE_ATTRS + ('state',) + + BASE_ATTRS = Wrapper.BASE_ATTRS + ("state",) diff --git a/bioblend/galaxy/quotas/__init__.py b/bioblend/galaxy/quotas/__init__.py index 8e9abab31..bfb10df2f 100644 --- a/bioblend/galaxy/quotas/__init__.py +++ b/bioblend/galaxy/quotas/__init__.py @@ -5,7 +5,7 @@ class QuotaClient(Client): - module = 'quotas' + module = "quotas" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -59,8 +59,7 @@ def show_quota(self, quota_id, deleted=False): """ return self._get(id=quota_id, deleted=deleted) - def create_quota(self, name, description, amount, operation, - default='no', in_users=None, in_groups=None): + def create_quota(self, name, description, amount, operation, default="no", in_users=None, in_groups=None): """ Create a new quota @@ -98,22 +97,31 @@ def create_quota(self, name, description, amount, operation, 'name': 'Testing'} """ payload = { - 'name': name, - 'description': description, - 'amount': amount, - 'operation': operation, - 'default': default + "name": name, + "description": description, + "amount": amount, + "operation": operation, + "default": default, } if in_users: - payload['in_users'] = in_users + payload["in_users"] = in_users if in_groups: - payload['in_groups'] = in_groups + payload["in_groups"] = in_groups return self._post(payload) - def update_quota(self, quota_id, name=None, description=None, amount=None, operation=None, - default='no', in_users=None, in_groups=None): + def update_quota( + self, + quota_id, + name=None, + description=None, + amount=None, + operation=None, + default="no", + in_users=None, + in_groups=None, + ): """ Update an existing quota @@ -154,26 +162,24 @@ def update_quota(self, quota_id, name=None, description=None, amount=None, opera "Quota 'Testing-A' has been renamed to 'Testing-B'; Quota 'Testing-e' is now '-100.0 GB'; Quota 'Testing-B' is now the default for unregistered users" """ - payload = { - 'default': default - } + payload = {"default": default} if name: - payload['name'] = name + payload["name"] = name if description: - payload['description'] = description + payload["description"] = description if amount: - payload['amount'] = amount + payload["amount"] = amount if operation: - payload['operation'] = operation + payload["operation"] = operation if in_users: - payload['in_users'] = in_users + payload["in_users"] = in_users if in_groups: - payload['in_groups'] = in_groups + payload["in_groups"] = in_groups return self._put(id=quota_id, payload=payload) @@ -207,5 +213,5 @@ def undelete_quota(self, quota_id): "Undeleted 1 quotas: Testing-B" """ - url = self._make_url(quota_id, deleted=True) + '/undelete' - return self._post(url=url, payload={'id': quota_id}) + url = self._make_url(quota_id, deleted=True) + "/undelete" + return self._post(url=url, payload={"id": quota_id}) diff --git a/bioblend/galaxy/roles/__init__.py b/bioblend/galaxy/roles/__init__.py index 662db82ba..083821e45 100644 --- a/bioblend/galaxy/roles/__init__.py +++ b/bioblend/galaxy/roles/__init__.py @@ -5,7 +5,7 @@ class RolesClient(Client): - module = 'roles' + module = "roles" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -83,12 +83,7 @@ def create_role(self, role_name, description, user_ids=None, group_ids=None): user_ids = [] if group_ids is None: group_ids = [] - payload = { - 'name': role_name, - 'description': description, - 'user_ids': user_ids, - 'group_ids': group_ids - } + payload = {"name": role_name, "description": description, "user_ids": user_ids, "group_ids": group_ids} ret = self._post(payload) if isinstance(ret, list): # Galaxy release_20.09 and earlier returned a 1-element list diff --git a/bioblend/galaxy/tool_data/__init__.py b/bioblend/galaxy/tool_data/__init__.py index f694d75f8..fca777f71 100644 --- a/bioblend/galaxy/tool_data/__init__.py +++ b/bioblend/galaxy/tool_data/__init__.py @@ -5,7 +5,7 @@ class ToolDataClient(Client): - module = 'tool_data' + module = "tool_data" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -64,7 +64,7 @@ def reload_data_table(self, data_table_id): 'model_class': 'TabularToolDataTable', 'name': 'all_fasta'} """ - url = self._make_url(data_table_id) + '/reload' + url = self._make_url(data_table_id) + "/reload" return self._get(url=url) def delete_data_table(self, data_table_id, values): @@ -81,5 +81,5 @@ def delete_data_table(self, data_table_id, values): :rtype: dict :return: Remaining contents of the given data table """ - payload = {'values': values} + payload = {"values": values} return self._delete(payload=payload, id=data_table_id) diff --git a/bioblend/galaxy/tool_dependencies/__init__.py b/bioblend/galaxy/tool_dependencies/__init__.py index 773db2f3d..74e4fefbb 100644 --- a/bioblend/galaxy/tool_dependencies/__init__.py +++ b/bioblend/galaxy/tool_dependencies/__init__.py @@ -5,12 +5,20 @@ class ToolDependenciesClient(Client): - module = 'dependency_resolvers' + module = "dependency_resolvers" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) - def summarize_toolbox(self, index=None, tool_ids=None, resolver_type=None, include_containers=False, container_type=None, index_by='requirements'): + def summarize_toolbox( + self, + index=None, + tool_ids=None, + resolver_type=None, + include_containers=False, + container_type=None, + index_by="requirements", + ): """ Summarize requirements across toolbox (for Tool Management grid). @@ -66,19 +74,19 @@ def summarize_toolbox(self, index=None, tool_ids=None, resolver_type=None, inclu Galaxy admin. It relies on an experimental API particularly tied to the GUI and therefore is subject to breaking changes. """ - assert index_by in ['tools', 'requirements'], "index_by must be one of 'tools' or 'requirements'." + assert index_by in ["tools", "requirements"], "index_by must be one of 'tools' or 'requirements'." params = { - 'include_containers': str(include_containers), - 'index_by': index_by, + "include_containers": str(include_containers), + "index_by": index_by, } if index: - params['index'] = str(index) + params["index"] = str(index) if tool_ids: - params['tool_ids'] = ','.join(tool_ids) + params["tool_ids"] = ",".join(tool_ids) if resolver_type: - params['resolver_type'] = resolver_type + params["resolver_type"] = resolver_type if container_type: - params['container_type'] = container_type + params["container_type"] = container_type - url = '/'.join((self._make_url(), 'toolbox')) + url = "/".join((self._make_url(), "toolbox")) return self._get(url=url, params=params) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index a38aa00e1..c64b36456 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -10,7 +10,7 @@ class ToolClient(Client): - module = 'tools' + module = "tools" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -41,17 +41,17 @@ def get_tools(self, tool_id=None, name=None, trackster=None): """ if tool_id is not None: warnings.warn( - 'The tool_id parameter is deprecated, use the show_tool() method to view details of a tool for which you know the ID.', - category=FutureWarning + "The tool_id parameter is deprecated, use the show_tool() method to view details of a tool for which you know the ID.", + category=FutureWarning, ) if tool_id is not None and name is not None: - raise ValueError('Provide only one argument between name or tool_id, but not both') + raise ValueError("Provide only one argument between name or tool_id, but not both") tools = self._raw_get_tool(in_panel=False, trackster=trackster) if tool_id is not None: - tool = next((_ for _ in tools if _['id'] == tool_id), None) + tool = next((_ for _ in tools if _["id"] == tool_id), None) tools = [tool] if tool is not None else [] elif name is not None: - tools = [_ for _ in tools if _['name'] == name] + tools = [_ for _ in tools if _["name"] == name] return tools def get_tool_panel(self): @@ -68,8 +68,8 @@ def get_tool_panel(self): def _raw_get_tool(self, in_panel=None, trackster=None): params = {} - params['in_panel'] = in_panel - params['trackster'] = trackster + params["in_panel"] = in_panel + params["trackster"] = trackster return self._get(params=params) def requirements(self, tool_id): @@ -104,7 +104,7 @@ def requirements(self, tool_id): .. note:: This method works only if the user is a Galaxy admin. """ - url = self._make_url(tool_id) + '/requirements' + url = self._make_url(tool_id) + "/requirements" return self._get(url=url) def reload(self, tool_id: str) -> dict: @@ -128,7 +128,7 @@ def reload(self, tool_id: str) -> dict: .. note:: This method works only if the user is a Galaxy admin. """ - url = self._make_url(tool_id) + '/reload' + url = self._make_url(tool_id) + "/reload" return self._put(url=url) def get_citations(self, tool_id: str) -> List[dict]: @@ -141,7 +141,7 @@ def get_citations(self, tool_id: str) -> List[dict]: :rtype: list of dicts :param: list containing the citations """ - url = self._make_url(tool_id) + '/citations' + url = self._make_url(tool_id) + "/citations" return self._get(url=url) def install_dependencies(self, tool_id): @@ -158,7 +158,7 @@ def install_dependencies(self, tool_id): .. note:: This method works only if the user is a Galaxy admin. """ - url = self._make_url(tool_id) + '/install_dependencies' + url = self._make_url(tool_id) + "/install_dependencies" return self._post(url=url) def uninstall_dependencies(self, tool_id: str) -> dict: @@ -175,7 +175,7 @@ def uninstall_dependencies(self, tool_id: str) -> dict: .. note:: This method works only if the user is a Galaxy admin. """ - url = self._make_url(tool_id) + '/dependencies' + url = self._make_url(tool_id) + "/dependencies" return self._delete(url=url) def show_tool(self, tool_id, io_details=False, link_details=False): @@ -195,8 +195,8 @@ def show_tool(self, tool_id, io_details=False, link_details=False): :return: Information about the tool's interface """ params = {} - params['io_details'] = io_details - params['link_details'] = link_details + params["io_details"] = io_details + params["link_details"] = link_details return self._get(id=tool_id, params=params) def build(self, tool_id, inputs=None, tool_version=None, history_id=None): @@ -300,19 +300,19 @@ def build(self, tool_id, inputs=None, tool_version=None, history_id=None): params = {} if inputs: - params['inputs'] = inputs + params["inputs"] = inputs if tool_version: - params['tool_version'] = tool_version + params["tool_version"] = tool_version if history_id: - params['history_id'] = history_id + params["history_id"] = history_id - url = '/'.join((self.gi.url, 'tools', tool_id, 'build')) + url = "/".join((self.gi.url, "tools", tool_id, "build")) return self._post(payload=params, url=url) - def run_tool(self, history_id, tool_id, tool_inputs, input_format='legacy'): + def run_tool(self, history_id, tool_id, tool_inputs, input_format="legacy"): """ Runs tool specified by ``tool_id`` in history indicated by ``history_id`` with inputs from ``dict`` ``tool_inputs``. @@ -450,7 +450,7 @@ def upload_from_ftp(self, path, history_id, **keywords): :return: Information about the created upload job """ payload = self._upload_payload(history_id, **keywords) - payload['files_0|ftp_files'] = path + payload["files_0|ftp_files"] = path return self._post(payload) def paste_content(self, content, history_id, **kwds): @@ -481,14 +481,14 @@ def _upload_payload(self, history_id, **keywords): payload["history_id"] = history_id payload["tool_id"] = keywords.get("tool_id", "upload1") tool_input = {} - tool_input["file_type"] = keywords.get('file_type', 'auto') + tool_input["file_type"] = keywords.get("file_type", "auto") tool_input["dbkey"] = keywords.get("dbkey", "?") - if not keywords.get('to_posix_lines', True): - tool_input['files_0|to_posix_lines'] = False - elif keywords.get('space_to_tab', False): - tool_input['files_0|space_to_tab'] = 'Yes' - if 'file_name' in keywords: - tool_input["files_0|NAME"] = keywords['file_name'] + if not keywords.get("to_posix_lines", True): + tool_input["files_0|to_posix_lines"] = False + elif keywords.get("space_to_tab", False): + tool_input["files_0|space_to_tab"] = "Yes" + if "file_name" in keywords: + tool_input["files_0|NAME"] = keywords["file_name"] tool_input["files_0|type"] = "upload_dataset" payload["inputs"] = tool_input return payload diff --git a/bioblend/galaxy/tools/inputs.py b/bioblend/galaxy/tools/inputs.py index 6a11577fc..1ff1b728c 100644 --- a/bioblend/galaxy/tools/inputs.py +++ b/bioblend/galaxy/tools/inputs.py @@ -1,6 +1,5 @@ class InputsBuilder: - """ - """ + """ """ def __init__(self): self._input_dict = {} @@ -33,7 +32,6 @@ def flat_iter(self, prefix=None): class RepeatBuilder: - def __init__(self): self._instances = [] @@ -48,13 +46,11 @@ def flat_iter(self, prefix=None): class Param: - def __init__(self, value): self.value = value class DatasetParam(Param): - def __init__(self, value, src="hda"): if not isinstance(value, dict): value = dict(src=src, id=value) diff --git a/bioblend/galaxy/toolshed/__init__.py b/bioblend/galaxy/toolshed/__init__.py index 4450609b8..85ecee65b 100644 --- a/bioblend/galaxy/toolshed/__init__.py +++ b/bioblend/galaxy/toolshed/__init__.py @@ -5,7 +5,7 @@ class ToolShedClient(Client): - module = 'tool_shed_repositories' + module = "tool_shed_repositories" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -59,13 +59,18 @@ def show_repository(self, toolShed_id): """ return self._get(id=toolShed_id) - def install_repository_revision(self, tool_shed_url, name, owner, - changeset_revision, - install_tool_dependencies=False, - install_repository_dependencies=False, - install_resolver_dependencies=False, - tool_panel_section_id=None, - new_tool_panel_section_label=None): + def install_repository_revision( + self, + tool_shed_url, + name, + owner, + changeset_revision, + install_tool_dependencies=False, + install_repository_dependencies=False, + install_resolver_dependencies=False, + tool_panel_section_id=None, + new_tool_panel_section_label=None, + ): """ Install a specified repository revision from a specified Tool Shed into this Galaxy instance. This example demonstrates installation of a repository @@ -128,23 +133,22 @@ def install_repository_revision(self, tool_shed_url, name, owner, installed into. """ payload = {} - payload['tool_shed_url'] = tool_shed_url - payload['name'] = name - payload['owner'] = owner - payload['changeset_revision'] = changeset_revision - payload['install_tool_dependencies'] = install_tool_dependencies - payload['install_repository_dependencies'] = install_repository_dependencies - payload['install_resolver_dependencies'] = install_resolver_dependencies + payload["tool_shed_url"] = tool_shed_url + payload["name"] = name + payload["owner"] = owner + payload["changeset_revision"] = changeset_revision + payload["install_tool_dependencies"] = install_tool_dependencies + payload["install_repository_dependencies"] = install_repository_dependencies + payload["install_resolver_dependencies"] = install_resolver_dependencies if tool_panel_section_id: - payload['tool_panel_section_id'] = tool_panel_section_id + payload["tool_panel_section_id"] = tool_panel_section_id elif new_tool_panel_section_label: - payload['new_tool_panel_section_label'] = new_tool_panel_section_label + payload["new_tool_panel_section_label"] = new_tool_panel_section_label - url = self._make_url() + '/new/install_repository_revision' + url = self._make_url() + "/new/install_repository_revision" return self._post(url=url, payload=payload) - def uninstall_repository_revision(self, name, owner, changeset_revision, - tool_shed_url, remove_from_disk=True): + def uninstall_repository_revision(self, name, owner, changeset_revision, tool_shed_url, remove_from_disk=True): """ Uninstalls a specified repository revision from this Galaxy instance. @@ -169,10 +173,10 @@ def uninstall_repository_revision(self, name, owner, changeset_revision, :return: If successful, a dictionary with a message noting the removal """ payload = { - 'tool_shed_url': tool_shed_url, - 'name': name, - 'owner': owner, - 'changeset_revision': changeset_revision, - 'remove_from_disk': remove_from_disk + "tool_shed_url": tool_shed_url, + "name": name, + "owner": owner, + "changeset_revision": changeset_revision, + "remove_from_disk": remove_from_disk, } return self._delete(params=payload) diff --git a/bioblend/galaxy/users/__init__.py b/bioblend/galaxy/users/__init__.py index 36e27e395..323e311ae 100644 --- a/bioblend/galaxy/users/__init__.py +++ b/bioblend/galaxy/users/__init__.py @@ -7,7 +7,7 @@ class UserClient(Client): - module = 'users' + module = "users" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -50,11 +50,11 @@ def get_users(self, deleted=False, f_email=None, f_name=None, f_any=None): """ params = {} if f_email: - params['f_email'] = f_email + params["f_email"] = f_email if f_name: - params['f_name'] = f_name + params["f_name"] = f_name if f_any: - params['f_any'] = f_any + params["f_any"] = f_any return self._get(deleted=deleted, params=params) def show_user(self, user_id, deleted=False): @@ -91,7 +91,7 @@ def create_remote_user(self, user_email): :return: a dictionary containing information about the created user """ payload = {} - payload['remote_user_email'] = user_email + payload["remote_user_email"] = user_email return self._post(payload) def create_local_user(self, username, user_email, password): @@ -117,9 +117,9 @@ def create_local_user(self, username, user_email, password): :return: a dictionary containing information about the created user """ payload = {} - payload['username'] = username - payload['email'] = user_email - payload['password'] = password + payload["username"] = username + payload["email"] = user_email + payload["password"] = password return self._post(payload) def get_current_user(self): @@ -130,7 +130,7 @@ def get_current_user(self): :rtype: dict :return: a dictionary containing information about the current user """ - url = self._make_url() + '/current' + url = self._make_url() + "/current" return self._get(url=url) def create_user_apikey(self, user_id): @@ -143,9 +143,9 @@ def create_user_apikey(self, user_id): :rtype: str :return: the API key for the user """ - url = self._make_url(user_id) + '/api_key' + url = self._make_url(user_id) + "/api_key" payload = {} - payload['user_id'] = user_id + payload["user_id"] = user_id return self._post(payload, url=url) def delete_user(self, user_id, purge=False): @@ -168,7 +168,7 @@ def delete_user(self, user_id, purge=False): """ params = {} if purge is True: - params['purge'] = purge + params["purge"] = purge return self._delete(id=user_id, params=params) def get_user_apikey(self, user_id): @@ -181,8 +181,8 @@ def get_user_apikey(self, user_id): :rtype: str :return: the API key for the user """ - url = self._make_url(user_id) + '/api_key/inputs' - return self._get(url=url)['inputs'][0]['value'] + url = self._make_url(user_id) + "/api_key/inputs" + return self._get(url=url)["inputs"][0]["value"] def update_user(self, user_id, **kwds): """ @@ -201,5 +201,5 @@ def update_user(self, user_id, **kwds): :rtype: dict :return: details of the updated user """ - url = self._make_url(user_id) + '/information/inputs' + url = self._make_url(user_id) + "/information/inputs" return self._put(url=url, payload=kwds, id=user_id) diff --git a/bioblend/galaxy/visual/__init__.py b/bioblend/galaxy/visual/__init__.py index 99c02741d..63635c8c0 100644 --- a/bioblend/galaxy/visual/__init__.py +++ b/bioblend/galaxy/visual/__init__.py @@ -5,7 +5,7 @@ class VisualClient(Client): - module = 'visualizations' + module = "visualizations" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) diff --git a/bioblend/galaxy/workflows/__init__.py b/bioblend/galaxy/workflows/__init__.py index b43e419de..9e3172ab7 100644 --- a/bioblend/galaxy/workflows/__init__.py +++ b/bioblend/galaxy/workflows/__init__.py @@ -14,7 +14,7 @@ class WorkflowClient(Client): - module = 'workflows' + module = "workflows" def __init__(self, galaxy_instance): super().__init__(galaxy_instance) @@ -49,20 +49,20 @@ def get_workflows(self, workflow_id=None, name=None, published=False): """ if workflow_id is not None: warnings.warn( - 'The workflow_id parameter is deprecated, use the show_workflow() method to view details of a workflow for which you know the ID.', - category=FutureWarning + "The workflow_id parameter is deprecated, use the show_workflow() method to view details of a workflow for which you know the ID.", + category=FutureWarning, ) if workflow_id is not None and name is not None: - raise ValueError('Provide only one argument between name or workflow_id, but not both') + raise ValueError("Provide only one argument between name or workflow_id, but not both") params = {} if published: - params['show_published'] = True + params["show_published"] = True workflows = self._get(params=params) if workflow_id is not None: - workflow = next((_ for _ in workflows if _['id'] == workflow_id), None) + workflow = next((_ for _ in workflows if _["id"] == workflow_id), None) workflows = [workflow] if workflow is not None else [] elif name is not None: - workflows = [_ for _ in workflows if _['name'] == name] + workflows = [_ for _ in workflows if _["name"] == name] return workflows def show_workflow(self, workflow_id, version=None): @@ -86,7 +86,7 @@ def show_workflow(self, workflow_id, version=None): """ params = {} if version is not None: - params['version'] = version + params["version"] = version return self._get(id=workflow_id, params=params) @@ -105,8 +105,8 @@ def get_workflow_inputs(self, workflow_id, label): :return: list of workflow inputs matching the label query """ wf = self._get(id=workflow_id) - inputs = wf['inputs'] - return [id for id in inputs if inputs[id]['label'] == label] + inputs = wf["inputs"] + return [id for id in inputs if inputs[id]["label"] == label] def import_workflow_dict(self, workflow_dict, publish=False): """ @@ -135,7 +135,7 @@ def import_workflow_dict(self, workflow_dict, publish=False): 'model_class': 'StoredWorkflow', 'id': '94bac0a90086bdcf'} """ - payload = {'workflow': workflow_dict, 'publish': publish} + payload = {"workflow": workflow_dict, "publish": publish} url = self._make_url() + "/upload" return self._post(url=url, payload=payload) @@ -191,7 +191,7 @@ def import_shared_workflow(self, workflow_id): 'tags': [], 'url': '/api/workflows/ee0e2b4b696d9092'} """ - payload = {'shared_workflow_id': workflow_id} + payload = {"shared_workflow_id": workflow_id} url = self._make_url() return self._post(url=url, payload=payload) @@ -210,9 +210,9 @@ def export_workflow_dict(self, workflow_id, version=None): """ params = {} if version is not None: - params['version'] = version + params["version"] = version - url = '/'.join((self._make_url(), 'download', workflow_id)) + url = "/".join((self._make_url(), "download", workflow_id)) return self._get(url=url, params=params) def export_workflow_to_local_path(self, workflow_id, file_local_path, use_default_filename=True): @@ -241,7 +241,7 @@ def export_workflow_to_local_path(self, workflow_id, file_local_path, use_defaul filename = f"Galaxy-Workflow-{workflow_dict['name']}.ga" file_local_path = os.path.join(file_local_path, filename) - with open(file_local_path, 'w') as fp: + with open(file_local_path, "w") as fp: json.dump(workflow_dict, fp) def update_workflow(self, workflow_id, **kwds): @@ -274,11 +274,19 @@ def update_workflow(self, workflow_id, **kwds): """ return self._put(payload=kwds, id=workflow_id) - def invoke_workflow(self, workflow_id: str, inputs: Optional[dict] = None, - params: Optional[dict] = None, history_id: Optional[str] = None, - history_name: Optional[str] = None, import_inputs_to_history: bool = False, - replacement_params: Optional[dict] = None, allow_tool_state_corrections: bool = False, - inputs_by: Optional[str] = None, parameters_normalized: bool = False) -> dict: + def invoke_workflow( + self, + workflow_id: str, + inputs: Optional[dict] = None, + params: Optional[dict] = None, + history_id: Optional[str] = None, + history_name: Optional[str] = None, + import_inputs_to_history: bool = False, + replacement_params: Optional[dict] = None, + allow_tool_state_corrections: bool = False, + inputs_by: Optional[str] = None, + parameters_normalized: bool = False, + ) -> dict: """ Invoke the workflow identified by ``workflow_id``. This will cause a workflow to be scheduled and return an object describing @@ -453,26 +461,26 @@ def invoke_workflow(self, workflow_id: str, inputs: Optional[dict] = None, """ payload: Dict[str, Any] = {} if inputs: - payload['inputs'] = inputs + payload["inputs"] = inputs if params: - payload['parameters'] = params + payload["parameters"] = params if replacement_params: - payload['replacement_params'] = replacement_params + payload["replacement_params"] = replacement_params if history_id: - payload['history'] = f'hist_id={history_id}' + payload["history"] = f"hist_id={history_id}" elif history_name: - payload['history'] = history_name + payload["history"] = history_name if not import_inputs_to_history: - payload['no_add_to_history'] = True + payload["no_add_to_history"] = True if allow_tool_state_corrections: - payload['allow_tool_state_corrections'] = allow_tool_state_corrections + payload["allow_tool_state_corrections"] = allow_tool_state_corrections if inputs_by is not None: - payload['inputs_by'] = inputs_by + payload["inputs_by"] = inputs_by if parameters_normalized: - payload['parameters_normalized'] = parameters_normalized + payload["parameters_normalized"] = parameters_normalized url = self._invocations_url(workflow_id) return self._post(payload, url=url) @@ -600,7 +608,7 @@ def show_invocation_step(self, workflow_id, invocation_id, step_id): return self._get(url=url) def run_invocation_step_action(self, workflow_id, invocation_id, step_id, action): - """ Execute an action for an active workflow invocation step. The + """Execute an action for an active workflow invocation step. The nature of this action and what is expected will vary based on the the type of workflow step (the only currently valid action is True/False for pause steps). @@ -676,14 +684,15 @@ def refactor_workflow(self, workflow_id, actions, dry_run=False): and the refactored workflow. """ payload = { - 'actions': actions, - 'dry_run': dry_run, + "actions": actions, + "dry_run": dry_run, } - url = '/'.join((self._make_url(workflow_id), 'refactor')) + url = "/".join((self._make_url(workflow_id), "refactor")) return self._put(payload=payload, url=url) - def extract_workflow_from_history(self, history_id, workflow_name, - job_ids=None, dataset_hids=None, dataset_collection_hids=None): + def extract_workflow_from_history( + self, history_id, workflow_name, job_ids=None, dataset_hids=None, dataset_collection_hids=None + ): """ Extract a workflow from a history. @@ -712,7 +721,7 @@ def extract_workflow_from_history(self, history_id, workflow_name, "job_ids": job_ids if job_ids else [], "dataset_ids": dataset_hids if dataset_hids else [], "dataset_collection_ids": dataset_collection_hids if dataset_collection_hids else [], - "workflow_name": workflow_name + "workflow_name": workflow_name, } return self._post(payload=payload) @@ -726,17 +735,17 @@ def show_versions(self, workflow_id): :rtype: list of dicts :return: Ordered list of version descriptions for this workflow """ - url = self._make_url(workflow_id) + '/versions' + url = self._make_url(workflow_id) + "/versions" return self._get(url=url) def _invocation_step_url(self, workflow_id, invocation_id, step_id): - return '/'.join((self._invocation_url(workflow_id, invocation_id), "steps", step_id)) + return "/".join((self._invocation_url(workflow_id, invocation_id), "steps", step_id)) def _invocation_url(self, workflow_id, invocation_id): - return '/'.join((self._invocations_url(workflow_id), invocation_id)) + return "/".join((self._invocations_url(workflow_id), invocation_id)) def _invocations_url(self, workflow_id): - return '/'.join((self._make_url(workflow_id), 'invocations')) + return "/".join((self._make_url(workflow_id), "invocations")) -__all__ = ('WorkflowClient',) +__all__ = ("WorkflowClient",) diff --git a/bioblend/galaxyclient.py b/bioblend/galaxyclient.py index 7e04f833d..85f35c9f6 100644 --- a/bioblend/galaxyclient.py +++ b/bioblend/galaxyclient.py @@ -9,9 +9,7 @@ import contextlib import json import logging -from urllib.parse import ( - urljoin, -) +from urllib.parse import urljoin import requests from requests_toolbelt import MultipartEncoder @@ -23,7 +21,6 @@ class GalaxyClient: - def __init__(self, url, key=None, email=None, password=None, verify=True, timeout=None): """ :param verify: Whether to verify the server's TLS certificate @@ -34,10 +31,10 @@ def __init__(self, url, key=None, email=None, password=None, verify=True, timeou self.verify = verify self.timeout = timeout # Make sure the URL scheme is defined (otherwise requests will not work) - if not url.lower().startswith('http'): + if not url.lower().startswith("http"): found_scheme = None # Try to guess the scheme, starting from the more secure - for scheme in ('https://', 'http://'): + for scheme in ("https://", "http://"): log.warning(f"Missing scheme in url, trying with {scheme}") with contextlib.suppress(requests.RequestException): r = requests.get( @@ -53,7 +50,7 @@ def __init__(self, url, key=None, email=None, password=None, verify=True, timeou url = found_scheme + url # All of Galaxy's and ToolShed's API's are rooted at /api so make that the url self.base_url = url - self.url = urljoin(url, 'api') + self.url = urljoin(url, "api") # If key has been supplied, use it; otherwise just set email and # password and grab user's key before first request. if key: @@ -62,9 +59,9 @@ def __init__(self, url, key=None, email=None, password=None, verify=True, timeou self._key = None self.email = email self.password = password - self.json_headers = {'Content-Type': 'application/json'} + self.json_headers = {"Content-Type": "application/json"} # json_headers needs to be set before key can be defined, otherwise authentication with email/password causes an error - self.json_headers['x-api-key'] = self.key + self.json_headers["x-api-key"] = self.key def make_get_request(self, url, **kwargs): """ @@ -83,8 +80,8 @@ def make_get_request(self, url, **kwargs): :return: the response object. """ headers = self.json_headers - kwargs.setdefault('timeout', self.timeout) - kwargs.setdefault('verify', self.verify) + kwargs.setdefault("timeout", self.timeout) + kwargs.setdefault("verify", self.verify) r = requests.get(url, headers=headers, **kwargs) return r @@ -122,7 +119,7 @@ def my_dumps(d): payload = my_dumps(payload) payload = MultipartEncoder(fields=payload) headers = self.json_headers.copy() - headers['Content-Type'] = payload.content_type + headers["Content-Type"] = payload.content_type post_params = None else: if payload is not None: diff --git a/bioblend/toolshed/__init__.py b/bioblend/toolshed/__init__.py index d19616e00..14c63cb4c 100644 --- a/bioblend/toolshed/__init__.py +++ b/bioblend/toolshed/__init__.py @@ -2,7 +2,11 @@ A base representation of an instance of Tool Shed """ from bioblend.galaxyclient import GalaxyClient -from bioblend.toolshed import categories, repositories, tools +from bioblend.toolshed import ( + categories, + repositories, + tools, +) class ToolShedInstance(GalaxyClient): diff --git a/bioblend/toolshed/categories/__init__.py b/bioblend/toolshed/categories/__init__.py index be3cc81ae..80f20b6b1 100644 --- a/bioblend/toolshed/categories/__init__.py +++ b/bioblend/toolshed/categories/__init__.py @@ -5,7 +5,7 @@ class ToolShedCategoryClient(Client): - module = 'categories' + module = "categories" def __init__(self, toolshed_instance): super().__init__(toolshed_instance) @@ -46,7 +46,7 @@ def show_category(self, category_id): """ return self._get(id=category_id) - def get_repositories(self, category_id, sort_key='name', sort_order='asc'): + def get_repositories(self, category_id, sort_key="name", sort_order="asc"): """ Returns a dictionary of information for a repository category including a list of repositories belonging to the category. @@ -108,9 +108,9 @@ def get_repositories(self, category_id, sort_key='name', sort_order='asc'): params = {} if sort_key: - params.update({'sort_key': sort_key}) + params.update({"sort_key": sort_key}) if sort_order: - params.update({'sort_order': sort_order}) + params.update({"sort_order": sort_order}) - url = self._make_url(category_id) + '/repositories' + url = self._make_url(category_id) + "/repositories" return self._get(url=url, params=params) diff --git a/bioblend/toolshed/repositories/__init__.py b/bioblend/toolshed/repositories/__init__.py index c55c050b4..d5f041b77 100644 --- a/bioblend/toolshed/repositories/__init__.py +++ b/bioblend/toolshed/repositories/__init__.py @@ -1,16 +1,14 @@ """ Interaction with a Tool Shed instance repositories """ -from typing import ( - Optional, -) +from typing import Optional from bioblend.galaxy.client import Client from bioblend.util import attach_file class ToolShedRepositoryClient(Client): - module = 'repositories' + module = "repositories" def __init__(self, toolshed_instance): super().__init__(toolshed_instance) @@ -145,17 +143,13 @@ def get_ordered_installable_revisions(self, name, owner): :rtype: list :return: List of changeset revision hash strings from oldest to newest """ - url = self._make_url() + '/get_ordered_installable_revisions' - params = { - 'name': name, - 'owner': owner - } + url = self._make_url() + "/get_ordered_installable_revisions" + params = {"name": name, "owner": owner} r = self._get(url=url, params=params) return r - def get_repository_revision_install_info(self, name, owner, - changeset_revision): + def get_repository_revision_install_info(self, name, owner, changeset_revision): """ Return a list of dictionaries of metadata about a certain changeset revision for a single tool. @@ -226,19 +220,21 @@ def get_repository_revision_install_info(self, name, owner, 'type': 'package', 'version': '0.1.18'}}]}] """ - url = self._make_url() + '/get_repository_revision_install_info' - params = { - 'name': name, - 'owner': owner, - 'changeset_revision': changeset_revision - } + url = self._make_url() + "/get_repository_revision_install_info" + params = {"name": name, "owner": owner, "changeset_revision": changeset_revision} return self._get(url=url, params=params) - def repository_revisions(self, downloadable=None, malicious=None, - tools_functionally_correct=None, - missing_test_components=None, do_not_test=None, - includes_tools=None, test_install_error=None, - skip_tool_test=None): + def repository_revisions( + self, + downloadable=None, + malicious=None, + tools_functionally_correct=None, + missing_test_components=None, + do_not_test=None, + includes_tools=None, + test_install_error=None, + skip_tool_test=None, + ): """ Returns a (possibly filtered) list of dictionaries that include information about all repository revisions. The following parameters can @@ -310,28 +306,28 @@ def repository_revisions(self, downloadable=None, malicious=None, """ # Not using '_make_url' or '_get' to create url since the module id used # to create url is not the same as needed for this method - url = self.gi.url + '/repository_revisions' + url = self.gi.url + "/repository_revisions" params = {} if downloadable: - params['downloadable'] = True + params["downloadable"] = True if malicious: - params['malicious'] = True + params["malicious"] = True if tools_functionally_correct: - params['tools_functionally_correct'] = True + params["tools_functionally_correct"] = True if missing_test_components: - params['missing_test_components'] = True + params["missing_test_components"] = True if do_not_test: - params['do_not_test'] = True + params["do_not_test"] = True if includes_tools: - params['includes_tools'] = True + params["includes_tools"] = True if test_install_error: - params['test_install_error'] = True + params["test_install_error"] = True if skip_tool_test: - params['skip_tool_test'] = True + params["skip_tool_test"] = True return self._get(url=url, params=params) def show_repository_revision(self, metadata_id): - ''' + """ Returns a dictionary that includes information about a specified repository revision. @@ -361,11 +357,11 @@ def show_repository_revision(self, metadata_id): 'tool_test_results': {'missing_test_components': []}, 'tools_functionally_correct': False, 'url': '/api/repository_revisions/504be8aaa652c154'} - ''' + """ # Not using '_make_url' or '_get' to create url since the module id used # to create url is not the same as needed for this method # since metadata_id has to be defined, easy to create the url here - url = '/'.join((self.gi.url, 'repository_revisions', metadata_id)) + url = "/".join((self.gi.url, "repository_revisions", metadata_id)) return self._get(url=url) def update_repository(self, id, tar_ball_path, commit_message=None): @@ -393,20 +389,25 @@ def update_repository(self, id, tar_ball_path, commit_message=None): .. versionadded:: 0.5.2 """ - url = self._make_url(id) + '/changeset_revision' - payload = { - 'file': attach_file(tar_ball_path) - } + url = self._make_url(id) + "/changeset_revision" + payload = {"file": attach_file(tar_ball_path)} if commit_message is not None: - payload['commit_message'] = commit_message + payload["commit_message"] = commit_message try: return self._post(payload=payload, files_attached=True, url=url) finally: - payload['file'].close() - - def create_repository(self, name, synopsis, description=None, - type='unrestricted', remote_repository_url=None, - homepage_url=None, category_ids=None): + payload["file"].close() + + def create_repository( + self, + name, + synopsis, + description=None, + type="unrestricted", + remote_repository_url=None, + homepage_url=None, + category_ids=None, + ): """ Create a new repository in a Tool Shed. @@ -453,29 +454,33 @@ def create_repository(self, name, synopsis, description=None, "user_id": "adb5f5c93f827949"} """ payload = { - 'name': name, - 'synopsis': synopsis, + "name": name, + "synopsis": synopsis, } if description is not None: - payload['description'] = description + payload["description"] = description if description is not None: - payload['description'] = description + payload["description"] = description if type is not None: - payload['type'] = type + payload["type"] = type if remote_repository_url is not None: - payload['remote_repository_url'] = remote_repository_url + payload["remote_repository_url"] = remote_repository_url if homepage_url is not None: - payload['homepage_url'] = homepage_url + payload["homepage_url"] = homepage_url if category_ids is not None: - payload['category_ids[]'] = category_ids + payload["category_ids[]"] = category_ids return self._post(payload) - def update_repository_metadata(self, toolShed_id: str, name: Optional[str] = None, - synopsis: Optional[str] = None, - description: Optional[str] = None, - remote_repository_url: Optional[str] = None, - homepage_url: Optional[str] = None, - category_ids: Optional[str] = None) -> dict: + def update_repository_metadata( + self, + toolShed_id: str, + name: Optional[str] = None, + synopsis: Optional[str] = None, + description: Optional[str] = None, + remote_repository_url: Optional[str] = None, + homepage_url: Optional[str] = None, + category_ids: Optional[str] = None, + ) -> dict: """ Update metadata of a Tool Shed repository. @@ -506,15 +511,15 @@ def update_repository_metadata(self, toolShed_id: str, name: Optional[str] = Non """ payload = {} if name: - payload['name'] = name + payload["name"] = name if synopsis: - payload['synopsis'] = synopsis + payload["synopsis"] = synopsis if description: - payload['description'] = description + payload["description"] = description if remote_repository_url: - payload['remote_repository_url'] = remote_repository_url + payload["remote_repository_url"] = remote_repository_url if homepage_url: - payload['homepage_url'] = homepage_url + payload["homepage_url"] = homepage_url if category_ids: - payload['category_ids'] = category_ids + payload["category_ids"] = category_ids return self._put(id=toolShed_id, payload=payload) diff --git a/bioblend/toolshed/tools/__init__.py b/bioblend/toolshed/tools/__init__.py index 3113489da..b3af4aedb 100644 --- a/bioblend/toolshed/tools/__init__.py +++ b/bioblend/toolshed/tools/__init__.py @@ -5,7 +5,7 @@ class ToolShedToolClient(Client): - module = 'tools' + module = "tools" def __init__(self, toolshed_instance): super().__init__(toolshed_instance) diff --git a/bioblend/util/__init__.py b/bioblend/util/__init__.py index 036631d7e..2a1d7f9e7 100644 --- a/bioblend/util/__init__.py +++ b/bioblend/util/__init__.py @@ -1,7 +1,7 @@ import os from typing import ( IO, - NamedTuple + NamedTuple, ) @@ -12,6 +12,7 @@ class Bunch: The end result is that this allows a dict to be to be represented the same as a database class, thus the two become interchangeable as a data source. """ + def __init__(self, **kwargs): self.__dict__.update(kwargs) @@ -56,6 +57,7 @@ def abstractclass(decorated_cls): Adapted from https://stackoverflow.com/a/49013561/4503125 """ + def clsnew(cls, *args, **kwargs): if cls is decorated_cls: raise TypeError(f"Can't instantiate abstract class {decorated_cls.__name__}") @@ -66,6 +68,6 @@ def clsnew(cls, *args, **kwargs): __all__ = ( - 'Bunch', - 'attach_file', + "Bunch", + "attach_file", ) diff --git a/docs/conf.py b/docs/conf.py index 70a1b897c..e9e955f81 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,7 +15,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), '..'))) +sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), ".."))) from bioblend import get_version # noqa: E402 @@ -26,28 +26,23 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.viewcode' -] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.viewcode"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'BioBlend' -copyright = '2012-2016, Enis Afgan' +project = "BioBlend" +copyright = "2012-2016, Enis Afgan" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -70,7 +65,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None @@ -87,7 +82,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -95,19 +90,19 @@ # List of autodoc directive flags that should be automatically applied to all # autodoc directives autodoc_default_options = { - 'members': True, - 'undoc-members': True, + "members": True, + "undoc-members": True, } # Include the __init__ method's doc string in addition to the class doc string # in the documentation. -autoclass_content = 'both' +autoclass_content = "both" # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -136,7 +131,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -180,7 +175,7 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'BioBlenddoc' +htmlhelp_basename = "BioBlenddoc" # -- Options for LaTeX output -------------------------------------------------- @@ -188,10 +183,8 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', } @@ -199,7 +192,7 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'BioBlend.tex', 'BioBlend Documentation', 'Enis Afgan', 'manual'), + ("index", "BioBlend.tex", "BioBlend Documentation", "Enis Afgan", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -227,9 +220,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'bioblend', 'BioBlend Documentation', ['Enis Afgan'], 1) -] +man_pages = [("index", "bioblend", "BioBlend Documentation", ["Enis Afgan"], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -241,8 +232,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'BioBlend', 'BioBlend Documentation', 'Enis Afgan', 'BioBlend', - 'One line description of project.', 'Miscellaneous'), + ( + "index", + "BioBlend", + "BioBlend Documentation", + "Enis Afgan", + "BioBlend", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. diff --git a/docs/examples/cloudman_basic_usage_scenario.py b/docs/examples/cloudman_basic_usage_scenario.py index 643dd1721..1aa3dc1ca 100644 --- a/docs/examples/cloudman_basic_usage_scenario.py +++ b/docs/examples/cloudman_basic_usage_scenario.py @@ -36,8 +36,8 @@ # autoscaling should be enabled now is_enabled = cm.autoscaling_enabled() -min_autoscaling = cm.get_status()['autoscaling']['as_min'] -max_autoscaling = cm.get_status()['autoscaling']['as_max'] +min_autoscaling = cm.get_status()["autoscaling"]["as_min"] +max_autoscaling = cm.get_status()["autoscaling"]["as_max"] # adjust cm.adjust_autoscaling(minimum_nodes=5, maximum_nodes=10) diff --git a/docs/examples/create_user_get_api_key.py b/docs/examples/create_user_get_api_key.py index 55c5d7d26..c25b8b958 100644 --- a/docs/examples/create_user_get_api_key.py +++ b/docs/examples/create_user_get_api_key.py @@ -10,7 +10,9 @@ import bioblend.galaxy if len(sys.argv) != 6: - print("Usage: python3 create_user_get_api_key.py ") + print( + "Usage: python3 create_user_get_api_key.py " + ) sys.exit(1) galaxy_url = sys.argv[1] galaxy_api_key = sys.argv[2] @@ -20,5 +22,5 @@ # Create a new user and get a new API key for her new_user = gi.users.create_local_user(sys.argv[3], sys.argv[4], sys.argv[5]) -new_api_key = gi.users.create_user_apikey(new_user['id']) +new_api_key = gi.users.create_user_apikey(new_user["id"]) print(new_api_key) diff --git a/docs/examples/list_histories.py b/docs/examples/list_histories.py index ea48e4fc9..08b2511a7 100644 --- a/docs/examples/list_histories.py +++ b/docs/examples/list_histories.py @@ -28,5 +28,5 @@ print("\nHistories:") for hist_dict in histories: # As an example, we retrieve a piece of metadata (the size) using show_history - hist_details = gi.histories.show_history(hist_dict['id']) + hist_details = gi.histories.show_history(hist_dict["id"]) print(f"{hist_dict['name']} ({hist_details['size']}) : {hist_dict['id']}") diff --git a/docs/examples/objects/small.py b/docs/examples/objects/small.py index 7870c959f..187c1e0c3 100644 --- a/docs/examples/objects/small.py +++ b/docs/examples/objects/small.py @@ -2,9 +2,10 @@ import sys import tempfile -from bioblend.galaxy.objects import GalaxyInstance from common import get_one # noqa:I100,I201 +from bioblend.galaxy.objects import GalaxyInstance + # This is a "toy" example that should run much faster # (once the cluster's resource manager allows it to run) than the # real-world ones. The workflow, which is imported from local disk, @@ -12,10 +13,10 @@ # the columns. The input dataset is publicly available on CRS4's # Orione Galaxy server. -URL = 'https://orione.crs4.it' -API_KEY = os.getenv('GALAXY_API_KEY', 'YOUR_API_KEY') -if API_KEY == 'YOUR_API_KEY': - sys.exit('API_KEY not set, see the README.txt file') +URL = "https://orione.crs4.it" +API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY") +if API_KEY == "YOUR_API_KEY": + sys.exit("API_KEY not set, see the README.txt file") gi = GalaxyInstance(URL, API_KEY) # import the workflow from the JSON dump @@ -25,29 +26,29 @@ # Select the "Orione SupMat" library -library_name = 'Orione SupMat' +library_name = "Orione SupMat" l = get_one(gi.libraries.list(name=library_name)) # Select the input dataset -ds_name = '/RNA-Seq - Listeria monocytogenes/Listeria_monocytogenes_EGD_e_uid61583/NC_003210.rnt' +ds_name = "/RNA-Seq - Listeria monocytogenes/Listeria_monocytogenes_EGD_e_uid61583/NC_003210.rnt" ld = get_one(l.get_datasets(name=ds_name)) -input_map = {'input_tsv': ld} +input_map = {"input_tsv": ld} # Run the workflow on a new history with the selected dataset as # input, overriding the index of the column to remove; wait until the # computation is complete. -history_name = 'get_col output' -params = {'Cut1': {'columnList': 'c2'}} -print(f'Running workflow: {wf.name} [{wf.id}]') +history_name = "get_col output" +params = {"Cut1": {"columnList": "c2"}} +print(f"Running workflow: {wf.name} [{wf.id}]") outputs, out_hist = wf.run(input_map, history_name, params=params, wait=True) -print('Job has finished') +print("Job has finished") assert out_hist.name == history_name -print(f'Output history: {out_hist.name} [{out_hist.id}]') +print(f"Output history: {out_hist.name} [{out_hist.id}]") # Save results to local disk -out_ds = get_one([_ for _ in outputs if _.name == 'Cut on data 1']) -with tempfile.NamedTemporaryFile(prefix='bioblend_', delete=False) as f: +out_ds = get_one([_ for _ in outputs if _.name == "Cut on data 1"]) +with tempfile.NamedTemporaryFile(prefix="bioblend_", delete=False) as f: out_ds.download(f) print(f'Output downloaded to "{f.name}"') diff --git a/docs/examples/objects/w2_bacterial_reseq.py b/docs/examples/objects/w2_bacterial_reseq.py index d5f68e491..5272a5b82 100644 --- a/docs/examples/objects/w2_bacterial_reseq.py +++ b/docs/examples/objects/w2_bacterial_reseq.py @@ -1,18 +1,19 @@ import os import sys -from bioblend.galaxy.objects import GalaxyInstance from common import get_one # noqa:I100,I201 -URL = 'https://orione.crs4.it' -API_KEY = os.getenv('GALAXY_API_KEY', 'YOUR_API_KEY') -if API_KEY == 'YOUR_API_KEY': - sys.exit('API_KEY not set, see the README.txt file') +from bioblend.galaxy.objects import GalaxyInstance + +URL = "https://orione.crs4.it" +API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY") +if API_KEY == "YOUR_API_KEY": + sys.exit("API_KEY not set, see the README.txt file") gi = GalaxyInstance(URL, API_KEY) # Select "W2 - Bacterial re-sequencing | Paired-end" from published workflows -workflow_name = 'W2 - Bacterial re-sequencing | Paired-end' +workflow_name = "W2 - Bacterial re-sequencing | Paired-end" previews = gi.workflows.get_previews(name=workflow_name, published=True) p = get_one(_ for _ in previews if _.published) @@ -27,29 +28,28 @@ # Select the "Orione SupMat" library -library_name = 'Orione SupMat' +library_name = "Orione SupMat" l = get_one(gi.libraries.list(name=library_name)) # Select the datasets ds_names = [ - '/Whole genome - Escherichia coli/E coli DH10B MiSeq R1.fastq', - '/Whole genome - Escherichia coli/E coli DH10B MiSeq R2.fastq', - '/Whole genome - Escherichia coli/E coli DH10B - Reference', + "/Whole genome - Escherichia coli/E coli DH10B MiSeq R1.fastq", + "/Whole genome - Escherichia coli/E coli DH10B MiSeq R2.fastq", + "/Whole genome - Escherichia coli/E coli DH10B - Reference", ] input_labels = [ - 'Forward Reads', - 'Reverse Reads', - 'Reference Genome', + "Forward Reads", + "Reverse Reads", + "Reference Genome", ] -input_map = {label: h.import_dataset(get_one(l.get_datasets(name=name))) - for name, label in zip(ds_names, input_labels)} +input_map = {label: h.import_dataset(get_one(l.get_datasets(name=name))) for name, label in zip(ds_names, input_labels)} # Set custom parameters for the "check_contigs" and "sspace" tools params = { - 'check_contigs': {'genomesize': 5.0}, # affects both occurrences - 'sspace': {'insert': 300, 'error': 0.5, 'minoverlap': 35}, + "check_contigs": {"genomesize": 5.0}, # affects both occurrences + "sspace": {"insert": 300, "error": 0.5, "minoverlap": 35}, } # Run the workflow on a new history with the selected datasets as inputs @@ -57,5 +57,5 @@ outputs, out_hist = iw.run(input_map, h, params=params) assert out_hist.name == history_name -print(f'Running workflow: {iw.name} [{iw.id}]') -print(f'Output history: {out_hist.name} [{out_hist.id}]') +print(f"Running workflow: {iw.name} [{iw.id}]") +print(f"Output history: {out_hist.name} [{out_hist.id}]") diff --git a/docs/examples/objects/w3_bacterial_denovo.py b/docs/examples/objects/w3_bacterial_denovo.py index 709173115..73a3bfdc7 100644 --- a/docs/examples/objects/w3_bacterial_denovo.py +++ b/docs/examples/objects/w3_bacterial_denovo.py @@ -2,18 +2,19 @@ import os import sys -from bioblend.galaxy.objects import GalaxyInstance from common import get_one # noqa:I100,I201 -URL = 'https://orione.crs4.it' -API_KEY = os.getenv('GALAXY_API_KEY', 'YOUR_API_KEY') -if API_KEY == 'YOUR_API_KEY': - sys.exit('API_KEY not set, see the README.txt file') +from bioblend.galaxy.objects import GalaxyInstance + +URL = "https://orione.crs4.it" +API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY") +if API_KEY == "YOUR_API_KEY": + sys.exit("API_KEY not set, see the README.txt file") gi = GalaxyInstance(URL, API_KEY) # Select "W3 - Bacterial de novo assembly | Paired-end" from published workflows -workflow_name = 'W3 - Bacterial de novo assembly | Paired-end' +workflow_name = "W3 - Bacterial de novo assembly | Paired-end" previews = gi.workflows.get_previews(name=workflow_name, published=True) p = get_one(_ for _ in previews if _.published) @@ -28,50 +29,47 @@ # Select the "Orione SupMat" library -library_name = 'Orione SupMat' +library_name = "Orione SupMat" l = get_one(gi.libraries.list(name=library_name)) # Select the datasets ds_names = [ - '/Whole genome - Escherichia coli/E coli DH10B MiSeq R1.fastq', - '/Whole genome - Escherichia coli/E coli DH10B MiSeq R2.fastq', + "/Whole genome - Escherichia coli/E coli DH10B MiSeq R1.fastq", + "/Whole genome - Escherichia coli/E coli DH10B MiSeq R2.fastq", ] input_labels = [ - 'Left/Forward FASTQ Reads', - 'Right/Reverse FASTQ Reads', + "Left/Forward FASTQ Reads", + "Right/Reverse FASTQ Reads", ] -input_map = {label: h.import_dataset(get_one(l.get_datasets(name=name))) - for name, label in zip(ds_names, input_labels)} +input_map = {label: h.import_dataset(get_one(l.get_datasets(name=name))) for name, label in zip(ds_names, input_labels)} # Set the "hash_length" parameter to different values for the 3 "velveth" steps -lengths = {'19', '23', '29'} -ws_ids = iw.tool_labels_to_ids['velveth'] +lengths = {"19", "23", "29"} +ws_ids = iw.tool_labels_to_ids["velveth"] assert len(ws_ids) == len(lengths) -params = {id_: {'hash_length': v} for id_, v in zip(ws_ids, lengths)} +params = {id_: {"hash_length": v} for id_, v in zip(ws_ids, lengths)} # Set the "ins_length" runtime parameter to the same value for the 3 # "velvetg" steps -tool_id = 'velvetg' +tool_id = "velvetg" ws_ids = iw.tool_labels_to_ids[tool_id] step = iw.steps[next(iter(ws_ids))] # arbitrarily pick one -params[tool_id] = {'reads': json.loads(step.tool_inputs['reads']).copy()} -params[tool_id]['reads']['ins_length'] = -1 +params[tool_id] = {"reads": json.loads(step.tool_inputs["reads"]).copy()} +params[tool_id]["reads"]["ins_length"] = -1 # Set more custom parameters -params['cisarunner'] = {'genomesize': 5000000} -params['check_contigs'] = {'genomesize': 5.0} -params[ - 'toolshed.g2.bx.psu.edu/repos/edward-kirton/abyss_toolsuite/abyss/1.0.0' -] = {'k': 41} +params["cisarunner"] = {"genomesize": 5000000} +params["check_contigs"] = {"genomesize": 5.0} +params["toolshed.g2.bx.psu.edu/repos/edward-kirton/abyss_toolsuite/abyss/1.0.0"] = {"k": 41} # Run the workflow on a new history with the selected datasets as inputs outputs, out_hist = iw.run(input_map, h, params=params) assert out_hist.name == history_name -print(f'Running workflow: {iw.name} [{iw.id}]') -print(f'Output history: {out_hist.name} [{out_hist.id}]') +print(f"Running workflow: {iw.name} [{iw.id}]") +print(f"Output history: {out_hist.name} [{out_hist.id}]") diff --git a/docs/examples/objects/w5_galaxy_api.py b/docs/examples/objects/w5_galaxy_api.py index 4df382739..110fae084 100644 --- a/docs/examples/objects/w5_galaxy_api.py +++ b/docs/examples/objects/w5_galaxy_api.py @@ -7,11 +7,11 @@ # contains the code required to run the metagenomics workflow # *without* BioBlend. -URL = os.getenv('GALAXY_URL', 'https://orione.crs4.it') -API_URL = urljoin(URL, 'api') -API_KEY = os.getenv('GALAXY_API_KEY', 'YOUR_API_KEY') -if API_KEY == 'YOUR_API_KEY': - sys.exit('API_KEY not set, see the README.txt file') +URL = os.getenv("GALAXY_URL", "https://orione.crs4.it") +API_URL = urljoin(URL, "api") +API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY") +if API_KEY == "YOUR_API_KEY": + sys.exit("API_KEY not set, see the README.txt file") # Clone the galaxy git repository and replace # YOUR_GALAXY_PATH with the clone's local path in the following code, e.g.: @@ -19,51 +19,50 @@ # git clone https://github.com/galaxyproject/galaxy # GALAXY_PATH = '/tmp/galaxy' -GALAXY_PATH = 'YOUR_GALAXY_PATH' -sys.path.insert(1, os.path.join(GALAXY_PATH, 'scripts/api')) +GALAXY_PATH = "YOUR_GALAXY_PATH" +sys.path.insert(1, os.path.join(GALAXY_PATH, "scripts/api")) import common # noqa: E402,I100,I202 # Select "W5 - Metagenomics" from published workflows -workflow_name = 'W5 - Metagenomics' +workflow_name = "W5 - Metagenomics" workflows = common.get(API_KEY, f"{API_URL}/workflows?show_published=True") -w = [_ for _ in workflows if _['published'] and _['name'] == workflow_name] +w = [_ for _ in workflows if _["published"] and _["name"] == workflow_name] assert len(w) == 1 w = w[0] # Import the workflow to user space -data = {'workflow_id': w['id']} +data = {"workflow_id": w["id"]} iw = common.post(API_KEY, f"{API_URL}/workflows/import", data) iw_details = common.get(API_KEY, f"{API_URL}/workflows/{iw['id']}") # Select the "Orione SupMat" library -library_name = 'Orione SupMat' +library_name = "Orione SupMat" libraries = common.get(API_KEY, f"{API_URL}/libraries") -l = [_ for _ in libraries if _['name'] == library_name] +l = [_ for _ in libraries if _["name"] == library_name] assert len(l) == 1 l = l[0] # Select the "/Metagenomics/MetagenomicsDataset.fq" dataset -ds_name = '/Metagenomics/MetagenomicsDataset.fq' +ds_name = "/Metagenomics/MetagenomicsDataset.fq" contents = common.get(API_KEY, f"{API_URL}/libraries/{l['id']}/contents") -ld = [_ for _ in contents if _['type'] == 'file' and _['name'] == ds_name] +ld = [_ for _ in contents if _["type"] == "file" and _["name"] == ds_name] assert len(ld) == 1 ld = ld[0] # Select the blastn step -ws = [_ for _ in iw_details['steps'].values() - if _['tool_id'] and 'blastn' in _['tool_id']] +ws = [_ for _ in iw_details["steps"].values() if _["tool_id"] and "blastn" in _["tool_id"]] assert len(ws) == 1 ws = ws[0] -tool_id = ws['tool_id'] +tool_id = ws["tool_id"] # Get (a copy of) the parameters dict for the selected step -ws_parameters = ws['tool_inputs'].copy() +ws_parameters = ws["tool_inputs"].copy() for k, v in ws_parameters.items(): ws_parameters[k] = json.loads(v) @@ -71,15 +70,15 @@ # as input, setting the BLAST db to "16SMicrobial-20131106" history_name = f"{workflow_name} output" -ws_parameters['db_opts']['database'] = '16SMicrobial-20131106' +ws_parameters["db_opts"]["database"] = "16SMicrobial-20131106" data = { - 'workflow_id': iw['id'], - 'parameters': {tool_id: {'db_opts': ws_parameters['db_opts']}}, + "workflow_id": iw["id"], + "parameters": {tool_id: {"db_opts": ws_parameters["db_opts"]}}, } -assert len(iw_details['inputs']) == 1 -input_step_id = iw_details['inputs'].keys()[0] -data['ds_map'] = {input_step_id: {'src': 'ld', 'id': ld['id']}} -data['history'] = history_name +assert len(iw_details["inputs"]) == 1 +input_step_id = iw_details["inputs"].keys()[0] +data["ds_map"] = {input_step_id: {"src": "ld", "id": ld["id"]}} +data["history"] = history_name r_dict = common.post(API_KEY, f"{API_URL}/workflows", data) print(f"Running workflow: {iw['name']} [{iw['id']}]") diff --git a/docs/examples/objects/w5_metagenomics.py b/docs/examples/objects/w5_metagenomics.py index 8bc70284e..a8b4f9ebe 100644 --- a/docs/examples/objects/w5_metagenomics.py +++ b/docs/examples/objects/w5_metagenomics.py @@ -2,18 +2,19 @@ import os import sys -from bioblend.galaxy.objects import GalaxyInstance from common import get_one # noqa:I100,I201 -URL = 'https://orione.crs4.it' -API_KEY = os.getenv('GALAXY_API_KEY', 'YOUR_API_KEY') -if API_KEY == 'YOUR_API_KEY': - sys.exit('API_KEY not set, see the README.txt file') +from bioblend.galaxy.objects import GalaxyInstance + +URL = "https://orione.crs4.it" +API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY") +if API_KEY == "YOUR_API_KEY": + sys.exit("API_KEY not set, see the README.txt file") gi = GalaxyInstance(URL, API_KEY) # Select "W5 - Metagenomics" from published workflows -workflow_name = 'W5 - Metagenomics' +workflow_name = "W5 - Metagenomics" previews = gi.workflows.get_previews(name=workflow_name, published=True) p = get_one(_ for _ in previews if _.published) @@ -28,17 +29,17 @@ # Select the "Orione SupMat" library -library_name = 'Orione SupMat' +library_name = "Orione SupMat" l = get_one(gi.libraries.list(name=library_name)) # Select the "/Metagenomics/MetagenomicsDataset.fq" dataset -ds_name = '/Metagenomics/MetagenomicsDataset.fq' -input_map = {'Input Dataset': h.import_dataset(get_one(l.get_datasets(name=ds_name)))} +ds_name = "/Metagenomics/MetagenomicsDataset.fq" +input_map = {"Input Dataset": h.import_dataset(get_one(l.get_datasets(name=ds_name)))} # Select the blastn step -tool_id = 'toolshed.g2.bx.psu.edu/repos/devteam/ncbi_blast_plus/ncbi_blastn_wrapper/0.1.00' +tool_id = "toolshed.g2.bx.psu.edu/repos/devteam/ncbi_blast_plus/ncbi_blastn_wrapper/0.1.00" step_id = get_one(iw.tool_labels_to_ids[tool_id]) ws = iw.steps[step_id] @@ -49,10 +50,10 @@ # Run the workflow on a new history with the selected dataset # as input, setting the BLAST db to "16SMicrobial-20131106" -params = {tool_id: {'db_opts': json.loads(ws_parameters['db_opts'])}} -params[tool_id]['db_opts']['database'] = '16SMicrobial-20131106' +params = {tool_id: {"db_opts": json.loads(ws_parameters["db_opts"])}} +params[tool_id]["db_opts"]["database"] = "16SMicrobial-20131106" outputs, out_hist = iw.run(input_map, h, params=params) assert out_hist.name == history_name -print(f'Running workflow: {iw.name} [{iw.id}]') -print(f'Output history: {out_hist.name} [{out_hist.id}]') +print(f"Running workflow: {iw.name} [{iw.id}]") +print(f"Output history: {out_hist.name} [{out_hist.id}]") diff --git a/docs/examples/run_imported_workflow.py b/docs/examples/run_imported_workflow.py index bf8534f5d..65aa95452 100644 --- a/docs/examples/run_imported_workflow.py +++ b/docs/examples/run_imported_workflow.py @@ -18,19 +18,19 @@ # Specify workflow and data to import into Galaxy -workflow_file = 'tophat_cufflinks_pairedend_workflow.ga' +workflow_file = "tophat_cufflinks_pairedend_workflow.ga" import_file_pairs = [ - ('https://bioblend.s3.amazonaws.com/C1_R1_1.chr4.fq', 'https://bioblend.s3.amazonaws.com/C1_R1_2.chr4.fq'), - ('https://bioblend.s3.amazonaws.com/C1_R2_1.chr4.fq', 'https://bioblend.s3.amazonaws.com/C1_R2_2.chr4.fq'), - ('https://bioblend.s3.amazonaws.com/C1_R3_1.chr4.fq', 'https://bioblend.s3.amazonaws.com/C1_R3_2.chr4.fq') + ("https://bioblend.s3.amazonaws.com/C1_R1_1.chr4.fq", "https://bioblend.s3.amazonaws.com/C1_R1_2.chr4.fq"), + ("https://bioblend.s3.amazonaws.com/C1_R2_1.chr4.fq", "https://bioblend.s3.amazonaws.com/C1_R2_2.chr4.fq"), + ("https://bioblend.s3.amazonaws.com/C1_R3_1.chr4.fq", "https://bioblend.s3.amazonaws.com/C1_R3_2.chr4.fq"), ] # Specify names of Library and History that will be created in Galaxy # In this simple example, these will be created even if items with the same name already exist. -library_name = 'Imported data for API demo' -output_history_name = 'Output from API demo' +library_name = "Imported data for API demo" +output_history_name = "Output from API demo" if len(sys.argv) != 3: print("Usage: python3 run_imported_workflow.py ") @@ -45,12 +45,12 @@ print("Importing workflow") wf_import_dict = gi.workflows.import_workflow_from_local_path(workflow_file) -workflow = wf_import_dict['id'] +workflow = wf_import_dict["id"] print(f"Creating data library '{library_name}'") library_dict = gi.libraries.create_library(library_name) -library = library_dict['id'] +library = library_dict["id"] print("Importing data") @@ -59,9 +59,9 @@ dataset_ids = [] filenames = {} for (file1, file2) in import_file_pairs: - dataset1 = gi.libraries.upload_file_from_url(library, file1, file_type='fastqsanger') - dataset2 = gi.libraries.upload_file_from_url(library, file2, file_type='fastqsanger') - id1, id2 = dataset1[0]['id'], dataset2[0]['id'] + dataset1 = gi.libraries.upload_file_from_url(library, file1, file_type="fastqsanger") + dataset2 = gi.libraries.upload_file_from_url(library, file2, file_type="fastqsanger") + id1, id2 = dataset1[0]["id"], dataset2[0]["id"] filenames[id1] = file1 filenames[id2] = file2 dataset_ids.append((id1, id2)) @@ -69,15 +69,15 @@ print(f"Creating output history '{output_history_name}'") outputhist_dict = gi.histories.create_history(output_history_name) -outputhist = outputhist_dict['id'] +outputhist = outputhist_dict["id"] print(f"Will run workflow on {len(dataset_ids)} pairs of files") # Get the input step IDs from the workflow. # We use the BioBlend convenience function get_workflow_inputs to retrieve inputs by label. -input1 = gi.workflows.get_workflow_inputs(workflow, label='Input fastq readpair-1')[0] -input2 = gi.workflows.get_workflow_inputs(workflow, label='Input fastq readpair-2')[0] +input1 = gi.workflows.get_workflow_inputs(workflow, label="Input fastq readpair-1")[0] +input2 = gi.workflows.get_workflow_inputs(workflow, label="Input fastq readpair-2")[0] # For each pair of datasets we imported, run the imported workflow # For each input we need to build a datamap dict with 'src' set to 'ld', as we stored our data in a Galaxy Library @@ -85,7 +85,9 @@ for (data1, data2) in dataset_ids: print(f"Initiating workflow run on files {filenames[data1]}, {filenames[data2]}") datamap = { - input1: {'src': 'ld', 'id': data1}, - input2: {'src': 'ld', 'id': data2}, + input1: {"src": "ld", "id": data1}, + input2: {"src": "ld", "id": data2}, } - invocation = gi.workflows.invoke_workflow(workflow, inputs=datamap, history_id=outputhist, import_inputs_to_history=True) + invocation = gi.workflows.invoke_workflow( + workflow, inputs=datamap, history_id=outputhist, import_inputs_to_history=True + ) diff --git a/docs/examples/start_cloudman.py b/docs/examples/start_cloudman.py index 66e4f9277..58345f06e 100644 --- a/docs/examples/start_cloudman.py +++ b/docs/examples/start_cloudman.py @@ -15,7 +15,10 @@ import sys -from bioblend.cloudman import CloudManConfig, CloudManInstance +from bioblend.cloudman import ( + CloudManConfig, + CloudManInstance, +) from bioblend.util import Bunch @@ -29,23 +32,34 @@ def start_cloudman(name, pwd, cm_type, inst_type, ami, ak, sk): """ cloud = None # If left as None, BioBlend will default to Amazon # Define properties for the NeCTAR cloud - cloud = Bunch(id='-1', - name="NeCTAR", - cloud_type='openstack', - bucket_default='cloudman-os', - region_name='melbourne', - region_endpoint='nova.rc.nectar.org.au', - ec2_port=8773, - ec2_conn_path='/services/Cloud', - cidr_range='115.146.92.0/22', - is_secure=True, - s3_host='swift.rc.nectar.org.au', - s3_port=8888, - s3_conn_path='/') + cloud = Bunch( + id="-1", + name="NeCTAR", + cloud_type="openstack", + bucket_default="cloudman-os", + region_name="melbourne", + region_endpoint="nova.rc.nectar.org.au", + ec2_port=8773, + ec2_conn_path="/services/Cloud", + cidr_range="115.146.92.0/22", + is_secure=True, + s3_host="swift.rc.nectar.org.au", + s3_port=8888, + s3_conn_path="/", + ) # Create an instance of the CloudManConfig class and launch a CloudMan instance cmc = CloudManConfig( - ak, sk, name, ami, inst_type, pwd, cloud_metadata=cloud, - cloudman_type=cm_type, initial_storage_size=2, placement='melbourne-np') + ak, + sk, + name, + ami, + inst_type, + pwd, + cloud_metadata=cloud, + cloudman_type=cm_type, + initial_storage_size=2, + placement="melbourne-np", + ) print("Configured an instance; waiting to launch and boot...") cmi = CloudManInstance.launch_instance(cmc) print(f"Done! CloudMan IP is {cmi.cloudman_url}") @@ -54,9 +68,10 @@ def start_cloudman(name, pwd, cm_type, inst_type, ami, ak, sk): if __name__ == "__main__": if len(sys.argv) != 8: - print("\nUsage:\n" - "python start_cloudman.py " - " ") + print( + "\nUsage:\n" + "python start_cloudman.py " + " " + ) sys.exit(1) - cml, cm = start_cloudman(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], - sys.argv[5], sys.argv[6], sys.argv[7]) + cml, cm = start_cloudman(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5], sys.argv[6], sys.argv[7]) diff --git a/pyproject.toml b/pyproject.toml index 9787c3bdf..fde2068ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,11 @@ [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" + +[tool.black] +include = '\.pyi?$' +line-length = 120 +target-version = ['py37'] + +[tool.darker] +isort = true diff --git a/setup.cfg b/setup.cfg index e8dcd7511..e5843a03a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,9 +8,10 @@ exclude = .tox .venv build -ignore = E501,E741,SFS3 -import-order-style = smarkets -application-import-names = bioblend +# E203 is whitespace before ':'; we follow black's formatting here. See https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated +# E501 is line length, managed by black +# W503 is line breaks before binary operators, which has been reversed in PEP 8. +ignore = E203,E501,E741,SFS3,W503 [metadata] author = Enis Afgan diff --git a/tox.ini b/tox.ini index ec8845db8..5886d895a 100644 --- a/tox.ini +++ b/tox.ini @@ -12,13 +12,17 @@ passenv = [testenv:lint] commands = flake8 . + lint: black --check --diff . + lint: isort --check --diff . mypy bioblend/ deps = + black flake8 - flake8-import-order>=0.9 flake8-bugbear flake8-sfs + isort mypy types-boto types-PyYAML types-requests +skip_install = True From b1e67c3ade0459720497e9d0cdf7e47f09c160ab Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Fri, 13 May 2022 19:18:58 +0100 Subject: [PATCH 31/32] Add `.git-blame-ignore-revs` file for better blame view on GitHub See https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view To use this locally, execute this once: ``` git config blame.ignoreRevsFile .git-blame-ignore-revs ``` and all following `git blame` will skip the revisions indicated in the file. --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..64b170b3b --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Format Python code with black and isort +7bcd07db8392ac790d1b0b92f4a377945197e43d From 984421d52d2d980143f756ea56338ceaf9cff8ce Mon Sep 17 00:00:00 2001 From: Simon Bray <32272674+simonbray@users.noreply.github.com> Date: Fri, 27 May 2022 11:05:28 +0200 Subject: [PATCH 32/32] Add note to run_tool() docstring pointing users to build() for constructing a template for tool_inputs --- bioblend/galaxy/tools/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/bioblend/galaxy/tools/__init__.py b/bioblend/galaxy/tools/__init__.py index c64b36456..0edc39b9b 100644 --- a/bioblend/galaxy/tools/__init__.py +++ b/bioblend/galaxy/tools/__init__.py @@ -378,8 +378,11 @@ def run_tool(self, history_id, tool_id, tool_inputs, input_format="legacy"): 'visible': True}]} The ``tool_inputs`` dict should contain input datasets and parameters - in the (largely undocumented) format used by the Galaxy API. - Some examples can be found in `Galaxy's API test suite + in the (largely undocumented) format used by the Galaxy API. If you are unsure + how to construct this dict for the tool you want to run, you can obtain a + template by executing the ``build()`` method and taking the value of + ``state_inputs`` from its output, then modifying it as you require. + You can also check the examples in `Galaxy's API test suite `_. """ payload = {}