diff --git a/client/src/components/History/Export/HistoryExport.test.ts b/client/src/components/History/Export/HistoryExport.test.ts index b296fbd63587..c2659cf1f32b 100644 --- a/client/src/components/History/Export/HistoryExport.test.ts +++ b/client/src/components/History/Export/HistoryExport.test.ts @@ -1,6 +1,8 @@ import { createTestingPinia } from "@pinia/testing"; import { getLocalVue } from "@tests/jest/helpers"; import { shallowMount } from "@vue/test-utils"; +import axios from "axios"; +import MockAdapter from "axios-mock-adapter"; import flushPromises from "flush-promises"; import { setActivePinia } from "pinia"; @@ -13,7 +15,6 @@ import { FILE_SOURCE_STORE_RECORD, RECENT_STS_DOWNLOAD_RECORD, } from "@/components/Common/models/testData/exportData"; -import { useHistoryStore } from "@/stores/historyStore"; import HistoryExport from "./HistoryExport.vue"; @@ -25,9 +26,20 @@ const mockFetchExportRecords = fetchHistoryExportRecords as jest.MockedFunction< mockFetchExportRecords.mockResolvedValue([]); const FAKE_HISTORY_ID = "fake-history-id"; -const FAKE_HISTORY = { +const FAKE_HISTORY_URL = `/api/histories/${FAKE_HISTORY_ID}`; +const FAKE_HISTORY: HistorySummary = { id: FAKE_HISTORY_ID, name: "fake-history-name", + annotation: "fake-history-annotation", + archived: false, + deleted: false, + purged: false, + published: false, + model_class: "History", + tags: [], + count: 0, + update_time: "2021-09-01T00:00:00.000Z", + url: FAKE_HISTORY_URL, }; const REMOTE_FILES_API_ENDPOINT = new RegExp("/api/remote_files/plugins"); @@ -48,13 +60,6 @@ const REMOTE_FILES_API_RESPONSE: FilesSourcePlugin[] = [ async function mountHistoryExport() { const pinia = createTestingPinia({ stubActions: false }); setActivePinia(pinia); - const historyStore = useHistoryStore(pinia); - - // the mocking method described in the pinia docs does not work in vue2 - // this is a work-around - jest.spyOn(historyStore, "getHistoryById").mockImplementation( - (_history_id: string) => FAKE_HISTORY as HistorySummary - ); const wrapper = shallowMount(HistoryExport as object, { propsData: { historyId: FAKE_HISTORY_ID }, @@ -66,8 +71,12 @@ async function mountHistoryExport() { } describe("HistoryExport.vue", () => { + let axiosMock: MockAdapter; + beforeEach(async () => { mockFetcher.path(REMOTE_FILES_API_ENDPOINT).method("get").mock({ data: [] }); + axiosMock = new MockAdapter(axios); + axiosMock.onGet(FAKE_HISTORY_URL).reply(200, FAKE_HISTORY); }); it("should render the history name", async () => { @@ -138,4 +147,25 @@ describe("HistoryExport.vue", () => { expect(wrapper.find("#zenodo-file-source-tab").exists()).toBe(true); }); + + it("should not display a fatal error alert if the history is found and loaded", async () => { + const wrapper = await mountHistoryExport(); + + expect(wrapper.find("#fatal-error-alert").exists()).toBe(false); + + expect(wrapper.find("#history-name").exists()).toBe(true); + expect(wrapper.find("#history-export-options").exists()).toBe(true); + expect(wrapper.find("#direct-download-tab").exists()).toBe(true); + }); + + it("should not render the UI and display a fatal error message if the history cannot be found or loaded", async () => { + axiosMock.onGet(FAKE_HISTORY_URL).reply(404); + const wrapper = await mountHistoryExport(); + + expect(wrapper.find("#fatal-error-alert").exists()).toBe(true); + + expect(wrapper.find("#history-name").exists()).toBe(false); + expect(wrapper.find("#history-export-options").exists()).toBe(false); + expect(wrapper.find("#direct-download-tab").exists()).toBe(false); + }); }); diff --git a/client/src/components/History/Export/HistoryExport.vue b/client/src/components/History/Export/HistoryExport.vue index 4ad55c065232..e47bd706388d 100644 --- a/client/src/components/History/Export/HistoryExport.vue +++ b/client/src/components/History/Export/HistoryExport.vue @@ -5,6 +5,7 @@ import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome"; import { BAlert, BButton, BCard, BTab, BTabs } from "bootstrap-vue"; import { computed, onMounted, ref, watch } from "vue"; +import type { AnyHistory } from "@/api"; import { exportHistoryToFileSource, fetchHistoryExportRecords, @@ -62,6 +63,8 @@ const POLLING_DELAY = 3000; const exportParams = ref(DEFAULT_EXPORT_PARAMS); const isLoadingRecords = ref(true); const exportRecords = ref([]); +const history = ref(); +const isLoadingHistory = ref(true); const historyName = computed(() => history.value?.name ?? props.historyId); const defaultFileName = computed(() => `(Galaxy History) ${historyName.value}`); @@ -84,18 +87,16 @@ const availableRecordsMessage = computed(() => const historyStore = useHistoryStore(); -const history = computed(() => { - const history = historyStore.getHistoryById(props.historyId); - return history; -}); - -const errorMessage = ref(undefined); -const actionMessage = ref(undefined); -const actionMessageVariant = ref(undefined); +const isFatalError = ref(false); +const errorMessage = ref(); +const actionMessage = ref(); +const actionMessageVariant = ref(); const zenodoSource = computed(() => getFileSourceById("zenodo")); onMounted(async () => { - updateExports(); + if (await loadHistory()) { + updateExports(); + } }); watch(isExportTaskRunning, (newValue, oldValue) => { @@ -105,6 +106,22 @@ watch(isExportTaskRunning, (newValue, oldValue) => { } }); +async function loadHistory() { + isLoadingHistory.value = true; + try { + history.value = + historyStore.getHistoryById(props.historyId, false) ?? + (await historyStore.loadHistoryById(props.historyId)); + return true; + } catch (error) { + errorMessage.value = errorMessageAsString(error); + isFatalError.value = true; + return false; + } finally { + isLoadingHistory.value = false; + } +} + async function updateExports() { isLoadingRecords.value = true; try { @@ -159,16 +176,15 @@ async function reimportFromRecord(record: ExportRecord) { `Do you really want to import a new copy of this history exported ${record.elapsedTime}?` ); if (confirmed) { - reimportHistoryFromRecord(record) - .then(() => { - actionMessageVariant.value = "info"; - actionMessage.value = - "The history is being imported in the background. Check your histories after a while to find it."; - }) - .catch((reason) => { - actionMessageVariant.value = "danger"; - actionMessage.value = reason; - }); + try { + await reimportHistoryFromRecord(record); + actionMessageVariant.value = "info"; + actionMessage.value = + "The history is being imported in the background. Check your histories after a while to find it."; + } catch (error) { + actionMessageVariant.value = "danger"; + actionMessage.value = errorMessageAsString(error); + } } } @@ -188,140 +204,150 @@ function updateExportParams(newParams: ExportParams) {

Export - + Error + {{ historyName }}

- - -

How do you want to export this history?

- - - -

- Here you can generate a temporal download for your history. When your download link expires or - your history changes you can re-generate it again. -

- - - History archive downloads can expire and are removed at regular intervals. For permanent - storage, export to a remote file or download and then import the archive on another - Galaxy server. - - - - Generate direct download - - - - - - - The latest export record is ready. Use the download button below to download it or change the - advanced export options above to generate a new one. - -
- -

- If you need a "more permanent" way of storing your history archive you can export it directly to - one of the available remote file sources here. You will be able to re-import it later as long as - it remains available on the remote server. -

- - -
- -

You can upload your history to one of the available RDM repositories here.

- - - - -
- -
- ZENODO Logo -

- Zenodo is a general-purpose - open repository developed under the - European OpenAIRE program and - operated by CERN. It allows - researchers to deposit research papers, data sets, research software, reports, and any other - research related digital artefacts. For each submission, a persistent - digital object identifier (DOI) is minted, which makes the stored items easily - citeable. -

-
- - - - -
-
-
- - + {{ errorMessage }} -
-

Export Records

- - - +
+ + +

How do you want to export this history?

+ + + +

+ Here you can generate a temporal download for your history. When your download link expires + or your history changes you can re-generate it again. +

+ + + History archive downloads can expire and are removed at regular intervals. For permanent + storage, export to a remote file or download and then import the archive on another + Galaxy server. + + + + Generate direct download + + + + + + + The latest export record is ready. Use the download button below to download it or change + the advanced export options above to generate a new one. + +
+ +

+ If you need a "more permanent" way of storing your history archive you can export it + directly to one of the available remote file sources here. You will be able to re-import it + later as long as it remains available on the remote server. +

+ + +
+ +

You can upload your history to one of the available RDM repositories here.

+ + + + +
+ +
+ ZENODO Logo +

+ Zenodo is a + general-purpose open repository developed under the + European OpenAIRE program + and operated by CERN. It allows + researchers to deposit research papers, data sets, research software, reports, and any + other research related digital artefacts. For each submission, a persistent + digital object identifier (DOI) is minted, which makes the stored items easily + citeable. +

+
+ + + + +
+
+ + + {{ errorMessage }} + +
+

Export Records

+ + + + +
+ + {{ availableRecordsMessage }} +
- - {{ availableRecordsMessage }} - diff --git a/client/src/store/historyStore/model/watchHistory.test.js b/client/src/store/historyStore/model/watchHistory.test.js index e0984c7fe097..6669c0200e5e 100644 --- a/client/src/store/historyStore/model/watchHistory.test.js +++ b/client/src/store/historyStore/model/watchHistory.test.js @@ -93,7 +93,7 @@ describe("watchHistory", () => { await watchHistoryOnce(); } catch (error) { console.log(error); - expect(error.response.status).toBe(500); + expect(error.message).toContain("500"); } // Need to reset axios mock here. Smells like a bug, // maybe in axios-mock-adapter, maybe on our side diff --git a/client/src/stores/historyStore.ts b/client/src/stores/historyStore.ts index e74d6a12497d..38c430ead734 100644 --- a/client/src/stores/historyStore.ts +++ b/client/src/stores/historyStore.ts @@ -24,6 +24,7 @@ import { setCurrentHistoryOnServer, updateHistoryFields, } from "@/stores/services/history.services"; +import { rethrowSimple } from "@/utils/simple-error"; import { sortByObjectProp } from "@/utils/sorting"; const PAGINATION_LIMIT = 10; @@ -72,10 +73,12 @@ export const useHistoryStore = defineStore("historyStore", () => { } }); - /** Returns history from storedHistories, will load history if not in store */ + /** Returns history from storedHistories, will load history if not in store by default. + * If shouldFetchIfMissing is false, will return null if history is not in store. + */ const getHistoryById = computed(() => { - return (historyId: string) => { - if (!storedHistories.value[historyId]) { + return (historyId: string, shouldFetchIfMissing = true) => { + if (!storedHistories.value[historyId] && shouldFetchIfMissing) { // TODO: Try to remove this as it can cause computed side effects loadHistoryById(historyId); } @@ -209,7 +212,7 @@ export const useHistoryStore = defineStore("historyStore", () => { selectHistory(history); return history; } catch (error) { - console.error(error); + rethrowSimple(error); } } @@ -257,7 +260,7 @@ export const useHistoryStore = defineStore("historyStore", () => { await handleTotalCountChange(histories.length); } } catch (error) { - console.error(error); + rethrowSimple(error); } finally { setHistoriesLoading(false); } @@ -272,7 +275,7 @@ export const useHistoryStore = defineStore("historyStore", () => { setHistory(history); return history; } catch (error) { - console.error(error); + rethrowSimple(error); } finally { isLoadingHistory.delete(historyId); } @@ -321,7 +324,7 @@ export const useHistoryStore = defineStore("historyStore", () => { setHistory(contentStats); return contentStats; } catch (error) { - console.error(error); + rethrowSimple(error); } } diff --git a/lib/galaxy/datatypes/dataproviders/dataset.py b/lib/galaxy/datatypes/dataproviders/dataset.py index 6c8075835393..bd2d46262d5b 100644 --- a/lib/galaxy/datatypes/dataproviders/dataset.py +++ b/lib/galaxy/datatypes/dataproviders/dataset.py @@ -15,6 +15,7 @@ ) from galaxy.util import sqlite +from galaxy.util.compression_utils import get_fileobj from . import ( base, column, @@ -54,7 +55,7 @@ def __init__(self, dataset, **kwargs): # this dataset file is obviously the source # TODO: this might be a good place to interface with the object_store... mode = "rb" if dataset.datatype.is_binary else "r" - super().__init__(open(dataset.get_file_name(), mode)) + super().__init__(get_fileobj(dataset.get_file_name(), mode)) # TODO: this is a bit of a mess @classmethod diff --git a/lib/galaxy/files/sources/_rdm.py b/lib/galaxy/files/sources/_rdm.py index 8f4a69c13cb4..1848cf57cb24 100644 --- a/lib/galaxy/files/sources/_rdm.py +++ b/lib/galaxy/files/sources/_rdm.py @@ -8,7 +8,6 @@ from typing_extensions import Unpack -from galaxy.exceptions import AuthenticationRequired from galaxy.files import OptionalUserContext from galaxy.files.sources import ( BaseFilesSource, @@ -201,15 +200,11 @@ def _serialization_props(self, user_context: OptionalUserContext = None): effective_props[key] = self._evaluate_prop(val, user_context=user_context) return effective_props - def get_authorization_token(self, user_context: OptionalUserContext) -> str: + def get_authorization_token(self, user_context: OptionalUserContext) -> Optional[str]: token = None if user_context: effective_props = self._serialization_props(user_context) token = effective_props.get("token") - if not token: - raise AuthenticationRequired( - f"Please provide a personal access token in your user's preferences for '{self.label}'" - ) return token def get_public_name(self, user_context: OptionalUserContext) -> Optional[str]: diff --git a/lib/galaxy/files/sources/invenio.py b/lib/galaxy/files/sources/invenio.py index a65a18050533..146d63d0b641 100644 --- a/lib/galaxy/files/sources/invenio.py +++ b/lib/galaxy/files/sources/invenio.py @@ -275,12 +275,7 @@ def create_draft_record( }, } - headers = self._get_request_headers(user_context) - if "Authorization" not in headers: - raise Exception( - "Cannot create record without authentication token. Please set your personal access token in your Galaxy preferences." - ) - + headers = self._get_request_headers(user_context, auth_required=True) response = requests.post(self.records_url, json=create_record_request, headers=headers) self._ensure_response_has_expected_status_code(response, 201) record = response.json() @@ -296,7 +291,7 @@ def upload_file_to_draft_record( ): record = self._get_draft_record(record_id, user_context=user_context) upload_file_url = record["links"]["files"] - headers = self._get_request_headers(user_context) + headers = self._get_request_headers(user_context, auth_required=True) # Add file metadata entry response = requests.post(upload_file_url, json=[{"key": filename}], headers=headers) @@ -452,28 +447,38 @@ def _get_creator_from_public_name(self, public_name: Optional[str] = None) -> Cr } def _get_response( - self, user_context: OptionalUserContext, request_url: str, params: Optional[Dict[str, Any]] = None + self, + user_context: OptionalUserContext, + request_url: str, + params: Optional[Dict[str, Any]] = None, + auth_required: bool = False, ) -> dict: - headers = self._get_request_headers(user_context) + headers = self._get_request_headers(user_context, auth_required) response = requests.get(request_url, params=params, headers=headers) self._ensure_response_has_expected_status_code(response, 200) return response.json() - def _get_request_headers(self, user_context: OptionalUserContext): + def _get_request_headers(self, user_context: OptionalUserContext, auth_required: bool = False): token = self.plugin.get_authorization_token(user_context) headers = {"Authorization": f"Bearer {token}"} if token else {} + if auth_required and token is None: + self._raise_auth_required() return headers def _ensure_response_has_expected_status_code(self, response, expected_status_code: int): - if response.status_code == 403: - record_url = response.url.replace("/api", "").replace("/files", "") - raise AuthenticationRequired(f"Please make sure you have the necessary permissions to access: {record_url}") if response.status_code != expected_status_code: + if response.status_code == 403: + self._raise_auth_required() error_message = self._get_response_error_message(response) raise Exception( f"Request to {response.url} failed with status code {response.status_code}: {error_message}" ) + def _raise_auth_required(self): + raise AuthenticationRequired( + f"Please provide a personal access token in your user's preferences for '{self.plugin.label}'" + ) + def _get_response_error_message(self, response): response_json = response.json() error_message = response_json.get("message") if response.status_code == 400 else response.text diff --git a/lib/galaxy/jobs/handler.py b/lib/galaxy/jobs/handler.py index f3965abb59bf..0213a797aab4 100644 --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -999,7 +999,7 @@ def __cache_total_job_count_per_destination(self): .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING)))) .group_by(model.Job.table.c.destination_id) ) - for row in result: + for row in result.mappings(): self.total_job_count_per_destination[row["destination_id"]] = row["job_count"] def get_total_job_count_per_destination(self): diff --git a/lib/galaxy/managers/hdcas.py b/lib/galaxy/managers/hdcas.py index 42d448245157..76d5ef10cdab 100644 --- a/lib/galaxy/managers/hdcas.py +++ b/lib/galaxy/managers/hdcas.py @@ -45,7 +45,7 @@ def write_dataset_collection(dataset_collection_instance, archive): raise RequestParameterInvalidException("Attempt to write dataset collection that has not been populated yet") names, hdas = get_hda_and_element_identifiers(dataset_collection_instance) for name, hda in zip(names, hdas): - if hda.state != hda.states.OK: + if hda.state != hda.states.OK or hda.purged or hda.dataset.purged: continue for file_path, relpath in hda.datatype.to_archive(dataset=hda, name=name): archive.write(file_path, relpath) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index dfe776d2da5c..33b405bb5916 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -6682,7 +6682,7 @@ def dataset_elements_and_identifiers(self, identifiers=None): return elements @property - def first_dataset_element(self): + def first_dataset_element(self) -> Optional["DatasetCollectionElement"]: for element in self.elements: if element.is_collection: first_element = element.child_collection.first_dataset_element @@ -10272,6 +10272,10 @@ class Page(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime): def to_dict(self, view="element"): rval = super().to_dict(view=view) + if "importable" in rval and rval["importable"] is None: + # pages created prior to 2011 might not have importable field + # probably not worth creating a migration to fix that + rval["importable"] = False rev = [] for a in self.revisions: rev.append(a.id) diff --git a/lib/galaxy/objectstore/__init__.py b/lib/galaxy/objectstore/__init__.py index 44d11ae5c6c3..b4c89e418048 100644 --- a/lib/galaxy/objectstore/__init__.py +++ b/lib/galaxy/objectstore/__init__.py @@ -882,7 +882,12 @@ def _delete(self, obj, entire_dir=False, **kwargs): except OSError as ex: # Likely a race condition in which we delete the job working directory # and another process writes files into that directory. - log.critical(f"{self.__get_filename(obj, **kwargs)} delete error {ex}", exc_info=True) + # If the path doesn't exist anymore, another rmtree call was successful. + path = self.__get_filename(obj, **kwargs) + if path is None: + return True + else: + log.critical(f"{path} delete error {ex}", exc_info=True) return False def _get_data(self, obj, start=0, count=-1, **kwargs): diff --git a/lib/galaxy/tool_util/linters/general.py b/lib/galaxy/tool_util/linters/general.py index 0e671d418126..eb3a98fd3b82 100644 --- a/lib/galaxy/tool_util/linters/general.py +++ b/lib/galaxy/tool_util/linters/general.py @@ -231,23 +231,6 @@ def lint(cls, tool_source: "ToolSource", lint_ctx: "LintContext"): ) -class TextSpaces(Linter): - @classmethod - def lint(cls, tool_source: "ToolSource", lint_ctx: "LintContext"): - _, tool_node = _tool_xml_and_root(tool_source) - if not tool_node: - return - for node in tool_node.iter(): - if len(node) > 0: - continue - if node.text and node.text != node.text.strip(): - lint_ctx.warn( - f"XML node '{node.tag}' has text with leading or trailing spaces ('{node.text}'!='{node.text.strip()}').", - linter=cls.name(), - node=node, - ) - - class BioToolsValid(Linter): @classmethod def lint(cls, tool_source: "ToolSource", lint_ctx: "LintContext"): diff --git a/lib/galaxy/tool_util/xsd/galaxy.xsd b/lib/galaxy/tool_util/xsd/galaxy.xsd index 771e700f88c9..39d07f0cc8b2 100644 --- a/lib/galaxy/tool_util/xsd/galaxy.xsd +++ b/lib/galaxy/tool_util/xsd/galaxy.xsd @@ -7861,7 +7861,7 @@ A tool can have any number of EDAM topic references. - + @@ -7886,7 +7886,7 @@ A tool can have any number of EDAM operation references. - + @@ -7921,7 +7921,7 @@ A tool can refer multiple reference IDs. information according to a catalog. - + Type of reference - currently ``bio.tools``, ``bioconductor``, and ``biii`` are @@ -7944,11 +7944,20 @@ the only supported options. - + + + + A string without newline characters. + + + + + + diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index 4757018c0885..9864f0ec58d4 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -3444,18 +3444,20 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history how = incoming["which"]["which_dataset"] if how == "first": extracted_element = collection.first_dataset_element + if not extracted_element: + raise exceptions.RequestParameterInvalidException("Input collection has no dataset elements.") elif how == "by_identifier": try: extracted_element = collection[incoming["which"]["identifier"]] except KeyError as e: - raise exceptions.MessageException(e.args[0]) + raise exceptions.RequestParameterInvalidException(e.args[0]) elif how == "by_index": try: extracted_element = collection[int(incoming["which"]["index"])] except KeyError as e: - raise exceptions.MessageException(e.args[0]) + raise exceptions.RequestParameterInvalidException(e.args[0]) else: - raise exceptions.MessageException("Invalid tool parameters.") + raise exceptions.RequestParameterInvalidException("Invalid tool parameters.") extracted = extracted_element.element_object extracted_o = extracted.copy( copy_tags=extracted.tags, new_name=extracted_element.element_identifier, flush=False diff --git a/lib/tool_shed/webapp/model/migrations/alembic/versions/1b5bf427db25_add_non_nullable_column_deleted_to_api_.py b/lib/tool_shed/webapp/model/migrations/alembic/versions/1b5bf427db25_add_non_nullable_column_deleted_to_api_.py index 390402f37b53..5f493a5aa8d1 100644 --- a/lib/tool_shed/webapp/model/migrations/alembic/versions/1b5bf427db25_add_non_nullable_column_deleted_to_api_.py +++ b/lib/tool_shed/webapp/model/migrations/alembic/versions/1b5bf427db25_add_non_nullable_column_deleted_to_api_.py @@ -5,6 +5,7 @@ Create Date: 2024-05-29 21:53:53.516506 """ + import sqlalchemy as sa from alembic import op from sqlalchemy import ( diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py index d59991bca0f1..cf4f82255d0c 100644 --- a/test/unit/tool_shed/_util.py +++ b/test/unit/tool_shed/_util.py @@ -46,6 +46,7 @@ class TestToolShedConfig: file_path: str id_secret: str = "thisistheshedunittestsecret" smtp_server: Optional[str] = None + hgweb_repo_prefix = "repos/" config_hg_for_dev = False def __init__(self, temp_directory): diff --git a/test/unit/tool_util/test_tool_linters.py b/test/unit/tool_util/test_tool_linters.py index 2db30308b950..7e6d3b62ec93 100644 --- a/test/unit/tool_util/test_tool_linters.py +++ b/test/unit/tool_util/test_tool_linters.py @@ -120,16 +120,6 @@ """ -GENERAL_TEXT_SPACES = """ - - - - bwa - - - -""" - GENERAL_VALID_BIOTOOLS = """ @@ -1108,19 +1098,6 @@ def test_general_valid_new_profile_fmt(lint_ctx): assert not lint_ctx.error_messages -def test_general_text_spaces(lint_ctx): - tool_source = get_xml_tool_source(GENERAL_TEXT_SPACES) - run_lint_module(lint_ctx, general, tool_source) - assert ( - "XML node 'xref' has text with leading or trailing spaces ('\n bwa\n '!='bwa')" - in lint_ctx.warn_messages - ) - assert not lint_ctx.info_messages - assert len(lint_ctx.valid_messages) == 4 - assert len(lint_ctx.warn_messages) == 1 - assert not lint_ctx.error_messages - - @skip_if_site_down("https://bio.tools/") def test_general_valid_biotools(lint_ctx): tool_source = get_xml_tool_source(GENERAL_VALID_BIOTOOLS) @@ -2168,7 +2145,7 @@ def test_skip_by_module(lint_ctx): def test_list_linters(): linter_names = Linter.list_listers() # make sure to add/remove a test for new/removed linters if this number changes - assert len(linter_names) == 133 + assert len(linter_names) == 132 assert "Linter" not in linter_names # make sure that linters from all modules are available for prefix in [