From cd5455773bc454deb33410fa31637d392d47b03c Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Tue, 17 Sep 2024 17:56:44 +0100 Subject: [PATCH] Type annotations and fixes - Use `galaxy.util.path.StrPath` everywhere --- lib/galaxy/app.py | 7 +- .../app_unittest_utils/tools_support.py | 7 +- lib/galaxy/celery/tasks.py | 6 +- lib/galaxy/datatypes/data.py | 1 + lib/galaxy/datatypes/registry.py | 85 ++++++++++--------- lib/galaxy/datatypes/sniff.py | 13 ++- lib/galaxy/files/sources/util.py | 7 +- lib/galaxy/jobs/runners/__init__.py | 30 ++++--- .../mulled_update_singularity_containers.py | 5 +- lib/galaxy/tool_util/parser/factory.py | 8 +- lib/galaxy/tool_util/parser/interface.py | 4 +- lib/galaxy/tool_util/parser/xml.py | 3 +- lib/galaxy/tool_util/verify/codegen.py | 4 +- lib/galaxy/tools/__init__.py | 75 ++++++++-------- lib/galaxy/tools/execute.py | 4 +- lib/galaxy/tools/execution_helpers.py | 3 +- lib/galaxy/tools/flatten_collection.xml | 2 +- lib/galaxy/tools/parameters/basic.py | 30 +++---- lib/galaxy/tools/remote_tool_eval.py | 2 +- lib/galaxy/tours/_impl.py | 12 ++- lib/galaxy/util/hash_util.py | 4 +- lib/galaxy/util/plugin_config.py | 7 +- lib/galaxy/webapps/base/api.py | 39 +++++---- lib/galaxy_test/base/populators.py | 3 +- lib/tool_shed/webapp/app.py | 7 +- lib/tool_shed/webapp/frontend/.eslintignore | 2 +- .../data/datatypes/test_check_required.py | 6 +- 27 files changed, 195 insertions(+), 181 deletions(-) diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index 2f67d6ec3b1b..fcbd1a1b7cb0 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -34,6 +34,7 @@ GalaxyTaskBeforeStartUserRateLimitPostgres, GalaxyTaskBeforeStartUserRateLimitStandard, ) +from galaxy.config import GalaxyAppConfiguration from galaxy.config_watchers import ConfigWatchers from galaxy.datatypes.registry import Registry from galaxy.files import ( @@ -206,7 +207,7 @@ def shutdown(self): class SentryClientMixin: - config: config.GalaxyAppConfiguration + config: GalaxyAppConfiguration application_stack: ApplicationStack def configure_sentry_client(self): @@ -263,7 +264,7 @@ class MinimalGalaxyApplication(BasicSharedApp, HaltableContainer, SentryClientMi """Encapsulates the state of a minimal Galaxy application""" model: GalaxyModelMapping - config: config.GalaxyAppConfiguration + config: GalaxyAppConfiguration tool_cache: ToolCache job_config: jobs.JobConfiguration toolbox_search: ToolBoxSearch @@ -287,7 +288,7 @@ def __init__(self, fsmon=False, **kwargs) -> None: self.name = "galaxy" self.is_webapp = False # Read config file and check for errors - self.config = self._register_singleton(config.GalaxyAppConfiguration, config.GalaxyAppConfiguration(**kwargs)) + self.config = self._register_singleton(GalaxyAppConfiguration, GalaxyAppConfiguration(**kwargs)) self.config.check() config_file = kwargs.get("global_conf", {}).get("__file__", None) if config_file: diff --git a/lib/galaxy/app_unittest_utils/tools_support.py b/lib/galaxy/app_unittest_utils/tools_support.py index 9af461601a83..f2f880da0704 100644 --- a/lib/galaxy/app_unittest_utils/tools_support.py +++ b/lib/galaxy/app_unittest_utils/tools_support.py @@ -20,6 +20,7 @@ from galaxy.tool_util.parser import get_tool_source from galaxy.tools import create_tool_from_source from galaxy.util.bunch import Bunch +from galaxy.util.path import StrPath datatypes_registry = galaxy.datatypes.registry.Registry() datatypes_registry.load_datatypes() @@ -83,10 +84,10 @@ def _init_tool( tool_id="test_tool", extra_file_contents=None, extra_file_path=None, - tool_path=None, + tool_path: Optional[StrPath] = None, ): if tool_path is None: - self.tool_file = os.path.join(self.test_directory, filename) + self.tool_file: StrPath = os.path.join(self.test_directory, filename) contents_template = string.Template(tool_contents) tool_contents = contents_template.safe_substitute(dict(version=version, profile=profile, tool_id=tool_id)) self.__write_tool(tool_contents) @@ -96,7 +97,7 @@ def _init_tool( self.tool_file = tool_path return self.__setup_tool() - def _init_tool_for_path(self, tool_file): + def _init_tool_for_path(self, tool_file: StrPath): self.tool_file = tool_file return self.__setup_tool() diff --git a/lib/galaxy/celery/tasks.py b/lib/galaxy/celery/tasks.py index 3b2e4c6272a7..6c27f975639c 100644 --- a/lib/galaxy/celery/tasks.py +++ b/lib/galaxy/celery/tasks.py @@ -75,10 +75,8 @@ def setup_data_table_manager(app): @lru_cache -def cached_create_tool_from_representation(app, raw_tool_source): - return create_tool_from_representation( - app=app, raw_tool_source=raw_tool_source, tool_dir="", tool_source_class="XmlToolSource" - ) +def cached_create_tool_from_representation(app: MinimalManagerApp, raw_tool_source: str): + return create_tool_from_representation(app=app, raw_tool_source=raw_tool_source, tool_source_class="XmlToolSource") @galaxy_task(action="recalculate a user's disk usage") diff --git a/lib/galaxy/datatypes/data.py b/lib/galaxy/datatypes/data.py index 05f5adbad6bf..0024adea6155 100644 --- a/lib/galaxy/datatypes/data.py +++ b/lib/galaxy/datatypes/data.py @@ -207,6 +207,7 @@ class Data(metaclass=DataMeta): edam_data = "data_0006" edam_format = "format_1915" file_ext = "data" + is_subclass = False # Data is not chunkable by default. CHUNKABLE = False diff --git a/lib/galaxy/datatypes/registry.py b/lib/galaxy/datatypes/registry.py index 9e7bf7d561a7..3d73c7f9bb9f 100644 --- a/lib/galaxy/datatypes/registry.py +++ b/lib/galaxy/datatypes/registry.py @@ -6,15 +6,16 @@ import logging import os import pkgutil -from pathlib import Path from string import Template from typing import ( + Any, cast, Dict, Iterable, List, Optional, Tuple, + Type, TYPE_CHECKING, Union, ) @@ -24,8 +25,12 @@ import galaxy.util from galaxy.datatypes.protocols import DatasetProtocol from galaxy.tool_util.edam_util import load_edam_tree -from galaxy.util import RW_R__R__ +from galaxy.util import ( + Element, + RW_R__R__, +) from galaxy.util.bunch import Bunch +from galaxy.util.path import StrPath from . import ( binary, coverage, @@ -65,7 +70,7 @@ def __init__(self, config=None): self.log.addHandler(logging.NullHandler()) self.config = config self.edam = edam - self.datatypes_by_extension = {} + self.datatypes_by_extension: Dict[str, Data] = {} self.datatypes_by_suffix_inferences = {} self.mimetypes_by_extension = {} self.datatype_converters = {} @@ -75,7 +80,7 @@ def __init__(self, config=None): self.converter_deps = {} self.available_tracks = [] self.set_external_metadata_tool = None - self.sniff_order = [] + self.sniff_order: List[Data] = [] self.upload_file_formats = [] # Datatype elements defined in local datatypes_conf.xml that contain display applications. self.display_app_containers = [] @@ -105,7 +110,7 @@ def __init__(self, config=None): def load_datatypes( self, root_dir=None, - config=None, + config: Optional[Union[Element, StrPath]] = None, override=True, use_converters=True, use_display_applications=True, @@ -127,8 +132,8 @@ def __import_module(full_path: str, datatype_module: str): return module if root_dir and config: - compressed_sniffers = {} - if isinstance(config, (str, Path)): + compressed_sniffers: Dict[Type[Data], List[Data]] = {} + if isinstance(config, (str, os.PathLike)): # Parse datatypes_conf.xml tree = galaxy.util.parse_xml(config) root = tree.getroot() @@ -137,6 +142,7 @@ def __import_module(full_path: str, datatype_module: str): else: root = config registration = root.find("registration") + assert registration is not None # Set default paths defined in local datatypes_conf.xml. if use_converters: if not self.converters_path: @@ -167,7 +173,6 @@ def __import_module(full_path: str, datatype_module: str): for elem in registration.findall("datatype"): # Keep a status of the process steps to enable stopping the process of handling the datatype if necessary. - ok = True extension = self.get_extension(elem) dtype = elem.get("type", None) type_extension = elem.get("type_extension", None) @@ -199,7 +204,9 @@ def __import_module(full_path: str, datatype_module: str): if override or extension not in self.datatypes_by_extension: can_process_datatype = True if can_process_datatype: + datatype_class: Optional[Type[Data]] = None if dtype is not None: + ok = True try: fields = dtype.split(":") datatype_module = fields[0] @@ -208,21 +215,18 @@ def __import_module(full_path: str, datatype_module: str): self.log.exception("Error parsing datatype definition for dtype %s", str(dtype)) ok = False if ok: - datatype_class = None - if datatype_class is None: - try: - # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution. - fields = datatype_module.split(".")[1:] - module = __import__(datatype_module) - for mod in fields: - module = getattr(module, mod) - datatype_class = getattr(module, datatype_class_name) - self.log.debug( - f"Retrieved datatype module {str(datatype_module)}:{datatype_class_name} from the datatype registry for extension {extension}." - ) - except Exception: - self.log.exception("Error importing datatype module %s", str(datatype_module)) - ok = False + try: + # The datatype class name must be contained in one of the datatype modules in the Galaxy distribution. + fields = datatype_module.split(".")[1:] + module = __import__(datatype_module) + for mod in fields: + module = getattr(module, mod) + datatype_class = getattr(module, datatype_class_name) + self.log.debug( + f"Retrieved datatype module {str(datatype_module)}:{datatype_class_name} from the datatype registry for extension {extension}." + ) + except Exception: + self.log.exception("Error importing datatype module %s", str(datatype_module)) elif type_extension is not None: try: datatype_class = self.datatypes_by_extension[type_extension].__class__ @@ -233,8 +237,7 @@ def __import_module(full_path: str, datatype_module: str): self.log.exception( "Error determining datatype_class for type_extension %s", str(type_extension) ) - ok = False - if ok: + if datatype_class: # A new tool shed repository that contains custom datatypes is being installed, and since installation is # occurring after the datatypes registry has been initialized at server startup, its contents cannot be # overridden by new introduced conflicting data types unless the value of override is True. @@ -262,7 +265,7 @@ def __import_module(full_path: str, datatype_module: str): for upload_warning_el in upload_warning_els: if upload_warning_template is not None: raise NotImplementedError("Multiple upload_warnings not implemented") - upload_warning_template = Template(upload_warning_el.text) + upload_warning_template = Template(upload_warning_el.text or "") datatype_instance = datatype_class() self.datatypes_by_extension[extension] = datatype_instance if mimetype is None: @@ -282,9 +285,9 @@ def __import_module(full_path: str, datatype_module: str): # compressed files in the future (e.g. maybe some day faz will be a compressed fasta # or something along those lines) for infer_from in elem.findall("infer_from"): - suffix = infer_from.get("suffix", None) + suffix = infer_from.get("suffix") if suffix is None: - raise Exception("Failed to parse infer_from datatype element") + raise ConfigurationError("Failed to parse infer_from datatype element") infer_from_suffixes.append(suffix) self.datatypes_by_suffix_inferences[suffix] = datatype_instance for converter in elem.findall("converter"): @@ -300,9 +303,11 @@ def __import_module(full_path: str, datatype_module: str): self.converters.append((converter_config, extension, target_datatype)) # Add composite files. for composite_file in elem.findall("composite_file"): - name = composite_file.get("name", None) + name = composite_file.get("name") if name is None: - self.log.warning(f"You must provide a name for your composite_file ({composite_file}).") + raise ConfigurationError( + f"You must provide a name for your composite_file ({composite_file})." + ) optional = composite_file.get("optional", False) mimetype = composite_file.get("mimetype", None) self.datatypes_by_extension[extension].add_composite_file( @@ -321,8 +326,8 @@ def __import_module(full_path: str, datatype_module: str): composite_files = datatype_instance.get_composite_files() if composite_files: _composite_files = [] - for name, composite_file in composite_files.items(): - _composite_file = composite_file.dict() + for name, composite_file_bunch in composite_files.items(): + _composite_file = composite_file_bunch.dict() _composite_file["name"] = name _composite_files.append(_composite_file) datatype_info_dict["composite_files"] = _composite_files @@ -332,16 +337,18 @@ def __import_module(full_path: str, datatype_module: str): compressed_extension = f"{extension}.{auto_compressed_type}" upper_compressed_type = auto_compressed_type[0].upper() + auto_compressed_type[1:] auto_compressed_type_name = datatype_class_name + upper_compressed_type - attributes = {} + attributes: Dict[str, Any] = {} if auto_compressed_type == "gz": - dynamic_parent = binary.GzDynamicCompressedArchive + dynamic_parent: Type[binary.DynamicCompressedArchive] = ( + binary.GzDynamicCompressedArchive + ) elif auto_compressed_type == "bz2": dynamic_parent = binary.Bz2DynamicCompressedArchive else: - raise Exception(f"Unknown auto compression type [{auto_compressed_type}]") + raise ConfigurationError(f"Unknown auto compression type [{auto_compressed_type}]") attributes["file_ext"] = compressed_extension attributes["uncompressed_datatype_instance"] = datatype_instance - compressed_datatype_class = type( + compressed_datatype_class: Type[Data] = type( auto_compressed_type_name, ( datatype_class, @@ -411,7 +418,7 @@ def __import_module(full_path: str, datatype_module: str): self._load_build_sites(root) self.set_default_values() - def append_to_sniff_order(): + def append_to_sniff_order() -> None: sniff_order_classes = {type(_) for _ in self.sniff_order} for datatype in self.datatypes_by_extension.values(): # Add a datatype only if it is not already in sniff_order, it @@ -482,7 +489,9 @@ def get_legacy_sites_by_build(self, site_type, build): def get_display_sites(self, site_type): return self.display_sites.get(site_type, []) - def load_datatype_sniffers(self, root, override=False, compressed_sniffers=None): + def load_datatype_sniffers( + self, root, override=False, compressed_sniffers: Optional[Dict[Type["Data"], List["Data"]]] = None + ): """ Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy distributed config) or contained within an installed Tool Shed repository. diff --git a/lib/galaxy/datatypes/sniff.py b/lib/galaxy/datatypes/sniff.py index 0affcde217da..310ee9d21c7f 100644 --- a/lib/galaxy/datatypes/sniff.py +++ b/lib/galaxy/datatypes/sniff.py @@ -17,8 +17,10 @@ Callable, Dict, IO, + Iterable, NamedTuple, Optional, + TYPE_CHECKING, Union, ) @@ -44,6 +46,8 @@ pass import magic # isort:skip +if TYPE_CHECKING: + from .data import Data log = logging.getLogger(__name__) @@ -689,7 +693,7 @@ def _get_file_prefix(filename_or_file_prefix: Union[str, FilePrefix], auto_decom return filename_or_file_prefix -def run_sniffers_raw(file_prefix: FilePrefix, sniff_order): +def run_sniffers_raw(file_prefix: FilePrefix, sniff_order: Iterable["Data"]): """Run through sniffers specified by sniff_order, return None of None match.""" fname = file_prefix.filename file_ext = None @@ -718,15 +722,16 @@ def run_sniffers_raw(file_prefix: FilePrefix, sniff_order): continue try: if hasattr(datatype, "sniff_prefix"): - if file_prefix.compressed_format and getattr(datatype, "compressed_format", None): + datatype_compressed_format = getattr(datatype, "compressed_format", None) + if file_prefix.compressed_format and datatype_compressed_format: # Compare the compressed format detected # to the expected. - if file_prefix.compressed_format != datatype.compressed_format: + if file_prefix.compressed_format != datatype_compressed_format: continue if datatype.sniff_prefix(file_prefix): file_ext = datatype.file_ext break - elif datatype.sniff(fname): + elif hasattr(datatype, "sniff") and datatype.sniff(fname): file_ext = datatype.file_ext break except Exception: diff --git a/lib/galaxy/files/sources/util.py b/lib/galaxy/files/sources/util.py index 3704aaa7a71b..a3af04f6bff4 100644 --- a/lib/galaxy/files/sources/util.py +++ b/lib/galaxy/files/sources/util.py @@ -1,10 +1,8 @@ import time -from os import PathLike from typing import ( List, Optional, Tuple, - Union, ) from galaxy import exceptions @@ -20,8 +18,7 @@ requests, ) from galaxy.util.config_parsers import IpAllowedListEntryT - -TargetPathT = Union[str, PathLike] +from galaxy.util.path import StrPath def _not_implemented(drs_uri: str, desc: str) -> NotImplementedError: @@ -79,7 +76,7 @@ def _get_access_info(obj_url: str, access_method: dict, headers=None) -> Tuple[s def fetch_drs_to_file( drs_uri: str, - target_path: TargetPathT, + target_path: StrPath, user_context: Optional[FileSourcesUserContext], force_http=False, retry_options: Optional[RetryOptions] = None, diff --git a/lib/galaxy/jobs/runners/__init__.py b/lib/galaxy/jobs/runners/__init__.py index 511c431ac8e5..fd2a13804345 100644 --- a/lib/galaxy/jobs/runners/__init__.py +++ b/lib/galaxy/jobs/runners/__init__.py @@ -10,12 +10,18 @@ import threading import time import traceback -import typing import uuid from queue import ( Empty, Queue, ) +from typing import ( + Any, + Dict, + Optional, + TYPE_CHECKING, + Union, +) from sqlalchemy import select from sqlalchemy.orm import object_session @@ -58,7 +64,7 @@ from galaxy.util.monitors import Monitors from .state_handler_factory import build_state_handlers -if typing.TYPE_CHECKING: +if TYPE_CHECKING: from galaxy.app import GalaxyManagerApplication from galaxy.jobs import ( JobDestination, @@ -183,7 +189,7 @@ def run_next(self): # Prevent fail_job cycle in the work_queue self.work_queue.put((self.fail_job, job_state)) - def _ensure_db_session(self, arg: typing.Union["JobWrapper", "JobState"]) -> None: + def _ensure_db_session(self, arg: Union["JobWrapper", "JobState"]) -> None: """Ensure Job object belongs to current session.""" try: job_wrapper = arg.job_wrapper # type: ignore[union-attr] @@ -264,7 +270,7 @@ def url_to_destination(self, url: str): """ return galaxy.jobs.JobDestination(runner=url.split(":")[0]) - def parse_destination_params(self, params: typing.Dict[str, typing.Any]): + def parse_destination_params(self, params: Dict[str, Any]): """Parse the JobDestination ``params`` dict and return the runner's native representation of those params.""" raise NotImplementedError() @@ -347,8 +353,8 @@ def build_command_line( def get_work_dir_outputs( self, job_wrapper: "MinimalJobWrapper", - job_working_directory: typing.Optional[str] = None, - tool_working_directory: typing.Optional[str] = None, + job_working_directory: Optional[str] = None, + tool_working_directory: Optional[str] = None, ): """ Returns list of pairs (source_file, destination) describing path @@ -527,10 +533,10 @@ def write_executable_script(self, path: str, contents: str, job_io: DescribesScr def _find_container( self, job_wrapper: "MinimalJobWrapper", - compute_working_directory: typing.Optional[str] = None, - compute_tool_directory: typing.Optional[str] = None, - compute_job_directory: typing.Optional[str] = None, - compute_tmp_directory: typing.Optional[str] = None, + compute_working_directory: Optional[str] = None, + compute_tool_directory: Optional[str] = None, + compute_job_directory: Optional[str] = None, + compute_tmp_directory: Optional[str] = None, ): job_directory_type = "galaxy" if compute_working_directory is None else "pulsar" if not compute_working_directory: @@ -542,7 +548,7 @@ def _find_container( tool = job_wrapper.tool assert tool if not compute_tool_directory: - compute_tool_directory = tool.tool_dir + compute_tool_directory = str(tool.tool_dir) if tool.tool_dir is not None else None if not compute_tmp_directory: compute_tmp_directory = job_wrapper.tmp_directory() @@ -600,7 +606,7 @@ def fail_job(self, job_state: "JobState", exception=False, message="Job failed", fail_message, tool_stdout=tool_stdout, tool_stderr=tool_stderr, exception=exception ) - def mark_as_resubmitted(self, job_state: "JobState", info: typing.Optional[str] = None): + def mark_as_resubmitted(self, job_state: "JobState", info: Optional[str] = None): job_state.job_wrapper.mark_as_resubmitted(info=info) if not self.app.config.track_jobs_in_database: job_state.job_wrapper.change_state(model.Job.states.QUEUED) diff --git a/lib/galaxy/tool_util/deps/mulled/mulled_update_singularity_containers.py b/lib/galaxy/tool_util/deps/mulled/mulled_update_singularity_containers.py index deb62f0b24b8..b1fd62feed80 100644 --- a/lib/galaxy/tool_util/deps/mulled/mulled_update_singularity_containers.py +++ b/lib/galaxy/tool_util/deps/mulled/mulled_update_singularity_containers.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import argparse -import os import os.path import subprocess import tempfile @@ -11,10 +10,10 @@ Any, Dict, List, - Union, ) from galaxy.util import unicodify +from galaxy.util.path import StrPath from .get_tests import ( hashed_test_search, import_test_to_command_list, @@ -46,7 +45,7 @@ def docker_to_singularity(container, installation, filepath, no_sudo=False): def singularity_container_test( - tests: Dict[str, Dict[str, Any]], installation: str, filepath: Union[str, os.PathLike] + tests: Dict[str, Dict[str, Any]], installation: str, filepath: StrPath ) -> Dict[str, List]: """ Run tests, record if they pass or fail diff --git a/lib/galaxy/tool_util/parser/factory.py b/lib/galaxy/tool_util/parser/factory.py index 4dda4589e596..f36647dfd40d 100644 --- a/lib/galaxy/tool_util/parser/factory.py +++ b/lib/galaxy/tool_util/parser/factory.py @@ -1,13 +1,11 @@ """Constructors for concrete tool and input source objects.""" import logging -from pathlib import PurePath from typing import ( Callable, Dict, List, Optional, - Union, ) from yaml import safe_load @@ -17,6 +15,7 @@ ElementTree, parse_xml_string_to_etree, ) +from galaxy.util.path import StrPath from galaxy.util.yaml_util import ordered_load from .cwl import ( CwlToolSource, @@ -61,7 +60,7 @@ def build_yaml_tool_source(yaml_string: str) -> YamlToolSource: def get_tool_source( - config_file: Optional[Union[str, PurePath]] = None, + config_file: Optional[StrPath] = None, xml_tree: Optional[ElementTree] = None, enable_beta_formats: bool = True, tool_location_fetcher: Optional[ToolLocationFetcher] = None, @@ -87,8 +86,7 @@ def get_tool_source( tool_location_fetcher = ToolLocationFetcher() assert config_file - if isinstance(config_file, PurePath): - config_file = str(config_file) + config_file = str(config_file) config_file = tool_location_fetcher.to_tool_path(config_file) if not enable_beta_formats: diff --git a/lib/galaxy/tool_util/parser/interface.py b/lib/galaxy/tool_util/parser/interface.py index 19c557cd53de..5565ff13ae42 100644 --- a/lib/galaxy/tool_util/parser/interface.py +++ b/lib/galaxy/tool_util/parser/interface.py @@ -155,7 +155,7 @@ def parse_id(self) -> Optional[str]: def parse_version(self) -> Optional[str]: """Parse a version describing the abstract tool.""" - def parse_tool_module(self): + def parse_tool_module(self) -> Optional[Tuple[str, str]]: """Load Tool class from a custom module. (Optional). If not None, return pair containing module and class (as strings). @@ -169,7 +169,7 @@ def parse_action_module(self): """ return None - def parse_tool_type(self): + def parse_tool_type(self) -> Optional[str]: """Load simple tool type string (e.g. 'data_source', 'default').""" return None diff --git a/lib/galaxy/tool_util/parser/xml.py b/lib/galaxy/tool_util/parser/xml.py index a89754553f84..ed1dda01c30d 100644 --- a/lib/galaxy/tool_util/parser/xml.py +++ b/lib/galaxy/tool_util/parser/xml.py @@ -190,8 +190,7 @@ def parse_action_module(self): def parse_tool_type(self): root = self.root - if root.get("tool_type", None) is not None: - return root.get("tool_type") + return root.get("tool_type") def parse_name(self): return self.root.get("name") or self.parse_id() diff --git a/lib/galaxy/tool_util/verify/codegen.py b/lib/galaxy/tool_util/verify/codegen.py index 2e93d29ccbf0..18132fb078e8 100644 --- a/lib/galaxy/tool_util/verify/codegen.py +++ b/lib/galaxy/tool_util/verify/codegen.py @@ -3,8 +3,6 @@ # how to use this function... # PYTHONPATH=lib python lib/galaxy/tool_util/verify/codegen.py -from __future__ import annotations - import argparse import inspect import os @@ -34,7 +32,7 @@ Children = Literal["allowed", "required", "forbidden"] -DESCRIPTION = """This script synchronizes dynamic code aritfacts against models in Galaxy. +DESCRIPTION = """This script synchronizes dynamic code artifacts against models in Galaxy. Right now this just synchronizes Galaxy's XSD file against documentation in Galaxy's assertion modules but in the future it will also build Pydantic models for these functions. diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index d006f5c2d435..c085ac512699 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -176,6 +176,7 @@ safe_loads, swap_inf_nan, ) +from galaxy.util.path import StrPath from galaxy.util.rules_dsl import RuleSet from galaxy.util.template import ( fill_template, @@ -370,15 +371,16 @@ class ToolNotFoundException(Exception): pass -def create_tool_from_source(app, tool_source, config_file=None, **kwds): +def create_tool_from_source(app, tool_source: ToolSource, config_file: Optional[StrPath] = None, **kwds): # Allow specifying a different tool subclass to instantiate if (tool_module := tool_source.parse_tool_module()) is not None: module, cls = tool_module mod = __import__(module, globals(), locals(), [cls]) ToolClass = getattr(mod, cls) - elif tool_source.parse_tool_type(): - tool_type = tool_source.parse_tool_type() + elif tool_type := tool_source.parse_tool_type(): ToolClass = tool_types.get(tool_type) + if not ToolClass: + raise ValueError(f"Unrecognized tool type: {tool_type}") else: # Normal tool root = getattr(tool_source, "root", None) @@ -388,7 +390,7 @@ def create_tool_from_source(app, tool_source, config_file=None, **kwds): def create_tool_from_representation( - app, raw_tool_source: str, tool_dir: str, tool_source_class="XmlToolSource" + app, raw_tool_source: str, tool_dir: Optional[StrPath] = None, tool_source_class="XmlToolSource" ) -> "Tool": tool_source = get_tool_source(tool_source_class=tool_source_class, raw_tool_source=raw_tool_source) return create_tool_from_source(app, tool_source=tool_source, tool_dir=tool_dir) @@ -560,7 +562,7 @@ def get_cache_region(self, tool_cache_data_dir): self.cache_regions[tool_cache_data_dir] = ToolDocumentCache(cache_dir=tool_cache_data_dir) return self.cache_regions[tool_cache_data_dir] - def create_tool(self, config_file, tool_cache_data_dir=None, **kwds): + def create_tool(self, config_file: str, tool_cache_data_dir=None, **kwds): cache = self.get_cache_region(tool_cache_data_dir) if config_file.endswith(".xml") and cache and not cache.disabled: tool_document = cache.get(config_file) @@ -617,7 +619,6 @@ def get_tool_components(self, tool_id, tool_version=None, get_loaded_tools_by_li Retrieve all loaded versions of a tool from the toolbox and return a select list enabling selection of a different version, the list of the tool's loaded versions, and the specified tool. """ - toolbox = self tool_version_select_field = None tools = [] tool = None @@ -629,11 +630,11 @@ def get_tool_components(self, tool_id, tool_version=None, get_loaded_tools_by_li # Some data sources send back redirects ending with `/`, this takes care of that case tool_id = tool_id[:-1] if get_loaded_tools_by_lineage: - tools = toolbox.get_loaded_tools_by_lineage(tool_id) + tools = self.get_loaded_tools_by_lineage(tool_id) else: - tools = toolbox.get_tool(tool_id, tool_version=tool_version, get_all_versions=True) + tools = self.get_tool(tool_id, tool_version=tool_version, get_all_versions=True) if tools: - tool = toolbox.get_tool(tool_id, tool_version=tool_version, get_all_versions=False) + tool = self.get_tool(tool_id, tool_version=tool_version, get_all_versions=False) if len(tools) > 1: tool_version_select_field = self.__build_tool_version_select_field(tools, tool.id, set_selected) break @@ -775,24 +776,22 @@ class Tool(UsesDictVisibleKeys): def __init__( self, - config_file, + config_file: Optional[StrPath], tool_source: ToolSource, app: "UniverseApplication", - guid=None, + guid: Optional[str] = None, repository_id=None, tool_shed_repository=None, - allow_code_files=True, - dynamic=False, - tool_dir=None, + allow_code_files: bool = True, + dynamic: bool = False, + tool_dir: Optional[StrPath] = None, ): """Load a tool from the config named by `config_file`""" + self.config_file = config_file # Determine the full path of the directory where the tool config is if config_file is not None: - self.config_file = config_file - self.tool_dir = tool_dir or os.path.dirname(config_file) - else: - self.config_file = None - self.tool_dir = tool_dir + tool_dir = tool_dir or os.path.dirname(config_file) + self.tool_dir = tool_dir self.app = app self.repository_id = repository_id @@ -1032,7 +1031,7 @@ def allow_user_access(self, user, attempting_access=True): return False return True - def parse(self, tool_source: ToolSource, guid=None, dynamic=False): + def parse(self, tool_source: ToolSource, guid: Optional[str] = None, dynamic: bool = False) -> None: """ Read tool configuration from the element `root` and fill in `self`. """ @@ -1130,6 +1129,7 @@ def parse(self, tool_source: ToolSource, guid=None, dynamic=False): version_cmd_interpreter = tool_source.parse_version_command_interpreter() if version_cmd_interpreter: executable = self.version_string_cmd.split()[0] + assert self.tool_dir is not None abs_executable = os.path.abspath(os.path.join(self.tool_dir, executable)) command_line = self.version_string_cmd.replace(executable, abs_executable, 1) self.version_string_cmd = f"{version_cmd_interpreter} {command_line}" @@ -1249,7 +1249,7 @@ def parse(self, tool_source: ToolSource, guid=None, dynamic=False): self._is_workflow_compatible = self.check_workflow_compatible(self.tool_source) - def __parse_legacy_features(self, tool_source): + def __parse_legacy_features(self, tool_source: ToolSource): self.code_namespace: Dict[str, str] = {} self.hook_map: Dict[str, str] = {} self.uihints: Dict[str, str] = {} @@ -1268,6 +1268,7 @@ def __parse_legacy_features(self, tool_source): # map hook to function self.hook_map[key] = value file_name = code_elem.get("file") + assert self.tool_dir is not None code_path = os.path.join(self.tool_dir, file_name) if self._allow_code_files: with open(code_path) as f: @@ -1349,9 +1350,8 @@ def tests(self): @property def _repository_dir(self): """If tool shed installed tool, the base directory of the repository installed.""" - repository_base_dir = None - if getattr(self, "tool_shed", None): + assert self.tool_dir is not None tool_dir = Path(self.tool_dir) for repo_dir in itertools.chain([tool_dir], tool_dir.parents): if repo_dir.name == self.repository_name and repo_dir.parent.name == self.installed_changeset_revision: @@ -1359,7 +1359,7 @@ def _repository_dir(self): else: log.error(f"Problem finding repository dir for tool '{self.id}'") - return repository_base_dir + return None def test_data_path(self, filename): test_data = None @@ -2402,16 +2402,16 @@ def discover_outputs( return collected def to_archive(self): - tool = self tarball_files = [] temp_files = [] - with open(os.path.abspath(tool.config_file)) as fh1: + assert self.config_file + with open(os.path.abspath(self.config_file)) as fh1: tool_xml = fh1.read() # Retrieve tool help images and rewrite the tool's xml into a temporary file with the path # modified to be relative to the repository root. image_found = False - if tool.help is not None: - tool_help = tool.help._source + if self.help is not None: + tool_help = self.help._source # Check each line of the rendered tool help for an image tag that points to a location under static/ for help_line in tool_help.split("\n"): image_regex = re.compile(r'img alt="[^"]+" src="\${static_path}/([^"]+)"') @@ -2429,25 +2429,25 @@ def to_archive(self): with tempfile.NamedTemporaryFile(mode="w", suffix=".xml", delete=False) as fh2: new_tool_config = fh2.name fh2.write(tool_xml) - tool_tup = (new_tool_config, os.path.split(tool.config_file)[-1]) + tool_tup = (new_tool_config, os.path.split(self.config_file)[-1]) temp_files.append(new_tool_config) else: - tool_tup = (os.path.abspath(tool.config_file), os.path.split(tool.config_file)[-1]) + tool_tup = (os.path.abspath(self.config_file), os.path.split(self.config_file)[-1]) tarball_files.append(tool_tup) # TODO: This feels hacky. - tool_command = tool.command.strip().split()[0] - tool_path = os.path.dirname(os.path.abspath(tool.config_file)) + tool_command = self.command.strip().split()[0] + tool_path = os.path.dirname(os.path.abspath(self.config_file)) # Add the tool XML to the tuple that will be used to populate the tarball. if os.path.exists(os.path.join(tool_path, tool_command)): tarball_files.append((os.path.join(tool_path, tool_command), tool_command)) # Find and add macros and code files. - for external_file in tool.get_externally_referenced_paths(os.path.abspath(tool.config_file)): + for external_file in self.get_externally_referenced_paths(os.path.abspath(self.config_file)): external_file_abspath = os.path.abspath(os.path.join(tool_path, external_file)) tarball_files.append((external_file_abspath, external_file)) if os.path.exists(os.path.join(tool_path, "Dockerfile")): tarball_files.append((os.path.join(tool_path, "Dockerfile"), "Dockerfile")) # Find tests, and check them for test data. - if (tests := tool.tests) is not None: + if (tests := self.tests) is not None: for test in tests: # Add input file tuples to the list. for input in test.inputs: @@ -2463,7 +2463,7 @@ def to_archive(self): if os.path.exists(output_filepath): td_tup = (output_filepath, os.path.join("test-data", filename)) tarball_files.append(td_tup) - for param in tool.input_params: + for param in self.input_params: # Check for tool data table definitions. param_options = getattr(param, "options", None) if param_options is not None: @@ -4168,7 +4168,6 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history # Populate tool_type to ToolClass mappings -tool_types = {} TOOL_CLASSES: List[Type[Tool]] = [ Tool, SetMetadataTool, @@ -4188,9 +4187,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history ExtractDatasetCollectionTool, DataDestinationTool, ] -for tool_class in TOOL_CLASSES: - tool_types[tool_class.tool_type] = tool_class - +tool_types = {tool_class.tool_type: tool_class for tool_class in TOOL_CLASSES} # ---- Utility classes to be factored out ----------------------------------- diff --git a/lib/galaxy/tools/execute.py b/lib/galaxy/tools/execute.py index d6e65f592a6e..31369c948052 100644 --- a/lib/galaxy/tools/execute.py +++ b/lib/galaxy/tools/execute.py @@ -285,7 +285,7 @@ def __init__( self.collection_info = collection_info self.completed_jobs = completed_jobs - self._on_text = None + self._on_text: Optional[str] = None # Populated as we go... self.failed_jobs = 0 @@ -322,7 +322,7 @@ def record_error(self, error): self.execution_errors.append(error) @property - def on_text(self): + def on_text(self) -> Optional[str]: collection_info = self.collection_info if self._on_text is None and collection_info is not None: collection_names = ["collection %d" % c.hid for c in collection_info.collections.values()] diff --git a/lib/galaxy/tools/execution_helpers.py b/lib/galaxy/tools/execution_helpers.py index 66ae3c853681..76413a5f6370 100644 --- a/lib/galaxy/tools/execution_helpers.py +++ b/lib/galaxy/tools/execution_helpers.py @@ -5,6 +5,7 @@ """ import logging +from typing import Collection log = logging.getLogger(__name__) @@ -47,7 +48,7 @@ def filter_output(tool, output, incoming): return False -def on_text_for_names(input_names): +def on_text_for_names(input_names: Collection[str]) -> str: # input_names may contain duplicates... this is because the first value in # multiple input dataset parameters will appear twice once as param_name # and once as param_name1. diff --git a/lib/galaxy/tools/flatten_collection.xml b/lib/galaxy/tools/flatten_collection.xml index 67a96252dfeb..a9cb4f04f075 100644 --- a/lib/galaxy/tools/flatten_collection.xml +++ b/lib/galaxy/tools/flatten_collection.xml @@ -1,7 +1,7 @@ + tool_type="flatten_collection"> >> print(p.name) _name >>> assert sorted(p.to_dict(trans).items()) == [('argument', None), ('falsevalue', '_falsevalue'), ('help', ''), ('hidden', False), ('is_dynamic', False), ('label', ''), ('model_class', 'BooleanToolParameter'), ('name', '_name'), ('optional', False), ('refresh_on_change', False), ('truevalue', '_truevalue'), ('type', 'boolean'), ('value', True)] - >>> print(p.from_json('true')) + >>> print(p.from_json('true', trans)) True >>> print(p.to_param_dict_string(True)) _truevalue - >>> print(p.from_json('false')) + >>> print(p.from_json('false', trans)) False >>> print(p.to_param_dict_string(False)) _falsevalue @@ -615,7 +615,7 @@ def __init__(self, tool, input_source): self.optional = input_source.get_bool("optional", False) self.checked = boolean_is_checked(input_source) - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): return self.to_python(value) def to_python(self, value, app=None): @@ -666,7 +666,7 @@ class FileToolParameter(ToolParameter): def __init__(self, tool, input_source): super().__init__(tool, input_source) - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): # Middleware or proxies may encode files in special ways (TODO: this # should be pluggable) if isinstance(value, FilesPayload): @@ -765,7 +765,7 @@ def to_param_dict_string(self, value, other_values=None): else: return lst[0] - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): return self.to_python(value, trans.app, validate=True) def to_json(self, value, app, use_security): @@ -885,7 +885,7 @@ def __init__(self, tool, input_source): def get_initial_value(self, trans, other_values): return self._get_value(trans) - def from_json(self, value=None, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): return self._get_value(trans) def _get_value(self, trans): @@ -1004,7 +1004,7 @@ def get_legal_names(self, trans, other_values): """ return {n: v for n, v, _ in self.get_options(trans, other_values)} - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): return self._select_from_json(value, trans, other_values=other_values, require_legal_value=True) def _select_from_json(self, value, trans, other_values=None, require_legal_value=True): @@ -1284,7 +1284,7 @@ def __init__(self, tool, input_source): self.default_value = input_source.get("value", None) self.is_dynamic = True - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): other_values = other_values or {} if self.multiple: tag_list = [] @@ -1412,7 +1412,7 @@ def to_json(self, value, app, use_security): return value.strip() return value - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): """ Label convention prepends column number with a 'c', but tool uses the integer. This removes the 'c' when entered into a workflow. @@ -1701,7 +1701,7 @@ def recurse_options(legal_values, options): recurse_options(legal_values, self.get_options(trans=trans, other_values=other_values)) return legal_values - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): other_values = other_values or {} legal_values = self.get_legal_values(trans, other_values, value) if not legal_values and trans.workflow_building_mode: @@ -2102,7 +2102,7 @@ def __init__(self, tool, input_source, trans=None): ) self.conversions.append((name, conv_extension, [conv_type])) - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): session = trans.sa_session other_values = other_values or {} @@ -2459,7 +2459,7 @@ def match_multirun_collections(self, trans, history, dataset_collection_matcher) if match: yield history_dataset_collection, match.implicit_conversion - def from_json(self, value, trans=None, other_values=None): + def from_json(self, value, trans, other_values=None): session = trans.sa_session other_values = other_values or {} diff --git a/lib/galaxy/tools/remote_tool_eval.py b/lib/galaxy/tools/remote_tool_eval.py index b07c6cf6a875..6f2e273ffbe4 100644 --- a/lib/galaxy/tools/remote_tool_eval.py +++ b/lib/galaxy/tools/remote_tool_eval.py @@ -72,7 +72,7 @@ def __init__( self.security = None # type: ignore[assignment] -def main(TMPDIR, WORKING_DIRECTORY, IMPORT_STORE_DIRECTORY): +def main(TMPDIR, WORKING_DIRECTORY, IMPORT_STORE_DIRECTORY) -> None: metadata_params = get_metadata_params(WORKING_DIRECTORY) datatypes_config = metadata_params["datatypes_config"] if not os.path.exists(datatypes_config): diff --git a/lib/galaxy/tours/_impl.py b/lib/galaxy/tours/_impl.py index 60ab97bfc80a..ab6f280f45e5 100644 --- a/lib/galaxy/tours/_impl.py +++ b/lib/galaxy/tours/_impl.py @@ -4,10 +4,7 @@ import logging import os -from typing import ( - List, - Union, -) +from typing import List import yaml from pydantic import parse_obj_as @@ -15,6 +12,7 @@ from galaxy.exceptions import ObjectNotFound from galaxy.navigation.data import load_root_component from galaxy.util import config_directories_from_setting +from galaxy.util.path import StrPath from ._interface import ToursRegistry from ._schema import TourList @@ -61,12 +59,12 @@ def load_tour_steps(contents_dict, warn=None, resolve_components=True): step["title"] = title_default -def get_tour_id_from_path(tour_path: Union[str, os.PathLike]) -> str: +def get_tour_id_from_path(tour_path: StrPath) -> str: filename = os.path.basename(tour_path) return os.path.splitext(filename)[0] -def load_tour_from_path(tour_path: Union[str, os.PathLike], warn=None, resolve_components=True) -> dict: +def load_tour_from_path(tour_path: StrPath, warn=None, resolve_components=True) -> dict: with open(tour_path) as f: tour = yaml.safe_load(f) load_tour_steps(tour, warn=warn, resolve_components=resolve_components) @@ -80,7 +78,7 @@ def is_yaml(filename: str) -> bool: return False -def tour_paths(target_path: Union[str, os.PathLike]) -> List[str]: +def tour_paths(target_path: StrPath) -> List[str]: paths = [] if os.path.isdir(target_path): for filename in os.listdir(target_path): diff --git a/lib/galaxy/util/hash_util.py b/lib/galaxy/util/hash_util.py index 100adc23bcc4..505e42687785 100644 --- a/lib/galaxy/util/hash_util.py +++ b/lib/galaxy/util/hash_util.py @@ -6,7 +6,6 @@ import hashlib import hmac import logging -import os from enum import Enum from typing import ( Any, @@ -19,6 +18,7 @@ ) from . import smart_str +from .path import StrPath log = logging.getLogger(__name__) @@ -82,7 +82,7 @@ def memory_bound_hexdigest( file.close() -def md5_hash_file(path: Union[str, os.PathLike]) -> Optional[str]: +def md5_hash_file(path: StrPath) -> Optional[str]: """ Return a md5 hashdigest for a file or None if path could not be read. """ diff --git a/lib/galaxy/util/plugin_config.py b/lib/galaxy/util/plugin_config.py index 3b4bc405ebd4..50852627e3e7 100644 --- a/lib/galaxy/util/plugin_config.py +++ b/lib/galaxy/util/plugin_config.py @@ -1,4 +1,3 @@ -from pathlib import Path from types import ModuleType from typing import ( Any, @@ -16,9 +15,9 @@ import yaml from galaxy.util import parse_xml +from galaxy.util.path import StrPath from galaxy.util.submodules import import_submodules -PathT = Union[str, Path] PluginDictConfigT = Dict[str, Any] PluginConfigsT = Union[PluginDictConfigT, List[PluginDictConfigT]] @@ -132,7 +131,7 @@ def __load_plugins_from_dicts( return plugins -def plugin_source_from_path(path: PathT) -> PluginConfigSource: +def plugin_source_from_path(path: StrPath) -> PluginConfigSource: filename = str(path) if ( filename.endswith(".yaml") @@ -149,7 +148,7 @@ def plugin_source_from_dict(as_dict: PluginConfigsT) -> PluginConfigSource: return PluginConfigSource("dict", as_dict) -def __read_yaml(path: PathT): +def __read_yaml(path: StrPath): if yaml is None: raise ImportError("Attempting to read YAML configuration file - but PyYAML dependency unavailable.") diff --git a/lib/galaxy/webapps/base/api.py b/lib/galaxy/webapps/base/api.py index b6d12d834014..9df5b838eea2 100644 --- a/lib/galaxy/webapps/base/api.py +++ b/lib/galaxy/webapps/base/api.py @@ -1,8 +1,16 @@ import os import stat -import typing import uuid from logging import getLogger +from typing import ( + Any, + Dict, + Mapping, + Optional, + Tuple, + TYPE_CHECKING, + Union, +) import anyio from fastapi import ( @@ -26,10 +34,11 @@ from galaxy.exceptions import MessageException from galaxy.exceptions.utils import api_error_to_dict +from galaxy.util.path import StrPath from galaxy.web.framework.base import walk_controller_modules from galaxy.web.framework.decorators import validation_error_to_message_exception -if typing.TYPE_CHECKING: +if TYPE_CHECKING: from starlette.background import BackgroundTask from starlette.types import ( Receive, @@ -43,7 +52,7 @@ # Copied from https://github.com/tiangolo/fastapi/issues/1240#issuecomment-1055396884 -def _get_range_header(range_header: str, file_size: int) -> typing.Tuple[int, int]: +def _get_range_header(range_header: str, file_size: int) -> Tuple[int, int]: def _invalid_range(): return HTTPException( status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, @@ -67,19 +76,19 @@ class GalaxyFileResponse(FileResponse): Augments starlette FileResponse with x-accel-redirect/x-sendfile and byte-range handling. """ - nginx_x_accel_redirect_base: typing.Optional[str] = None - apache_xsendfile: typing.Optional[bool] = None + nginx_x_accel_redirect_base: Optional[str] = None + apache_xsendfile: Optional[bool] = None def __init__( self, - path: typing.Union[str, "os.PathLike[str]"], + path: StrPath, status_code: int = 200, - headers: typing.Optional[typing.Mapping[str, str]] = None, - media_type: typing.Optional[str] = None, - background: typing.Optional["BackgroundTask"] = None, - filename: typing.Optional[str] = None, - stat_result: typing.Optional[os.stat_result] = None, - method: typing.Optional[str] = None, + headers: Optional[Mapping[str, str]] = None, + media_type: Optional[str] = None, + background: Optional["BackgroundTask"] = None, + filename: Optional[str] = None, + stat_result: Optional[os.stat_result] = None, + method: Optional[str] = None, content_disposition_type: str = "attachment", ) -> None: super().__init__( @@ -184,8 +193,8 @@ def get_error_response_for_request(request: Request, exc: MessageException) -> J else: content = error_dict - retry_after: typing.Optional[int] = getattr(exc, "retry_after", None) - headers: typing.Dict[str, str] = {} + retry_after: Optional[int] = getattr(exc, "retry_after", None) + headers: Dict[str, str] = {} if retry_after: headers["Retry-After"] = str(retry_after) return JSONResponse(status_code=status_code, content=content, headers=headers) @@ -237,7 +246,7 @@ def add_request_id_middleware(app: FastAPI): def include_all_package_routers(app: FastAPI, package_name: str): - responses: typing.Dict[typing.Union[int, str], typing.Dict[str, typing.Any]] = { + responses: Dict[Union[int, str], Dict[str, Any]] = { "4XX": { "description": "Request Error", "model": MessageExceptionModel, diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 5bbd28dbbc77..4aa520da2ec2 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -101,6 +101,7 @@ galaxy_root_path, UNKNOWN, ) +from galaxy.util.path import StrPath from galaxy.util.resources import resource_string from galaxy.util.unittest_utils import skip_if_site_down from galaxy_test.base.decorators import ( @@ -1701,7 +1702,7 @@ def _test_history( # Things gxformat2 knows how to upload as workflows -YamlContentT = Union[str, os.PathLike, dict] +YamlContentT = Union[StrPath, dict] class BaseWorkflowPopulator(BasePopulator): diff --git a/lib/tool_shed/webapp/app.py b/lib/tool_shed/webapp/app.py index 6778d9028463..1a10551ed0f7 100644 --- a/lib/tool_shed/webapp/app.py +++ b/lib/tool_shed/webapp/app.py @@ -1,10 +1,7 @@ import logging import sys import time -from typing import ( - Any, - Optional, -) +from typing import Optional from sqlalchemy.orm.scoping import scoped_session @@ -54,7 +51,7 @@ def __init__(self, **kwd) -> None: # will be overwritten when building WSGI app self.is_webapp = False # Read the tool_shed.ini configuration file and check for errors. - self.config: Any = config.Configuration(**kwd) + self.config = config.Configuration(**kwd) self.config.check() configure_logging(self.config) self.application_stack = application_stack_instance() diff --git a/lib/tool_shed/webapp/frontend/.eslintignore b/lib/tool_shed/webapp/frontend/.eslintignore index b22c816bfd5e..a220c535ece5 100644 --- a/lib/tool_shed/webapp/frontend/.eslintignore +++ b/lib/tool_shed/webapp/frontend/.eslintignore @@ -3,5 +3,5 @@ node_modules # don't lint build output (make sure it's set to your correct build folder name) dist -# Ignore codegen aritfacts +# Ignore codegen artifacts src/gql/*.ts diff --git a/test/unit/data/datatypes/test_check_required.py b/test/unit/data/datatypes/test_check_required.py index e5f8d1c880e3..8351f1c08bf5 100644 --- a/test/unit/data/datatypes/test_check_required.py +++ b/test/unit/data/datatypes/test_check_required.py @@ -22,21 +22,21 @@ class CheckRequiredInherited(CheckRequiredTrue): def test_check_required_metadata_false(): app = GalaxyDataTestApp() - app.datatypes_registry.datatypes_by_extension["false"] = CheckRequiredFalse + app.datatypes_registry.datatypes_by_extension["false"] = CheckRequiredFalse() hda = HistoryDatasetAssociation(sa_session=app.model.session, extension="false") assert not hda.metadata.spec["columns"].check_required_metadata def test_check_required_metadata_true(): app = GalaxyDataTestApp() - app.datatypes_registry.datatypes_by_extension["true"] = CheckRequiredTrue + app.datatypes_registry.datatypes_by_extension["true"] = CheckRequiredTrue() hda = HistoryDatasetAssociation(sa_session=app.model.session, extension="true") assert hda.metadata.spec["columns"].check_required_metadata def test_check_required_metadata_inherited(): app = GalaxyDataTestApp() - app.datatypes_registry.datatypes_by_extension["inherited"] = CheckRequiredInherited + app.datatypes_registry.datatypes_by_extension["inherited"] = CheckRequiredInherited() hda = HistoryDatasetAssociation(sa_session=app.model.session, extension="inherited") assert hda.metadata.spec["columns"].check_required_metadata assert not hda.metadata.spec["something"].check_required_metadata