From f46d69eddd7a8a0f4defcea0939dc096b04aafdf Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 12 Dec 2023 18:53:19 -0500 Subject: [PATCH 01/71] Upgrade SQLAlchemy to 2.0 This conflicts with dependency requirements for sqlalchemy-graphene (used only in toolshed, new WIP client) --- lib/galaxy/dependencies/pinned-requirements.txt | 2 +- packages/app/setup.cfg | 2 +- packages/data/setup.cfg | 2 +- packages/job_execution/setup.cfg | 2 +- packages/web_apps/setup.cfg | 2 +- packages/web_framework/setup.cfg | 2 +- packages/web_stack/setup.cfg | 2 +- pyproject.toml | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/galaxy/dependencies/pinned-requirements.txt b/lib/galaxy/dependencies/pinned-requirements.txt index 9b51bb5490a2..bf5d947867a4 100644 --- a/lib/galaxy/dependencies/pinned-requirements.txt +++ b/lib/galaxy/dependencies/pinned-requirements.txt @@ -186,7 +186,7 @@ sniffio==1.3.1 ; python_version >= "3.8" and python_version < "3.13" social-auth-core[openidconnect]==4.0.3 ; python_version >= "3.8" and python_version < "3.13" sortedcontainers==2.4.0 ; python_version >= "3.8" and python_version < "3.13" spython==0.3.13 ; python_version >= "3.8" and python_version < "3.13" -sqlalchemy==1.4.51 ; python_version >= "3.8" and python_version < "3.13" +sqlalchemy==2.0.25 ; python_version >= "3.8" and python_version < "3.13" sqlitedict==2.1.0 ; python_version >= "3.8" and python_version < "3.13" sqlparse==0.4.4 ; python_version >= "3.8" and python_version < "3.13" starlette-context==0.3.6 ; python_version >= "3.8" and python_version < "3.13" diff --git a/packages/app/setup.cfg b/packages/app/setup.cfg index 4360d7181811..1879c3437ee9 100644 --- a/packages/app/setup.cfg +++ b/packages/app/setup.cfg @@ -68,7 +68,7 @@ install_requires = refgenconf>=0.12.0 regex requests - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 sqlitedict starlette svgwrite diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 157324e102a8..3e3e0e085e5c 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -58,7 +58,7 @@ install_requires = pysam>=0.21 rocrate social-auth-core[openidconnect]==4.0.3 - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 tifffile typing-extensions WebOb diff --git a/packages/job_execution/setup.cfg b/packages/job_execution/setup.cfg index 40c3be56f524..05394dca8e27 100644 --- a/packages/job_execution/setup.cfg +++ b/packages/job_execution/setup.cfg @@ -38,7 +38,7 @@ install_requires = galaxy-tool-util galaxy-util MarkupSafe - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 packages = find: python_requires = >=3.8 diff --git a/packages/web_apps/setup.cfg b/packages/web_apps/setup.cfg index 20ac5878220b..53ef00625581 100644 --- a/packages/web_apps/setup.cfg +++ b/packages/web_apps/setup.cfg @@ -58,7 +58,7 @@ install_requires = PyYAML requests Routes - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 sqlalchemy-migrate starlette starlette-context diff --git a/packages/web_framework/setup.cfg b/packages/web_framework/setup.cfg index e04d3f101f32..55d3421dade9 100644 --- a/packages/web_framework/setup.cfg +++ b/packages/web_framework/setup.cfg @@ -40,7 +40,7 @@ install_requires = pydantic>=2,!=2.6.0,!=2.6.1 requests Routes - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 WebOb packages = find: diff --git a/packages/web_stack/setup.cfg b/packages/web_stack/setup.cfg index f7bc6adc5d54..16e6d08f1c15 100644 --- a/packages/web_stack/setup.cfg +++ b/packages/web_stack/setup.cfg @@ -34,7 +34,7 @@ include_package_data = True install_requires = galaxy-data galaxy-util - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 packages = find: python_requires = >=3.8 diff --git a/pyproject.toml b/pyproject.toml index a1b0be2fda3d..86a696525145 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -108,7 +108,7 @@ Routes = "*" schema-salad = "!=8.3.20220721194857" # https://github.com/common-workflow-language/schema_salad/issues/575 social-auth-core = {version = "==4.0.3", extras = ["openidconnect"]} sortedcontainers = "*" -SQLAlchemy = ">=1.4.25,<2" +SQLAlchemy = "^2.0" sqlitedict = "*" sqlparse = "*" starlette = "*" From 5dc6e0731c911314b94cf55679cb7e4851c9da03 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 19 Dec 2023 13:00:19 -0500 Subject: [PATCH 02/71] Remove RemovedIn20Warning from config This does not exist in SQLAlchemy 2.0 --- lib/galaxy/config/__init__.py | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/lib/galaxy/config/__init__.py b/lib/galaxy/config/__init__.py index ff3ea85a5ae1..a68c692f1cff 100644 --- a/lib/galaxy/config/__init__.py +++ b/lib/galaxy/config/__init__.py @@ -743,7 +743,6 @@ class GalaxyAppConfiguration(BaseAppConfiguration, CommonConfigurationMixin): def __init__(self, **kwargs): super().__init__(**kwargs) self._override_tempdir(kwargs) - self._configure_sqlalchemy20_warnings(kwargs) self._process_config(kwargs) self._set_dependent_defaults() @@ -760,36 +759,6 @@ def _set_dependent_defaults(self): f"{dependent_config_param}, {config_param}" ) - def _configure_sqlalchemy20_warnings(self, kwargs): - """ - This method should be deleted after migration to SQLAlchemy 2.0 is complete. - To enable warnings, set `GALAXY_CONFIG_SQLALCHEMY_WARN_20=1`, - """ - warn = string_as_bool(kwargs.get("sqlalchemy_warn_20", False)) - if warn: - import sqlalchemy - - sqlalchemy.util.deprecations.SQLALCHEMY_WARN_20 = True - self._setup_sqlalchemy20_warnings_filters() - - def _setup_sqlalchemy20_warnings_filters(self): - import warnings - - from sqlalchemy.exc import RemovedIn20Warning - - # Always display RemovedIn20Warning warnings. - warnings.filterwarnings("always", category=RemovedIn20Warning) - # Optionally, enable filters for specific warnings (raise error, or log, etc.) - # messages = [ - # r"replace with warning text to match", - # ] - # for msg in messages: - # warnings.filterwarnings('error', message=msg, category=RemovedIn20Warning) - # - # See documentation: - # https://docs.python.org/3.7/library/warnings.html#the-warnings-filter - # https://docs.sqlalchemy.org/en/14/changelog/migration_20.html#migration-to-2-0-step-three-resolve-all-removedin20warnings - def _load_schema(self): return AppSchema(GALAXY_CONFIG_SCHEMA_PATH, GALAXY_APP_NAME) From 0b75e555ab637a1228d7ffabb87b995b8808112f Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 9 Jan 2024 16:17:09 -0500 Subject: [PATCH 03/71] Update import path for DeclarativeMeta --- lib/galaxy/model/__init__.py | 4 ++-- lib/galaxy/model/tool_shed_install/__init__.py | 4 ++-- lib/tool_shed/webapp/model/__init__.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 97c41e7ee2e1..0a79864fe0a0 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -216,7 +216,7 @@ if TYPE_CHECKING: # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm.decl_api import DeclarativeMeta as _DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta class DeclarativeMeta(_DeclarativeMeta, type): pass @@ -230,7 +230,7 @@ class _HasTable: __table__: Table else: - from sqlalchemy.orm.decl_api import DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta _HasTable = object diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py index ab5a7d9a41b8..edec5b1f5c82 100644 --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -44,13 +44,13 @@ if TYPE_CHECKING: # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm.decl_api import DeclarativeMeta as _DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta class DeclarativeMeta(_DeclarativeMeta, type): pass else: - from sqlalchemy.orm.decl_api import DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta class HasToolBox(common_util.HasToolShedRegistry, Protocol): diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 2f2be9dccfbe..3b2003d3e1be 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -62,13 +62,13 @@ if TYPE_CHECKING: # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm.decl_api import DeclarativeMeta as _DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta class DeclarativeMeta(_DeclarativeMeta, type): pass else: - from sqlalchemy.orm.decl_api import DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta mapper_registry = registry() From 54578a0425c21a72cbc93a161aac8945bc1304f5 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 12 Dec 2023 18:40:19 -0500 Subject: [PATCH 04/71] Move declaration of injected attrs into constructor Remove unused import For context: https://github.com/galaxyproject/galaxy/pull/14717/files#r1486979280 Also, remove model attr type hints that conflict with SA2.0 --- lib/galaxy/model/__init__.py | 52 +++++++++++++++++------------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 0a79864fe0a0..a4bec20b9c47 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -140,7 +140,6 @@ ) from galaxy.model.orm.now import now from galaxy.model.orm.util import add_object_to_object_session -from galaxy.objectstore import ObjectStore from galaxy.schema.invocation import ( InvocationCancellationUserRequest, InvocationState, @@ -737,7 +736,7 @@ class User(Base, Dictifiable, RepresentById): "FormValues", primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] ) # Add type hint (will this work w/SA?) - api_keys: "List[APIKeys]" = relationship( + api_keys = relationship( "APIKeys", back_populates="user", order_by=lambda: desc(APIKeys.create_time), @@ -767,7 +766,7 @@ class User(Base, Dictifiable, RepresentById): ), ) - preferences: association_proxy # defined at the end of this module + preferences = None # attributes that will be accessed and returned when calling to_dict( view='collection' ) dict_collection_visible_keys = ["id", "email", "username", "deleted", "active", "last_password_change"] @@ -1386,8 +1385,8 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): "WorkflowInvocationStep", back_populates="job", uselist=False, cascade_backrefs=False ) - any_output_dataset_collection_instances_deleted: column_property # defined at the end of this module - any_output_dataset_deleted: column_property # defined at the end of this module + any_output_dataset_collection_instances_deleted = None + any_output_dataset_deleted = None dict_collection_visible_keys = ["id", "state", "exit_code", "update_time", "create_time", "galaxy_version"] dict_element_visible_keys = [ @@ -3032,8 +3031,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable update_time = column_property( select(func.max(HistoryAudit.update_time)).where(HistoryAudit.history_id == id).scalar_subquery(), ) - users_shared_with_count: column_property # defined at the end of this module - average_rating: column_property # defined at the end of this module + users_shared_with_count = None + average_rating = None # Set up proxy so that # History.users_shared_with @@ -3972,7 +3971,7 @@ class conversion_messages(str, Enum): permitted_actions = get_permitted_actions(filter="DATASET") file_path = "/tmp/" - object_store: Optional[ObjectStore] = None # This get initialized in mapping.py (method init) by app.py + object_store = None # This get initialized in mapping.py (method init) by app.py engine = None def __init__( @@ -7427,7 +7426,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): ) users_shared_with = relationship("StoredWorkflowUserShareAssociation", back_populates="stored_workflow") - average_rating: column_property + average_rating = None # Set up proxy so that # StoredWorkflow.users_shared_with @@ -7555,7 +7554,7 @@ class Workflow(Base, Dictifiable, RepresentById): source_metadata = Column(JSONType) uuid = Column(UUIDType, nullable=True) - steps: List["WorkflowStep"] = relationship( + steps = relationship( "WorkflowStep", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), # type: ignore[has-type] @@ -7563,7 +7562,7 @@ class Workflow(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", lazy=False, ) - comments: List["WorkflowComment"] = relationship( + comments = relationship( "WorkflowComment", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), # type: ignore[has-type] @@ -7582,7 +7581,7 @@ class Workflow(Base, Dictifiable, RepresentById): back_populates="workflows", ) - step_count: column_property + step_count = None dict_collection_visible_keys = ["name", "has_cycles", "has_errors"] dict_element_visible_keys = ["name", "has_cycles", "has_errors"] @@ -7740,7 +7739,7 @@ class WorkflowStep(Base, RepresentById): when_expression = Column(JSONType) uuid = Column(UUIDType) label = Column(Unicode(255)) - temp_input_connections: Optional[InputConnDictType] + temp_input_connections = None parent_comment_id = Column(Integer, ForeignKey("workflow_comment.id"), nullable=True) parent_comment = relationship( @@ -7749,7 +7748,7 @@ class WorkflowStep(Base, RepresentById): back_populates="child_steps", ) - subworkflow: Optional[Workflow] = relationship( + subworkflow = relationship( "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id), back_populates="parent_workflow_steps", @@ -7776,12 +7775,6 @@ class WorkflowStep(Base, RepresentById): cascade_backrefs=False, ) - # Injected attributes - # TODO: code using these should be refactored to not depend on these non-persistent fields - module: Optional["WorkflowModule"] - state: Optional["DefaultToolState"] - upgrade_messages: Optional[Dict] - STEP_TYPE_TO_INPUT_TYPE = { "data_input": "dataset", "data_collection_input": "dataset_collection", @@ -7793,6 +7786,11 @@ def __init__(self): self.uuid = uuid4() self._input_connections_by_name = None self._inputs_by_name = None + # Injected attributes + # TODO: code using these should be refactored to not depend on these non-persistent fields + self.module: Optional["WorkflowModule"] + self.state: Optional["DefaultToolState"] + self.upgrade_messages: Optional[Dict] @reconstructor def init_on_load(self): @@ -8196,20 +8194,20 @@ class WorkflowComment(Base, RepresentById): back_populates="comments", ) - child_steps: List["WorkflowStep"] = relationship( + child_steps = relationship( "WorkflowStep", primaryjoin=(lambda: WorkflowStep.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", ) - parent_comment: "WorkflowComment" = relationship( + parent_comment = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.id == WorkflowComment.parent_comment_id), back_populates="child_comments", remote_side=[id], ) - child_comments: List["WorkflowComment"] = relationship( + child_comments = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", @@ -8321,7 +8319,7 @@ class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializabl order_by=lambda: WorkflowInvocationStep.order_index, cascade_backrefs=False, ) - workflow: Workflow = relationship("Workflow") + workflow = relationship("Workflow") output_dataset_collections = relationship( "WorkflowInvocationOutputDatasetCollectionAssociation", back_populates="workflow_invocation", @@ -8968,7 +8966,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): select(WorkflowStep.order_index).where(WorkflowStep.id == workflow_step_id).scalar_subquery() ) - subworkflow_invocation_id: column_property + subworkflow_invocation_id = None dict_collection_visible_keys = [ "id", @@ -10054,7 +10052,7 @@ class Page(Base, HasTags, Dictifiable, RepresentById): ) users_shared_with = relationship("PageUserShareAssociation", back_populates="page") - average_rating: column_property # defined at the end of this module + average_rating = None # Set up proxy so that # Page.users_shared_with @@ -10181,7 +10179,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): ) users_shared_with = relationship("VisualizationUserShareAssociation", back_populates="visualization") - average_rating: column_property # defined at the end of this module + average_rating = None # Set up proxy so that # Visualization.users_shared_with From 1f207ece9f594723c6c27a038e41fd5ab4126674 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 20 Dec 2023 16:58:01 -0500 Subject: [PATCH 05/71] Apply Mapped/mapped_column to model definitions Included models: galaxy, tool shed, tool shed install Column types: DateTime Integer Boolan Unicode String (Text/TEXT/TrimmedString/VARCHAR) UUID Numeric NOTE on typing of nullability: db schema != python app - Mapped[datetime] specifies correct type for the python app; - nullable=True specifies correct mapping to the db schema (that's what the CREATE TABLE sql statement will reflect). mapped_column.nullable takes precedence over typing annotation of Mapped. So, if we have: foo: Mapped[str] = mapped_column(String, nullable=True) - that means that the foo db field will allow NULL, but the python app will not allow foo = None. And vice-versa: bar: Mapped[Optional[str]] = mapped_column(String, nullable=False) - the bar db field is NOT NULL, but bar = None is OK. This might need to be applied to other column definitions, but for now this addresses specific mypy errors. Ref: https://docs.sqlalchemy.org/en/20/orm/declarative_tables.html#mapped-column-derives-the-datatype-and-nullability-from-the-mapped-annotation --- lib/galaxy/model/__init__.py | 2124 +++++++++-------- .../model/tool_shed_install/__init__.py | 93 +- lib/tool_shed/webapp/model/__init__.py | 193 +- 3 files changed, 1294 insertions(+), 1116 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index a4bec20b9c47..25dcb625cf9f 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -18,7 +18,11 @@ import string from collections import defaultdict from collections.abc import Callable -from datetime import timedelta +from datetime import ( + datetime, + timedelta, +) +from decimal import Decimal from enum import Enum from secrets import token_hex from string import Template @@ -100,6 +104,8 @@ column_property, deferred, joinedload, + Mapped, + mapped_column, object_session, Query, reconstructor, @@ -434,11 +440,11 @@ class WorkerProcess(Base, UsesCreateAndUpdateTime): __tablename__ = "worker_process" __table_args__ = (UniqueConstraint("server_name", "hostname"),) - id = Column(Integer, primary_key=True) - server_name = Column(String(255), index=True) - hostname = Column(String(255)) - pid = Column(Integer) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + server_name: Mapped[Optional[str]] = mapped_column(String(255), index=True) + hostname: Mapped[Optional[str]] = mapped_column(String(255)) + pid: Mapped[Optional[int]] = mapped_column(Integer) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) def cached_id(galaxy_model_object): @@ -681,22 +687,22 @@ class User(Base, Dictifiable, RepresentById): __tablename__ = "galaxy_user" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - email = Column(TrimmedString(255), index=True, nullable=False) - username = Column(TrimmedString(255), index=True, unique=True) - password = Column(TrimmedString(255), nullable=False) - last_password_change = Column(DateTime, default=now) - external = Column(Boolean, default=False) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - preferred_object_store_id = Column(String(255), nullable=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - disk_usage = Column(Numeric(15, 0), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + email: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=False) + username: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + password: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + last_password_change: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + external: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + preferred_object_store_id: Mapped[str] = mapped_column(String(255), nullable=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) # Column("person_metadata", JSONType), # TODO: add persistent, configurable metadata rep for workflow creator - active = Column(Boolean, index=True, default=True, nullable=False) - activation_token = Column(TrimmedString(64), nullable=True, index=True) + active: Mapped[bool] = mapped_column(Boolean, index=True, default=True, nullable=False) + activation_token: Mapped[Optional[str]] = mapped_column(TrimmedString(64), nullable=True, index=True) addresses = relationship( "UserAddress", back_populates="user", order_by=lambda: desc(UserAddress.update_time), cascade_backrefs=False @@ -1210,10 +1216,10 @@ def count_stored_workflow_user_assocs(self, stored_workflow) -> int: class PasswordResetToken(Base): __tablename__ = "password_reset_token" - token = Column(String(32), primary_key=True, unique=True, index=True) - expiration_time = Column(DateTime) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + token: Mapped[str] = mapped_column(String(32), primary_key=True, unique=True, index=True) + expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user: Mapped["User"] = relationship("User") def __init__(self, user, token=None): if token: @@ -1227,17 +1233,17 @@ def __init__(self, user, token=None): class DynamicTool(Base, Dictifiable, RepresentById): __tablename__ = "dynamic_tool" - id = Column(Integer, primary_key=True) - uuid = Column(UUIDType()) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - tool_id = Column(Unicode(255)) - tool_version = Column(Unicode(255)) - tool_format = Column(Unicode(255)) - tool_path = Column(Unicode(255)) - tool_directory = Column(Unicode(255)) - hidden = Column(Boolean, default=True) - active = Column(Boolean, default=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType()) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + tool_id: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_version: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_format: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_path: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_directory: Mapped[Optional[str]] = mapped_column(Unicode(255)) + hidden: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + active: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) value = Column(MutableJSONType) dict_collection_visible_keys = ("id", "tool_id", "tool_format", "tool_version", "uuid", "active", "hidden") @@ -1264,41 +1270,41 @@ def __init__(self, plugin, metric_name, metric_value): class JobMetricText(BaseJobMetric, RepresentById): __tablename__ = "job_metric_text" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Unicode(JOB_METRIC_MAX_LENGTH)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[str]] = mapped_column(Unicode(JOB_METRIC_MAX_LENGTH)) class JobMetricNumeric(BaseJobMetric, RepresentById): __tablename__ = "job_metric_numeric" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[Decimal]] = mapped_column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) class TaskMetricText(BaseJobMetric, RepresentById): __tablename__ = "task_metric_text" - id = Column(Integer, primary_key=True) - task_id = Column(Integer, ForeignKey("task.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Unicode(JOB_METRIC_MAX_LENGTH)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + task_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("task.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[str]] = mapped_column(Unicode(JOB_METRIC_MAX_LENGTH)) class TaskMetricNumeric(BaseJobMetric, RepresentById): __tablename__ = "task_metric_numeric" - id = Column(Integer, primary_key=True) - task_id = Column(Integer, ForeignKey("task.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + task_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("task.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[Decimal]] = mapped_column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) class IoDicts(NamedTuple): @@ -1315,40 +1321,44 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): __tablename__ = "job" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) - tool_id = Column(String(255)) - tool_version = Column(TEXT, default="1.0.0") - galaxy_version = Column(String(64), default=None) - dynamic_tool_id = Column(Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True) - state = Column(String(64), index=True) - info = Column(TrimmedString(255)) - copied_from_job_id = Column(Integer, nullable=True) - command_line = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + library_folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) + tool_id: Mapped[Optional[str]] = mapped_column(String(255)) + tool_version: Mapped[Optional[str]] = mapped_column(TEXT, default="1.0.0") + galaxy_version: Mapped[Optional[str]] = mapped_column(String(64), default=None) + dynamic_tool_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True + ) + state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + copied_from_job_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + command_line: Mapped[Optional[str]] = mapped_column(TEXT) dependencies = Column(MutableJSONType, nullable=True) job_messages = Column(MutableJSONType, nullable=True) - param_filename = Column(String(1024)) - runner_name = Column(String(255)) - job_stdout = Column(TEXT) - job_stderr = Column(TEXT) - tool_stdout = Column(TEXT) - tool_stderr = Column(TEXT) - exit_code = Column(Integer, nullable=True) - traceback = Column(TEXT) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) - job_runner_name = Column(String(255)) - job_runner_external_id = Column(String(255), index=True) - destination_id = Column(String(255), nullable=True) + param_filename: Mapped[Optional[str]] = mapped_column(String(1024)) + runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + job_stdout: Mapped[Optional[str]] = mapped_column(TEXT) + job_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stdout: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + exit_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + traceback: Mapped[Optional[str]] = mapped_column(TEXT) + session_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) + job_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + job_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255), index=True) + destination_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) destination_params = Column(MutableJSONType, nullable=True) - object_store_id = Column(TrimmedString(255), index=True) - imported = Column(Boolean, default=False, index=True) - params = Column(TrimmedString(255), index=True) - handler = Column(TrimmedString(255), index=True) - preferred_object_store_id = Column(String(255), nullable=True) + object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + imported: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) + params: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + handler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) object_store_id_overrides = Column(JSONType) user = relationship("User") @@ -2083,27 +2093,29 @@ class Task(Base, JobLike, RepresentById): __tablename__ = "task" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - execution_time = Column(DateTime) - update_time = Column(DateTime, default=now, onupdate=now) - state = Column(String(64), index=True) - command_line = Column(TEXT) - param_filename = Column(String(1024)) - runner_name = Column(String(255)) - job_stdout = Column(TEXT) # job_stdout makes sense here because it is short for job script standard out - job_stderr = Column(TEXT) - tool_stdout = Column(TEXT) - tool_stderr = Column(TEXT) - exit_code = Column(Integer, nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + execution_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + command_line: Mapped[Optional[str]] = mapped_column(TEXT) + param_filename: Mapped[Optional[str]] = mapped_column(String(1024)) + runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + job_stdout: Mapped[Optional[str]] = mapped_column( + TEXT + ) # job_stdout makes sense here because it is short for job script standard out + job_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stdout: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + exit_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) job_messages = Column(MutableJSONType, nullable=True) - info = Column(TrimmedString(255)) - traceback = Column(TEXT) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=False) - working_directory = Column(String(1024)) - task_runner_name = Column(String(255)) - task_runner_external_id = Column(String(255)) - prepare_input_files_cmd = Column(TEXT) + info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + traceback: Mapped[Optional[str]] = mapped_column(TEXT) + job_id: Mapped[int] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=False) + working_directory: Mapped[Optional[str]] = mapped_column(String(1024)) + task_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + task_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255)) + prepare_input_files_cmd: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job", back_populates="tasks") text_metrics = relationship("TaskMetricText") numeric_metrics = relationship("TaskMetricNumeric") @@ -2253,10 +2265,10 @@ def set_prepare_input_files_cmd(self, prepare_input_files_cmd): class JobParameter(Base, RepresentById): __tablename__ = "job_parameter" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - name = Column(String(255)) - value = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) + value: Mapped[Optional[str]] = mapped_column(TEXT) def __init__(self, name, value): self.name = name @@ -2269,11 +2281,11 @@ def copy(self): class JobToInputDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - dataset_version = Column(Integer) - name = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + dataset_version: Mapped[Optional[int]] = mapped_column(Integer) + name: Mapped[Optional[str]] = mapped_column(String(255)) dataset = relationship("HistoryDatasetAssociation", lazy="joined", back_populates="dependent_jobs") job = relationship("Job", back_populates="input_datasets") @@ -2287,10 +2299,10 @@ def __init__(self, name, dataset): class JobToOutputDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_output_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - name = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) dataset = relationship("HistoryDatasetAssociation", lazy="joined", back_populates="creating_job_associations") job = relationship("Job", back_populates="output_datasets") @@ -2307,10 +2319,12 @@ def item(self): class JobToInputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset_collection" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - name = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(String(255)) dataset_collection = relationship("HistoryDatasetCollectionAssociation", lazy="joined") job = relationship("Job", back_populates="input_dataset_collections") @@ -2322,10 +2336,12 @@ def __init__(self, name, dataset_collection): class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset_collection_element" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_element_id = Column(Integer, ForeignKey("dataset_collection_element.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_element_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dataset_collection_element.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) dataset_collection_element = relationship("DatasetCollectionElement", lazy="joined") job = relationship("Job", back_populates="input_dataset_collection_elements") @@ -2339,10 +2355,12 @@ def __init__(self, name, dataset_collection_element): class JobToOutputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_output_dataset_collection" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) dataset_collection_instance = relationship("HistoryDatasetCollectionAssociation", lazy="joined") job = relationship("Job", back_populates="output_dataset_collection_instances") @@ -2361,10 +2379,12 @@ def item(self): class JobToImplicitOutputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_implicit_output_dataset_collection" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dataset_collection.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) dataset_collection = relationship("DatasetCollection") job = relationship("Job", back_populates="output_dataset_collections") @@ -2376,10 +2396,12 @@ def __init__(self, name, dataset_collection): class JobToInputLibraryDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_input_library_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) job = relationship("Job", back_populates="input_library_datasets") dataset = relationship("LibraryDatasetDatasetAssociation", lazy="joined", back_populates="dependent_jobs") @@ -2392,10 +2414,12 @@ def __init__(self, name, dataset): class JobToOutputLibraryDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_output_library_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) job = relationship("Job", back_populates="output_library_datasets") dataset = relationship( "LibraryDatasetDatasetAssociation", lazy="joined", back_populates="creating_job_associations" @@ -2410,11 +2434,11 @@ def __init__(self, name, dataset): class JobStateHistory(Base, RepresentById): __tablename__ = "job_state_history" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - state = Column(String(64), index=True) - info = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) def __init__(self, job): self.job_id = job.id @@ -2425,10 +2449,14 @@ def __init__(self, job): class ImplicitlyCreatedDatasetCollectionInput(Base, RepresentById): __tablename__ = "implicitly_created_dataset_collection_inputs" - id = Column(Integer, primary_key=True) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - input_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + input_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) input_dataset_collection = relationship( "HistoryDatasetCollectionAssociation", @@ -2446,8 +2474,8 @@ def __init__(self, name, input_dataset_collection): class ImplicitCollectionJobs(Base, Serializable): __tablename__ = "implicit_collection_jobs" - id = Column(Integer, primary_key=True) - populated_state = Column(TrimmedString(64), default="new", nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="new", nullable=False) jobs = relationship( "ImplicitCollectionJobsJobAssociation", back_populates="implicit_collection_jobs", cascade_backrefs=False ) @@ -2477,10 +2505,14 @@ def _serialize(self, id_encoder, serialization_options): class ImplicitCollectionJobsJobAssociation(Base, RepresentById): __tablename__ = "implicit_collection_jobs_job_association" - id = Column(Integer, primary_key=True) - implicit_collection_jobs_id = Column(Integer, ForeignKey("implicit_collection_jobs.id"), index=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) # Consider making this nullable... - order_index = Column(Integer, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("implicit_collection_jobs.id"), index=True + ) + job_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("job.id"), index=True + ) # Consider making this nullable... + order_index: Mapped[int] = mapped_column(Integer, nullable=False) implicit_collection_jobs = relationship("ImplicitCollectionJobs", back_populates="jobs") job = relationship("Job", back_populates="implicit_collection_jobs_association") @@ -2488,10 +2520,12 @@ class ImplicitCollectionJobsJobAssociation(Base, RepresentById): class PostJobAction(Base, RepresentById): __tablename__ = "post_job_action" - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=True) - action_type = Column(String(255), nullable=False) - output_name = Column(String(255), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id"), index=True, nullable=True + ) + action_type: Mapped[str] = mapped_column(String(255), nullable=False) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) action_arguments = Column(MutableJSONType, nullable=True) workflow_step = relationship( "WorkflowStep", @@ -2510,9 +2544,11 @@ def __init__(self, action_type, workflow_step=None, output_name=None, action_arg class PostJobActionAssociation(Base, RepresentById): __tablename__ = "post_job_action_association" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=False) - post_job_action_id = Column(Integer, ForeignKey("post_job_action.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[int] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=False) + post_job_action_id: Mapped[int] = mapped_column( + Integer, ForeignKey("post_job_action.id"), index=True, nullable=False + ) post_job_action = relationship("PostJobAction") job = relationship("Job", back_populates="post_job_actions") @@ -2530,21 +2566,21 @@ def __init__(self, pja, job=None, job_id=None): class JobExternalOutputMetadata(Base, RepresentById): __tablename__ = "job_external_output_metadata" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - history_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True ) - library_dataset_dataset_association_id = Column( + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) - is_valid = Column(Boolean, default=True) - filename_in = Column(String(255)) - filename_out = Column(String(255)) - filename_results_code = Column(String(255)) - filename_kwds = Column(String(255)) - filename_override_metadata = Column(String(255)) - job_runner_external_pid = Column(String(255)) + is_valid: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + filename_in: Mapped[Optional[str]] = mapped_column(String(255)) + filename_out: Mapped[Optional[str]] = mapped_column(String(255)) + filename_results_code: Mapped[Optional[str]] = mapped_column(String(255)) + filename_kwds: Mapped[Optional[str]] = mapped_column(String(255)) + filename_override_metadata: Mapped[Optional[str]] = mapped_column(String(255)) + job_runner_external_pid: Mapped[Optional[str]] = mapped_column(String(255)) history_dataset_association = relationship("HistoryDatasetAssociation", lazy="joined") library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", lazy="joined") job = relationship("Job", back_populates="external_output_metadata") @@ -2587,12 +2623,12 @@ def __eq__(self, other): class JobExportHistoryArchive(Base, RepresentById): __tablename__ = "job_export_history_archive" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - compressed = Column(Boolean, index=True, default=False) - history_attrs_filename = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + compressed: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + history_attrs_filename: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job") dataset = relationship("Dataset") history = relationship("History", back_populates="exports") @@ -2675,10 +2711,10 @@ def to_dict(self): class JobImportHistoryArchive(Base, RepresentById): __tablename__ = "job_import_history_archive" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - archive_dir = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + archive_dir: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job") history = relationship("History") @@ -2687,24 +2723,24 @@ class StoreExportAssociation(Base, RepresentById): __tablename__ = "store_export_association" __table_args__ = (Index("ix_store_export_object", "object_id", "object_type"),) - id = Column(Integer, primary_key=True) - task_uuid = Column(UUIDType(), index=True, unique=True) - create_time = Column(DateTime, default=now) - object_type = Column(TrimmedString(32)) - object_id = Column(Integer) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + task_uuid: Mapped[Optional[str]] = mapped_column(UUIDType(), index=True, unique=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + object_type: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) + object_id: Mapped[Optional[int]] = mapped_column(Integer) export_metadata = Column(JSONType) class JobContainerAssociation(Base, RepresentById): __tablename__ = "job_container_association" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - container_type = Column(TEXT) - container_name = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + container_type: Mapped[Optional[str]] = mapped_column(TEXT) + container_name: Mapped[Optional[str]] = mapped_column(TEXT) container_info = Column(MutableJSONType, nullable=True) - created_time = Column(DateTime, default=now) - modified_time = Column(DateTime, default=now, onupdate=now) + created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) job = relationship("Job", back_populates="container") def __init__(self, **kwd): @@ -2717,24 +2753,24 @@ def __init__(self, **kwd): class InteractiveToolEntryPoint(Base, Dictifiable, RepresentById): __tablename__ = "interactivetool_entry_point" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - name = Column(TEXT) - token = Column(TEXT) - tool_port = Column(Integer) - host = Column(TEXT) - port = Column(Integer) - protocol = Column(TEXT) - entry_url = Column(TEXT) - requires_domain = Column(Boolean, default=True) - requires_path_in_url = Column(Boolean, default=False) - requires_path_in_header_named = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + token: Mapped[Optional[str]] = mapped_column(TEXT) + tool_port: Mapped[Optional[int]] = mapped_column(Integer) + host: Mapped[Optional[str]] = mapped_column(TEXT) + port: Mapped[Optional[int]] = mapped_column(Integer) + protocol: Mapped[Optional[str]] = mapped_column(TEXT) + entry_url: Mapped[Optional[str]] = mapped_column(TEXT) + requires_domain: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + requires_path_in_url: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + requires_path_in_header_named: Mapped[Optional[str]] = mapped_column(TEXT) info = Column(MutableJSONType, nullable=True) - configured = Column(Boolean, default=False) - deleted = Column(Boolean, default=False) - created_time = Column(DateTime, default=now) - modified_time = Column(DateTime, default=now, onupdate=now) - label = Column(TEXT) + configured: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + label: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job", back_populates="interactivetool_entry_points", uselist=False) dict_collection_visible_keys = [ @@ -2793,14 +2829,14 @@ def output_datasets_ids(self): class GenomeIndexToolData(Base, RepresentById): # TODO: params arg is lost __tablename__ = "genome_index_tool_data" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - fasta_path = Column(String(255)) - created_time = Column(DateTime, default=now) - modified_time = Column(DateTime, default=now, onupdate=now) - indexer = Column(String(64)) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + fasta_path: Mapped[Optional[str]] = mapped_column(String(255)) + created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + indexer: Mapped[Optional[str]] = mapped_column(String(64)) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) job = relationship("Job") dataset = relationship("Dataset") user = relationship("User") @@ -2809,11 +2845,11 @@ class GenomeIndexToolData(Base, RepresentById): # TODO: params arg is lost class Group(Base, Dictifiable, RepresentById): __tablename__ = "galaxy_group" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) quotas = relationship("GroupQuotaAssociation", back_populates="group") roles = relationship("GroupRoleAssociation", back_populates="group", cascade_backrefs=False) users = relationship("UserGroupAssociation", back_populates="group") @@ -2829,11 +2865,11 @@ def __init__(self, name=None): class UserGroupAssociation(Base, RepresentById): __tablename__ = "user_group_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - group_id = Column(Integer, ForeignKey("galaxy_group.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + group_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_group.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) user = relationship("User", back_populates="groups") group = relationship("Group", back_populates="users") @@ -2846,20 +2882,20 @@ def __init__(self, user, group): class Notification(Base, Dictifiable, RepresentById): __tablename__ = "notification" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - publication_time = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + publication_time: Mapped[Optional[datetime]] = mapped_column( DateTime, default=now ) # The date of publication, can be a future date to allow scheduling - expiration_time = Column( + expiration_time: Mapped[Optional[datetime]] = mapped_column( DateTime, default=now() + timedelta(days=30 * 6) ) # The expiration date, expired notifications will be permanently removed from DB regularly - source = Column(String(32), index=True) # Who (or what) generated the notification - category = Column( + source: Mapped[Optional[str]] = mapped_column(String(32), index=True) # Who (or what) generated the notification + category: Mapped[Optional[str]] = mapped_column( String(64), index=True ) # Category of the notification, defines its contents. Used for filtering, un/subscribing, etc - variant = Column( + variant: Mapped[Optional[str]] = mapped_column( String(16), index=True ) # Defines the 'importance' of the notification ('info', 'warning', 'urgent', etc.). Used for filtering, highlight rendering, etc # A bug in early 23.1 led to values being stored as json string, so we use this special type to process the result value twice. @@ -2878,12 +2914,12 @@ def __init__(self, source: str, category: str, variant: str, content): class UserNotificationAssociation(Base, RepresentById): __tablename__ = "user_notification_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - notification_id = Column(Integer, ForeignKey("notification.id"), index=True) - seen_time = Column(DateTime, nullable=True) - deleted = Column(Boolean, index=True, default=False) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + notification_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("notification.id"), index=True) + seen_time: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) user = relationship("User", back_populates="all_notifications") notification = relationship("Notification", back_populates="user_notification_associations") @@ -2902,8 +2938,8 @@ class HistoryAudit(Base, RepresentById): __tablename__ = "history_audit" __table_args__ = (PrimaryKeyConstraint(sqlite_on_conflict="IGNORE"),) - history_id = Column(Integer, ForeignKey("history.id"), primary_key=True, nullable=False) - update_time = Column(DateTime, default=now, primary_key=True, nullable=False) + history_id = mapped_column(Integer, ForeignKey("history.id"), primary_key=True, nullable=False) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, primary_key=True, nullable=False) # This class should never be instantiated. # See https://github.com/galaxyproject/galaxy/pull/11914 for details. @@ -2937,22 +2973,26 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable __tablename__ = "history" __table_args__ = (Index("ix_history_slug", "slug", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - _update_time = Column("update_time", DateTime, index=True, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - name = Column(TrimmedString(255)) - hid_counter = Column(Integer, default=1) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - importing = Column(Boolean, index=True, default=False) - genome_build = Column(TrimmedString(40)) - importable = Column(Boolean, default=False) - slug = Column(TEXT) - published = Column(Boolean, index=True, default=False) - preferred_object_store_id = Column(String(255), nullable=True) - archived = Column(Boolean, index=True, default=False, server_default=false()) - archive_export_id = Column(Integer, ForeignKey("store_export_association.id"), nullable=True, default=None) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + _update_time: Mapped[datetime] = mapped_column( + "update_time", DateTime, index=True, default=now, onupdate=now, nullable=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + hid_counter: Mapped[Optional[int]] = mapped_column(Integer, default=1) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + importing: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + archived: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False, server_default=false()) + archive_export_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("store_export_association.id"), nullable=True, default=None + ) datasets = relationship( "HistoryDatasetAssociation", back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type] @@ -3516,9 +3556,9 @@ class UserShareAssociation(RepresentById): class HistoryUserShareAssociation(Base, UserShareAssociation): __tablename__ = "history_user_share_association" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) user = relationship("User") history = relationship("History", back_populates="users_shared_with") @@ -3526,11 +3566,11 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): class UserRoleAssociation(Base, RepresentById): __tablename__ = "user_role_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) user = relationship("User", back_populates="roles") role = relationship("Role", back_populates="users") @@ -3544,11 +3584,11 @@ def __init__(self, user, role): class GroupRoleAssociation(Base, RepresentById): __tablename__ = "group_role_association" - id = Column(Integer, primary_key=True) - group_id = Column(Integer, ForeignKey("galaxy_group.id"), index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + group_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_group.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) group = relationship("Group", back_populates="roles") role = relationship("Role", back_populates="groups") @@ -3561,13 +3601,13 @@ def __init__(self, group, role): class Role(Base, Dictifiable, RepresentById): __tablename__ = "role" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - description = Column(TEXT) - type = Column(String(40), index=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(String(40), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) dataset_actions = relationship("DatasetPermissions", back_populates="role") groups = relationship("GroupRoleAssociation", back_populates="role") users = relationship("UserRoleAssociation", back_populates="role") @@ -3596,22 +3636,22 @@ class UserQuotaSourceUsage(Base, Dictifiable, RepresentById): dict_element_visible_keys = ["disk_usage", "quota_source_label"] - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - quota_source_label = Column(String(32), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), index=True) # user had an index on disk_usage - does that make any sense? -John - disk_usage = Column(Numeric(15, 0), default=0, nullable=False) + disk_usage: Mapped[Decimal] = mapped_column(Numeric(15, 0), default=0, nullable=False) user = relationship("User", back_populates="quota_source_usages") class UserQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "user_quota_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - quota_id = Column(Integer, ForeignKey("quota.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + quota_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("quota.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) user = relationship("User", back_populates="quotas") quota = relationship("Quota", back_populates="users") @@ -3626,11 +3666,11 @@ def __init__(self, user, quota): class GroupQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "group_quota_association" - id = Column(Integer, primary_key=True) - group_id = Column(Integer, ForeignKey("galaxy_group.id"), index=True) - quota_id = Column(Integer, ForeignKey("quota.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + group_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_group.id"), index=True) + quota_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("quota.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) group = relationship("Group", back_populates="quotas") quota = relationship("Quota", back_populates="groups") @@ -3646,15 +3686,15 @@ class Quota(Base, Dictifiable, RepresentById): __tablename__ = "quota" __table_args__ = (Index("ix_quota_quota_source_label", "quota_source_label"),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - description = Column(TEXT) - bytes = Column(BigInteger) - operation = Column(String(8)) - deleted = Column(Boolean, index=True, default=False) - quota_source_label = Column(String(32), default=None) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + bytes: Mapped[Optional[int]] = mapped_column(BigInteger) + operation: Mapped[Optional[str]] = mapped_column(String(8)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), default=None) default = relationship("DefaultQuotaAssociation", back_populates="quota", cascade_backrefs=False) groups = relationship("GroupQuotaAssociation", back_populates="quota") users = relationship("UserQuotaAssociation", back_populates="quota") @@ -3708,11 +3748,11 @@ def display_amount(self): class DefaultQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "default_quota_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - type = Column(String(32)) - quota_id = Column(Integer, ForeignKey("quota.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + type: Mapped[Optional[str]] = mapped_column(String(32)) + quota_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("quota.id"), index=True) quota = relationship("Quota", back_populates="default") dict_element_visible_keys = ["type"] @@ -3731,12 +3771,12 @@ def __init__(self, type, quota): class DatasetPermissions(Base, RepresentById): __tablename__ = "dataset_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) dataset = relationship("Dataset", back_populates="actions") role = relationship("Role", back_populates="dataset_actions") @@ -3753,12 +3793,12 @@ def __init__(self, action, dataset, role=None, role_id=None): class LibraryPermissions(Base, RepresentById): __tablename__ = "library_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_id = Column(Integer, ForeignKey("library.id"), nullable=True, index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library.id"), nullable=True, index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) library = relationship("Library", back_populates="actions") role = relationship("Role") @@ -3775,12 +3815,14 @@ def __init__(self, action, library_item, role): class LibraryFolderPermissions(Base, RepresentById): __tablename__ = "library_folder_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), nullable=True, index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_folder_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_folder.id"), nullable=True, index=True + ) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) folder = relationship("LibraryFolder", back_populates="actions") role = relationship("Role") @@ -3797,12 +3839,14 @@ def __init__(self, action, library_item, role): class LibraryDatasetPermissions(Base, RepresentById): __tablename__ = "library_dataset_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_dataset_id = Column(Integer, ForeignKey("library_dataset.id"), nullable=True, index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_dataset_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset.id"), nullable=True, index=True + ) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) library_dataset = relationship("LibraryDataset", back_populates="actions") role = relationship("Role") @@ -3819,14 +3863,14 @@ def __init__(self, action, library_item, role): class LibraryDatasetDatasetAssociationPermissions(Base, RepresentById): __tablename__ = "library_dataset_dataset_association_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_dataset_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), nullable=True, index=True ) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", back_populates="actions") role = relationship("Role") @@ -3843,10 +3887,10 @@ def __init__(self, action, library_item, role): class DefaultUserPermissions(Base, RepresentById): __tablename__ = "default_user_permissions" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - action = Column(TEXT) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) user = relationship("User", back_populates="default_permissions") role = relationship("Role") @@ -3860,10 +3904,10 @@ def __init__(self, user, action, role): class DefaultHistoryPermissions(Base, RepresentById): __tablename__ = "default_history_permissions" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - action = Column(TEXT) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) history = relationship("History", back_populates="default_permissions") role = relationship("Role") @@ -3884,21 +3928,21 @@ def flush(self): class Dataset(Base, StorableObject, Serializable): __tablename__ = "dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - state = Column(TrimmedString(64), index=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - purgable = Column(Boolean, default=True) - object_store_id = Column(TrimmedString(255), index=True) - external_filename = Column(TEXT) - _extra_files_path = Column(TEXT) - created_from_basename = Column(TEXT) - file_size = Column(Numeric(15, 0)) - total_size = Column(Numeric(15, 0)) - uuid = Column(UUIDType()) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + state: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purgable: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + external_filename: Mapped[Optional[str]] = mapped_column(TEXT) + _extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) + created_from_basename: Mapped[Optional[str]] = mapped_column(TEXT) + file_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) + total_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType()) actions = relationship("DatasetPermissions", back_populates="dataset") job = relationship(Job, primaryjoin=(lambda: Dataset.job_id == Job.id)) @@ -4264,10 +4308,10 @@ def to_int(n) -> Optional[int]: class DatasetSource(Base, Dictifiable, Serializable): __tablename__ = "dataset_source" - id = Column(Integer, primary_key=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - source_uri = Column(TEXT) - extra_files_path = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + source_uri: Mapped[Optional[str]] = mapped_column(TEXT) + extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) transform = Column(MutableJSONType) dataset = relationship("Dataset", back_populates="sources") hashes = relationship("DatasetSourceHash", back_populates="source") @@ -4302,10 +4346,10 @@ def copy(self) -> "DatasetSource": class DatasetSourceHash(Base, Serializable): __tablename__ = "dataset_source_hash" - id = Column(Integer, primary_key=True) - dataset_source_id = Column(Integer, ForeignKey("dataset_source.id"), index=True) - hash_function = Column(TEXT) - hash_value = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_source_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset_source.id"), index=True) + hash_function: Mapped[Optional[str]] = mapped_column(TEXT) + hash_value: Mapped[Optional[str]] = mapped_column(TEXT) source = relationship("DatasetSource", back_populates="hashes") def _serialize(self, id_encoder, serialization_options): @@ -4327,11 +4371,11 @@ def copy(self) -> "DatasetSourceHash": class DatasetHash(Base, Dictifiable, Serializable): __tablename__ = "dataset_hash" - id = Column(Integer, primary_key=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - hash_function = Column(TEXT) - hash_value = Column(TEXT) - extra_files_path = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + hash_function: Mapped[Optional[str]] = mapped_column(TEXT) + hash_value: Mapped[Optional[str]] = mapped_column(TEXT) + extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) dataset = relationship("Dataset", back_populates="hashes") dict_collection_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] dict_element_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] @@ -5357,14 +5401,16 @@ def type_id(cls): class HistoryDatasetAssociationHistory(Base, Serializable): __tablename__ = "history_dataset_association_history" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - update_time = Column(DateTime, default=now) - version = Column(Integer) - name = Column(TrimmedString(255)) - extension = Column(TrimmedString(64)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + version: Mapped[Optional[int]] = mapped_column(Integer) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + extension: Mapped[Optional[str]] = mapped_column(TrimmedString(64)) _metadata = Column("metadata", MetadataType) - extended_metadata_id = Column(Integer, ForeignKey("extended_metadata.id"), index=True) + extended_metadata_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("extended_metadata.id"), index=True) def __init__( self, @@ -5391,12 +5437,14 @@ def __init__( class HistoryDatasetAssociationDisplayAtAuthorization(Base, RepresentById): __tablename__ = "history_dataset_association_display_at_authorization" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - site = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + site: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) history_dataset_association = relationship("HistoryDatasetAssociation") user = relationship("User") @@ -5409,10 +5457,14 @@ def __init__(self, hda=None, user=None, site=None): class HistoryDatasetAssociationSubset(Base, RepresentById): __tablename__ = "history_dataset_association_subset" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - history_dataset_association_subset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - location = Column(Unicode(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + history_dataset_association_subset_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + location: Mapped[Optional[str]] = mapped_column(Unicode(255), index=True) hda = relationship( "HistoryDatasetAssociation", @@ -5437,15 +5489,15 @@ def __init__(self, hda, subset, location): class Library(Base, Dictifiable, HasName, Serializable): __tablename__ = "library" - id = Column(Integer, primary_key=True) - root_folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - description = Column(TEXT) - synopsis = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + root_folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + description: Mapped[Optional[str]] = mapped_column(TEXT) + synopsis: Mapped[Optional[str]] = mapped_column(TEXT) root_folder = relationship("LibraryFolder", back_populates="library_root") actions = relationship("LibraryPermissions", back_populates="library", cascade_backrefs=False) @@ -5515,17 +5567,19 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): __tablename__ = "library_folder" __table_args__ = (Index("ix_library_folder_name", "name", mysql_length=200),) - id = Column(Integer, primary_key=True) - parent_id = Column(Integer, ForeignKey("library_folder.id"), nullable=True, index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TEXT) - description = Column(TEXT) - order_id = Column(Integer) # not currently being used, but for possible future use - item_count = Column(Integer) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - genome_build = Column(TrimmedString(40)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + parent_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_folder.id"), nullable=True, index=True + ) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + description: Mapped[Optional[str]] = mapped_column(TEXT) + order_id: Mapped[Optional[int]] = mapped_column(Integer) # not currently being used, but for possible future use + item_count: Mapped[Optional[int]] = mapped_column(Integer) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) folders = relationship( "LibraryFolder", @@ -5656,9 +5710,9 @@ def parent_library(self): class LibraryDataset(Base, Serializable): __tablename__ = "library_dataset" - id = Column(Integer, primary_key=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) # current version of dataset, if null, there is not a current version selected - library_dataset_dataset_association_id = Column( + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" @@ -5666,17 +5720,17 @@ class LibraryDataset(Base, Serializable): nullable=True, index=True, ) - folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) + folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) # not currently being used, but for possible future use - order_id = Column(Integer) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + order_id: Mapped[Optional[int]] = mapped_column(Integer) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) # when not None/null this will supercede display in library (but not when imported into user's history?) - _name = Column("name", TrimmedString(255), index=True) + _name: Mapped[Optional[str]] = mapped_column("name", TrimmedString(255), index=True) # when not None/null this will supercede display in library (but not when imported into user's history?) - _info = Column("info", TrimmedString(255)) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + _info: Mapped[Optional[str]] = mapped_column("info", TrimmedString(255)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) folder = relationship("LibraryFolder") library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=library_dataset_dataset_association_id, post_update=True @@ -5962,7 +6016,7 @@ def update_parent_folder_update_times(self): class ExtendedMetadata(Base, RepresentById): __tablename__ = "extended_metadata" - id = Column(Integer, primary_key=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) data = Column(MutableJSONType) children = relationship("ExtendedMetadataIndex", back_populates="extended_metadata") @@ -5973,12 +6027,12 @@ def __init__(self, data): class ExtendedMetadataIndex(Base, RepresentById): __tablename__ = "extended_metadata_index" - id = Column(Integer, primary_key=True) - extended_metadata_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + extended_metadata_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("extended_metadata.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) - path = Column(String(255)) - value = Column(TEXT) + path: Mapped[Optional[str]] = mapped_column(String(255)) + value: Mapped[Optional[str]] = mapped_column(TEXT) extended_metadata = relationship("ExtendedMetadata", back_populates="children") def __init__(self, extended_metadata, path, value): @@ -5990,12 +6044,12 @@ def __init__(self, extended_metadata, path, value): class LibraryInfoAssociation(Base, RepresentById): __tablename__ = "library_info_association" - id = Column(Integer, primary_key=True) - library_id = Column(Integer, ForeignKey("library.id"), index=True) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - inheritable = Column(Boolean, index=True, default=False) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library.id"), index=True) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + inheritable: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) library = relationship( "Library", @@ -6020,12 +6074,14 @@ def __init__(self, library, form_definition, info, inheritable=False): class LibraryFolderInfoAssociation(Base, RepresentById): __tablename__ = "library_folder_info_association" - id = Column(Integer, primary_key=True) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), nullable=True, index=True) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - inheritable = Column(Boolean, index=True, default=False) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_folder_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_folder.id"), nullable=True, index=True + ) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + inheritable: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) folder = relationship( "LibraryFolder", @@ -6051,13 +6107,13 @@ def __init__(self, folder, form_definition, info, inheritable=False): class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): __tablename__ = "library_dataset_dataset_info_association" - id = Column(Integer, primary_key=True) - library_dataset_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), nullable=True, index=True ) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - deleted = Column(Boolean, index=True, default=False) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", @@ -6091,16 +6147,24 @@ def inheritable(self): class ImplicitlyConvertedDatasetAssociation(Base, Serializable): __tablename__ = "implicitly_converted_dataset_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True) - hda_parent_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - ldda_parent_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) - deleted = Column(Boolean, index=True, default=False) - metadata_safe = Column(Boolean, index=True, default=True) - type = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + hda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True + ) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) + hda_parent_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + ldda_parent_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + metadata_safe: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) parent_hda = relationship( "HistoryDatasetAssociation", @@ -6201,13 +6265,13 @@ def produce_filter(self, table): class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable): __tablename__ = "dataset_collection" - id = Column(Integer, primary_key=True) - collection_type = Column(Unicode(255), nullable=False) - populated_state = Column(TrimmedString(64), default="ok", nullable=False) - populated_state_message = Column(TEXT) - element_count = Column(Integer, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + collection_type: Mapped[str] = mapped_column(Unicode(255), nullable=False) + populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="ok", nullable=False) + populated_state_message: Mapped[Optional[str]] = mapped_column(TEXT) + element_count: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) elements = relationship( "DatasetCollectionElement", @@ -6659,21 +6723,23 @@ class HistoryDatasetCollectionAssociation( __tablename__ = "history_dataset_collection_association" - id = Column(Integer, primary_key=True) - collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - name = Column(TrimmedString(255)) - hid = Column(Integer) - visible = Column(Boolean) - deleted = Column(Boolean, default=False) - copied_from_history_dataset_collection_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + collection_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset_collection.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + hid: Mapped[Optional[int]] = mapped_column(Integer) + visible: Mapped[Optional[bool]] = mapped_column(Boolean) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + copied_from_history_dataset_collection_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True ) - implicit_output_name = Column(Unicode(255), nullable=True) - job_id = Column(ForeignKey("job.id"), index=True, nullable=True) - implicit_collection_jobs_id = Column(ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) + implicit_output_name: Mapped[Optional[str]] = mapped_column(Unicode(255), nullable=True) + job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( + ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True + ) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) collection = relationship("DatasetCollection") history = relationship("History", back_populates="dataset_collections") @@ -7039,11 +7105,11 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre __tablename__ = "library_dataset_collection_association" - id = Column(Integer, primary_key=True) - collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True) - folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) - name = Column(TrimmedString(255)) - deleted = Column(Boolean, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + collection_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset_collection.id"), index=True) + folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) collection = relationship("DatasetCollection") folder = relationship("LibraryFolder") @@ -7083,16 +7149,24 @@ class DatasetCollectionElement(Base, Dictifiable, Serializable): __tablename__ = "dataset_collection_element" - id = Column(Integer, primary_key=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) # Parent collection id describing what collection this element belongs to. - dataset_collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True, nullable=False) + dataset_collection_id: Mapped[int] = mapped_column( + Integer, ForeignKey("dataset_collection.id"), index=True, nullable=False + ) # Child defined by this association - HDA, LDDA, or another dataset association... - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True) - child_collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True, nullable=True) + hda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True + ) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) + child_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dataset_collection.id"), index=True, nullable=True + ) # Element index and identifier to define this parent-child relationship. - element_index = Column(Integer) - element_identifier = Column(Unicode(255)) + element_index: Mapped[Optional[int]] = mapped_column(Integer) + element_identifier: Mapped[Optional[str]] = mapped_column(Unicode(255)) hda = relationship( "HistoryDatasetAssociation", @@ -7271,14 +7345,16 @@ def _serialize(self, id_encoder, serialization_options): class Event(Base, RepresentById): __tablename__ = "event" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - history_id = Column(Integer, ForeignKey("history.id"), index=True, nullable=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) - message = Column(TrimmedString(1024)) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True) - tool_id = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) + message: Mapped[Optional[str]] = mapped_column(TrimmedString(1024)) + session_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True + ) + tool_id: Mapped[Optional[str]] = mapped_column(String(255)) history = relationship("History") user = relationship("User") @@ -7288,21 +7364,21 @@ class Event(Base, RepresentById): class GalaxySession(Base, RepresentById): __tablename__ = "galaxy_session" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) - remote_host = Column(String(255)) - remote_addr = Column(String(255)) - referer = Column(TEXT) - current_history_id = Column(Integer, ForeignKey("history.id"), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) + remote_host: Mapped[Optional[str]] = mapped_column(String(255)) + remote_addr: Mapped[Optional[str]] = mapped_column(String(255)) + referer: Mapped[Optional[str]] = mapped_column(TEXT) + current_history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), nullable=True) # unique 128 bit random number coerced to a string - session_key = Column(TrimmedString(255), index=True, unique=True) - is_valid = Column(Boolean, default=False) + session_key: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + is_valid: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) # saves a reference to the previous session so we have a way to chain them together - prev_session_id = Column(Integer) - disk_usage = Column(Numeric(15, 0), index=True) - last_action = Column(DateTime) + prev_session_id: Mapped[Optional[int]] = mapped_column(Integer) + disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) + last_action: Mapped[Optional[datetime]] = mapped_column(DateTime) current_history = relationship("History") histories = relationship( "GalaxySessionToHistoryAssociation", back_populates="galaxy_session", cascade_backrefs=False @@ -7334,10 +7410,10 @@ def set_disk_usage(self, bytes): class GalaxySessionToHistoryAssociation(Base, RepresentById): __tablename__ = "galaxy_session_to_history" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + session_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_session.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) galaxy_session = relationship("GalaxySession", back_populates="histories") history = relationship("History", back_populates="galaxy_sessions") @@ -7366,20 +7442,20 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): __tablename__ = "stored_workflow" __table_args__ = (Index("ix_stored_workflow_slug", "slug", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) - latest_workflow_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) + latest_workflow_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow.id", use_alter=True, name="stored_workflow_latest_workflow_id_fk"), index=True ) - name = Column(TEXT) - deleted = Column(Boolean, default=False) - hidden = Column(Boolean, default=False) - importable = Column(Boolean, default=False) - slug = Column(TEXT) - from_path = Column(TEXT) - published = Column(Boolean, index=True, default=False) + name: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + hidden: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + from_path: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) user = relationship( "User", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" @@ -7539,20 +7615,24 @@ class Workflow(Base, Dictifiable, RepresentById): __tablename__ = "workflow" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) # workflows will belong to either a stored workflow or a parent/nesting workflow. - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True, nullable=True) - parent_workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=True) - name = Column(TEXT) - has_cycles = Column(Boolean) - has_errors = Column(Boolean) + stored_workflow_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("stored_workflow.id"), index=True, nullable=True + ) + parent_workflow_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow.id"), index=True, nullable=True + ) + name: Mapped[Optional[str]] = mapped_column(TEXT) + has_cycles: Mapped[Optional[bool]] = mapped_column(Boolean) + has_errors: Mapped[Optional[bool]] = mapped_column(Boolean) reports_config = Column(JSONType) creator_metadata = Column(JSONType) - license = Column(TEXT) + license: Mapped[Optional[str]] = mapped_column(TEXT) source_metadata = Column(JSONType) - uuid = Column(UUIDType, nullable=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType, nullable=True) steps = relationship( "WorkflowStep", @@ -7722,25 +7802,27 @@ class WorkflowStep(Base, RepresentById): __tablename__ = "workflow_step" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) - subworkflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=True) - dynamic_tool_id = Column(Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True) - type: str = Column(String(64)) - tool_id = Column(TEXT) - tool_version = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) + subworkflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=True) + dynamic_tool_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True + ) + type: Mapped[Optional[str]] = mapped_column(String(64)) + tool_id: Mapped[Optional[str]] = mapped_column(TEXT) + tool_version: Mapped[Optional[str]] = mapped_column(TEXT) tool_inputs = Column(JSONType) tool_errors = Column(JSONType) position = Column(MutableJSONType) config = Column(JSONType) - order_index: int = Column(Integer) + order_index: Mapped[Optional[int]] = mapped_column(Integer) when_expression = Column(JSONType) - uuid = Column(UUIDType) - label = Column(Unicode(255)) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType) + label: Mapped[Optional[str]] = mapped_column(Unicode(255)) temp_input_connections = None - parent_comment_id = Column(Integer, ForeignKey("workflow_comment.id"), nullable=True) + parent_comment_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_comment.id"), nullable=True) parent_comment = relationship( "WorkflowComment", @@ -8037,16 +8119,16 @@ class WorkflowStepInput(Base, RepresentById): ), ) - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - name = Column(TEXT) - merge_type = Column(TEXT) - scatter_type = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + merge_type: Mapped[Optional[str]] = mapped_column(TEXT) + scatter_type: Mapped[Optional[str]] = mapped_column(TEXT) value_from = Column(MutableJSONType) - value_from_type = Column(TEXT) + value_from_type: Mapped[Optional[str]] = mapped_column(TEXT) default_value = Column(MutableJSONType) - default_value_set = Column(Boolean, default=False) - runtime_value = Column(Boolean, default=False) + default_value_set: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + runtime_value: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) workflow_step = relationship( "WorkflowStep", @@ -8081,11 +8163,15 @@ def copy(self, copied_step): class WorkflowStepConnection(Base, RepresentById): __tablename__ = "workflow_step_connection" - id = Column(Integer, primary_key=True) - output_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - input_step_input_id = Column(Integer, ForeignKey("workflow_step_input.id"), index=True) - output_name = Column(TEXT) - input_subworkflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + output_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + input_step_input_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step_input.id"), index=True + ) + output_name: Mapped[Optional[str]] = mapped_column(TEXT) + input_subworkflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id"), index=True + ) input_step_input = relationship( "WorkflowStepInput", @@ -8137,11 +8223,11 @@ def copy(self): class WorkflowOutput(Base, Serializable): __tablename__ = "workflow_output" - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) - output_name = Column(String(255), nullable=True) - label = Column(Unicode(255)) - uuid = Column(UUIDType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + label: Mapped[Optional[str]] = mapped_column(Unicode(255)) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType) workflow_step = relationship( "WorkflowStep", back_populates="workflow_outputs", @@ -8178,15 +8264,15 @@ class WorkflowComment(Base, RepresentById): __tablename__ = "workflow_comment" - id = Column(Integer, primary_key=True) - order_index: int = Column(Integer) - workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + order_index: Mapped[Optional[int]] = mapped_column(Integer) + workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) position = Column(MutableJSONType) size = Column(JSONType) - type = Column(String(16)) - color = Column(String(16)) + type: Mapped[Optional[str]] = mapped_column(String(16)) + color: Mapped[Optional[str]] = mapped_column(String(16)) data = Column(JSONType) - parent_comment_id = Column(Integer, ForeignKey("workflow_comment.id"), nullable=True) + parent_comment_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_comment.id"), nullable=True) workflow = relationship( "Workflow", @@ -8249,9 +8335,9 @@ def from_dict(dict): class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): __tablename__ = "stored_workflow_user_share_connection" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) user = relationship("User") stored_workflow = relationship("StoredWorkflow", back_populates="users_shared_with") @@ -8259,10 +8345,10 @@ class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): class StoredWorkflowMenuEntry(Base, RepresentById): __tablename__ = "stored_workflow_menu_entry" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - order_index = Column(Integer) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + order_index: Mapped[Optional[int]] = mapped_column(Integer) stored_workflow = relationship("StoredWorkflow") user = relationship( @@ -8279,15 +8365,15 @@ class StoredWorkflowMenuEntry(Base, RepresentById): class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializable): __tablename__ = "workflow_invocation" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) - workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) - state = Column(TrimmedString(64), index=True) - scheduler = Column(TrimmedString(255), index=True) - handler = Column(TrimmedString(255), index=True) - uuid = Column(UUIDType()) - history_id = Column(Integer, ForeignKey("history.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) + workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) + state: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) + scheduler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + handler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType()) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) history = relationship("History", back_populates="workflow_invocations") input_parameters = relationship( @@ -8825,10 +8911,16 @@ def log_str(self): class WorkflowInvocationToSubworkflowInvocationAssociation(Base, Dictifiable, RepresentById): __tablename__ = "workflow_invocation_to_subworkflow_invocation_association" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_wfi"), index=True) - subworkflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_swi"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id", name="fk_wfi_swi_ws")) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_wfi"), index=True + ) + subworkflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_swi"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id", name="fk_wfi_swi_ws") + ) subworkflow_invocation = relationship( "WorkflowInvocation", @@ -8853,16 +8945,22 @@ class WorkflowInvocationToSubworkflowInvocationAssociation(Base, Dictifiable, Re class WorkflowInvocationMessage(Base, Dictifiable, Serializable): __tablename__ = "workflow_invocation_message" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False) - reason = Column(String(32)) - details = Column(TrimmedString(255), nullable=True) - output_name = Column(String(255), nullable=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), nullable=True) - dependent_workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), nullable=True) - job_id = Column(Integer, ForeignKey("job.id"), nullable=True) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), nullable=True) - hdca_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[int] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False + ) + reason: Mapped[Optional[str]] = mapped_column(String(32)) + details: Mapped[Optional[str]] = mapped_column(TrimmedString(255), nullable=True) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), nullable=True) + dependent_workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id"), nullable=True + ) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), nullable=True) + hda_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), nullable=True) + hdca_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True + ) workflow_invocation = relationship("WorkflowInvocation", back_populates="messages", lazy=True) workflow_step = relationship("WorkflowStep", foreign_keys=workflow_step_id, lazy=True) @@ -8926,14 +9024,18 @@ def is_split_configuration(self): class WorkflowInvocationStep(Base, Dictifiable, Serializable): __tablename__ = "workflow_invocation_step" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) - state = Column(TrimmedString(64), index=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=True) - implicit_collection_jobs_id = Column(Integer, ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + workflow_invocation_id: Mapped[int] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False + ) + workflow_step_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) + state: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=True) + implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True + ) action = Column(MutableJSONType, nullable=True) workflow_step = relationship("WorkflowStep") @@ -9134,13 +9236,13 @@ class WorkflowRequestInputParameter(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_input_parameters" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) - name = Column(Unicode(255)) - value = Column(TEXT) - type = Column(Unicode(255)) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + value: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(Unicode(255)) workflow_invocation = relationship("WorkflowInvocation", back_populates="input_parameters") dict_collection_visible_keys = ["id", "name", "value", "type"] @@ -9164,11 +9266,11 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_step_states" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) value = Column(MutableJSONType) workflow_step = relationship("WorkflowStep") workflow_invocation = relationship("WorkflowInvocation", back_populates="step_states") @@ -9187,11 +9289,13 @@ class WorkflowRequestToInputDatasetAssociation(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_to_input_dataset" - id = Column(Integer, primary_key=True) - name = Column(String(255)) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) workflow_step = relationship("WorkflowStep") dataset = relationship("HistoryDatasetAssociation") @@ -9215,11 +9319,15 @@ class WorkflowRequestToInputDatasetCollectionAssociation(Base, Dictifiable, Seri __tablename__ = "workflow_request_to_input_collection_dataset" - id = Column(Integer, primary_key=True) - name = Column(String(255)) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) workflow_step = relationship("WorkflowStep") dataset_collection = relationship("HistoryDatasetCollectionAssociation") workflow_invocation = relationship("WorkflowInvocation", back_populates="input_dataset_collections") @@ -9242,9 +9350,11 @@ class WorkflowRequestInputStepParameter(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_input_step_parameter" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) parameter_value = Column(MutableJSONType) workflow_step = relationship("WorkflowStep") @@ -9264,11 +9374,13 @@ class WorkflowInvocationOutputDatasetAssociation(Base, Dictifiable, Serializable __tablename__ = "workflow_invocation_output_dataset_association" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - workflow_output_id = Column(Integer, ForeignKey("workflow_output.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + workflow_output_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_output.id"), index=True) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_datasets") workflow_step = relationship("WorkflowStep") @@ -9291,13 +9403,17 @@ class WorkflowInvocationOutputDatasetCollectionAssociation(Base, Dictifiable, Se __tablename__ = "workflow_invocation_output_dataset_collection_association" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id", name="fk_wiodca_wii"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id", name="fk_wiodca_wsi"), index=True) - dataset_collection_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id", name="fk_wiodca_wii"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id", name="fk_wiodca_wsi"), index=True + ) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id", name="fk_wiodca_dci"), index=True ) - workflow_output_id = Column(Integer, ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True) + workflow_output_id = mapped_column(Integer, ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_dataset_collections") workflow_step = relationship("WorkflowStep") @@ -9322,10 +9438,12 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): __tablename__ = "workflow_invocation_output_value" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - workflow_output_id = Column(Integer, ForeignKey("workflow_output.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + workflow_output_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_output.id"), index=True) value = Column(MutableJSONType) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_values") @@ -9361,10 +9479,12 @@ class WorkflowInvocationStepOutputDatasetAssociation(Base, Dictifiable, Represen __tablename__ = "workflow_invocation_step_output_dataset_association" - id = Column(Integer, primary_key=True) - workflow_invocation_step_id = Column(Integer, ForeignKey("workflow_invocation_step.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - output_name = Column(String(255), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation_step.id"), index=True + ) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) workflow_invocation_step = relationship("WorkflowInvocationStep", back_populates="output_datasets") dataset = relationship("HistoryDatasetAssociation") @@ -9376,15 +9496,17 @@ class WorkflowInvocationStepOutputDatasetCollectionAssociation(Base, Dictifiable __tablename__ = "workflow_invocation_step_output_dataset_collection_association" - id = Column(Integer, primary_key=True) - workflow_invocation_step_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_step_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow_invocation_step.id", name="fk_wisodca_wisi"), index=True ) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id", name="fk_wisodca_wsi"), index=True) - dataset_collection_id = Column( + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id", name="fk_wisodca_wsi"), index=True + ) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id", name="fk_wisodca_dci"), index=True ) - output_name = Column(String(255), nullable=True) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) workflow_invocation_step = relationship("WorkflowInvocationStep", back_populates="output_dataset_collections") dataset_collection = relationship("HistoryDatasetCollectionAssociation") @@ -9395,16 +9517,20 @@ class WorkflowInvocationStepOutputDatasetCollectionAssociation(Base, Dictifiable class MetadataFile(Base, StorableObject, Serializable): __tablename__ = "metadata_file" - id = Column(Integer, primary_key=True) - name = Column(TEXT) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True) - lda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - object_store_id = Column(TrimmedString(255), index=True) - uuid = Column(UUIDType(), index=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + hda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True + ) + lda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType(), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) history_dataset = relationship("HistoryDatasetAssociation") library_dataset = relationship("LibraryDatasetDatasetAssociation") @@ -9477,16 +9603,16 @@ def _serialize(self, id_encoder, serialization_options): class FormDefinition(Base, Dictifiable, RepresentById): __tablename__ = "form_definition" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TrimmedString(255), nullable=False) - desc = Column(TEXT) - form_definition_current_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + desc: Mapped[Optional[str]] = mapped_column(TEXT) + form_definition_current_id: Mapped[int] = mapped_column( Integer, ForeignKey("form_definition_current.id", use_alter=True), index=True, nullable=False ) fields = Column(MutableJSONType) - type = Column(TrimmedString(255), index=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) layout = Column(MutableJSONType) form_definition_current = relationship( "FormDefinitionCurrent", @@ -9549,11 +9675,11 @@ def grid_fields(self, grid_index): class FormDefinitionCurrent(Base, RepresentById): __tablename__ = "form_definition_current" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - latest_form_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + latest_form_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) forms = relationship( "FormDefinition", back_populates="form_definition_current", @@ -9573,10 +9699,10 @@ def __init__(self, form_definition=None): class FormValues(Base, RepresentById): __tablename__ = "form_values" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) content = Column(MutableJSONType) form_definition = relationship( "FormDefinition", primaryjoin=(lambda: FormValues.form_definition_id == FormDefinition.id) @@ -9590,21 +9716,21 @@ def __init__(self, form_def=None, content=None): class UserAddress(Base, RepresentById): __tablename__ = "user_address" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - desc = Column(TrimmedString(255)) - name = Column(TrimmedString(255), nullable=False) - institution = Column(TrimmedString(255)) - address = Column(TrimmedString(255), nullable=False) - city = Column(TrimmedString(255), nullable=False) - state = Column(TrimmedString(255), nullable=False) - postal_code = Column(TrimmedString(255), nullable=False) - country = Column(TrimmedString(255), nullable=False) - phone = Column(TrimmedString(255)) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + desc: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + name: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + institution: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + address: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + city: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + state: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + postal_code: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + country: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + phone: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) # `desc` needs to be fully qualified because it is shadowed by `desc` Column defined above # TODO: db migration to rename column, then use `desc` user = relationship("User", back_populates="addresses", order_by=sqlalchemy.desc("update_time")) @@ -9627,13 +9753,13 @@ def to_dict(self, trans): class PSAAssociation(Base, AssociationMixin, RepresentById): __tablename__ = "psa_association" - id = Column(Integer, primary_key=True) - server_url = Column(VARCHAR(255)) - handle = Column(VARCHAR(255)) - secret = Column(VARCHAR(255)) - issued = Column(Integer) - lifetime = Column(Integer) - assoc_type = Column(VARCHAR(64)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + server_url: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + handle: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + secret: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + issued: Mapped[Optional[int]] = mapped_column(Integer) + lifetime: Mapped[Optional[int]] = mapped_column(Integer) + assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9691,9 +9817,9 @@ class PSACode(Base, CodeMixin, RepresentById): __tablename__ = "psa_code" __table_args__ = (UniqueConstraint("code", "email"),) - id = Column(Integer, primary_key=True) - email = Column(VARCHAR(200)) - code = Column(VARCHAR(32)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + email: Mapped[Optional[str]] = mapped_column(VARCHAR(200)) + code: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9719,10 +9845,10 @@ def get_code(cls, code): class PSANonce(Base, NonceMixin, RepresentById): __tablename__ = "psa_nonce" - id = Column(Integer, primary_key=True) - server_url = Column(VARCHAR(255)) - timestamp = Column(Integer) - salt = Column(VARCHAR(40)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + server_url: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + timestamp: Mapped[Optional[int]] = mapped_column(Integer) + salt: Mapped[Optional[str]] = mapped_column(VARCHAR(40)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9757,11 +9883,11 @@ def use(cls, server_url, timestamp, salt): class PSAPartial(Base, PartialMixin, RepresentById): __tablename__ = "psa_partial" - id = Column(Integer, primary_key=True) - token = Column(VARCHAR(32)) - data = Column(TEXT) - next_step = Column(Integer) - backend = Column(VARCHAR(32)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + token: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) + data: Mapped[Optional[str]] = mapped_column(TEXT) + next_step: Mapped[Optional[int]] = mapped_column(Integer) + backend: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9801,13 +9927,13 @@ class UserAuthnzToken(Base, UserMixin, RepresentById): __tablename__ = "oidc_user_authnz_tokens" __table_args__ = (UniqueConstraint("provider", "uid"),) - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - uid = Column(VARCHAR(255)) - provider = Column(VARCHAR(32)) - extra_data = Column(MutableJSONType, nullable=True) - lifetime = Column(Integer) - assoc_type = Column(VARCHAR(64)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + uid: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + provider: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) + extra_data: Mapped[Optional[str]] = mapped_column(MutableJSONType, nullable=True) + lifetime: Mapped[Optional[int]] = mapped_column(Integer) + assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) user = relationship("User", back_populates="social_auth") # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz @@ -9964,31 +10090,31 @@ class CustosAuthnzToken(Base, RepresentById): UniqueConstraint("external_user_id", "provider"), ) - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id")) - external_user_id = Column(String(255)) - provider = Column(String(255)) - access_token = Column(Text) - id_token = Column(Text) - refresh_token = Column(Text) - expiration_time = Column(DateTime) - refresh_expiration_time = Column(DateTime) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id")) + external_user_id: Mapped[Optional[str]] = mapped_column(String(255)) + provider: Mapped[Optional[str]] = mapped_column(String(255)) + access_token: Mapped[Optional[str]] = mapped_column(Text) + id_token: Mapped[Optional[str]] = mapped_column(Text) + refresh_token: Mapped[Optional[str]] = mapped_column(Text) + expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + refresh_expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) user = relationship("User", back_populates="custos_auth") class CloudAuthz(Base): __tablename__ = "cloudauthz" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - provider = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + provider: Mapped[Optional[str]] = mapped_column(String(255)) config = Column(MutableJSONType) - authn_id = Column(Integer, ForeignKey("oidc_user_authnz_tokens.id"), index=True) + authn_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("oidc_user_authnz_tokens.id"), index=True) tokens = Column(MutableJSONType) - last_update = Column(DateTime) - last_activity = Column(DateTime) - description = Column(TEXT) - create_time = Column(DateTime, default=now) + last_update: Mapped[Optional[datetime]] = mapped_column(DateTime) + last_activity: Mapped[Optional[datetime]] = mapped_column(DateTime) + description: Mapped[Optional[str]] = mapped_column(TEXT) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) user = relationship("User", back_populates="cloudauthz") authn = relationship("UserAuthnzToken") @@ -10016,18 +10142,18 @@ class Page(Base, HasTags, Dictifiable, RepresentById): __tablename__ = "page" __table_args__ = (Index("ix_page_slug", "slug", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) - latest_revision_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) + latest_revision_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("page_revision.id", use_alter=True, name="page_latest_revision_id_fk"), index=True ) - title = Column(TEXT) - deleted = Column(Boolean, index=True, default=False) - importable = Column(Boolean, index=True, default=False) - slug = Column(TEXT) - published = Column(Boolean, index=True, default=False) + title: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) user = relationship("User") revisions = relationship( "PageRevision", @@ -10095,13 +10221,13 @@ def email_hash(self): class PageRevision(Base, Dictifiable, RepresentById): __tablename__ = "page_revision" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - page_id = Column(Integer, ForeignKey("page.id"), index=True, nullable=False) - title = Column(TEXT) - content = Column(TEXT) - content_format = Column(TrimmedString(32)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + page_id: Mapped[int] = mapped_column(Integer, ForeignKey("page.id"), index=True, nullable=False) + title: Mapped[Optional[str]] = mapped_column(TEXT) + content: Mapped[Optional[str]] = mapped_column(TEXT) + content_format: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) page = relationship("Page", primaryjoin=(lambda: Page.id == PageRevision.page_id)) DEFAULT_CONTENT_FORMAT = "html" dict_element_visible_keys = ["id", "page_id", "title", "content", "content_format"] @@ -10119,9 +10245,9 @@ def to_dict(self, view="element"): class PageUserShareAssociation(Base, UserShareAssociation): __tablename__ = "page_user_share_association" - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) user = relationship("User") page = relationship("Page", back_populates="users_shared_with") @@ -10133,22 +10259,22 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): Index("ix_visualization_slug", "slug", mysql_length=200), ) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) - latest_revision_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) + latest_revision_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("visualization_revision.id", use_alter=True, name="visualization_latest_revision_id_fk"), index=True, ) - title = Column(TEXT) - type = Column(TEXT) - dbkey = Column(TEXT) - deleted = Column(Boolean, default=False, index=True) - importable = Column(Boolean, default=False, index=True) - slug = Column(TEXT) - published = Column(Boolean, default=False, index=True) + title: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(TEXT) + dbkey: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) user = relationship("User") revisions = relationship( @@ -10247,12 +10373,12 @@ class VisualizationRevision(Base, RepresentById): __tablename__ = "visualization_revision" __table_args__ = (Index("ix_visualization_revision_dbkey", "dbkey", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True, nullable=False) - title = Column(TEXT) - dbkey = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + visualization_id: Mapped[int] = mapped_column(Integer, ForeignKey("visualization.id"), index=True, nullable=False) + title: Mapped[Optional[str]] = mapped_column(TEXT) + dbkey: Mapped[Optional[str]] = mapped_column(TEXT) config = Column(MutableJSONType) visualization = relationship( "Visualization", @@ -10275,9 +10401,9 @@ def copy(self, visualization=None): class VisualizationUserShareAssociation(Base, UserShareAssociation): __tablename__ = "visualization_user_share_association" - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) user = relationship("User") visualization = relationship("Visualization", back_populates="users_shared_with") @@ -10286,10 +10412,10 @@ class Tag(Base, RepresentById): __tablename__ = "tag" __table_args__ = (UniqueConstraint("name"),) - id = Column(Integer, primary_key=True) - type = Column(Integer) - parent_id = Column(Integer, ForeignKey("tag.id")) - name = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + type: Mapped[Optional[int]] = mapped_column(Integer) + parent_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id")) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) children = relationship("Tag", back_populates="parent") parent = relationship("Tag", back_populates="children", remote_side=[id]) @@ -10300,8 +10426,8 @@ def __str__(self): class ItemTagAssociation(Dictifiable): dict_collection_visible_keys = ["id", "user_tname", "user_value"] dict_element_visible_keys = dict_collection_visible_keys - user_tname: Column - user_value = Column(TrimmedString(255), index=True) + user_tname: Mapped[Optional[str]] + user_value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) @@ -10321,12 +10447,12 @@ def copy(self, cls=None): class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_tag_association" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) history = relationship("History", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10335,12 +10461,14 @@ class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_dataset_association_tag_association" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) history_dataset_association = relationship("HistoryDatasetAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10349,14 +10477,14 @@ class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, Represen class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "library_dataset_dataset_association_tag_association" - id = Column(Integer, primary_key=True) - library_dataset_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), index=True ) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10365,12 +10493,12 @@ class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, R class PageTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "page_tag_association" - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) page = relationship("Page", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10379,12 +10507,12 @@ class PageTagAssociation(Base, ItemTagAssociation, RepresentById): class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "workflow_step_tag_association" - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) workflow_step = relationship("WorkflowStep", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10393,12 +10521,12 @@ class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "stored_workflow_tag_association" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) stored_workflow = relationship("StoredWorkflow", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10407,12 +10535,12 @@ class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "visualization_tag_association" - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) visualization = relationship("Visualization", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10421,12 +10549,14 @@ class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_dataset_collection_tag_association" - id = Column(Integer, primary_key=True) - history_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10435,12 +10565,14 @@ class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "library_dataset_collection_tag_association" - id = Column(Integer, primary_key=True) - library_dataset_collection_id = Column(Integer, ForeignKey("library_dataset_collection_association.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_collection_association.id"), index=True + ) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10449,12 +10581,12 @@ class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "tool_tag_association" - id = Column(Integer, primary_key=True) - tool_id = Column(TrimmedString(255), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + tool_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) tag = relationship("Tag") user = relationship("User") @@ -10464,10 +10596,10 @@ class HistoryAnnotationAssociation(Base, RepresentById): __tablename__ = "history_annotation_association" __table_args__ = (Index("ix_history_anno_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) history = relationship("History", back_populates="annotations") user = relationship("User") @@ -10476,10 +10608,12 @@ class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): __tablename__ = "history_dataset_association_annotation_association" __table_args__ = (Index("ix_history_dataset_anno_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) hda = relationship("HistoryDatasetAssociation", back_populates="annotations") user = relationship("User") @@ -10488,10 +10622,10 @@ class StoredWorkflowAnnotationAssociation(Base, RepresentById): __tablename__ = "stored_workflow_annotation_association" __table_args__ = (Index("ix_stored_workflow_ann_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) stored_workflow = relationship("StoredWorkflow", back_populates="annotations") user = relationship("User") @@ -10500,10 +10634,10 @@ class WorkflowStepAnnotationAssociation(Base, RepresentById): __tablename__ = "workflow_step_annotation_association" __table_args__ = (Index("ix_workflow_step_ann_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) workflow_step = relationship("WorkflowStep", back_populates="annotations") user = relationship("User") @@ -10512,10 +10646,10 @@ class PageAnnotationAssociation(Base, RepresentById): __tablename__ = "page_annotation_association" __table_args__ = (Index("ix_page_annotation_association_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) page = relationship("Page", back_populates="annotations") user = relationship("User") @@ -10524,10 +10658,10 @@ class VisualizationAnnotationAssociation(Base, RepresentById): __tablename__ = "visualization_annotation_association" __table_args__ = (Index("ix_visualization_annotation_association_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) visualization = relationship("Visualization", back_populates="annotations") user = relationship("User") @@ -10535,10 +10669,12 @@ class VisualizationAnnotationAssociation(Base, RepresentById): class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentById): __tablename__ = "history_dataset_collection_annotation_association" - id = Column(Integer, primary_key=True) - history_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) history_dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="annotations") user = relationship("User") @@ -10546,10 +10682,12 @@ class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentBy class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): __tablename__ = "library_dataset_collection_annotation_association" - id = Column(Integer, primary_key=True) - library_dataset_collection_id = Column(Integer, ForeignKey("library_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="annotations") user = relationship("User") @@ -10557,13 +10695,13 @@ class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): class Vault(Base): __tablename__ = "vault" - key = Column(Text, primary_key=True) - parent_key = Column(Text, ForeignKey(key), index=True, nullable=True) + key: Mapped[str] = mapped_column(Text, primary_key=True) + parent_key: Mapped[Optional[str]] = mapped_column(Text, ForeignKey(key), index=True, nullable=True) children = relationship("Vault", back_populates="parent") parent = relationship("Vault", back_populates="children", remote_side=[key]) - value = Column(Text, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + value: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) # Item rating classes. @@ -10583,10 +10721,10 @@ def _set_item(self, item): class HistoryRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_rating_association" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) history = relationship("History", back_populates="ratings") user = relationship("User") @@ -10598,10 +10736,12 @@ def _set_item(self, history): class HistoryDatasetAssociationRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_dataset_association_rating_association" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) history_dataset_association = relationship("HistoryDatasetAssociation", back_populates="ratings") user = relationship("User") @@ -10613,10 +10753,10 @@ def _set_item(self, history_dataset_association): class StoredWorkflowRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "stored_workflow_rating_association" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) stored_workflow = relationship("StoredWorkflow", back_populates="ratings") user = relationship("User") @@ -10628,10 +10768,10 @@ def _set_item(self, stored_workflow): class PageRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "page_rating_association" - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) page = relationship("Page", back_populates="ratings") user = relationship("User") @@ -10643,10 +10783,10 @@ def _set_item(self, page): class VisualizationRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "visualization_rating_association" - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) visualization = relationship("Visualization", back_populates="ratings") user = relationship("User") @@ -10658,10 +10798,12 @@ def _set_item(self, visualization): class HistoryDatasetCollectionRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_dataset_collection_rating_association" - id = Column(Integer, primary_key=True) - history_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="ratings") user = relationship("User") @@ -10673,10 +10815,12 @@ def _set_item(self, dataset_collection): class LibraryDatasetCollectionRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "library_dataset_collection_rating_association" - id = Column(Integer, primary_key=True) - library_dataset_collection_id = Column(Integer, ForeignKey("library_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="ratings") user = relationship("User") @@ -10689,11 +10833,11 @@ def _set_item(self, dataset_collection): class DataManagerHistoryAssociation(Base, RepresentById): __tablename__ = "data_manager_history_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) history = relationship("History") user = relationship("User", back_populates="data_manager_histories") @@ -10702,21 +10846,21 @@ class DataManagerJobAssociation(Base, RepresentById): __tablename__ = "data_manager_job_association" __table_args__ = (Index("ix_data_manager_job_association_data_manager_id", "data_manager_id", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - data_manager_id = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + data_manager_id: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job", back_populates="data_manager_association", uselist=False) class UserPreference(Base, RepresentById): __tablename__ = "user_preference" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - name = Column(Unicode(255), index=True) - value = Column(Text) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(Unicode(255), index=True) + value: Mapped[Optional[str]] = mapped_column(Text) def __init__(self, name=None, value=None): # Do not remove this constructor: it is set as the creator for the User.preferences @@ -10728,25 +10872,25 @@ def __init__(self, name=None, value=None): class UserAction(Base, RepresentById): __tablename__ = "user_action" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True) - action = Column(Unicode(255)) - context = Column(Unicode(512)) - params = Column(Unicode(1024)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + session_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_session.id"), index=True) + action: Mapped[Optional[str]] = mapped_column(Unicode(255)) + context: Mapped[Optional[str]] = mapped_column(Unicode(512)) + params: Mapped[Optional[str]] = mapped_column(Unicode(1024)) user = relationship("User") class APIKeys(Base, RepresentById): __tablename__ = "api_keys" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - key = Column(TrimmedString(32), index=True, unique=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + key: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True, unique=True) user = relationship("User", back_populates="api_keys") - deleted = Column(Boolean, index=True, server_default=false(), nullable=False) + deleted: Mapped[bool] = mapped_column(Boolean, index=True, server_default=false(), nullable=False) def copy_list(lst, *args, **kwds): @@ -10779,90 +10923,112 @@ def _prepare_metadata_for_serialization(id_encoder, serialization_options, metad class CleanupEvent(Base): __tablename__ = "cleanup_event" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - message = Column(TrimmedString(1024)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + message: Mapped[Optional[str]] = mapped_column(TrimmedString(1024)) class CleanupEventDatasetAssociation(Base): __tablename__ = "cleanup_event_dataset_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) class CleanupEventMetadataFileAssociation(Base): __tablename__ = "cleanup_event_metadata_file_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - metadata_file_id = Column(Integer, ForeignKey("metadata_file.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + metadata_file_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("metadata_file.id"), index=True) class CleanupEventHistoryAssociation(Base): __tablename__ = "cleanup_event_history_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) class CleanupEventHistoryDatasetAssociationAssociation(Base): __tablename__ = "cleanup_event_hda_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + hda_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) class CleanupEventLibraryAssociation(Base): __tablename__ = "cleanup_event_library_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - library_id = Column(Integer, ForeignKey("library.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + library_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library.id"), index=True) class CleanupEventLibraryFolderAssociation(Base): __tablename__ = "cleanup_event_library_folder_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + library_folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) class CleanupEventLibraryDatasetAssociation(Base): __tablename__ = "cleanup_event_library_dataset_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - library_dataset_id = Column(Integer, ForeignKey("library_dataset.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + library_dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_dataset.id"), index=True) class CleanupEventLibraryDatasetDatasetAssociationAssociation(Base): __tablename__ = "cleanup_event_ldda_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) class CleanupEventImplicitlyConvertedDatasetAssociationAssociation(Base): __tablename__ = "cleanup_event_icda_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - icda_id = Column(Integer, ForeignKey("implicitly_converted_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + icda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("implicitly_converted_dataset_association.id"), index=True + ) class CeleryUserRateLimit(Base): @@ -10873,8 +11039,8 @@ class CeleryUserRateLimit(Base): __tablename__ = "celery_user_rate_limit" - user_id = Column(Integer, ForeignKey("galaxy_user.id", ondelete="CASCADE"), primary_key=True) - last_scheduled_time = Column(DateTime, nullable=False) + user_id = mapped_column(Integer, ForeignKey("galaxy_user.id", ondelete="CASCADE"), primary_key=True) + last_scheduled_time: Mapped[datetime] = mapped_column(DateTime, nullable=False) def __repr__(self): return ( diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py index edec5b1f5c82..edd922600b1d 100644 --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -1,5 +1,6 @@ import logging import os +from datetime import datetime from enum import Enum from typing import ( Any, @@ -19,6 +20,8 @@ TEXT, ) from sqlalchemy.orm import ( + Mapped, + mapped_column, registry, relationship, ) @@ -75,24 +78,24 @@ def __declare_last__(cls): class ToolShedRepository(Base): __tablename__ = "tool_shed_repository" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed = Column(TrimmedString(255), index=True) - name = Column(TrimmedString(255), index=True) - description = Column(TEXT) - owner = Column(TrimmedString(255), index=True) - installed_changeset_revision = Column(TrimmedString(255)) - changeset_revision = Column(TrimmedString(255), index=True) - ctx_rev = Column(TrimmedString(10)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + tool_shed: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + name: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + owner: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + installed_changeset_revision: Mapped[str] = mapped_column(TrimmedString(255), nullable=True) + changeset_revision: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + ctx_rev: Mapped[Optional[str]] = mapped_column(TrimmedString(10)) metadata_ = Column("metadata", MutableJSONType, nullable=True) - includes_datatypes = Column(Boolean, index=True, default=False) + includes_datatypes: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) tool_shed_status = Column(MutableJSONType, nullable=True) - deleted = Column(Boolean, index=True, default=False) - uninstalled = Column(Boolean, default=False) - dist_to_shed = Column(Boolean, default=False) - status = Column(TrimmedString(255)) - error_message = Column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + uninstalled: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + dist_to_shed: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + status: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + error_message: Mapped[Optional[str]] = mapped_column(TEXT) tool_versions = relationship("ToolVersion", back_populates="tool_shed_repository") tool_dependencies = relationship( "ToolDependency", order_by="ToolDependency.name", back_populates="tool_shed_repository" @@ -654,11 +657,11 @@ def upgrade_available(self): class RepositoryRepositoryDependencyAssociation(Base): __tablename__ = "repository_repository_dependency_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True) - repository_dependency_id = Column(ForeignKey("repository_dependency.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_shed_repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tool_shed_repository.id"), index=True) + repository_dependency_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository_dependency.id"), index=True) repository = relationship("ToolShedRepository", back_populates="required_repositories") repository_dependency = relationship("RepositoryDependency") @@ -670,10 +673,12 @@ def __init__(self, tool_shed_repository_id=None, repository_dependency_id=None): class RepositoryDependency(Base): __tablename__ = "repository_dependency" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_shed_repository_id: Mapped[int] = mapped_column( + ForeignKey("tool_shed_repository.id"), index=True, nullable=False + ) repository = relationship("ToolShedRepository") def __init__(self, tool_shed_repository_id=None): @@ -683,15 +688,17 @@ def __init__(self, tool_shed_repository_id=None): class ToolDependency(Base): __tablename__ = "tool_dependency" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True, nullable=False) - name = Column(TrimmedString(255)) - version = Column(TEXT) - type = Column(TrimmedString(40)) - status = Column(TrimmedString(255), nullable=False) - error_message = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_shed_repository_id: Mapped[int] = mapped_column( + ForeignKey("tool_shed_repository.id"), index=True, nullable=False + ) + name: Mapped[str] = mapped_column(TrimmedString(255), nullable=True) + version: Mapped[str] = mapped_column(TEXT, nullable=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) + status: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + error_message: Mapped[Optional[str]] = mapped_column(TEXT) tool_shed_repository = relationship("ToolShedRepository", back_populates="tool_dependencies") # converting this one to Enum breaks the tool shed tests, @@ -773,11 +780,13 @@ def is_installed(self): class ToolVersion(Base, Dictifiable): __tablename__ = "tool_version" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_id = Column(String(255)) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True, nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_id: Mapped[Optional[str]] = mapped_column(String(255)) + tool_shed_repository_id: Mapped[Optional[int]] = mapped_column( + ForeignKey("tool_shed_repository.id"), index=True, nullable=True + ) parent_tool_association = relationship( "ToolVersionAssociation", primaryjoin=(lambda: ToolVersion.id == ToolVersionAssociation.tool_id) ) @@ -801,6 +810,6 @@ def to_dict(self, view="element"): class ToolVersionAssociation(Base): __tablename__ = "tool_version_association" - id = Column(Integer, primary_key=True) - tool_id = Column(ForeignKey("tool_version.id"), index=True, nullable=False) - parent_id = Column(ForeignKey("tool_version.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + tool_id: Mapped[int] = mapped_column(ForeignKey("tool_version.id"), index=True, nullable=False) + parent_id: Mapped[int] = mapped_column(ForeignKey("tool_version.id"), index=True, nullable=False) diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 3b2003d3e1be..4181757d81f0 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -10,6 +10,7 @@ from typing import ( Any, Mapping, + Optional, TYPE_CHECKING, ) @@ -32,6 +33,8 @@ UniqueConstraint, ) from sqlalchemy.orm import ( + Mapped, + mapped_column, registry, relationship, ) @@ -87,27 +90,27 @@ def __declare_last__(cls): class APIKeys(Base): __tablename__ = "api_keys" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - key = Column(TrimmedString(32), index=True, unique=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + key: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True, unique=True) user = relationship("User", back_populates="api_keys") - deleted = Column(Boolean, index=True, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) class User(Base, Dictifiable): __tablename__ = "galaxy_user" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - email = Column(TrimmedString(255), nullable=False) - username = Column(String(255), index=True) - password = Column(TrimmedString(40), nullable=False) - external = Column(Boolean, default=False) - new_repo_alert = Column(Boolean, default=False) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + email: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + username: Mapped[Optional[str]] = mapped_column(String(255), index=True) + password: Mapped[str] = mapped_column(TrimmedString(40), nullable=False) + external: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + new_repo_alert: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) active_repositories = relationship( "Repository", primaryjoin=(lambda: (Repository.user_id == User.id) & (not_(Repository.deleted))), # type: ignore[has-type] @@ -186,9 +189,9 @@ def set_random_password(self, length=16): class PasswordResetToken(Base): __tablename__ = "password_reset_token" - token = Column(String(32), primary_key=True, unique=True, index=True) - expiration_time = Column(DateTime) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) + token: Mapped[str] = mapped_column(String(32), primary_key=True, unique=True, index=True) + expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user = relationship("User", back_populates="reset_tokens") def __init__(self, user, token=None): @@ -204,11 +207,11 @@ def __init__(self, user, token=None): class Group(Base, Dictifiable): __tablename__ = "galaxy_group" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) roles = relationship("GroupRoleAssociation", back_populates="group") users = relationship("UserGroupAssociation", back_populates="group") @@ -223,13 +226,13 @@ def __init__(self, name=None): class Role(Base, Dictifiable): __tablename__ = "role" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - description = Column(TEXT) - type = Column(String(40), index=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(String(40), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) repositories = relationship("RepositoryRoleAssociation", back_populates="role") groups = relationship("GroupRoleAssociation", back_populates="role") users = relationship("UserRoleAssociation", back_populates="role") @@ -258,11 +261,11 @@ def is_repository_admin_role(self): class UserGroupAssociation(Base): __tablename__ = "user_group_association" - id = Column(Integer, primary_key=True) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - group_id = Column(ForeignKey("galaxy_group.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) user = relationship("User", back_populates="groups") group = relationship("Group", back_populates="users") @@ -275,11 +278,11 @@ def __init__(self, user, group): class UserRoleAssociation(Base): __tablename__ = "user_role_association" - id = Column(Integer, primary_key=True) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - role_id = Column(ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) user = relationship("User", back_populates="roles") role = relationship("Role", back_populates="users") @@ -293,11 +296,11 @@ def __init__(self, user, role): class GroupRoleAssociation(Base): __tablename__ = "group_role_association" - id = Column(Integer, primary_key=True) - group_id = Column(ForeignKey("galaxy_group.id"), index=True) - role_id = Column(ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) group = relationship("Group", back_populates="roles") role = relationship("Role", back_populates="groups") @@ -309,11 +312,11 @@ def __init__(self, group, role): class RepositoryRoleAssociation(Base): __tablename__ = "repository_role_association" - id = Column(Integer, primary_key=True) - repository_id = Column(ForeignKey("repository.id"), index=True) - role_id = Column(ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) repository = relationship("Repository", back_populates="roles") role = relationship("Role", back_populates="repositories") @@ -326,19 +329,19 @@ def __init__(self, repository, role): class GalaxySession(Base): __tablename__ = "galaxy_session" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(ForeignKey("galaxy_user.id"), index=True, nullable=True) - remote_host = Column(String(255)) - remote_addr = Column(String(255)) - referer = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + remote_host: Mapped[Optional[str]] = mapped_column(String(255)) + remote_addr: Mapped[Optional[str]] = mapped_column(String(255)) + referer: Mapped[Optional[str]] = mapped_column(TEXT) # unique 128 bit random number coerced to a string - session_key = Column(TrimmedString(255), index=True, unique=True) - is_valid = Column(Boolean, default=False) + session_key: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + is_valid: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) # saves a reference to the previous session so we have a way to chain them together - prev_session_id = Column(Integer) - last_action = Column(DateTime) + prev_session_id: Mapped[Optional[int]] = mapped_column(Integer) + last_action: Mapped[Optional[datetime]] = mapped_column(DateTime) user = relationship("User", back_populates="galaxy_sessions") def __init__(self, is_valid=False, **kwd): @@ -350,21 +353,21 @@ def __init__(self, is_valid=False, **kwd): class Repository(Base, Dictifiable): __tablename__ = "repository" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TrimmedString(255), index=True) - type = Column(TrimmedString(255), index=True) - remote_repository_url = Column(TrimmedString(255)) - homepage_url = Column(TrimmedString(255)) - description = Column(TEXT) - long_description = Column(TEXT) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - private = Column(Boolean, default=False) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + remote_repository_url: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + homepage_url: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + description: Mapped[Optional[str]] = mapped_column(TEXT) + long_description: Mapped[Optional[str]] = mapped_column(TEXT) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + private: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) email_alerts = Column(MutableJSONType, nullable=True) - times_downloaded = Column(Integer) - deprecated = Column(Boolean, default=False) + times_downloaded: Mapped[Optional[int]] = mapped_column(Integer) + deprecated: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) categories = relationship("RepositoryCategoryAssociation", back_populates="repository") ratings = relationship( "RepositoryRatingAssociation", @@ -564,13 +567,13 @@ def set_item(self, item): class RepositoryRatingAssociation(Base, ItemRatingAssociation): __tablename__ = "repository_rating_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - repository_id = Column(ForeignKey("repository.id"), index=True) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) - comment = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) + comment: Mapped[Optional[str]] = mapped_column(TEXT) repository = relationship("Repository", back_populates="ratings") user = relationship("User") @@ -581,12 +584,12 @@ def set_item(self, repository): class Category(Base, Dictifiable): __tablename__ = "category" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TrimmedString(255), index=True, unique=True) - description = Column(TEXT) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) repositories = relationship("RepositoryCategoryAssociation", back_populates="category") dict_collection_visible_keys = ["id", "name", "description", "deleted"] @@ -600,9 +603,9 @@ def __init__(self, deleted=False, **kwd): class RepositoryCategoryAssociation(Base): __tablename__ = "repository_category_association" - id = Column(Integer, primary_key=True) - repository_id = Column(ForeignKey("repository.id"), index=True) - category_id = Column(ForeignKey("category.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository.id"), index=True) + category_id: Mapped[Optional[int]] = mapped_column(ForeignKey("category.id"), index=True) category = relationship("Category", back_populates="repositories") repository = relationship("Repository", back_populates="categories") @@ -615,10 +618,10 @@ class Tag(Base): __tablename__ = "tag" __table_args__ = (UniqueConstraint("name"),) - id = Column(Integer, primary_key=True) - type = Column(Integer) - parent_id = Column(ForeignKey("tag.id")) - name = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + type: Mapped[Optional[int]] = mapped_column(Integer) + parent_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id")) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) children = relationship("Tag", back_populates="parent") parent = relationship("Tag", back_populates="children", remote_side=[id]) From f8f2400ba3bcd8db44fe9383b739ffdd1aaba004 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 2 Feb 2024 13:18:14 -0500 Subject: [PATCH 06/71] Add typing to JSON columns, fix related mypy errors Columns: MutableJSONType JSONType DoubleEncodedJsonType TODO: I think we need a type alias for json-typed columns: bytes understand iteration, but not access by key. --- lib/galaxy/model/__init__.py | 74 +++++++++++------------ lib/galaxy/model/deferred.py | 4 +- lib/galaxy/model/store/discover.py | 2 +- lib/galaxy/tools/actions/upload_common.py | 2 +- lib/galaxy/workflow/run_request.py | 2 +- lib/tool_shed/webapp/model/__init__.py | 2 +- 6 files changed, 43 insertions(+), 43 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 25dcb625cf9f..985ad66c484b 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1244,7 +1244,7 @@ class DynamicTool(Base, Dictifiable, RepresentById): tool_directory: Mapped[Optional[str]] = mapped_column(Unicode(255)) hidden: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) active: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) - value = Column(MutableJSONType) + value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) dict_collection_visible_keys = ("id", "tool_id", "tool_format", "tool_version", "uuid", "active", "hidden") dict_element_visible_keys = ("id", "tool_id", "tool_format", "tool_version", "uuid", "active", "hidden") @@ -1336,8 +1336,8 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) copied_from_job_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) command_line: Mapped[Optional[str]] = mapped_column(TEXT) - dependencies = Column(MutableJSONType, nullable=True) - job_messages = Column(MutableJSONType, nullable=True) + dependencies: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + job_messages: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) param_filename: Mapped[Optional[str]] = mapped_column(String(1024)) runner_name: Mapped[Optional[str]] = mapped_column(String(255)) job_stdout: Mapped[Optional[str]] = mapped_column(TEXT) @@ -1353,13 +1353,13 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): job_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) job_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255), index=True) destination_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) - destination_params = Column(MutableJSONType, nullable=True) + destination_params: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) imported: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) params: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) handler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) - object_store_id_overrides = Column(JSONType) + object_store_id_overrides: Mapped[Optional[bytes]] = mapped_column(JSONType) user = relationship("User") galaxy_session = relationship("GalaxySession") @@ -2108,7 +2108,7 @@ class Task(Base, JobLike, RepresentById): tool_stdout: Mapped[Optional[str]] = mapped_column(TEXT) tool_stderr: Mapped[Optional[str]] = mapped_column(TEXT) exit_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) - job_messages = Column(MutableJSONType, nullable=True) + job_messages: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) traceback: Mapped[Optional[str]] = mapped_column(TEXT) job_id: Mapped[int] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=False) @@ -2526,7 +2526,7 @@ class PostJobAction(Base, RepresentById): ) action_type: Mapped[str] = mapped_column(String(255), nullable=False) output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) - action_arguments = Column(MutableJSONType, nullable=True) + action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) workflow_step = relationship( "WorkflowStep", back_populates="post_job_actions", @@ -2728,7 +2728,7 @@ class StoreExportAssociation(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) object_type: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) object_id: Mapped[Optional[int]] = mapped_column(Integer) - export_metadata = Column(JSONType) + export_metadata: Mapped[Optional[bytes]] = mapped_column(JSONType) class JobContainerAssociation(Base, RepresentById): @@ -2738,7 +2738,7 @@ class JobContainerAssociation(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) container_type: Mapped[Optional[str]] = mapped_column(TEXT) container_name: Mapped[Optional[str]] = mapped_column(TEXT) - container_info = Column(MutableJSONType, nullable=True) + container_info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) job = relationship("Job", back_populates="container") @@ -2765,7 +2765,7 @@ class InteractiveToolEntryPoint(Base, Dictifiable, RepresentById): requires_domain: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) requires_path_in_url: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) requires_path_in_header_named: Mapped[Optional[str]] = mapped_column(TEXT) - info = Column(MutableJSONType, nullable=True) + info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) configured: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) @@ -2900,7 +2900,7 @@ class Notification(Base, Dictifiable, RepresentById): ) # Defines the 'importance' of the notification ('info', 'warning', 'urgent', etc.). Used for filtering, highlight rendering, etc # A bug in early 23.1 led to values being stored as json string, so we use this special type to process the result value twice. # content should always be a dict - content = Column(DoubleEncodedJsonType) + content: Mapped[Optional[bytes]] = mapped_column(DoubleEncodedJsonType) user_notification_associations = relationship("UserNotificationAssociation", back_populates="notification") @@ -4312,7 +4312,7 @@ class DatasetSource(Base, Dictifiable, Serializable): dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) source_uri: Mapped[Optional[str]] = mapped_column(TEXT) extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) - transform = Column(MutableJSONType) + transform: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) dataset = relationship("Dataset", back_populates="sources") hashes = relationship("DatasetSourceHash", back_populates="source") dict_collection_visible_keys = ["id", "source_uri", "extra_files_path", "transform"] @@ -6017,7 +6017,7 @@ class ExtendedMetadata(Base, RepresentById): __tablename__ = "extended_metadata" id: Mapped[int] = mapped_column(Integer, primary_key=True) - data = Column(MutableJSONType) + data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) children = relationship("ExtendedMetadataIndex", back_populates="extended_metadata") def __init__(self, data): @@ -7628,10 +7628,10 @@ class Workflow(Base, Dictifiable, RepresentById): name: Mapped[Optional[str]] = mapped_column(TEXT) has_cycles: Mapped[Optional[bool]] = mapped_column(Boolean) has_errors: Mapped[Optional[bool]] = mapped_column(Boolean) - reports_config = Column(JSONType) - creator_metadata = Column(JSONType) + reports_config: Mapped[Optional[bytes]] = mapped_column(JSONType) + creator_metadata: Mapped[Optional[bytes]] = mapped_column(JSONType) license: Mapped[Optional[str]] = mapped_column(TEXT) - source_metadata = Column(JSONType) + source_metadata: Mapped[Optional[bytes]] = mapped_column(JSONType) uuid: Mapped[Optional[str]] = mapped_column(UUIDType, nullable=True) steps = relationship( @@ -7813,12 +7813,12 @@ class WorkflowStep(Base, RepresentById): type: Mapped[Optional[str]] = mapped_column(String(64)) tool_id: Mapped[Optional[str]] = mapped_column(TEXT) tool_version: Mapped[Optional[str]] = mapped_column(TEXT) - tool_inputs = Column(JSONType) - tool_errors = Column(JSONType) - position = Column(MutableJSONType) - config = Column(JSONType) + tool_inputs: Mapped[Optional[bytes]] = mapped_column(JSONType) + tool_errors: Mapped[Optional[bytes]] = mapped_column(JSONType) + position: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + config: Mapped[Optional[bytes]] = mapped_column(JSONType) order_index: Mapped[Optional[int]] = mapped_column(Integer) - when_expression = Column(JSONType) + when_expression: Mapped[Optional[bytes]] = mapped_column(JSONType) uuid: Mapped[Optional[str]] = mapped_column(UUIDType) label: Mapped[Optional[str]] = mapped_column(Unicode(255)) temp_input_connections = None @@ -8124,9 +8124,9 @@ class WorkflowStepInput(Base, RepresentById): name: Mapped[Optional[str]] = mapped_column(TEXT) merge_type: Mapped[Optional[str]] = mapped_column(TEXT) scatter_type: Mapped[Optional[str]] = mapped_column(TEXT) - value_from = Column(MutableJSONType) + value_from: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) value_from_type: Mapped[Optional[str]] = mapped_column(TEXT) - default_value = Column(MutableJSONType) + default_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) default_value_set: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) runtime_value: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) @@ -8267,11 +8267,11 @@ class WorkflowComment(Base, RepresentById): id: Mapped[int] = mapped_column(Integer, primary_key=True) order_index: Mapped[Optional[int]] = mapped_column(Integer) workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) - position = Column(MutableJSONType) - size = Column(JSONType) + position: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + size: Mapped[Optional[bytes]] = mapped_column(JSONType) type: Mapped[Optional[str]] = mapped_column(String(16)) color: Mapped[Optional[str]] = mapped_column(String(16)) - data = Column(JSONType) + data: Mapped[Optional[bytes]] = mapped_column(JSONType) parent_comment_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_comment.id"), nullable=True) workflow = relationship( @@ -9036,7 +9036,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True ) - action = Column(MutableJSONType, nullable=True) + action: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) workflow_step = relationship("WorkflowStep") job = relationship("Job", back_populates="workflow_invocation_step", uselist=False) @@ -9271,7 +9271,7 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable): Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) - value = Column(MutableJSONType) + value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step = relationship("WorkflowStep") workflow_invocation = relationship("WorkflowInvocation", back_populates="step_states") @@ -9355,7 +9355,7 @@ class WorkflowRequestInputStepParameter(Base, Dictifiable, Serializable): Integer, ForeignKey("workflow_invocation.id"), index=True ) workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) - parameter_value = Column(MutableJSONType) + parameter_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step = relationship("WorkflowStep") workflow_invocation = relationship("WorkflowInvocation", back_populates="input_step_parameters") @@ -9444,7 +9444,7 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): ) workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) workflow_output_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_output.id"), index=True) - value = Column(MutableJSONType) + value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_values") @@ -9611,9 +9611,9 @@ class FormDefinition(Base, Dictifiable, RepresentById): form_definition_current_id: Mapped[int] = mapped_column( Integer, ForeignKey("form_definition_current.id", use_alter=True), index=True, nullable=False ) - fields = Column(MutableJSONType) + fields: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) - layout = Column(MutableJSONType) + layout: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition_current = relationship( "FormDefinitionCurrent", back_populates="forms", @@ -9703,7 +9703,7 @@ class FormValues(Base, RepresentById): create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) - content = Column(MutableJSONType) + content: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition = relationship( "FormDefinition", primaryjoin=(lambda: FormValues.form_definition_id == FormDefinition.id) ) @@ -9931,7 +9931,7 @@ class UserAuthnzToken(Base, UserMixin, RepresentById): user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) uid: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) provider: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) - extra_data: Mapped[Optional[str]] = mapped_column(MutableJSONType, nullable=True) + extra_data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) lifetime: Mapped[Optional[int]] = mapped_column(Integer) assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) user = relationship("User", back_populates="social_auth") @@ -10108,9 +10108,9 @@ class CloudAuthz(Base): id: Mapped[int] = mapped_column(Integer, primary_key=True) user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) provider: Mapped[Optional[str]] = mapped_column(String(255)) - config = Column(MutableJSONType) + config: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) authn_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("oidc_user_authnz_tokens.id"), index=True) - tokens = Column(MutableJSONType) + tokens: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) last_update: Mapped[Optional[datetime]] = mapped_column(DateTime) last_activity: Mapped[Optional[datetime]] = mapped_column(DateTime) description: Mapped[Optional[str]] = mapped_column(TEXT) @@ -10379,7 +10379,7 @@ class VisualizationRevision(Base, RepresentById): visualization_id: Mapped[int] = mapped_column(Integer, ForeignKey("visualization.id"), index=True, nullable=False) title: Mapped[Optional[str]] = mapped_column(TEXT) dbkey: Mapped[Optional[str]] = mapped_column(TEXT) - config = Column(MutableJSONType) + config: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) visualization = relationship( "Visualization", back_populates="revisions", diff --git a/lib/galaxy/model/deferred.py b/lib/galaxy/model/deferred.py index e72d4e126aca..18f5804375c8 100644 --- a/lib/galaxy/model/deferred.py +++ b/lib/galaxy/model/deferred.py @@ -174,12 +174,12 @@ def ensure_materialized( def _stream_source(self, target_source: DatasetSource, datatype) -> str: path = stream_url_to_file(target_source.source_uri, file_sources=self._file_sources) - transform = target_source.transform or [] + transform = target_source.transform or [] # type:ignore[var-annotated] to_posix_lines = False spaces_to_tabs = False datatype_groom = False for transform_action in transform: - action = transform_action["action"] + action = transform_action["action"] # type:ignore[index] if action == "to_posix_lines": to_posix_lines = True elif action == "spaces_to_tabs": diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index 4ec5d103158d..05b30ec97ab4 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -463,7 +463,7 @@ def override_object_store_id(self, output_name: Optional[str] = None) -> Optiona return None default_object_store_id = job.object_store_id object_store_id_overrides = job.object_store_id_overrides or {} - return object_store_id_overrides.get(output_name, default_object_store_id) + return object_store_id_overrides.get(output_name, default_object_store_id) # type:ignore[union-attr] @property @abc.abstractmethod diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py index a345abde954a..9c7412156499 100644 --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -104,7 +104,7 @@ def handle_library_params( template = session.get(FormDefinition, template_id) assert template for field in template.fields: - field_name = field["name"] + field_name = field["name"] # type:ignore[index] if params.get(field_name, False): field_value = util.restore_text(params.get(field_name, "")) template_field_contents[field_name] = field_value diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py index 029b1e7630c6..a45e7e441ec0 100644 --- a/lib/galaxy/workflow/run_request.py +++ b/lib/galaxy/workflow/run_request.py @@ -113,7 +113,7 @@ def _normalize_inputs( elif inputs_by_el == "step_uuid": possible_input_keys.append(str(step.uuid)) elif inputs_by_el == "name": - possible_input_keys.append(step.label or step.tool_inputs.get("name")) + possible_input_keys.append(step.label or step.tool_inputs.get("name")) # type:ignore[union-attr] else: raise exceptions.MessageException( "Workflow cannot be run because unexpected inputs_by value specified." diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 4181757d81f0..00379b0b9ce4 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -365,7 +365,7 @@ class Repository(Base, Dictifiable): user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) private: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) - email_alerts = Column(MutableJSONType, nullable=True) + email_alerts: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) times_downloaded: Mapped[Optional[int]] = mapped_column(Integer) deprecated: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) categories = relationship("RepositoryCategoryAssociation", back_populates="repository") From eb1aa47861a5a693fdec4ff4c5c8a2576cb99b2a Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 19 Jan 2024 10:45:58 -0500 Subject: [PATCH 07/71] Use correct type hints to define common model attrs --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 985ad66c484b..3421ef8967a6 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -261,7 +261,7 @@ def __declare_last__(cls): class RepresentById: - id: int + id: Mapped[int] def __repr__(self): try: @@ -420,7 +420,7 @@ def get_display_name(self): class UsesCreateAndUpdateTime: - update_time: DateTime + update_time: Mapped[Optional[datetime]] @property def seconds_since_updated(self): From ab406e7e6b23fc8f945f19c58cdc798f1756da59 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 16 Jan 2024 18:45:39 -0500 Subject: [PATCH 08/71] Start applying Mapped to relationship definitions in the model --- lib/galaxy/model/__init__.py | 76 +++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 31 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 3421ef8967a6..156c28ab0a3b 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -307,7 +307,7 @@ def set_datatypes_registry(d_registry): class HasTags: dict_collection_visible_keys = ["tags"] dict_element_visible_keys = ["tags"] - tags: List["ItemTagAssociation"] + tags: Mapped[List["ItemTagAssociation"]] def to_dict(self, *args, **kwargs): rval = super().to_dict(*args, **kwargs) @@ -704,29 +704,33 @@ class User(Base, Dictifiable, RepresentById): active: Mapped[bool] = mapped_column(Boolean, index=True, default=True, nullable=False) activation_token: Mapped[Optional[str]] = mapped_column(TrimmedString(64), nullable=True, index=True) - addresses = relationship( + addresses: Mapped[List["UserAddress"]] = relationship( "UserAddress", back_populates="user", order_by=lambda: desc(UserAddress.update_time), cascade_backrefs=False ) cloudauthz = relationship("CloudAuthz", back_populates="user") custos_auth = relationship("CustosAuthnzToken", back_populates="user") - default_permissions = relationship("DefaultUserPermissions", back_populates="user") - groups = relationship("UserGroupAssociation", back_populates="user") - histories = relationship( + default_permissions: Mapped[List["DefaultUserPermissions"]] = relationship( + "DefaultUserPermissions", back_populates="user" + ) + groups: Mapped[List["UserGroupAssociation"]] = relationship("UserGroupAssociation", back_populates="user") + histories: Mapped[List["History"]] = relationship( "History", back_populates="user", order_by=lambda: desc(History.update_time), cascade_backrefs=False # type: ignore[has-type] ) - active_histories = relationship( + active_histories: Mapped[List["History"]] = relationship( "History", primaryjoin=(lambda: (History.user_id == User.id) & (not_(History.deleted)) & (not_(History.archived))), # type: ignore[has-type] viewonly=True, order_by=lambda: desc(History.update_time), # type: ignore[has-type] ) - galaxy_sessions = relationship( + galaxy_sessions: Mapped[List["GalaxySession"]] = relationship( "GalaxySession", back_populates="user", order_by=lambda: desc(GalaxySession.update_time), cascade_backrefs=False # type: ignore[has-type] ) - quotas = relationship("UserQuotaAssociation", back_populates="user") - quota_source_usages = relationship("UserQuotaSourceUsage", back_populates="user") + quotas: Mapped[List["UserQuotaAssociation"]] = relationship("UserQuotaAssociation", back_populates="user") + quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship( + "UserQuotaSourceUsage", back_populates="user" + ) social_auth = relationship("UserAuthnzToken", back_populates="user") - stored_workflow_menu_entries = relationship( + stored_workflow_menu_entries: Mapped[List["StoredWorkflowMenuEntry"]] = relationship( "StoredWorkflowMenuEntry", primaryjoin=( lambda: (StoredWorkflowMenuEntry.user_id == User.id) @@ -737,12 +741,14 @@ class User(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", collection_class=ordering_list("order_index"), ) - _preferences = relationship("UserPreference", collection_class=attribute_mapped_collection("name")) - values = relationship( + _preferences: Mapped[List["UserPreference"]] = relationship( + "UserPreference", collection_class=attribute_mapped_collection("name") + ) + values: Mapped[List["FormValues"]] = relationship( "FormValues", primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] ) # Add type hint (will this work w/SA?) - api_keys = relationship( + api_keys: Mapped[List["APIKeys"]] = relationship( "APIKeys", back_populates="user", order_by=lambda: desc(APIKeys.create_time), @@ -753,16 +759,20 @@ class User(Base, Dictifiable, RepresentById): ) ), ) - data_manager_histories = relationship("DataManagerHistoryAssociation", back_populates="user") - roles = relationship("UserRoleAssociation", back_populates="user") - stored_workflows = relationship( + data_manager_histories: Mapped[List["DataManagerHistoryAssociation"]] = relationship( + "DataManagerHistoryAssociation", back_populates="user" + ) + roles: Mapped[List["UserRoleAssociation"]] = relationship("UserRoleAssociation", back_populates="user") + stored_workflows: Mapped[List["StoredWorkflow"]] = relationship( "StoredWorkflow", back_populates="user", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), # type: ignore[has-type] cascade_backrefs=False, ) - all_notifications = relationship("UserNotificationAssociation", back_populates="user", cascade_backrefs=False) - non_private_roles = relationship( + all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship( + "UserNotificationAssociation", back_populates="user", cascade_backrefs=False + ) + non_private_roles: Mapped[List["UserRoleAssociation"]] = relationship( "UserRoleAssociation", viewonly=True, primaryjoin=( @@ -3052,7 +3062,9 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), # type: ignore[has-type] viewonly=True, ) - tags = relationship("HistoryTagAssociation", order_by=lambda: HistoryTagAssociation.id, back_populates="history") + tags: Mapped[List["HistoryTagAssociation"]] = relationship( + "HistoryTagAssociation", order_by=lambda: HistoryTagAssociation.id, back_populates="history" + ) annotations = relationship( "HistoryAnnotationAssociation", order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" ) @@ -3550,7 +3562,7 @@ def __filter_contents(self, content_class, **kwds): class UserShareAssociation(RepresentById): - user: Optional[User] + user: Mapped[User] class HistoryUserShareAssociation(Base, UserShareAssociation): @@ -3559,7 +3571,7 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(Integer, primary_key=True) history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + user: Mapped[User] = relationship("User") history = relationship("History", back_populates="users_shared_with") @@ -6768,7 +6780,7 @@ class HistoryDatasetCollectionAssociation( back_populates="history_dataset_collection_associations", uselist=False, ) - tags = relationship( + tags: Mapped[List["HistoryDatasetCollectionTagAssociation"]] = relationship( "HistoryDatasetCollectionTagAssociation", order_by=lambda: HistoryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", @@ -7114,7 +7126,7 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre collection = relationship("DatasetCollection") folder = relationship("LibraryFolder") - tags = relationship( + tags: Mapped[List["LibraryDatasetCollectionTagAssociation"]] = relationship( "LibraryDatasetCollectionTagAssociation", order_by=lambda: LibraryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", @@ -7474,12 +7486,12 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: StoredWorkflow.latest_workflow_id == Workflow.id), # type: ignore[has-type] lazy=False, ) - tags = relationship( + tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( "StoredWorkflowTagAssociation", order_by=lambda: StoredWorkflowTagAssociation.id, back_populates="stored_workflow", ) - owner_tags = relationship( + owner_tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( "StoredWorkflowTagAssociation", primaryjoin=( lambda: and_( @@ -7836,7 +7848,7 @@ class WorkflowStep(Base, RepresentById): back_populates="parent_workflow_steps", ) dynamic_tool = relationship("DynamicTool", primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id)) - tags = relationship( + tags: Mapped[List["WorkflowStepTagAssociation"]] = relationship( "WorkflowStepTagAssociation", order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" ) annotations = relationship( @@ -8338,7 +8350,7 @@ class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(Integer, primary_key=True) stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + user: Mapped[User] = relationship("User") stored_workflow = relationship("StoredWorkflow", back_populates="users_shared_with") @@ -10167,7 +10179,9 @@ class Page(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), # type: ignore[has-type] lazy=False, ) - tags = relationship("PageTagAssociation", order_by=lambda: PageTagAssociation.id, back_populates="page") + tags: Mapped[List["PageTagAssociation"]] = relationship( + "PageTagAssociation", order_by=lambda: PageTagAssociation.id, back_populates="page" + ) annotations = relationship( "PageAnnotationAssociation", order_by=lambda: PageAnnotationAssociation.id, back_populates="page" ) @@ -10248,7 +10262,7 @@ class PageUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(Integer, primary_key=True) page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + user: Mapped[User] = relationship("User") page = relationship("Page", back_populates="users_shared_with") @@ -10290,7 +10304,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: Visualization.latest_revision_id == VisualizationRevision.id), lazy=False, ) - tags = relationship( + tags: Mapped[List["VisualizationTagAssociation"]] = relationship( "VisualizationTagAssociation", order_by=lambda: VisualizationTagAssociation.id, back_populates="visualization" ) annotations = relationship( @@ -10404,7 +10418,7 @@ class VisualizationUserShareAssociation(Base, UserShareAssociation): id: Mapped[int] = mapped_column(Integer, primary_key=True) visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + user: Mapped[User] = relationship("User") visualization = relationship("Visualization", back_populates="users_shared_with") From 3441e9609637e6c805c76d82a6026c686d4dab57 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 24 Jan 2024 16:10:42 -0500 Subject: [PATCH 09/71] Remove column declaration from HasTags parent class --- lib/galaxy/model/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 156c28ab0a3b..5f3f83a7edca 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -307,7 +307,6 @@ def set_datatypes_registry(d_registry): class HasTags: dict_collection_visible_keys = ["tags"] dict_element_visible_keys = ["tags"] - tags: Mapped[List["ItemTagAssociation"]] def to_dict(self, *args, **kwargs): rval = super().to_dict(*args, **kwargs) From 84f9fb7e18d72a3c5490865796293c9b17b34db0 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 19 Dec 2023 13:44:25 -0500 Subject: [PATCH 10/71] Fix SA2.0 error: wrap sql in text() --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5f3f83a7edca..d7d1f3f73518 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -942,7 +942,7 @@ def get_disk_usage(self, nice_size=False, quota_source_label=None): "user_id": self.id, "label": quota_source_label, } - row = sa_session.execute(statement, params).fetchone() + row = sa_session.execute(text(statement), params).fetchone() if row is not None: rval = row[0] else: From 595af2efabf08161c0128f70ca1abe2b6b31b218 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 19 Dec 2023 15:43:49 -0500 Subject: [PATCH 11/71] Fix SA2.0 error: pass bind to create_all --- lib/tool_shed/webapp/model/mapping.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/tool_shed/webapp/model/mapping.py b/lib/tool_shed/webapp/model/mapping.py index 0da07ef195c1..0ef27ff7f8b9 100644 --- a/lib/tool_shed/webapp/model/mapping.py +++ b/lib/tool_shed/webapp/model/mapping.py @@ -35,13 +35,11 @@ def init( engine_options = engine_options or {} # Create the database engine engine = build_engine(url, engine_options) - # Connect the metadata to the database. - metadata.bind = engine result = ToolShedModelMapping([tool_shed.webapp.model], engine=engine) if create_tables: - metadata.create_all() + metadata.create_all(bind=engine) result.create_tables = create_tables From d08a047b508a27b5d2384b17b61a4950855781da Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 19 Dec 2023 15:57:59 -0500 Subject: [PATCH 12/71] Fix SA2.0 error: use Row._mapping for keyed attribute access Ref: https://docs.sqlalchemy.org/en/20/changelog/migration_20.html#result-rows-act-like-named-tuples --- test/unit/app/managers/test_NotificationManager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/unit/app/managers/test_NotificationManager.py b/test/unit/app/managers/test_NotificationManager.py index 82b339f05792..c8c011160a83 100644 --- a/test/unit/app/managers/test_NotificationManager.py +++ b/test/unit/app/managers/test_NotificationManager.py @@ -264,8 +264,8 @@ def test_get_user_notification(self): actual_user_notification = self.notification_manager.get_user_notification(user, notification.id) self._assert_notification_expected(actual_user_notification, expected_user_notification) - assert actual_user_notification["seen_time"] is None - assert actual_user_notification["deleted"] is False + assert actual_user_notification._mapping["seen_time"] is None + assert actual_user_notification._mapping["deleted"] is False def test_update_user_notifications(self): user = self._create_test_user() From 03ad73ccecb4616ac47cb629018c8aaa14500272 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 20 Dec 2023 11:39:46 -0500 Subject: [PATCH 13/71] Fix SA2.0 error: show password in url SA 1.4: str(url) renders connection string with password SA 2.0: str(url) renders connection string WITHOUT password Solution: Use render_as_string(hide_password=False) --- lib/galaxy/model/unittest_utils/model_testing_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/unittest_utils/model_testing_utils.py b/lib/galaxy/model/unittest_utils/model_testing_utils.py index 3f13dd79f731..2f8d71babc54 100644 --- a/lib/galaxy/model/unittest_utils/model_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/model_testing_utils.py @@ -263,4 +263,4 @@ def _make_sqlite_db_url(tmpdir: str, database: str) -> DbUrl: def _make_postgres_db_url(connection_url: DbUrl, database: str) -> DbUrl: url = make_url(connection_url) url = url.set(database=database) - return DbUrl(str(url)) + return DbUrl(url.render_as_string(hide_password=False)) From 0c46858a57c59f0f75cb892b1a31f8904a0f1387 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 24 Jan 2024 11:52:45 -0500 Subject: [PATCH 14/71] Fix SA2.0 error: use attribute_keyed_dict Replaces attribute_mapped_collection (SA20) --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index d7d1f3f73518..ffd1d37540bd 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -113,7 +113,7 @@ relationship, ) from sqlalchemy.orm.attributes import flag_modified -from sqlalchemy.orm.collections import attribute_mapped_collection +from sqlalchemy.orm.collections import attribute_keyed_dict from sqlalchemy.sql import exists from typing_extensions import ( Literal, @@ -741,7 +741,7 @@ class User(Base, Dictifiable, RepresentById): collection_class=ordering_list("order_index"), ) _preferences: Mapped[List["UserPreference"]] = relationship( - "UserPreference", collection_class=attribute_mapped_collection("name") + "UserPreference", collection_class=attribute_keyed_dict("name") ) values: Mapped[List["FormValues"]] = relationship( "FormValues", primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] From b9d2886dacad699dae7fce83a5cc9e1e3251ec02 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 25 Jan 2024 13:06:09 -0500 Subject: [PATCH 15/71] Fix SA2.0 error: make select stmt a subquery Rename varable to fix mypy --- lib/galaxy/managers/jobs.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 102d8b04f591..97073e9df46d 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -360,7 +360,7 @@ def replace_dataset_ids(path, key, value): # build one subquery that selects a job with correct job parameters - subq = select(model.Job.id).where( + stmt = select(model.Job.id).where( and_( model.Job.tool_id == tool_id, model.Job.user_id == user.id, @@ -368,22 +368,22 @@ def replace_dataset_ids(path, key, value): ) ) if tool_version: - subq = subq.where(Job.tool_version == str(tool_version)) + stmt = stmt.where(Job.tool_version == str(tool_version)) if job_state is None: - subq = subq.where( + stmt = stmt.where( Job.state.in_( [Job.states.NEW, Job.states.QUEUED, Job.states.WAITING, Job.states.RUNNING, Job.states.OK] ) ) else: if isinstance(job_state, str): - subq = subq.where(Job.state == job_state) + stmt = stmt.where(Job.state == job_state) elif isinstance(job_state, list): - subq = subq.where(or_(*[Job.state == s for s in job_state])) + stmt = stmt.where(or_(*[Job.state == s for s in job_state])) # exclude jobs with deleted outputs - subq = subq.where( + stmt = stmt.where( and_( model.Job.any_output_dataset_collection_instances_deleted == false(), model.Job.any_output_dataset_deleted == false(), @@ -403,7 +403,7 @@ def replace_dataset_ids(path, key, value): wildcard_value = value_dump.replace('"id": "__id_wildcard__"', '"id": %') a = aliased(JobParameter) if value_dump == wildcard_value: - subq = subq.join(a).where( + stmt = stmt.join(a).where( and_( Job.id == a.job_id, a.name == k, @@ -411,7 +411,7 @@ def replace_dataset_ids(path, key, value): ) ) else: - subq = subq.join(a).where( + stmt = stmt.join(a).where( and_( Job.id == a.job_id, a.name == k, @@ -419,7 +419,8 @@ def replace_dataset_ids(path, key, value): ) ) - query = select(Job.id).select_from(Job.table.join(subq, subq.c.id == Job.id)) + stmt_sq = stmt.subquery() + query = select(Job.id).select_from(Job.table.join(stmt_sq, stmt_sq.c.id == Job.id)) data_conditions = [] From 6b3bca48b5df8ee3d926b0b6933972dd88fc1ca1 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 7 Feb 2024 14:16:46 -0500 Subject: [PATCH 16/71] Fix SA2.0 error: explicitly use subquery() for select-from argument --- lib/galaxy/model/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index ffd1d37540bd..3cdfa1f3eb4b 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1216,8 +1216,8 @@ def quota_source_usage_for(self, quota_source_label: Optional[str]) -> Optional[ return None def count_stored_workflow_user_assocs(self, stored_workflow) -> int: - stmt = select(StoredWorkflowUserShareAssociation).filter_by(user=self, stored_workflow=stored_workflow) - stmt = select(func.count()).select_from(stmt) + sq = select(StoredWorkflowUserShareAssociation).filter_by(user=self, stored_workflow=stored_workflow).subquery() + stmt = select(func.count()).select_from(sq) session = object_session(self) return session.scalar(stmt) From d478e0800d48d7b52e6fdd9e87f5f3d66ed3dc5e Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 12 Feb 2024 13:41:12 -0500 Subject: [PATCH 17/71] Fix SA2.0 error: replase session.bind with session.get_bind() --- lib/galaxy/quota/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/galaxy/quota/__init__.py b/lib/galaxy/quota/__init__.py index 0c81d75f9466..3f0b55cda645 100644 --- a/lib/galaxy/quota/__init__.py +++ b/lib/galaxy/quota/__init__.py @@ -4,7 +4,6 @@ from typing import Optional from sqlalchemy import select -from sqlalchemy.orm import object_session from sqlalchemy.sql import text import galaxy.util @@ -198,7 +197,7 @@ def relabel_quota_for_dataset(self, dataset, from_label: Optional[str], to_label WHERE dataset_id = :dataset_id )""" - engine = object_session(dataset).bind + engine = self.sa_session.get_bind() # Hack for older sqlite, would work on newer sqlite - 3.24.0 for_sqlite = "sqlite" in engine.dialect.name From 2d49d6995ef09ab35a0c17624fe7d8802152c70a Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 12 Feb 2024 14:18:02 -0500 Subject: [PATCH 18/71] Fix SA2.0 error: joinedload does not take str args --- scripts/cleanup_datasets/cleanup_datasets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/cleanup_datasets/cleanup_datasets.py b/scripts/cleanup_datasets/cleanup_datasets.py index a373e551ff14..81fd7801f5da 100755 --- a/scripts/cleanup_datasets/cleanup_datasets.py +++ b/scripts/cleanup_datasets/cleanup_datasets.py @@ -261,7 +261,7 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r histories = ( app.sa_session.query(app.model.History) .filter(and_(app.model.History.__table__.c.deleted == true(), app.model.History.update_time < cutoff_time)) - .options(joinedload("datasets")) + .options(joinedload(app.model.History.datasets)) ) else: histories = ( @@ -273,7 +273,7 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r app.model.History.update_time < cutoff_time, ) ) - .options(joinedload("datasets")) + .options(joinedload(app.model.History.datasets)) ) for history in histories: log.info("### Processing history id %d (%s)", history.id, unicodify(history.name)) From fb526d45d3d19d1f2065160d1d0055eff11a6549 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 17 Jan 2024 10:42:33 -0500 Subject: [PATCH 19/71] Fix use of table model attribute - Use __table__ (SA attr) instead of table (galaxy attr) on mapped classes - Drop .table and .table.c where redundant --- lib/galaxy/model/__init__.py | 22 ++++----- lib/galaxy/model/item_attrs.py | 6 +-- .../galaxy_install/install_manager.py | 2 +- .../installed_repository_metadata_manager.py | 8 ++-- .../update_repository_manager.py | 2 +- lib/galaxy/tool_shed/util/repository_util.py | 26 +++++------ lib/galaxy/web/framework/helpers/grids.py | 2 +- lib/galaxy/web/legacy_framework/grids.py | 10 ++-- .../galaxy/services/tool_shed_repositories.py | 2 +- lib/tool_shed/grids/repository_grids.py | 46 +++++++++---------- lib/tool_shed/webapp/model/__init__.py | 2 + 11 files changed, 63 insertions(+), 65 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 3cdfa1f3eb4b..24701f176c29 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1700,7 +1700,7 @@ def set_state(self, state: JobState) -> bool: if session and self.id and state not in Job.finished_states: # generate statement that will not revert DELETING or DELETED back to anything non-terminal rval = session.execute( - update(Job.table) + update(Job) .where(Job.id == self.id, ~Job.state.in_((Job.states.DELETING, Job.states.DELETED))) .values(state=state) ) @@ -1941,12 +1941,12 @@ def update_hdca_update_time_for_job(self, update_time, sa_session, supports_skip subq = subq.with_for_update(skip_locked=True).subquery() implicit_statement = ( HistoryDatasetCollectionAssociation.table.update() - .where(HistoryDatasetCollectionAssociation.table.c.id.in_(select(subq))) + .where(HistoryDatasetCollectionAssociation.id.in_(select(subq))) .values(update_time=update_time) ) explicit_statement = ( HistoryDatasetCollectionAssociation.table.update() - .where(HistoryDatasetCollectionAssociation.table.c.job_id == self.id) + .where(HistoryDatasetCollectionAssociation.job_id == self.id) .values(update_time=update_time) ) sa_session.execute(explicit_statement) @@ -3429,17 +3429,17 @@ def disk_size(cls): # .expression acts as a column_property and should return a scalar # first, get the distinct datasets within a history that are not purged hda_to_dataset_join = join( - HistoryDatasetAssociation, Dataset, HistoryDatasetAssociation.table.c.dataset_id == Dataset.table.c.id + HistoryDatasetAssociation, Dataset, HistoryDatasetAssociation.dataset_id == Dataset.id ) distinct_datasets = ( select( # use labels here to better access from the query above - HistoryDatasetAssociation.table.c.history_id.label("history_id"), + HistoryDatasetAssociation.history_id.label("history_id"), Dataset.total_size.label("dataset_size"), Dataset.id.label("dataset_id"), ) - .where(HistoryDatasetAssociation.table.c.purged != true()) - .where(Dataset.table.c.purged != true()) + .where(HistoryDatasetAssociation.purged != true()) + .where(Dataset.purged != true()) .select_from(hda_to_dataset_join) # TODO: slow (in general) but most probably here - index total_size for easier sorting/distinct? .distinct() @@ -8498,7 +8498,7 @@ def set_state(self, state: InvocationState): if session and self.id and state not in priority_states: # generate statement that will not revert CANCELLING or CANCELLED back to anything non-terminal session.execute( - update(WorkflowInvocation.table) + update(WorkflowInvocation) .where( WorkflowInvocation.id == self.id, or_(~WorkflowInvocation.state.in_(priority_states), WorkflowInvocation.state.is_(None)), @@ -8527,7 +8527,7 @@ def cancel_invocation_steps(self): .filter(~Job.state.in_(Job.finished_states)) .with_for_update() ) - sa_session.execute(update(Job.table).where(Job.id.in_(job_subq)).values({"state": Job.states.DELETING})) + sa_session.execute(update(Job).where(Job.id.in_(job_subq)).values({"state": Job.states.DELETING})) job_collection_subq = ( select(Job.id) @@ -8543,9 +8543,7 @@ def cancel_invocation_steps(self): ) sa_session.execute( - update(Job.table) - .where(Job.table.c.id.in_(job_collection_subq.element)) - .values({"state": Job.states.DELETING}) + update(Job).where(Job.id.in_(job_collection_subq.element)).values({"state": Job.states.DELETING}) ) for invocation in self.subworkflow_invocations: diff --git a/lib/galaxy/model/item_attrs.py b/lib/galaxy/model/item_attrs.py index e361d5458441..37a467f5dde7 100644 --- a/lib/galaxy/model/item_attrs.py +++ b/lib/galaxy/model/item_attrs.py @@ -180,12 +180,12 @@ def _get_annotation_assoc_class(item): def get_foreign_key(source_class, target_class): """Returns foreign key in source class that references target class.""" target_fk = None - for fk in source_class.table.foreign_keys: - if fk.references(target_class.table): + for fk in source_class.__table__.foreign_keys: + if fk.references(target_class.__table__): target_fk = fk break if not target_fk: - raise Exception(f"No foreign key found between objects: {source_class.table}, {target_class.table}") + raise Exception(f"No foreign key found between objects: {source_class.__table__}, {target_class.__table__}") return target_fk diff --git a/lib/galaxy/tool_shed/galaxy_install/install_manager.py b/lib/galaxy/tool_shed/galaxy_install/install_manager.py index d97a586c08f0..9c45ea79e7fc 100644 --- a/lib/galaxy/tool_shed/galaxy_install/install_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/install_manager.py @@ -328,7 +328,7 @@ def initiate_repository_installation(self, installation_dict): tool_shed_repositories.append(tsr) clause_list = [] for tsr_id in tsr_ids: - clause_list.append(self.install_model.ToolShedRepository.table.c.id == tsr_id) + clause_list.append(self.install_model.ToolShedRepository.id == tsr_id) query = self.install_model.context.query(self.install_model.ToolShedRepository).filter(or_(*clause_list)) return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index aea14cacdd93..bf7e3ccbd33e 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -86,15 +86,15 @@ def get_query_for_setting_metadata_on_repositories(self, order=True): if order: return ( self.app.install_model.context.query(self.app.install_model.ToolShedRepository) - .filter(self.app.install_model.ToolShedRepository.table.c.uninstalled == false()) + .filter(self.app.install_model.ToolShedRepository.uninstalled == false()) .order_by( - self.app.install_model.ToolShedRepository.table.c.name, - self.app.install_model.ToolShedRepository.table.c.owner, + self.app.install_model.ToolShedRepository.name, + self.app.install_model.ToolShedRepository.owner, ) ) else: return self.app.install_model.context.query(self.app.install_model.ToolShedRepository).filter( - self.app.install_model.ToolShedRepository.table.c.uninstalled == false() + self.app.install_model.ToolShedRepository.uninstalled == false() ) def get_repository_tools_tups(self): diff --git a/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py index 42f86250f563..e7ac8399b88b 100644 --- a/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py @@ -75,7 +75,7 @@ def __restarter(self) -> None: # the repository revision is the latest installable revision, and whether the repository # has been deprecated in the Tool Shed. for repository in self.context.query(self.app.install_model.ToolShedRepository).filter( - self.app.install_model.ToolShedRepository.table.c.deleted == false() + self.app.install_model.ToolShedRepository.deleted == false() ): tool_shed_status_dict = get_tool_shed_status_for_installed_repository(self.app, repository) if tool_shed_status_dict: diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index 11e9a675039d..dc5734a089aa 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -48,9 +48,7 @@ def check_for_updates( success_count = 0 repository_names_not_updated = [] updated_count = 0 - for repository in install_model_context.query(ToolShedRepository).filter( - ToolShedRepository.table.c.deleted == false() - ): + for repository in install_model_context.query(ToolShedRepository).filter(ToolShedRepository.deleted == false()): ok, updated = _check_or_update_tool_shed_status_for_installed_repository( tool_shed_registry, install_model_context, repository ) @@ -298,18 +296,18 @@ def get_installed_repository( ) query = app.install_model.context.query(app.install_model.ToolShedRepository) if repository_id: - clause_list = [app.install_model.ToolShedRepository.table.c.id == repository_id] + clause_list = [app.install_model.ToolShedRepository.id == repository_id] else: clause_list = [ - app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.install_model.ToolShedRepository.table.c.name == name, - app.install_model.ToolShedRepository.table.c.owner == owner, + app.install_model.ToolShedRepository.tool_shed == tool_shed, + app.install_model.ToolShedRepository.name == name, + app.install_model.ToolShedRepository.owner == owner, ] if changeset_revision is not None: - clause_list.append(app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision) + clause_list.append(app.install_model.ToolShedRepository.changeset_revision == changeset_revision) if installed_changeset_revision is not None: clause_list.append( - app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision + app.install_model.ToolShedRepository.installed_changeset_revision == installed_changeset_revision ) return query.filter(and_(*clause_list)).first() @@ -443,16 +441,16 @@ def get_repository_by_name_and_owner(app, name, owner, eagerload_columns=None): if is_tool_shed_client(app): return repository_query.filter( and_( - app.install_model.ToolShedRepository.table.c.name == name, - app.install_model.ToolShedRepository.table.c.owner == owner, + app.install_model.ToolShedRepository.name == name, + app.install_model.ToolShedRepository.owner == owner, ) ).first() # We're in the tool shed. q = repository_query.filter( and_( - app.model.Repository.table.c.name == name, - app.model.User.table.c.username == owner, - app.model.Repository.table.c.user_id == app.model.User.table.c.id, + app.model.Repository.name == name, + app.model.User.username == owner, + app.model.Repository.user_id == app.model.User.id, ) ) if eagerload_columns: diff --git a/lib/galaxy/web/framework/helpers/grids.py b/lib/galaxy/web/framework/helpers/grids.py index 98795511186d..ea01f1efd098 100644 --- a/lib/galaxy/web/framework/helpers/grids.py +++ b/lib/galaxy/web/framework/helpers/grids.py @@ -52,7 +52,7 @@ def sort(self, trans, query, ascending, column_name=None): column_name = self.key column = getattr(self.model_class, column_name) if column is None: - column = self.model_class.table.c.get(column_name) + column = self.model_class.__table__.c.get(column_name) if ascending: query = query.order_by(column.asc()) else: diff --git a/lib/galaxy/web/legacy_framework/grids.py b/lib/galaxy/web/legacy_framework/grids.py index 3b62e8665012..dcaff6fa12e6 100644 --- a/lib/galaxy/web/legacy_framework/grids.py +++ b/lib/galaxy/web/legacy_framework/grids.py @@ -112,9 +112,9 @@ def sort(self, trans, query, ascending, column_name=None): if column_name is None: column_name = self.key if ascending: - query = query.order_by(self.model_class.table.c.get(column_name).asc()) + query = query.order_by(self.model_class.__table__.c.get(column_name).asc()) else: - query = query.order_by(self.model_class.table.c.get(column_name).desc()) + query = query.order_by(self.model_class.__table__.c.get(column_name).desc()) return query @@ -165,9 +165,9 @@ def sort(self, trans, query, ascending, column_name=None): if column_name is None: column_name = self.key if ascending: - query = query.order_by(func.lower(self.model_class.table.c.get(column_name)).asc()) + query = query.order_by(func.lower(self.model_class.__table__.c.get(column_name)).asc()) else: - query = query.order_by(func.lower(self.model_class.table.c.get(column_name)).desc()) + query = query.order_by(func.lower(self.model_class.__table__.c.get(column_name)).desc()) return query @@ -236,7 +236,7 @@ def sort(self, trans, query, ascending, column_name=None): item_rating_assoc_class = getattr(trans.model, f"{self.model_class.__name__}RatingAssociation") foreign_key = get_foreign_key(item_rating_assoc_class, self.model_class) fk_col = foreign_key.parent - referent_col = foreign_key.get_referent(self.model_class.table) + referent_col = foreign_key.get_referent(self.model_class.__table__) # Do sorting using a subquery. # Subquery to get average rating for each item. ave_rating_subquery = ( diff --git a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py index 630ad5325bd9..f827c0ee999b 100644 --- a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py @@ -74,7 +74,7 @@ def _get_tool_shed_repositories(self, **kwd): stmt = select(ToolShedRepository) for key, value in kwd.items(): if value is not None: - column = ToolShedRepository.table.c[key] + column = ToolShedRepository.__table__.c[key] stmt = stmt.filter(column == value) stmt = stmt.order_by(ToolShedRepository.name).order_by(cast(ToolShedRepository.ctx_rev, Integer).desc()) session = self._install_model_context diff --git a/lib/tool_shed/grids/repository_grids.py b/lib/tool_shed/grids/repository_grids.py index 5c80c185ff38..691959f32916 100644 --- a/lib/tool_shed/grids/repository_grids.py +++ b/lib/tool_shed/grids/repository_grids.py @@ -237,7 +237,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES: @@ -247,7 +247,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) else: @@ -258,7 +258,7 @@ def build_initial_query(self, trans, **kwd): and_(model.Repository.table.c.deleted == false(), model.Repository.table.c.deprecated == false()) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -458,7 +458,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES: @@ -473,7 +473,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) else: @@ -488,7 +488,7 @@ def build_initial_query(self, trans, **kwd): ) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -538,7 +538,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.Category.table.c.name == category.name) ) @@ -547,7 +547,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES: @@ -560,7 +560,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.Category.table.c.name == category.name) ) @@ -570,7 +570,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) else: @@ -587,7 +587,7 @@ def build_initial_query(self, trans, **kwd): ) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.Category.table.c.name == category.name) ) @@ -597,7 +597,7 @@ def build_initial_query(self, trans, **kwd): and_(model.Repository.table.c.deleted == false(), model.Repository.table.c.deprecated == false()) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -632,7 +632,7 @@ def build_initial_query(self, trans, **kwd): and_(model.Repository.table.c.deleted == false(), model.Repository.table.c.user_id == trans.user.id) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -680,11 +680,11 @@ def build_initial_query(self, trans, **kwd): return ( trans.sa_session.query(model.Repository) .filter(model.Repository.table.c.deleted == false()) - .outerjoin(model.RepositoryRoleAssociation.table) + .outerjoin(model.RepositoryRoleAssociation) .outerjoin(model.Role.table) .filter(or_(*clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -835,7 +835,7 @@ def build_initial_query(self, trans, **kwd): ) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -1477,11 +1477,11 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .join(model.RepositoryCategoryAssociation.table) + .join(model.RepositoryCategoryAssociation) .join(model.Category.table) .filter( and_( - model.Category.table.c.id == trans.security.decode_id(kwd["id"]), + model.Category.__table__.c.id == trans.security.decode_id(kwd["id"]), model.RepositoryMetadata.table.c.downloadable == true(), ) ) @@ -1493,7 +1493,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .join(model.RepositoryCategoryAssociation.table) + .join(model.RepositoryCategoryAssociation) .join(model.Category.table) .filter( and_( @@ -1513,7 +1513,7 @@ def build_initial_query(self, trans, **kwd): ) .join(model.RepositoryMetadata.table) .join(model.User.table) - .join(model.RepositoryCategoryAssociation.table) + .join(model.RepositoryCategoryAssociation) .join(model.Category.table) .filter( and_( @@ -1529,7 +1529,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.RepositoryMetadata.table.c.downloadable == true()) ) @@ -1540,7 +1540,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.RepositoryMetadata.table.c.downloadable == true()) ) @@ -1553,7 +1553,7 @@ def build_initial_query(self, trans, **kwd): ) .join(model.RepositoryMetadata.table) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.RepositoryMetadata.table.c.downloadable == true()) ) diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 00379b0b9ce4..84dee54ccf93 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -81,6 +81,8 @@ class Base(metaclass=DeclarativeMeta): registry = mapper_registry metadata = mapper_registry.metadata __init__ = mapper_registry.constructor + table: Table + __table__: Table @classmethod def __declare_last__(cls): From e2ded65701b8eb32bfa02893ea36c1f87e9f7f66 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 19 Jan 2024 12:08:29 -0500 Subject: [PATCH 20/71] Fix bug: fix HistoryAudit model It is not a child of RepresentById becuase it does not and should not have an id attr. Duplicating the __repr__ definition in the HistoryAudit class is a temporary fix: a proper fix requires changing all models (id and __repr__ should be split into 2 mixins): to be done in a follow-up PR. --- lib/galaxy/model/__init__.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 24701f176c29..3320a8297b4f 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2943,7 +2943,7 @@ def is_hda(d): return isinstance(d, HistoryDatasetAssociation) -class HistoryAudit(Base, RepresentById): +class HistoryAudit(Base): __tablename__ = "history_audit" __table_args__ = (PrimaryKeyConstraint(sqlite_on_conflict="IGNORE"),) @@ -2954,6 +2954,14 @@ class HistoryAudit(Base, RepresentById): # See https://github.com/galaxyproject/galaxy/pull/11914 for details. __init__ = None # type: ignore[assignment] + def __repr__(self): + try: + r = f"" + except Exception: + r = object.__repr__(self) + log.exception("Caught exception attempting to generate repr for: %s", r) + return r + @classmethod def prune(cls, sa_session): latest_subq = ( From d679453feea2401e6c64f1ac440989477923efae Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 5 Feb 2024 11:59:16 -0500 Subject: [PATCH 21/71] Fix bug: check if template.fields is not null before iterating --- lib/galaxy/tools/actions/upload_common.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py index 9c7412156499..ab7cc544f787 100644 --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -102,12 +102,12 @@ def handle_library_params( template: Optional[FormDefinition] = None if template_id not in [None, "None"]: template = session.get(FormDefinition, template_id) - assert template - for field in template.fields: - field_name = field["name"] # type:ignore[index] - if params.get(field_name, False): - field_value = util.restore_text(params.get(field_name, "")) - template_field_contents[field_name] = field_value + if template and template.fields: + for field in template.fields: + field_name = field["name"] # type:ignore[index] + if params.get(field_name, False): + field_value = util.restore_text(params.get(field_name, "")) + template_field_contents[field_name] = field_value roles: List[Role] = [] for role_id in util.listify(params.get("roles", [])): role = session.get(Role, role_id) From 1551a23f80e77ae96386026a5e5a48d10a6f3dad Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 16:07:03 -0500 Subject: [PATCH 22/71] Fix bug: call unique() on result, not select stmt --- lib/galaxy/tools/actions/upload_common.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py index ab7cc544f787..b1f9a8f22a44 100644 --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -441,7 +441,6 @@ def active_folders(trans, folder): select(LibraryFolder) .filter_by(parent=folder, deleted=False) .options(joinedload(LibraryFolder.actions)) - .unique() .order_by(LibraryFolder.name) ) - return trans.sa_session.scalars(stmt).all() + return trans.sa_session.scalars(stmt).unique().all() From fbdfaa6027fe079e7a4f9e51e9563055f51206bf Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 16:12:56 -0500 Subject: [PATCH 23/71] Fix bug: do not pass subquery to in_ --- lib/galaxy/managers/notification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/managers/notification.py b/lib/galaxy/managers/notification.py index e2e78a793029..4f11742dec69 100644 --- a/lib/galaxy/managers/notification.py +++ b/lib/galaxy/managers/notification.py @@ -304,7 +304,7 @@ def cleanup_expired_notifications(self) -> CleanupResultSummary: expired_notifications_stmt = select(Notification.id).where(notification_has_expired) delete_stmt = delete(UserNotificationAssociation).where( - UserNotificationAssociation.notification_id.in_(expired_notifications_stmt.subquery()) + UserNotificationAssociation.notification_id.in_(expired_notifications_stmt) ) result = self.sa_session.execute(delete_stmt, execution_options={"synchronize_session": False}) deleted_associations_count = result.rowcount From 3883f34f996e5b88f1587055de76ce41dc905332 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 16:16:18 -0500 Subject: [PATCH 24/71] Fix bug/typo: use select_from --- lib/galaxy/webapps/galaxy/controllers/tag.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/webapps/galaxy/controllers/tag.py b/lib/galaxy/webapps/galaxy/controllers/tag.py index 04f315202da9..05dda97f1ebe 100644 --- a/lib/galaxy/webapps/galaxy/controllers/tag.py +++ b/lib/galaxy/webapps/galaxy/controllers/tag.py @@ -140,7 +140,7 @@ def _get_tag_autocomplete_values(self, trans, q, limit, timestamp, user=None, it # Do query and get result set. query = ( select(item_tag_assoc_class.table.c.value, func.count()) - .select_from_obj(from_obj) + .select_from(from_obj) .where(where_clause) .group_by(item_tag_assoc_class.table.c.value) .order_by(func.count().desc(), item_tag_assoc_class.table.c.value) From 4a8cf032df7af1c9cacd84c24b3720dec1b7e593 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 16:40:52 -0500 Subject: [PATCH 25/71] Fix bug: if using alias on ORM entity, use __table__ as valid FromClause --- lib/galaxy/model/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 3320a8297b4f..afde68ca3fe9 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -6338,8 +6338,8 @@ def _build_nested_collection_attributes_stmt( dataset_permission_attributes = dataset_permission_attributes or () return_entities = return_entities or () dataset_collection = self - dc = alias(DatasetCollection) - dce = alias(DatasetCollectionElement) + dc = alias(DatasetCollection.__table__) + dce = alias(DatasetCollectionElement.__table__) depth_collection_type = dataset_collection.collection_type order_by_columns = [dce.c.element_index] nesting_level = 0 @@ -6360,8 +6360,8 @@ def attribute_columns(column_collection, attributes, nesting_level=None): while ":" in depth_collection_type: nesting_level += 1 - inner_dc = alias(DatasetCollection) - inner_dce = alias(DatasetCollectionElement) + inner_dc = alias(DatasetCollection.__table__) + inner_dce = alias(DatasetCollectionElement.__table__) order_by_columns.append(inner_dce.c.element_index) q = q.join( inner_dc, and_(inner_dc.c.id == dce.c.child_collection_id, dce.c.dataset_collection_id == dc.c.id) From efce1c71d54d8c08981be2c0fb2aeb739194a7c9 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 25 Jan 2024 17:06:47 -0500 Subject: [PATCH 26/71] Fix bug: HDAH model is not serializable (caught by mypy) --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index afde68ca3fe9..31ac81b25969 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -5417,7 +5417,7 @@ def type_id(cls): return (type_coerce(cls.content_type, Unicode) + "-" + type_coerce(cls.id, Unicode)).label("type_id") -class HistoryDatasetAssociationHistory(Base, Serializable): +class HistoryDatasetAssociationHistory(Base): __tablename__ = "history_dataset_association_history" id: Mapped[int] = mapped_column(Integer, primary_key=True) From f6b22691426cf7cd8156365f7756666c0fd4b5c1 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 23 Jan 2024 14:57:13 -0500 Subject: [PATCH 27/71] Fix typing error: migrations.base --- lib/galaxy/model/migrations/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/migrations/base.py b/lib/galaxy/model/migrations/base.py index 973f12224936..e03257d3f522 100644 --- a/lib/galaxy/model/migrations/base.py +++ b/lib/galaxy/model/migrations/base.py @@ -33,7 +33,6 @@ ) from sqlalchemy.engine import ( Connection, - CursorResult, Engine, ) @@ -398,10 +397,11 @@ def _load_db_metadata(self, conn: Connection) -> MetaData: metadata.reflect(bind=conn) return metadata - def _load_sqlalchemymigrate_version(self, conn: Connection) -> CursorResult: + def _load_sqlalchemymigrate_version(self, conn: Connection) -> Optional[int]: if self.has_sqlalchemymigrate_version_table(): sql = text(f"select version from {SQLALCHEMYMIGRATE_TABLE}") return conn.execute(sql).scalar() + return None def pop_arg_from_args(args: List[str], arg_name) -> Optional[str]: From 28b25eabe5b969f1b2026dd50dbcce4ee599c12c Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 24 Jan 2024 16:27:39 -0500 Subject: [PATCH 28/71] Fix typing error: managers.secured This fixed 58 mypy errors! --- lib/galaxy/managers/secured.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/managers/secured.py b/lib/galaxy/managers/secured.py index 30958ceb8c92..20d95b8a8217 100644 --- a/lib/galaxy/managers/secured.py +++ b/lib/galaxy/managers/secured.py @@ -40,7 +40,7 @@ def is_accessible(self, item: "Query", user: model.User, **kwargs: Any) -> bool: # override in subclasses raise exceptions.NotImplemented("Abstract interface Method") - def get_accessible(self, id: int, user: model.User, **kwargs: Any) -> "Query": + def get_accessible(self, id: int, user: model.User, **kwargs: Any): """ Return the item with the given id if it's accessible to user, otherwise raise an error. From 0f4ad52bf757737e700fe79d04bd58dd5bf94b6b Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 25 Jan 2024 10:07:46 -0500 Subject: [PATCH 29/71] Fix typing error: session type --- lib/galaxy/celery/base_task.py | 7 +++---- lib/galaxy/managers/dbkeys.py | 4 ++-- lib/galaxy/managers/group_roles.py | 4 ++-- lib/galaxy/managers/group_users.py | 4 ++-- lib/galaxy/managers/groups.py | 5 ++--- lib/galaxy/managers/jobs.py | 7 ++----- lib/galaxy/managers/pages.py | 14 ++++++-------- lib/galaxy/managers/roles.py | 8 +++----- lib/galaxy/managers/users.py | 4 ++-- lib/galaxy/webapps/galaxy/services/histories.py | 4 ++-- lib/galaxy/webapps/galaxy/services/quotas.py | 4 ++-- 11 files changed, 28 insertions(+), 37 deletions(-) diff --git a/lib/galaxy/celery/base_task.py b/lib/galaxy/celery/base_task.py index 410d6d4d2bfe..af032fec1821 100644 --- a/lib/galaxy/celery/base_task.py +++ b/lib/galaxy/celery/base_task.py @@ -11,7 +11,6 @@ ) from sqlalchemy.dialects.postgresql import insert as ps_insert from sqlalchemy.exc import IntegrityError -from sqlalchemy.orm import Session from galaxy.model import CeleryUserRateLimit from galaxy.model.base import transaction @@ -70,7 +69,7 @@ def __call__(self, task: Task, task_id, args, kwargs): @abstractmethod def calculate_task_start_time( - self, user_id: int, sa_session: Session, task_interval_secs: float, now: datetime.datetime + self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: return now @@ -99,7 +98,7 @@ class GalaxyTaskBeforeStartUserRateLimitPostgres(GalaxyTaskBeforeStartUserRateLi ) def calculate_task_start_time( # type: ignore - self, user_id: int, sa_session: Session, task_interval_secs: float, now: datetime.datetime + self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: with transaction(sa_session): result = sa_session.execute( @@ -138,7 +137,7 @@ class GalaxyTaskBeforeStartUserRateLimitStandard(GalaxyTaskBeforeStartUserRateLi ) def calculate_task_start_time( - self, user_id: int, sa_session: Session, task_interval_secs: float, now: datetime.datetime + self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: last_scheduled_time = None with transaction(sa_session): diff --git a/lib/galaxy/managers/dbkeys.py b/lib/galaxy/managers/dbkeys.py index 8ec01e3d57bd..99168133a69e 100644 --- a/lib/galaxy/managers/dbkeys.py +++ b/lib/galaxy/managers/dbkeys.py @@ -14,9 +14,9 @@ ) from sqlalchemy import select -from sqlalchemy.orm import Session from galaxy.model import HistoryDatasetAssociation +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.util import ( galaxy_directory, sanitize_lists_to_string, @@ -166,6 +166,6 @@ def get_chrom_info(self, dbkey, trans=None, custom_build_hack_get_len_from_fasta return (chrom_info, db_dataset) -def get_len_files_by_history(session: Session, history_id: int): +def get_len_files_by_history(session: galaxy_scoped_session, history_id: int): stmt = select(HistoryDatasetAssociation).filter_by(history_id=history_id, extension="len", deleted=False) return session.scalars(stmt) diff --git a/lib/galaxy/managers/group_roles.py b/lib/galaxy/managers/group_roles.py index 984a5c48f9ab..c942bbe7431a 100644 --- a/lib/galaxy/managers/group_roles.py +++ b/lib/galaxy/managers/group_roles.py @@ -5,13 +5,13 @@ ) from sqlalchemy import select -from sqlalchemy.orm import Session from galaxy import model from galaxy.exceptions import ObjectNotFound from galaxy.managers.context import ProvidesAppContext from galaxy.model import GroupRoleAssociation from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.structured_app import MinimalManagerApp log = logging.getLogger(__name__) @@ -93,7 +93,7 @@ def _remove_role_from_group(self, trans: ProvidesAppContext, group_role: model.G trans.sa_session.commit() -def get_group_role(session: Session, group, role) -> Optional[GroupRoleAssociation]: +def get_group_role(session: galaxy_scoped_session, group, role) -> Optional[GroupRoleAssociation]: stmt = ( select(GroupRoleAssociation).where(GroupRoleAssociation.group == group).where(GroupRoleAssociation.role == role) ) diff --git a/lib/galaxy/managers/group_users.py b/lib/galaxy/managers/group_users.py index e71eb8ecadcf..6bcd088fcc3f 100644 --- a/lib/galaxy/managers/group_users.py +++ b/lib/galaxy/managers/group_users.py @@ -5,7 +5,6 @@ ) from sqlalchemy import select -from sqlalchemy.orm import Session from galaxy import model from galaxy.exceptions import ObjectNotFound @@ -15,6 +14,7 @@ UserGroupAssociation, ) from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.structured_app import MinimalManagerApp log = logging.getLogger(__name__) @@ -96,7 +96,7 @@ def _remove_user_from_group(self, trans: ProvidesAppContext, group_user: model.U trans.sa_session.commit() -def get_group_user(session: Session, user, group) -> Optional[UserGroupAssociation]: +def get_group_user(session: galaxy_scoped_session, user, group) -> Optional[UserGroupAssociation]: stmt = ( select(UserGroupAssociation).where(UserGroupAssociation.user == user).where(UserGroupAssociation.group == group) ) diff --git a/lib/galaxy/managers/groups.py b/lib/galaxy/managers/groups.py index 33de69670cf5..f600369e3184 100644 --- a/lib/galaxy/managers/groups.py +++ b/lib/galaxy/managers/groups.py @@ -2,7 +2,6 @@ false, select, ) -from sqlalchemy.orm import Session from galaxy import model from galaxy.exceptions import ( @@ -152,11 +151,11 @@ def _get_group(self, sa_session: galaxy_scoped_session, group_id: int) -> model. return group -def get_group_by_name(session: Session, name: str): +def get_group_by_name(session: galaxy_scoped_session, name: str): stmt = select(Group).filter(Group.name == name).limit(1) return session.scalars(stmt).first() -def get_not_deleted_groups(session: Session): +def get_not_deleted_groups(session: galaxy_scoped_session): stmt = select(Group).where(Group.deleted == false()) return session.scalars(stmt) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 97073e9df46d..7b11d63f7d26 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -20,10 +20,7 @@ or_, true, ) -from sqlalchemy.orm import ( - aliased, - Session, -) +from sqlalchemy.orm import aliased from sqlalchemy.sql import select from galaxy import model @@ -1069,7 +1066,7 @@ def summarize_job_outputs(job: model.Job, tool, params): return outputs -def get_jobs_to_check_at_startup(session: Session, track_jobs_in_database: bool, config): +def get_jobs_to_check_at_startup(session: galaxy_scoped_session, track_jobs_in_database: bool, config): if track_jobs_in_database: in_list = (Job.states.QUEUED, Job.states.RUNNING, Job.states.STOPPED) else: diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py index da0d076305b0..e37866a61fb1 100644 --- a/lib/galaxy/managers/pages.py +++ b/lib/galaxy/managers/pages.py @@ -24,10 +24,7 @@ select, true, ) -from sqlalchemy.orm import ( - aliased, - Session, -) +from sqlalchemy.orm import aliased from galaxy import ( exceptions, @@ -64,6 +61,7 @@ text_column_filter, ) from galaxy.model.item_attrs import UsesAnnotations +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.schema.schema import ( CreatePagePayload, PageContentFormat, @@ -644,12 +642,12 @@ def placeholderRenderForSave(trans: ProvidesHistoryContext, item_class, item_id, ) -def get_page_revision(session: Session, page_id: int): +def get_page_revision(session: galaxy_scoped_session, page_id: int): stmt = select(PageRevision).filter_by(page_id=page_id) return session.scalars(stmt) -def get_shared_pages(session: Session, user: User): +def get_shared_pages(session: galaxy_scoped_session, user: User): stmt = ( select(PageUserShareAssociation) .where(PageUserShareAssociation.user == user) @@ -660,12 +658,12 @@ def get_shared_pages(session: Session, user: User): return session.scalars(stmt) -def get_page(session: Session, user: User, slug: str): +def get_page(session: galaxy_scoped_session, user: User, slug: str): stmt = _build_page_query(select(Page), user, slug) return session.scalars(stmt).first() -def page_exists(session: Session, user: User, slug: str) -> bool: +def page_exists(session: galaxy_scoped_session, user: User, slug: str) -> bool: stmt = _build_page_query(select(Page.id), user, slug) return session.scalars(stmt).first() is not None diff --git a/lib/galaxy/managers/roles.py b/lib/galaxy/managers/roles.py index 89bf69815e2c..a12de13be181 100644 --- a/lib/galaxy/managers/roles.py +++ b/lib/galaxy/managers/roles.py @@ -9,10 +9,7 @@ false, select, ) -from sqlalchemy.orm import ( - exc as sqlalchemy_exceptions, - Session, -) +from sqlalchemy.orm import exc as sqlalchemy_exceptions from galaxy import model from galaxy.exceptions import ( @@ -26,6 +23,7 @@ from galaxy.managers.context import ProvidesUserContext from galaxy.model import Role from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.schema.schema import RoleDefinitionModel from galaxy.util import unicodify @@ -162,6 +160,6 @@ def undelete(self, trans: ProvidesUserContext, role: model.Role) -> model.Role: return role -def get_roles_by_ids(session: Session, role_ids): +def get_roles_by_ids(session: galaxy_scoped_session, role_ids): stmt = select(Role).where(Role.id.in_(role_ids)) return session.scalars(stmt).all() diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 8f4b9c428e82..40c32292f9fa 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -24,7 +24,6 @@ select, true, ) -from sqlalchemy.orm import Session from sqlalchemy.orm.exc import NoResultFound from galaxy import ( @@ -46,6 +45,7 @@ UserQuotaUsage, ) from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.security.validate_user_input import ( VALID_EMAIL_RE, validate_email, @@ -873,7 +873,7 @@ def _add_parsers(self): self.fn_filter_parsers.update({}) -def get_users_by_ids(session: Session, user_ids): +def get_users_by_ids(session: galaxy_scoped_session, user_ids): stmt = select(User).where(User.id.in_(user_ids)) return session.scalars(stmt).all() diff --git a/lib/galaxy/webapps/galaxy/services/histories.py b/lib/galaxy/webapps/galaxy/services/histories.py index ab3fe0e69dc0..dd192b860f21 100644 --- a/lib/galaxy/webapps/galaxy/services/histories.py +++ b/lib/galaxy/webapps/galaxy/services/histories.py @@ -20,7 +20,6 @@ select, true, ) -from sqlalchemy.orm import Session from galaxy import ( exceptions as glx_exceptions, @@ -45,6 +44,7 @@ from galaxy.managers.users import UserManager from galaxy.model import HistoryDatasetAssociation from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.model.store import payload_to_source_uri from galaxy.schema import ( FilterQueryParams, @@ -820,7 +820,7 @@ def _get_export_record_data(self, history: model.History) -> Optional[WriteStore return None -def get_fasta_hdas_by_history(session: Session, history_id: int): +def get_fasta_hdas_by_history(session: galaxy_scoped_session, history_id: int): stmt = ( select(HistoryDatasetAssociation) .filter_by(history_id=history_id, extension="fasta", deleted=False) diff --git a/lib/galaxy/webapps/galaxy/services/quotas.py b/lib/galaxy/webapps/galaxy/services/quotas.py index 290ee8056668..37ac0a3ceaf8 100644 --- a/lib/galaxy/webapps/galaxy/services/quotas.py +++ b/lib/galaxy/webapps/galaxy/services/quotas.py @@ -6,7 +6,6 @@ select, true, ) -from sqlalchemy.orm import Session from galaxy import util from galaxy.managers.context import ProvidesUserContext @@ -14,6 +13,7 @@ from galaxy.managers.quotas import QuotaManager from galaxy.managers.users import get_user_by_email from galaxy.model import Quota +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.quota._schema import ( CreateQuotaParams, CreateQuotaResult, @@ -161,7 +161,7 @@ def get_group_id(item): payload["in_groups"] = list(map(str, new_in_groups)) -def get_quotas(session: Session, deleted: bool = False): +def get_quotas(session: galaxy_scoped_session, deleted: bool = False): is_deleted = true() if not deleted: is_deleted = false() From 36f3051fc36ce277ec9963c7695da7e46e4ec8d5 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 2 Feb 2024 11:05:22 -0500 Subject: [PATCH 30/71] Fix typing error: use Session instead of scoped_session No need to pass around scoped_session as arguments --- lib/galaxy/managers/hdas.py | 2 +- lib/galaxy/model/deferred.py | 3 +-- test/unit/data/test_dataset_materialization.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index c6176e2cac8d..863d6d9986c7 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -178,7 +178,7 @@ def materialize(self, request: MaterializeDatasetInstanceTaskRequest) -> None: True, # attached... object_store=self.app.object_store, file_sources=self.app.file_sources, - sa_session=self.app.model.context, + sa_session=self.app.model.session(), ) user = self.user_manager.by_id(request_user.user_id) if request.source == DatasetSourceType.hda: diff --git a/lib/galaxy/model/deferred.py b/lib/galaxy/model/deferred.py index 18f5804375c8..66a4c23f1d4a 100644 --- a/lib/galaxy/model/deferred.py +++ b/lib/galaxy/model/deferred.py @@ -11,7 +11,6 @@ from sqlalchemy.orm import object_session from sqlalchemy.orm.exc import DetachedInstanceError -from sqlalchemy.orm.scoping import scoped_session from galaxy.datatypes.sniff import ( convert_function, @@ -278,7 +277,7 @@ def materializer_factory( transient_path_mapper: Optional[TransientPathMapper] = None, transient_directory: Optional[str] = None, file_sources: Optional[ConfiguredFileSources] = None, - sa_session: Optional[scoped_session] = None, + sa_session: Optional[Session] = None, ) -> DatasetInstanceMaterializer: if object_store_populator is None and object_store is not None: object_store_populator = ObjectStorePopulator(object_store, None) diff --git a/test/unit/data/test_dataset_materialization.py b/test/unit/data/test_dataset_materialization.py index c107f2eb6bc8..9015b107539f 100644 --- a/test/unit/data/test_dataset_materialization.py +++ b/test/unit/data/test_dataset_materialization.py @@ -134,7 +134,7 @@ def test_deferred_hdas_basic_attached_from_detached_hda(): assert deferred_hda.dataset.state == "deferred" materializer = materializer_factory( - True, object_store=fixture_context.app.object_store, sa_session=fixture_context.sa_session + True, object_store=fixture_context.app.object_store, sa_session=fixture_context.sa_session() ) materialized_hda = materializer.ensure_materialized(deferred_hda) materialized_dataset = materialized_hda.dataset From 6dde02fa5adacf2d651fe14b843e501784ff9892 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 14:08:07 -0500 Subject: [PATCH 31/71] Fix typing error: sharable --- lib/galaxy/managers/sharable.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/managers/sharable.py b/lib/galaxy/managers/sharable.py index 0d106a9ae1b7..053c11562fe4 100644 --- a/lib/galaxy/managers/sharable.py +++ b/lib/galaxy/managers/sharable.py @@ -28,7 +28,10 @@ true, ) -from galaxy import exceptions +from galaxy import ( + exceptions, + model, +) from galaxy.managers import ( annotatable, base, @@ -91,7 +94,7 @@ def by_user(self, user: User, **kwargs: Any) -> List[Any]: return self.list(filters=filters, **kwargs) # .... owned/accessible interfaces - def is_owner(self, item: "Query", user: Optional[User], **kwargs: Any) -> bool: + def is_owner(self, item: model.Base, user: Optional[User], **kwargs: Any) -> bool: """ Return true if this sharable belongs to `user` (or `user` is an admin). """ From 5aca5b425f3936802f4ccc4708378a9765ab5545 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 5 Feb 2024 23:23:39 -0500 Subject: [PATCH 32/71] Fix SA2.0 error: sqlalchemy exceptions import; minor mypy fix --- lib/galaxy/managers/base.py | 4 ++-- lib/galaxy/managers/export_tracker.py | 2 +- lib/galaxy/managers/folders.py | 4 ++-- lib/galaxy/managers/forms.py | 9 ++++++--- lib/galaxy/managers/libraries.py | 4 ++-- lib/galaxy/managers/roles.py | 9 ++++++--- lib/galaxy/managers/users.py | 2 +- lib/galaxy/model/store/_bco_convert_utils.py | 1 + lib/galaxy/model/tags.py | 1 + lib/galaxy/tool_shed/util/repository_util.py | 4 +++- lib/galaxy/webapps/base/webapp.py | 2 +- lib/galaxy/webapps/galaxy/controllers/user.py | 2 +- lib/tool_shed/managers/groups.py | 2 +- 13 files changed, 28 insertions(+), 18 deletions(-) diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py index c151852ee054..be487b4da675 100644 --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -318,9 +318,9 @@ def _one_with_recast_errors(self, query: Query) -> U: # overridden to raise serializable errors try: return query.one() - except sqlalchemy.orm.exc.NoResultFound: + except sqlalchemy.exc.NoResultFound: raise exceptions.ObjectNotFound(f"{self.model_class.__name__} not found") - except sqlalchemy.orm.exc.MultipleResultsFound: + except sqlalchemy.exc.MultipleResultsFound: raise exceptions.InconsistentDatabase(f"found more than one {self.model_class.__name__}") # NOTE: at this layer, all ids are expected to be decoded and in int form diff --git a/lib/galaxy/managers/export_tracker.py b/lib/galaxy/managers/export_tracker.py index 29ec780e0261..f0d6ae0d33f5 100644 --- a/lib/galaxy/managers/export_tracker.py +++ b/lib/galaxy/managers/export_tracker.py @@ -8,7 +8,7 @@ and_, select, ) -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from sqlalchemy.orm.scoping import scoped_session from galaxy.exceptions import ObjectNotFound diff --git a/lib/galaxy/managers/folders.py b/lib/galaxy/managers/folders.py index 97f0f3ad4cac..29c465e95566 100644 --- a/lib/galaxy/managers/folders.py +++ b/lib/galaxy/managers/folders.py @@ -20,11 +20,11 @@ or_, select, ) -from sqlalchemy.orm import aliased -from sqlalchemy.orm.exc import ( +from sqlalchemy.exc import ( MultipleResultsFound, NoResultFound, ) +from sqlalchemy.orm import aliased from galaxy import ( model, diff --git a/lib/galaxy/managers/forms.py b/lib/galaxy/managers/forms.py index 989a7310fe65..fdbbd4985e7c 100644 --- a/lib/galaxy/managers/forms.py +++ b/lib/galaxy/managers/forms.py @@ -1,5 +1,8 @@ from sqlalchemy import select -from sqlalchemy.orm import exc as sqlalchemy_exceptions +from sqlalchemy.exc import ( + MultipleResultsFound, + NoResultFound, +) from galaxy.exceptions import ( InconsistentDatabase, @@ -59,9 +62,9 @@ def get(self, trans: ProvidesUserContext, form_id: int) -> FormDefinitionCurrent try: stmt = select(FormDefinitionCurrent).where(FormDefinitionCurrent.id == form_id) form = self.session().execute(stmt).scalar_one() - except sqlalchemy_exceptions.MultipleResultsFound: + except MultipleResultsFound: raise InconsistentDatabase("Multiple forms found with the same id.") - except sqlalchemy_exceptions.NoResultFound: + except NoResultFound: raise RequestParameterInvalidException("No accessible form found with the id provided.") except Exception as e: raise InternalServerError(f"Error loading from the database.{unicodify(e)}") diff --git a/lib/galaxy/managers/libraries.py b/lib/galaxy/managers/libraries.py index 45c1c582a074..2e7ff0efa128 100644 --- a/lib/galaxy/managers/libraries.py +++ b/lib/galaxy/managers/libraries.py @@ -19,11 +19,11 @@ select, true, ) -from sqlalchemy.orm import Query -from sqlalchemy.orm.exc import ( +from sqlalchemy.exc import ( MultipleResultsFound, NoResultFound, ) +from sqlalchemy.orm import Query from galaxy import exceptions from galaxy.managers.folders import FolderManager diff --git a/lib/galaxy/managers/roles.py b/lib/galaxy/managers/roles.py index a12de13be181..8dfdc7e2f11b 100644 --- a/lib/galaxy/managers/roles.py +++ b/lib/galaxy/managers/roles.py @@ -9,7 +9,10 @@ false, select, ) -from sqlalchemy.orm import exc as sqlalchemy_exceptions +from sqlalchemy.exc import ( + MultipleResultsFound, + NoResultFound, +) from galaxy import model from galaxy.exceptions import ( @@ -56,9 +59,9 @@ def get(self, trans: ProvidesUserContext, role_id: int) -> model.Role: try: stmt = select(self.model_class).where(self.model_class.id == role_id) role = self.session().execute(stmt).scalar_one() - except sqlalchemy_exceptions.MultipleResultsFound: + except MultipleResultsFound: raise InconsistentDatabase("Multiple roles found with the same id.") - except sqlalchemy_exceptions.NoResultFound: + except NoResultFound: raise ObjectNotFound("No accessible role found with the id provided.") except Exception as e: raise InternalServerError(f"Error loading from the database.{unicodify(e)}") diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 40c32292f9fa..f92b82b68c10 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -24,7 +24,7 @@ select, true, ) -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from galaxy import ( exceptions, diff --git a/lib/galaxy/model/store/_bco_convert_utils.py b/lib/galaxy/model/store/_bco_convert_utils.py index 82bb952b47f5..1763b2851665 100644 --- a/lib/galaxy/model/store/_bco_convert_utils.py +++ b/lib/galaxy/model/store/_bco_convert_utils.py @@ -27,6 +27,7 @@ def register_step(self, step: WorkflowStep) -> None: return tool_version = step.tool_version + assert tool_id self._recorded_tools.add(tool_id) uri_safe_tool_id = urllib.parse.quote(tool_id) if "repos/" in tool_id: diff --git a/lib/galaxy/model/tags.py b/lib/galaxy/model/tags.py index a62051b79eef..f62b3cbb3b67 100644 --- a/lib/galaxy/model/tags.py +++ b/lib/galaxy/model/tags.py @@ -194,6 +194,7 @@ def _ensure_user_owns_item(self, user: Optional["User"], item): def item_has_tag(self, user, item, tag): """Returns true if item is has a given tag.""" # Get tag name. + tag_name = None if isinstance(tag, str): tag_name = tag elif isinstance(tag, galaxy.model.Tag): diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index dc5734a089aa..ce03847a7aa9 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -635,7 +635,9 @@ def get_tool_shed_repository_by_id(app, repository_id) -> ToolShedRepository: def get_tool_shed_status_for(tool_shed_registry: Registry, repository: ToolShedRepository): tool_shed_url = tool_shed_registry.get_tool_shed_url(str(repository.tool_shed)) assert tool_shed_url - params = dict(name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision) + params: Dict[str, Any] = dict( + name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision + ) pathspec = ["repository", "status_for_installed_repository"] try: encoded_tool_shed_status_dict = util.url_get( diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py index 155511afb585..9191d6a65913 100644 --- a/lib/galaxy/webapps/base/webapp.py +++ b/lib/galaxy/webapps/base/webapp.py @@ -25,7 +25,7 @@ select, true, ) -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from galaxy import util from galaxy.exceptions import ( diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index e661f3c40afe..f0a82c7f7519 100644 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -10,7 +10,7 @@ from urllib.parse import unquote from markupsafe import escape -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from galaxy import ( util, diff --git a/lib/tool_shed/managers/groups.py b/lib/tool_shed/managers/groups.py index 280f1e685995..21f2a6ce0e62 100644 --- a/lib/tool_shed/managers/groups.py +++ b/lib/tool_shed/managers/groups.py @@ -9,7 +9,7 @@ select, true, ) -from sqlalchemy.orm.exc import ( +from sqlalchemy.exc import ( MultipleResultsFound, NoResultFound, ) From e8a9e6e3fcb07fc4d74766d93d94712558b5b30d Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 23 Jan 2024 15:02:54 -0500 Subject: [PATCH 33/71] Mypy: type-ignore: this is never SessionlessContext --- lib/galaxy/model/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index 707dc486a734..8890349e9b9c 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -53,8 +53,8 @@ def transaction(session: Union[scoped_session, Session, "SessionlessContext"]): yield return # exit: can't use as a Session - if not session.in_transaction(): - with session.begin(): + if not session.in_transaction(): # type:ignore[union-attr] + with session.begin(): # type:ignore[union-attr] yield else: yield From 25d91eb00ba4df1e1f735f126f80202874dce0d1 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 23 Jan 2024 15:07:04 -0500 Subject: [PATCH 34/71] Mypy: use verbose assignment to help mypy --- lib/galaxy/model/base.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index 8890349e9b9c..8cfe259a4dc0 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -197,7 +197,9 @@ def ensure_object_added_to_session(object_to_add, *, object_in_session=None, ses if session: session.add(object_to_add) return True - if object_in_session and object_session(object_in_session): - object_session(object_in_session).add(object_to_add) - return True + if object_in_session: + session = object_session(object_in_session) + if session: + session.add(object_to_add) + return True return False From 1a2eb39f8938baa61c5c9544bb0921bcaf60f5f4 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 23 Jan 2024 15:37:42 -0500 Subject: [PATCH 35/71] Mypy: add assert stmt --- lib/galaxy/tool_shed/util/repository_util.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index ce03847a7aa9..dae1c02ccd6c 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -64,7 +64,8 @@ def check_for_updates( message += "Unable to retrieve status from the tool shed for the following repositories:\n" message += ", ".join(repository_names_not_updated) else: - repository = install_model_context.get(ToolShedRepository, repository_id) + repository = install_model_context.get(ToolShedRepository, repository_id) # type:ignore[assignment] + assert repository ok, updated = _check_or_update_tool_shed_status_for_installed_repository( tool_shed_registry, install_model_context, repository ) From f960416c018dc5feea2c813aff327fa4f83b19d0 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 24 Jan 2024 15:27:56 -0500 Subject: [PATCH 36/71] Mypy: add assert to ensure seesion is not None Calling that method when a User obj is not attached to a session should not happen. --- lib/galaxy/model/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 31ac81b25969..719203ce27b2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1219,6 +1219,7 @@ def count_stored_workflow_user_assocs(self, stored_workflow) -> int: sq = select(StoredWorkflowUserShareAssociation).filter_by(user=self, stored_workflow=stored_workflow).subquery() stmt = select(func.count()).select_from(sq) session = object_session(self) + assert session return session.scalar(stmt) From 4c2b4d67e3362e34453a1028a8522f892bc1e54e Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 24 Jan 2024 15:48:13 -0500 Subject: [PATCH 37/71] Mypy: return 0 if no results --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 719203ce27b2..2ed0e2d796e2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -1220,7 +1220,7 @@ def count_stored_workflow_user_assocs(self, stored_workflow) -> int: stmt = select(func.count()).select_from(sq) session = object_session(self) assert session - return session.scalar(stmt) + return session.scalar(stmt) or 0 class PasswordResetToken(Base): From dd090bf39383301a509537ad8e161f484a1377d4 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 24 Jan 2024 17:10:54 -0500 Subject: [PATCH 38/71] Mypy: type-ignore: scoped_session vs. install_model_session We use the disctinction for DI. --- test/unit/app/test_galaxy_install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/app/test_galaxy_install.py b/test/unit/app/test_galaxy_install.py index 1695e8faebc2..f7c44908ff58 100644 --- a/test/unit/app/test_galaxy_install.py +++ b/test/unit/app/test_galaxy_install.py @@ -57,7 +57,7 @@ def test_against_production_shed(tmp_path: Path): assert tsr message, status = check_for_updates( install_target.tool_shed_registry, - install_model_context, + install_model_context, # type:ignore[arg-type] tsr.id, ) assert status From fae5ed7d09cc5eea22d2e34821aed8a5d2e9aff9 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 25 Jan 2024 10:18:25 -0500 Subject: [PATCH 39/71] Mypy: refactor to one-liner --- lib/galaxy/webapps/galaxy/services/quotas.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/services/quotas.py b/lib/galaxy/webapps/galaxy/services/quotas.py index 37ac0a3ceaf8..38b6a69fe849 100644 --- a/lib/galaxy/webapps/galaxy/services/quotas.py +++ b/lib/galaxy/webapps/galaxy/services/quotas.py @@ -162,8 +162,6 @@ def get_group_id(item): def get_quotas(session: galaxy_scoped_session, deleted: bool = False): - is_deleted = true() - if not deleted: - is_deleted = false() + is_deleted = true() if deleted else false() stmt = select(Quota).where(Quota.deleted == is_deleted) return session.scalars(stmt) From 2270d1758b9ff9931b237f815d0ba9936ace18a1 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 25 Jan 2024 11:13:33 -0500 Subject: [PATCH 40/71] Mypy: add assert stmts where we know session returns an object --- lib/galaxy/managers/model_stores.py | 1 + lib/galaxy/managers/pages.py | 4 ++++ lib/galaxy/webapps/galaxy/api/tool_entry_points.py | 1 + lib/galaxy/webapps/galaxy/api/users.py | 1 + lib/galaxy/webapps/galaxy/services/tools.py | 1 + lib/galaxy/workflow/run_request.py | 2 ++ 6 files changed, 10 insertions(+) diff --git a/lib/galaxy/managers/model_stores.py b/lib/galaxy/managers/model_stores.py index 48a70117604f..395d9dbade4c 100644 --- a/lib/galaxy/managers/model_stores.py +++ b/lib/galaxy/managers/model_stores.py @@ -100,6 +100,7 @@ def setup_history_export_job(self, request: SetupHistoryExportJob): with DirectoryModelExportStore(store_directory, app=self._app, export_files="symlink") as export_store: export_store.export_history(history, include_hidden=include_hidden, include_deleted=include_deleted) job = self._sa_session.get(model.Job, job_id) + assert job job.state = model.Job.states.NEW with transaction(self._sa_session): self._sa_session.commit() diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py index e37866a61fb1..c773786c4d2b 100644 --- a/lib/galaxy/managers/pages.py +++ b/lib/galaxy/managers/pages.py @@ -614,19 +614,23 @@ def placeholderRenderForSave(trans: ProvidesHistoryContext, item_class, item_id, if item_class == "History": history = trans.sa_session.get(History, decoded_item_id) history = base.security_check(trans, history, False, True) + assert history item_name = history.name elif item_class == "HistoryDatasetAssociation": hda = trans.sa_session.get(HistoryDatasetAssociation, decoded_item_id) hda_manager = trans.app.hda_manager hda = hda_manager.get_accessible(decoded_item_id, trans.user) + assert hda item_name = hda.name elif item_class == "StoredWorkflow": wf = trans.sa_session.get(StoredWorkflow, decoded_item_id) wf = base.security_check(trans, wf, False, True) + assert wf item_name = wf.name elif item_class == "Visualization": visualization = trans.sa_session.get(Visualization, decoded_item_id) visualization = base.security_check(trans, visualization, False, True) + assert visualization item_name = visualization.title class_shorthand = PAGE_CLASS_MAPPING[item_class] if encode: diff --git a/lib/galaxy/webapps/galaxy/api/tool_entry_points.py b/lib/galaxy/webapps/galaxy/api/tool_entry_points.py index fcaffd3049de..f173a8b46ecf 100644 --- a/lib/galaxy/webapps/galaxy/api/tool_entry_points.py +++ b/lib/galaxy/webapps/galaxy/api/tool_entry_points.py @@ -54,6 +54,7 @@ def index(self, trans: ProvidesUserContext, running=False, job_id=None, **kwd): if job_id is not None: job = trans.sa_session.get(Job, self.decode_id(job_id)) + assert job if not self.interactivetool_manager.can_access_job(trans, job): raise exceptions.ItemAccessibilityException() entry_points = job.interactivetool_entry_points diff --git a/lib/galaxy/webapps/galaxy/api/users.py b/lib/galaxy/webapps/galaxy/api/users.py index 03e1511c732d..bf74edf20f58 100644 --- a/lib/galaxy/webapps/galaxy/api/users.py +++ b/lib/galaxy/webapps/galaxy/api/users.py @@ -515,6 +515,7 @@ def add_custom_builds( else: build_dict["fasta"] = trans.security.decode_id(len_value) dataset = trans.sa_session.get(HistoryDatasetAssociation, int(build_dict["fasta"])) + assert dataset try: new_len = dataset.get_converted_dataset(trans, "len") new_linecount = new_len.get_converted_dataset(trans, "linecount") diff --git a/lib/galaxy/webapps/galaxy/services/tools.py b/lib/galaxy/webapps/galaxy/services/tools.py index 2266c6c58597..3b2e3ff9f6dc 100644 --- a/lib/galaxy/webapps/galaxy/services/tools.py +++ b/lib/galaxy/webapps/galaxy/services/tools.py @@ -280,6 +280,7 @@ def _patch_library_inputs(self, trans: ProvidesHistoryContext, inputs, target_hi def _patch_library_dataset(self, trans: ProvidesHistoryContext, v, target_history): if isinstance(v, dict) and "id" in v and v.get("src") == "ldda": ldda = trans.sa_session.get(LibraryDatasetDatasetAssociation, self.decode_id(v["id"])) + assert ldda if trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ): diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py index a45e7e441ec0..dd041531bd1c 100644 --- a/lib/galaxy/workflow/run_request.py +++ b/lib/galaxy/workflow/run_request.py @@ -379,6 +379,7 @@ def build_workflow_run_configs( try: if input_source == "ldda": ldda = trans.sa_session.get(LibraryDatasetDatasetAssociation, trans.security.decode_id(input_id)) + assert ldda assert trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ) @@ -387,6 +388,7 @@ def build_workflow_run_configs( ldda = trans.sa_session.get( LibraryDataset, trans.security.decode_id(input_id) ).library_dataset_dataset_association + assert ldda assert trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ) From ddb3101527d28a1adc98bddd16b121f0393c8f06 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 25 Jan 2024 11:24:37 -0500 Subject: [PATCH 41/71] Mypy: rename wfi_step > wfi_step_sq when it becomes a subquery --- lib/galaxy/managers/jobs.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 7b11d63f7d26..ce0688eec41b 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -137,13 +137,13 @@ def add_workflow_jobs(): ) elif invocation_id is not None: wfi_step = wfi_step.where(WorkflowInvocationStep.workflow_invocation_id == invocation_id) - wfi_step = wfi_step.subquery() + wfi_step_sq = wfi_step.subquery() - stmt1 = stmt.join(wfi_step) + stmt1 = stmt.join(wfi_step_sq) stmt2 = stmt.join(ImplicitCollectionJobsJobAssociation).join( - wfi_step, + wfi_step_sq, ImplicitCollectionJobsJobAssociation.implicit_collection_jobs_id - == wfi_step.c.implicit_collection_jobs_id, + == wfi_step_sq.c.implicit_collection_jobs_id, ) # Ensure the result is models, not tuples sq = stmt1.union(stmt2).subquery() From 4e84e83e8087e287a77c3ce3d133c0a3d12d7462 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 18:18:27 -0500 Subject: [PATCH 42/71] Job search refactor: factor out build_job_subquery --- lib/galaxy/managers/jobs.py | 125 ++++++++++++++++++------------------ 1 file changed, 64 insertions(+), 61 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index ce0688eec41b..da01b666cda4 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -355,68 +355,8 @@ def replace_dataset_ids(path, key, value): return key, value return key, value - # build one subquery that selects a job with correct job parameters + stmt_sq = self._build_job_subquery(tool_id, user.id, tool_version, job_state, wildcard_param_dump) - stmt = select(model.Job.id).where( - and_( - model.Job.tool_id == tool_id, - model.Job.user_id == user.id, - model.Job.copied_from_job_id.is_(None), # Always pick original job - ) - ) - if tool_version: - stmt = stmt.where(Job.tool_version == str(tool_version)) - - if job_state is None: - stmt = stmt.where( - Job.state.in_( - [Job.states.NEW, Job.states.QUEUED, Job.states.WAITING, Job.states.RUNNING, Job.states.OK] - ) - ) - else: - if isinstance(job_state, str): - stmt = stmt.where(Job.state == job_state) - elif isinstance(job_state, list): - stmt = stmt.where(or_(*[Job.state == s for s in job_state])) - - # exclude jobs with deleted outputs - stmt = stmt.where( - and_( - model.Job.any_output_dataset_collection_instances_deleted == false(), - model.Job.any_output_dataset_deleted == false(), - ) - ) - - for k, v in wildcard_param_dump.items(): - if v == {"__class__": "RuntimeValue"}: - # TODO: verify this is always None. e.g. run with runtime input input - v = None - elif k.endswith("|__identifier__"): - # We've taken care of this while constructing the conditions based on ``input_data`` above - continue - elif k == "chromInfo" and "?.len" in v: - continue - value_dump = json.dumps(v, sort_keys=True) - wildcard_value = value_dump.replace('"id": "__id_wildcard__"', '"id": %') - a = aliased(JobParameter) - if value_dump == wildcard_value: - stmt = stmt.join(a).where( - and_( - Job.id == a.job_id, - a.name == k, - a.value == value_dump, - ) - ) - else: - stmt = stmt.join(a).where( - and_( - Job.id == a.job_id, - a.name == k, - a.value.like(wildcard_value), - ) - ) - - stmt_sq = stmt.subquery() query = select(Job.id).select_from(Job.table.join(stmt_sq, stmt_sq.c.id == Job.id)) data_conditions = [] @@ -627,6 +567,69 @@ def replace_dataset_ids(path, key, value): log.info("No equivalent jobs found %s", search_timer) return None + def _build_job_subquery(self, tool_id, user_id, tool_version, job_state, wildcard_param_dump): + """Build subquery that selects a job with correct job parameters.""" + stmt = select(model.Job.id).where( + and_( + model.Job.tool_id == tool_id, + model.Job.user_id == user_id, + model.Job.copied_from_job_id.is_(None), # Always pick original job + ) + ) + if tool_version: + stmt = stmt.where(Job.tool_version == str(tool_version)) + + if job_state is None: + stmt = stmt.where( + Job.state.in_( + [Job.states.NEW, Job.states.QUEUED, Job.states.WAITING, Job.states.RUNNING, Job.states.OK] + ) + ) + else: + if isinstance(job_state, str): + stmt = stmt.where(Job.state == job_state) + elif isinstance(job_state, list): + stmt = stmt.where(or_(*[Job.state == s for s in job_state])) + + # exclude jobs with deleted outputs + stmt = stmt.where( + and_( + model.Job.any_output_dataset_collection_instances_deleted == false(), + model.Job.any_output_dataset_deleted == false(), + ) + ) + + for k, v in wildcard_param_dump.items(): + if v == {"__class__": "RuntimeValue"}: + # TODO: verify this is always None. e.g. run with runtime input input + v = None + elif k.endswith("|__identifier__"): + # We've taken care of this while constructing the conditions based on ``input_data`` above + continue + elif k == "chromInfo" and "?.len" in v: + continue + value_dump = json.dumps(v, sort_keys=True) + wildcard_value = value_dump.replace('"id": "__id_wildcard__"', '"id": %') + a = aliased(JobParameter) + if value_dump == wildcard_value: + stmt = stmt.join(a).where( + and_( + Job.id == a.job_id, + a.name == k, + a.value == value_dump, + ) + ) + else: + stmt = stmt.join(a).where( + and_( + Job.id == a.job_id, + a.name == k, + a.value.like(wildcard_value), + ) + ) + + return stmt.subquery() + def view_show_job(trans, job: Job, full: bool) -> typing.Dict: is_admin = trans.user_is_admin From 295da0c459da39a7038a25ff95e04ee0e9132d6f Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 20:43:53 -0500 Subject: [PATCH 43/71] Job search refactor: build_stmt_for_hda --- lib/galaxy/managers/jobs.py | 118 +++++++++++++++++++----------------- 1 file changed, 61 insertions(+), 57 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index da01b666cda4..4ae4eed3f636 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -379,63 +379,7 @@ def replace_dataset_ids(path, key, value): data_types.append(t) identifier = type_values["identifier"] if t == "hda": - a = aliased(model.JobToInputDatasetAssociation) - b = aliased(model.HistoryDatasetAssociation) - c = aliased(model.HistoryDatasetAssociation) - d = aliased(model.JobParameter) - e = aliased(model.HistoryDatasetAssociationHistory) - query = query.add_columns(a.dataset_id) - used_ids.append(a.dataset_id) - query = query.join(a, a.job_id == model.Job.id) - stmt = select(model.HistoryDatasetAssociation.id).where( - model.HistoryDatasetAssociation.id == e.history_dataset_association_id - ) - # b is the HDA used for the job - query = query.join(b, a.dataset_id == b.id).join(c, c.dataset_id == b.dataset_id) - name_condition = [] - if identifier: - query = query.join(d) - data_conditions.append( - and_( - d.name.in_({f"{_}|__identifier__" for _ in k}), - d.value == json.dumps(identifier), - ) - ) - else: - stmt = stmt.where(e.name == c.name) - name_condition.append(b.name == c.name) - stmt = ( - stmt.where( - e.extension == c.extension, - ) - .where( - a.dataset_version == e.version, - ) - .where( - e._metadata == c._metadata, - ) - ) - data_conditions.append( - and_( - a.name.in_(k), - c.id == v, # c is the requested job input HDA - # We need to make sure that the job we are looking for has been run with identical inputs. - # Here we deal with 3 requirements: - # - the jobs' input dataset (=b) version is 0, meaning the job's input dataset is not yet ready - # - b's update_time is older than the job create time, meaning no changes occurred - # - the job has a dataset_version recorded, and that versions' metadata matches c's metadata. - or_( - and_( - or_(a.dataset_version.in_([0, b.version]), b.update_time < model.Job.create_time), - b.extension == c.extension, - b.metadata == c.metadata, - *name_condition, - ), - b.id.in_(stmt), - ), - or_(b.deleted == false(), c.deleted == false()), - ) - ) + query = self._build_stmt_for_hda(query, data_conditions, used_ids, k, v, identifier) elif t == "ldda": a = aliased(model.JobToInputLibraryDatasetAssociation) query = query.add_columns(a.ldda_id) @@ -630,6 +574,66 @@ def _build_job_subquery(self, tool_id, user_id, tool_version, job_state, wildcar return stmt.subquery() + def _build_stmt_for_hda(self, stmt, data_conditions, used_ids, k, v, identifier): + a = aliased(model.JobToInputDatasetAssociation) + b = aliased(model.HistoryDatasetAssociation) + c = aliased(model.HistoryDatasetAssociation) + d = aliased(model.JobParameter) + e = aliased(model.HistoryDatasetAssociationHistory) + stmt = stmt.add_columns(a.dataset_id) + used_ids.append(a.dataset_id) + stmt = stmt.join(a, a.job_id == model.Job.id) + hda_stmt = select(model.HistoryDatasetAssociation.id).where( + model.HistoryDatasetAssociation.id == e.history_dataset_association_id + ) + # b is the HDA used for the job + stmt = stmt.join(b, a.dataset_id == b.id).join(c, c.dataset_id == b.dataset_id) + name_condition = [] + if identifier: + stmt = stmt.join(d) + data_conditions.append( + and_( + d.name.in_({f"{_}|__identifier__" for _ in k}), + d.value == json.dumps(identifier), + ) + ) + else: + hda_stmt = hda_stmt.where(e.name == c.name) + name_condition.append(b.name == c.name) + hda_stmt = ( + hda_stmt.where( + e.extension == c.extension, + ) + .where( + a.dataset_version == e.version, + ) + .where( + e._metadata == c._metadata, + ) + ) + data_conditions.append( + and_( + a.name.in_(k), + c.id == v, # c is the requested job input HDA + # We need to make sure that the job we are looking for has been run with identical inputs. + # Here we deal with 3 requirements: + # - the jobs' input dataset (=b) version is 0, meaning the job's input dataset is not yet ready + # - b's update_time is older than the job create time, meaning no changes occurred + # - the job has a dataset_version recorded, and that versions' metadata matches c's metadata. + or_( + and_( + or_(a.dataset_version.in_([0, b.version]), b.update_time < model.Job.create_time), + b.extension == c.extension, + b.metadata == c.metadata, + *name_condition, + ), + b.id.in_(hda_stmt), + ), + or_(b.deleted == false(), c.deleted == false()), + ) + ) + return stmt + def view_show_job(trans, job: Job, full: bool) -> typing.Dict: is_admin = trans.user_is_admin From 78f26fd9acf344bf44f312b837c5c3449fbfe1c5 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 20:50:42 -0500 Subject: [PATCH 44/71] Job search refactor: build_stmt_for_ldda --- lib/galaxy/managers/jobs.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 4ae4eed3f636..298d105b52e9 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -381,11 +381,7 @@ def replace_dataset_ids(path, key, value): if t == "hda": query = self._build_stmt_for_hda(query, data_conditions, used_ids, k, v, identifier) elif t == "ldda": - a = aliased(model.JobToInputLibraryDatasetAssociation) - query = query.add_columns(a.ldda_id) - query = query.join(a, a.job_id == model.Job.id) - data_conditions.append(and_(a.name.in_(k), a.ldda_id == v)) - used_ids.append(a.ldda_id) + query = self._build_stmt_for_ldda(query, data_conditions, used_ids, k, v) elif t == "hdca": a = aliased(model.JobToInputDatasetCollectionAssociation) b = aliased(model.HistoryDatasetCollectionAssociation) @@ -634,6 +630,14 @@ def _build_stmt_for_hda(self, stmt, data_conditions, used_ids, k, v, identifier) ) return stmt + def _build_stmt_for_ldda(self, stmt, data_conditions, used_ids, k, v): + a = aliased(model.JobToInputLibraryDatasetAssociation) + stmt = stmt.add_columns(a.ldda_id) + stmt = stmt.join(a, a.job_id == model.Job.id) + data_conditions.append(and_(a.name.in_(k), a.ldda_id == v)) + used_ids.append(a.ldda_id) + return stmt + def view_show_job(trans, job: Job, full: bool) -> typing.Dict: is_admin = trans.user_is_admin From 03925e2a1a3b58a3b6982e131114b3c1d58414cc Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 21:06:00 -0500 Subject: [PATCH 45/71] Job search refactor: build_stmt_for_hdca --- lib/galaxy/managers/jobs.py | 52 ++++++++++++++++++------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 298d105b52e9..5253b3c3b34e 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -383,32 +383,7 @@ def replace_dataset_ids(path, key, value): elif t == "ldda": query = self._build_stmt_for_ldda(query, data_conditions, used_ids, k, v) elif t == "hdca": - a = aliased(model.JobToInputDatasetCollectionAssociation) - b = aliased(model.HistoryDatasetCollectionAssociation) - c = aliased(model.HistoryDatasetCollectionAssociation) - query = query.add_columns(a.dataset_collection_id) - query = ( - query.join(a, a.job_id == model.Job.id) - .join(b, b.id == a.dataset_collection_id) - .join(c, b.name == c.name) - ) - data_conditions.append( - and_( - a.name.in_(k), - c.id == v, - or_( - and_(b.deleted == false(), b.id == v), - and_( - or_( - c.copied_from_history_dataset_collection_association_id == b.id, - b.copied_from_history_dataset_collection_association_id == c.id, - ), - c.deleted == false(), - ), - ), - ) - ) - used_ids.append(a.dataset_collection_id) + query = self._build_stmt_for_hdca(query, data_conditions, used_ids, k, v) elif t == "dce": a = aliased(model.JobToInputDatasetCollectionElementAssociation) b = aliased(model.DatasetCollectionElement) @@ -638,6 +613,31 @@ def _build_stmt_for_ldda(self, stmt, data_conditions, used_ids, k, v): used_ids.append(a.ldda_id) return stmt + def _build_stmt_for_hdca(self, stmt, data_conditions, used_ids, k, v): + a = aliased(model.JobToInputDatasetCollectionAssociation) + b = aliased(model.HistoryDatasetCollectionAssociation) + c = aliased(model.HistoryDatasetCollectionAssociation) + stmt = stmt.add_columns(a.dataset_collection_id) + stmt = stmt.join(a, a.job_id == model.Job.id).join(b, b.id == a.dataset_collection_id).join(c, b.name == c.name) + data_conditions.append( + and_( + a.name.in_(k), + c.id == v, + or_( + and_(b.deleted == false(), b.id == v), + and_( + or_( + c.copied_from_history_dataset_collection_association_id == b.id, + b.copied_from_history_dataset_collection_association_id == c.id, + ), + c.deleted == false(), + ), + ), + ) + ) + used_ids.append(a.dataset_collection_id) + return stmt + def view_show_job(trans, job: Job, full: bool) -> typing.Dict: is_admin = trans.user_is_admin From ad9023cac7cd2bb3f5a399de730f9eb349c0b900 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 21:09:10 -0500 Subject: [PATCH 46/71] Job search refactor: build_stmt_for_dce --- lib/galaxy/managers/jobs.py | 72 +++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 5253b3c3b34e..3a3304cf6caf 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -385,40 +385,7 @@ def replace_dataset_ids(path, key, value): elif t == "hdca": query = self._build_stmt_for_hdca(query, data_conditions, used_ids, k, v) elif t == "dce": - a = aliased(model.JobToInputDatasetCollectionElementAssociation) - b = aliased(model.DatasetCollectionElement) - c = aliased(model.DatasetCollectionElement) - d = aliased(model.HistoryDatasetAssociation) - e = aliased(model.HistoryDatasetAssociation) - query = query.add_columns(a.dataset_collection_element_id) - query = ( - query.join(a, a.job_id == model.Job.id) - .join(b, b.id == a.dataset_collection_element_id) - .join( - c, - and_( - c.element_identifier == b.element_identifier, - or_(c.hda_id == b.hda_id, c.child_collection_id == b.child_collection_id), - ), - ) - .outerjoin(d, d.id == c.hda_id) - .outerjoin(e, e.dataset_id == d.dataset_id) - ) - data_conditions.append( - and_( - a.name.in_(k), - or_( - c.child_collection_id == b.child_collection_id, - and_( - c.hda_id == b.hda_id, - d.id == c.hda_id, - e.dataset_id == d.dataset_id, - ), - ), - c.id == v, - ) - ) - used_ids.append(a.dataset_collection_element_id) + query = self._build_stmt_for_dce(query, data_conditions, used_ids, k, v) else: return [] @@ -638,6 +605,43 @@ def _build_stmt_for_hdca(self, stmt, data_conditions, used_ids, k, v): used_ids.append(a.dataset_collection_id) return stmt + def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v): + a = aliased(model.JobToInputDatasetCollectionElementAssociation) + b = aliased(model.DatasetCollectionElement) + c = aliased(model.DatasetCollectionElement) + d = aliased(model.HistoryDatasetAssociation) + e = aliased(model.HistoryDatasetAssociation) + stmt = stmt.add_columns(a.dataset_collection_element_id) + stmt = ( + stmt.join(a, a.job_id == model.Job.id) + .join(b, b.id == a.dataset_collection_element_id) + .join( + c, + and_( + c.element_identifier == b.element_identifier, + or_(c.hda_id == b.hda_id, c.child_collection_id == b.child_collection_id), + ), + ) + .outerjoin(d, d.id == c.hda_id) + .outerjoin(e, e.dataset_id == d.dataset_id) + ) + data_conditions.append( + and_( + a.name.in_(k), + or_( + c.child_collection_id == b.child_collection_id, + and_( + c.hda_id == b.hda_id, + d.id == c.hda_id, + e.dataset_id == d.dataset_id, + ), + ), + c.id == v, + ) + ) + used_ids.append(a.dataset_collection_element_id) + return stmt + def view_show_job(trans, job: Job, full: bool) -> typing.Dict: is_admin = trans.user_is_admin From 55dd20f2320b013aa3747dc4df3c203e003b6f0b Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 21:11:04 -0500 Subject: [PATCH 47/71] Job search refactor: rename query >> stmt --- lib/galaxy/managers/jobs.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 3a3304cf6caf..91c770c387f2 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -357,11 +357,11 @@ def replace_dataset_ids(path, key, value): stmt_sq = self._build_job_subquery(tool_id, user.id, tool_version, job_state, wildcard_param_dump) - query = select(Job.id).select_from(Job.table.join(stmt_sq, stmt_sq.c.id == Job.id)) + stmt = select(Job.id).select_from(Job.table.join(stmt_sq, stmt_sq.c.id == Job.id)) data_conditions = [] - # We now build the query filters that relate to the input datasets + # We now build the stmt filters that relate to the input datasets # that this job uses. We keep track of the requested dataset id in `requested_ids`, # the type (hda, hdca or lda) in `data_types` # and the ids that have been used in the job that has already been run in `used_ids`. @@ -379,19 +379,19 @@ def replace_dataset_ids(path, key, value): data_types.append(t) identifier = type_values["identifier"] if t == "hda": - query = self._build_stmt_for_hda(query, data_conditions, used_ids, k, v, identifier) + stmt = self._build_stmt_for_hda(stmt, data_conditions, used_ids, k, v, identifier) elif t == "ldda": - query = self._build_stmt_for_ldda(query, data_conditions, used_ids, k, v) + stmt = self._build_stmt_for_ldda(stmt, data_conditions, used_ids, k, v) elif t == "hdca": - query = self._build_stmt_for_hdca(query, data_conditions, used_ids, k, v) + stmt = self._build_stmt_for_hdca(stmt, data_conditions, used_ids, k, v) elif t == "dce": - query = self._build_stmt_for_dce(query, data_conditions, used_ids, k, v) + stmt = self._build_stmt_for_dce(stmt, data_conditions, used_ids, k, v) else: return [] - query = query.where(*data_conditions).group_by(model.Job.id, *used_ids).order_by(model.Job.id.desc()) + stmt = stmt.where(*data_conditions).group_by(model.Job.id, *used_ids).order_by(model.Job.id.desc()) - for job in self.sa_session.execute(query): + for job in self.sa_session.execute(stmt): # We found a job that is equal in terms of tool_id, user, state and input datasets, # but to be able to verify that the parameters match we need to modify all instances of # dataset_ids (HDA, LDDA, HDCA) in the incoming param_dump to point to those used by the From 7d4a46739444ef6b3c90add674895735a21338ff Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 26 Jan 2024 21:13:37 -0500 Subject: [PATCH 48/71] Mypy: add anno for Lists; type-ignore for HDAs Note: type-ignore is due to imperative mapping of HDAs (and LDDAs). This will be removed once we map those models declaratively --- lib/galaxy/managers/jobs.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 91c770c387f2..70957f89e076 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -5,6 +5,7 @@ date, datetime, ) +from typing import List import sqlalchemy from boltons.iterutils import remap @@ -359,7 +360,7 @@ def replace_dataset_ids(path, key, value): stmt = select(Job.id).select_from(Job.table.join(stmt_sq, stmt_sq.c.id == Job.id)) - data_conditions = [] + data_conditions: List = [] # We now build the stmt filters that relate to the input datasets # that this job uses. We keep track of the requested dataset id in `requested_ids`, @@ -367,7 +368,7 @@ def replace_dataset_ids(path, key, value): # and the ids that have been used in the job that has already been run in `used_ids`. requested_ids = [] data_types = [] - used_ids = [] + used_ids: List = [] for k, input_list in input_data.items(): # k will be matched against the JobParameter.name column. This can be prefixed depending on whethter # the input is in a repeat, or not (section and conditional) @@ -525,7 +526,7 @@ def _build_stmt_for_hda(self, stmt, data_conditions, used_ids, k, v, identifier) model.HistoryDatasetAssociation.id == e.history_dataset_association_id ) # b is the HDA used for the job - stmt = stmt.join(b, a.dataset_id == b.id).join(c, c.dataset_id == b.dataset_id) + stmt = stmt.join(b, a.dataset_id == b.id).join(c, c.dataset_id == b.dataset_id) # type:ignore[attr-defined] name_condition = [] if identifier: stmt = stmt.join(d) @@ -623,7 +624,7 @@ def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v): ), ) .outerjoin(d, d.id == c.hda_id) - .outerjoin(e, e.dataset_id == d.dataset_id) + .outerjoin(e, e.dataset_id == d.dataset_id) # type:ignore[attr-defined] ) data_conditions.append( and_( @@ -633,7 +634,7 @@ def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v): and_( c.hda_id == b.hda_id, d.id == c.hda_id, - e.dataset_id == d.dataset_id, + e.dataset_id == d.dataset_id, # type:ignore[attr-defined] ), ), c.id == v, From c7948516016c6bc342ac02bed0bc7cfc663581d0 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 2 Feb 2024 01:15:45 -0500 Subject: [PATCH 49/71] Mypy: managers.histories --- lib/galaxy/managers/histories.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py index bebd03446786..4e5b20bf5ba8 100644 --- a/lib/galaxy/managers/histories.py +++ b/lib/galaxy/managers/histories.py @@ -213,6 +213,8 @@ def p_tag_filter(term_text: str, quoted: bool): total_matches = get_count(trans.sa_session, stmt) else: total_matches = None + + sort_column: Any if payload.sort_by == "username": sort_column = model.User.username else: @@ -220,6 +222,7 @@ def p_tag_filter(term_text: str, quoted: bool): if payload.sort_desc: sort_column = sort_column.desc() stmt = stmt.order_by(sort_column) + if payload.limit is not None: stmt = stmt.limit(payload.limit) if payload.offset is not None: @@ -479,7 +482,7 @@ def is_history_shared_with(self, history: model.History, user: model.User) -> bo .where(HistoryUserShareAssociation.user_id == user.id) .where(HistoryUserShareAssociation.history_id == history.id) ) - return self.session().scalar(stmt) + return bool(self.session().scalar(stmt)) def make_members_public(self, trans, item): """Make the non-purged datasets in history public. @@ -560,6 +563,7 @@ def get_discarded_summary(self, user: model.User) -> CleanableItemsSummary: model.History.purged == false(), ) result = self.history_manager.session().execute(stmt).fetchone() + assert result total_size = 0 if result[0] is None else result[0] return CleanableItemsSummary(total_size=total_size, total_items=result[1]) @@ -594,6 +598,7 @@ def get_archived_summary(self, user: model.User) -> CleanableItemsSummary: model.History.purged == false(), ) result = self.history_manager.session().execute(stmt).fetchone() + assert result total_size = 0 if result[0] is None else result[0] return CleanableItemsSummary(total_size=total_size, total_items=result[1]) From bc329c58916527bcdfe3bf1d7a5ba0d6c6bc340f Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 2 Feb 2024 10:56:38 -0500 Subject: [PATCH 50/71] Mypy: model.deferred --- lib/galaxy/model/deferred.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/deferred.py b/lib/galaxy/model/deferred.py index 66a4c23f1d4a..d57622f700ba 100644 --- a/lib/galaxy/model/deferred.py +++ b/lib/galaxy/model/deferred.py @@ -9,7 +9,10 @@ Union, ) -from sqlalchemy.orm import object_session +from sqlalchemy.orm import ( + object_session, + Session, +) from sqlalchemy.orm.exc import DetachedInstanceError from galaxy.datatypes.sniff import ( @@ -74,7 +77,7 @@ def __init__( object_store_populator: Optional[ObjectStorePopulator] = None, transient_path_mapper: Optional[TransientPathMapper] = None, file_sources: Optional[ConfiguredFileSources] = None, - sa_session: Optional[scoped_session] = None, + sa_session: Optional[Session] = None, ): """Constructor for DatasetInstanceMaterializer. @@ -122,6 +125,7 @@ def ensure_materialized( sa_session = self._sa_session if sa_session is None: sa_session = object_session(dataset_instance) + assert sa_session sa_session.add(materialized_dataset) with transaction(sa_session): sa_session.commit() @@ -152,6 +156,7 @@ def ensure_materialized( sa_session = self._sa_session if sa_session is None: sa_session = object_session(dataset_instance) + assert sa_session sa_session.add(materialized_dataset_instance) materialized_dataset_instance.copy_from( dataset_instance, new_dataset=materialized_dataset, include_tags=attached, include_metadata=True From c84f52b51fc0449a0b27530dc0decd3608ca1769 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 5 Feb 2024 11:52:30 -0500 Subject: [PATCH 51/71] Mypy: arg passed to template can be None --- lib/galaxy/managers/pages.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py index c773786c4d2b..5e058a623a13 100644 --- a/lib/galaxy/managers/pages.py +++ b/lib/galaxy/managers/pages.py @@ -12,6 +12,7 @@ from html.parser import HTMLParser from typing import ( Callable, + Optional, Tuple, ) @@ -610,7 +611,7 @@ def placeholderRenderForEdit(trans: ProvidesHistoryContext, item_class, item_id) def placeholderRenderForSave(trans: ProvidesHistoryContext, item_class, item_id, encode=False): encoded_item_id, decoded_item_id = get_page_identifiers(item_id, trans.app) - item_name = "" + item_name: Optional[str] = "" if item_class == "History": history = trans.sa_session.get(History, decoded_item_id) history = base.security_check(trans, history, False, True) From 51d94931676cacd95facaf9b43e3ac0b7b8fdc6a Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 5 Feb 2024 12:17:33 -0500 Subject: [PATCH 52/71] Mypy: celery tasks type ignore arg: we need to map DatasetInstance classes declaratively for that to work correctly. --- lib/galaxy/celery/tasks.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/celery/tasks.py b/lib/galaxy/celery/tasks.py index fc60f6921327..e49008fb9cec 100644 --- a/lib/galaxy/celery/tasks.py +++ b/lib/galaxy/celery/tasks.py @@ -195,7 +195,7 @@ def set_metadata( try: if overwrite: hda_manager.overwrite_metadata(dataset_instance) - dataset_instance.datatype.set_meta(dataset_instance) + dataset_instance.datatype.set_meta(dataset_instance) # type:ignore [arg-type] dataset_instance.set_peek() # Reset SETTING_METADATA state so the dataset instance getter picks the dataset state dataset_instance.set_metadata_success_state() @@ -228,6 +228,7 @@ def setup_fetch_data( ): tool = cached_create_tool_from_representation(app=app, raw_tool_source=raw_tool_source) job = sa_session.get(Job, job_id) + assert job # self.request.hostname is the actual worker name given by the `-n` argument, not the hostname as you might think. job.handler = self.request.hostname job.job_runner_name = "celery" @@ -260,6 +261,7 @@ def finish_job( ): tool = cached_create_tool_from_representation(app=app, raw_tool_source=raw_tool_source) job = sa_session.get(Job, job_id) + assert job # TODO: assert state ? mini_job_wrapper = MinimalJobWrapper(job=job, app=app, tool=tool) mini_job_wrapper.finish("", "") @@ -320,6 +322,7 @@ def fetch_data( task_user_id: Optional[int] = None, ) -> str: job = sa_session.get(Job, job_id) + assert job mini_job_wrapper = MinimalJobWrapper(job=job, app=app) mini_job_wrapper.change_state(model.Job.states.RUNNING, flush=True, job=job) return abort_when_job_stops(_fetch_data, session=sa_session, job_id=job_id, setup_return=setup_return) From f19f65b0a14cea013851ca9ceea333d7321e5131 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 6 Feb 2024 00:00:14 -0500 Subject: [PATCH 53/71] Mypy: type-ignore hda attr-defined error Need to map declaratively to remove this --- lib/galaxy/managers/hdas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index 863d6d9986c7..00196dc61597 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -201,7 +201,7 @@ def copy( if not isinstance(item, model.HistoryDatasetAssociation): raise TypeError() hda = item - copy = hda.copy(parent_id=kwargs.get("parent_id"), copy_hid=False, copy_tags=hda.tags, flush=flush) + copy = hda.copy(parent_id=kwargs.get("parent_id"), copy_hid=False, copy_tags=hda.tags, flush=flush) # type:ignore[attr-defined] if hide_copy: copy.visible = False if history: From 41b737fb906ce0b2a7a597007a3c9f757755b4a6 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 6 Feb 2024 00:25:01 -0500 Subject: [PATCH 54/71] Convert visualization manager index query to SA Core --- lib/galaxy/managers/hdas.py | 4 ++- lib/galaxy/managers/visualizations.py | 49 +++++++++++++++------------ 2 files changed, 31 insertions(+), 22 deletions(-) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index 00196dc61597..04adacb687ea 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -201,7 +201,9 @@ def copy( if not isinstance(item, model.HistoryDatasetAssociation): raise TypeError() hda = item - copy = hda.copy(parent_id=kwargs.get("parent_id"), copy_hid=False, copy_tags=hda.tags, flush=flush) # type:ignore[attr-defined] + copy = hda.copy( + parent_id=kwargs.get("parent_id"), copy_hid=False, copy_tags=hda.tags, flush=flush + ) # type:ignore[attr-defined] if hide_copy: copy.visible = False if history: diff --git a/lib/galaxy/managers/visualizations.py b/lib/galaxy/managers/visualizations.py index 1f03e3900ff1..bd097b99e9cc 100644 --- a/lib/galaxy/managers/visualizations.py +++ b/lib/galaxy/managers/visualizations.py @@ -14,7 +14,9 @@ from sqlalchemy import ( false, + func, or_, + select, true, ) from sqlalchemy.orm import aliased @@ -86,7 +88,7 @@ def index_query( message = "Requires user to log in." raise exceptions.RequestParameterInvalidException(message) - query = trans.sa_session.query(self.model_class) + stmt = select(self.model_class) filters = [] if show_own or (not show_published and not show_shared and not is_admin): @@ -95,20 +97,20 @@ def index_query( filters.append(self.model_class.published == true()) if user and show_shared: filters.append(self.user_share_model.user == user) - query = query.outerjoin(self.model_class.users_shared_with) - query = query.filter(or_(*filters)) + stmt = stmt.outerjoin(self.model_class.users_shared_with) + stmt = stmt.where(or_(*filters)) if payload.user_id: - query = query.filter(self.model_class.user_id == payload.user_id) + stmt = stmt.where(self.model_class.user_id == payload.user_id) if payload.search: search_query = payload.search parsed_search = parse_filters_structured(search_query, INDEX_SEARCH_FILTERS) def p_tag_filter(term_text: str, quoted: bool): - nonlocal query + nonlocal stmt alias = aliased(model.VisualizationTagAssociation) - query = query.outerjoin(self.model_class.tags.of_type(alias)) + stmt = stmt.outerjoin(self.model_class.tags.of_type(alias)) return tag_filter(alias, term_text, quoted) for term in parsed_search.terms: @@ -117,30 +119,30 @@ def p_tag_filter(term_text: str, quoted: bool): q = term.text if key == "tag": pg = p_tag_filter(term.text, term.quoted) - query = query.filter(pg) + stmt = stmt.where(pg) elif key == "title": - query = query.filter(text_column_filter(self.model_class.title, term)) + stmt = stmt.where(text_column_filter(self.model_class.title, term)) elif key == "slug": - query = query.filter(text_column_filter(self.model_class.slug, term)) + stmt = stmt.where(text_column_filter(self.model_class.slug, term)) elif key == "user": - query = append_user_filter(query, self.model_class, term) + stmt = append_user_filter(stmt, self.model_class, term) elif key == "is": if q == "deleted": show_deleted = True if q == "published": - query = query.filter(self.model_class.published == true()) + stmt = stmt.where(self.model_class.published == true()) if q == "importable": - query = query.filter(self.model_class.importable == true()) + stmt = stmt.where(self.model_class.importable == true()) elif q == "shared_with_me": if not show_shared: message = "Can only use tag is:shared_with_me if show_shared parameter also true." raise exceptions.RequestParameterInvalidException(message) - query = query.filter(self.user_share_model.user == user) + stmt = stmt.where(self.user_share_model.user == user) elif isinstance(term, RawTextTerm): tf = p_tag_filter(term.text, False) alias = aliased(model.User) - query = query.outerjoin(self.model_class.user.of_type(alias)) - query = query.filter( + stmt = stmt.outerjoin(self.model_class.user.of_type(alias)) + stmt = stmt.where( raw_text_column_filter( [ self.model_class.title, @@ -155,21 +157,21 @@ def p_tag_filter(term_text: str, quoted: bool): if (show_published or show_shared) and not is_admin: show_deleted = False - query = query.filter(self.model_class.deleted == (true() if show_deleted else false())).distinct() + stmt = stmt.where(self.model_class.deleted == (true() if show_deleted else false())).distinct() if include_total_count: - total_matches = query.count() + total_matches = get_count(trans.sa_session, stmt) else: total_matches = None sort_column = getattr(model.Visualization, payload.sort_by) if payload.sort_desc: sort_column = sort_column.desc() - query = query.order_by(sort_column) + stmt = stmt.order_by(sort_column) if payload.limit is not None: - query = query.limit(payload.limit) + stmt = stmt.limit(payload.limit) if payload.offset is not None: - query = query.offset(payload.offset) - return query, total_matches + stmt = stmt.offset(payload.offset) + return trans.sa_session.scalars(stmt), total_matches class VisualizationSerializer(sharable.SharableModelSerializer): @@ -210,3 +212,8 @@ def add_deserializers(self): super().add_deserializers() self.deserializers.update({}) self.deserializable_keyset.update(self.deserializers.keys()) + + +def get_count(session, statement): + stmt = select(func.count()).select_from(statement) + return session.scalar(stmt) From 3bb43ec6545e5e76e3e6e90f2383d2a51267e035 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 6 Feb 2024 01:15:00 -0500 Subject: [PATCH 55/71] Mypy: session is not none --- lib/galaxy/managers/hdas.py | 2 ++ lib/galaxy/model/__init__.py | 2 +- lib/galaxy/model/database_utils.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index 04adacb687ea..f0acddc138b4 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -217,6 +217,7 @@ def copy( if history: history.add_pending_items() session = object_session(copy) + assert session with transaction(session): session.commit() @@ -253,6 +254,7 @@ def _purge(self, hda, flush=True): user.adjust_total_disk_usage(-quota_amount_reduction, quota_source_info.label) # TODO: don't flush above if we're going to re-flush here session = object_session(user) + assert session with transaction(session): session.commit() diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 2ed0e2d796e2..5f99411be19d 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -6492,7 +6492,7 @@ def element_identifiers_extensions_paths_and_metadata_files( hda_attributes=("extension",), return_entities=(HistoryDatasetAssociation, Dataset), ) - tuples = object_session(self).execute(stmt) + tuples = object_session(self).execute(stmt) # type:ignore[union-attr] # element_identifiers, extension, path for row in tuples: result = [row[:-3], row.extension, row.Dataset.get_file_name()] diff --git a/lib/galaxy/model/database_utils.py b/lib/galaxy/model/database_utils.py index 123bd22a12e6..a3ec08cd46dc 100644 --- a/lib/galaxy/model/database_utils.py +++ b/lib/galaxy/model/database_utils.py @@ -192,6 +192,6 @@ def ensure_object_added_to_session(object_to_add, *, object_in_session=None, ses session.add(object_to_add) return True if object_in_session and object_session(object_in_session): - object_session(object_in_session).add(object_to_add) + object_session(object_in_session).add(object_to_add) # type:ignore[union-attr] return True return False From 9b4080a2ff41ae16018bdc992ed2f5acd6280f32 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 6 Feb 2024 01:40:33 -0500 Subject: [PATCH 56/71] Mypy: type-ignore what requires more refactoring --- lib/galaxy/managers/collections.py | 2 +- lib/galaxy/managers/hdas.py | 7 +++++-- lib/galaxy/managers/history_contents.py | 12 ++++++------ lib/galaxy/model/store/discover.py | 2 +- lib/galaxy/tools/parameters/basic.py | 1 + 5 files changed, 14 insertions(+), 10 deletions(-) diff --git a/lib/galaxy/managers/collections.py b/lib/galaxy/managers/collections.py index 8db9037b55a5..772009aa6464 100644 --- a/lib/galaxy/managers/collections.py +++ b/lib/galaxy/managers/collections.py @@ -839,7 +839,7 @@ def get_collection_contents(self, trans: ProvidesAppContext, parent_id, limit=No def _get_collection_contents_qry(self, parent_id, limit=None, offset=None): """Build query to find first level of collection contents by containing collection parent_id""" DCE = model.DatasetCollectionElement - qry = Query(DCE).filter(DCE.dataset_collection_id == parent_id) + qry = Query(DCE).filter(DCE.dataset_collection_id == parent_id) # type:ignore[var-annotated] qry = qry.order_by(DCE.element_index) qry = qry.options( joinedload(model.DatasetCollectionElement.child_collection), joinedload(model.DatasetCollectionElement.hda) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index f0acddc138b4..9b6bd50222c8 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -202,8 +202,11 @@ def copy( raise TypeError() hda = item copy = hda.copy( - parent_id=kwargs.get("parent_id"), copy_hid=False, copy_tags=hda.tags, flush=flush - ) # type:ignore[attr-defined] + parent_id=kwargs.get("parent_id"), + copy_hid=False, + copy_tags=hda.tags, # type:ignore[attr-defined] + flush=flush, + ) if hide_copy: copy.visible = False if history: diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py index 0a4a09f08f00..b8380f3f31f3 100644 --- a/lib/galaxy/managers/history_contents.py +++ b/lib/galaxy/managers/history_contents.py @@ -22,8 +22,10 @@ nullsfirst, nullslast, select, + Select, sql, true, + UnaryExpression, ) from sqlalchemy.orm import ( joinedload, @@ -134,7 +136,7 @@ def parse_order_by(self, order_by_string, default=None): attribute_dsc = f"{attribute}-dsc" attribute_asc = f"{attribute}-asc" if order_by_string in (attribute, attribute_dsc): - order_by = desc(attribute) + order_by: UnaryExpression = desc(attribute) if attribute == "size": return nullslast(order_by) return order_by @@ -163,12 +165,10 @@ def state_counts(self, history): base.ModelFilterParser.parsed_filter("orm", sql.column("visible") == true()), ] contents_subquery = self._union_of_contents_query(history, filters=filters).subquery() - statement = ( - sql.select(sql.column("state"), func.count("*")) - .select_from(contents_subquery) - .group_by(sql.column("state")) + statement: Select = ( + select(sql.column("state"), func.count("*")).select_from(contents_subquery).group_by(sql.column("state")) ) - counts = self.app.model.context.execute(statement).fetchall() + counts = self.app.model.session().execute(statement).fetchall() return dict(counts) def active_counts(self, history): diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index 05b30ec97ab4..8664e09adca0 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -462,7 +462,7 @@ def override_object_store_id(self, output_name: Optional[str] = None) -> Optiona if not job: return None default_object_store_id = job.object_store_id - object_store_id_overrides = job.object_store_id_overrides or {} + object_store_id_overrides = job.object_store_id_overrides or {} # type:ignore[var-annotated] return object_store_id_overrides.get(output_name, default_object_store_id) # type:ignore[union-attr] @property diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index b5a8c30a8754..57084e3c5b4b 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -2044,6 +2044,7 @@ def src_id_to_item( item = sa_session.get(src_to_class[value["src"]], decoded_id) except KeyError: raise ValueError(f"Unknown input source {value['src']} passed to job submission API.") + assert item item.extra_params = {k: v for k, v in value.items() if k not in ("src", "id")} return item From 575279e080607d0ec623ded4a3f5a07fecf5acee Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 6 Feb 2024 02:01:25 -0500 Subject: [PATCH 57/71] Mypy: type-ignore hda, ldda attrs: need declarative mapping Also, minor SA2.0 syntax fix --- lib/galaxy/managers/folders.py | 2 +- lib/galaxy/managers/history_contents.py | 2 +- lib/galaxy/managers/roles.py | 3 ++- lib/galaxy/tools/wrappers.py | 4 +++- lib/galaxy/workflow/run_request.py | 6 +++--- 5 files changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/managers/folders.py b/lib/galaxy/managers/folders.py index 29c465e95566..04762990cf9b 100644 --- a/lib/galaxy/managers/folders.py +++ b/lib/galaxy/managers/folders.py @@ -505,7 +505,7 @@ def _get_contained_datasets_statement( stmt = stmt.where( or_( func.lower(ldda.name).contains(search_text, autoescape=True), - func.lower(ldda.message).contains(search_text, autoescape=True), + func.lower(ldda.message).contains(search_text, autoescape=True), # type:ignore[attr-defined] ) ) sort_column = LDDA_SORT_COLUMN_MAP[payload.order_by](ldda, associated_dataset) diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py index b8380f3f31f3..4bf0240ca623 100644 --- a/lib/galaxy/managers/history_contents.py +++ b/lib/galaxy/managers/history_contents.py @@ -418,7 +418,7 @@ def _contained_id_map(self, id_list): .where(component_class.id.in_(id_list)) # type: ignore[attr-defined] .options(undefer(component_class._metadata)) .options(joinedload(component_class.dataset).joinedload(model.Dataset.actions)) - .options(joinedload(component_class.tags)) + .options(joinedload(component_class.tags)) # type: ignore[attr-defined] .options(joinedload(component_class.annotations)) # type: ignore[attr-defined] ) result = self._session().scalars(stmt).unique() diff --git a/lib/galaxy/managers/roles.py b/lib/galaxy/managers/roles.py index 8dfdc7e2f11b..1f8ef428b101 100644 --- a/lib/galaxy/managers/roles.py +++ b/lib/galaxy/managers/roles.py @@ -128,7 +128,8 @@ def purge(self, trans: ProvidesUserContext, role: model.Role) -> model.Role: raise RequestParameterInvalidException(f"Role '{role.name}' has not been deleted, so it cannot be purged.") # Delete UserRoleAssociations for ura in role.users: - user = sa_session.query(trans.app.model.User).get(ura.user_id) + user = sa_session.get(trans.app.model.User, ura.user_id) + assert user # Delete DefaultUserPermissions for associated users for dup in user.default_permissions: if role == dup.role: diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index b8591ea749d5..0059fc29092a 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -387,7 +387,9 @@ def __init__( self.dataset = wrap_with_safe_string(dataset_instance, no_wrap_classes=ToolParameterValueWrapper) self.metadata = self.MetadataWrapper(dataset_instance, compute_environment) if isinstance(dataset_instance, HasTags): - self.groups = {tag.user_value.lower() for tag in dataset_instance.tags if tag.user_tname == "group"} + self.groups = { + tag.user_value.lower() for tag in dataset_instance.tags if tag.user_tname == "group" + } # type-ignore[attr-defined] else: # May be a 'FakeDatasetAssociation' self.groups = set() diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py index dd041531bd1c..516c85d07eb1 100644 --- a/lib/galaxy/workflow/run_request.py +++ b/lib/galaxy/workflow/run_request.py @@ -385,9 +385,9 @@ def build_workflow_run_configs( ) content = ldda.to_history_dataset_association(history, add_to_history=add_to_history) elif input_source == "ld": - ldda = trans.sa_session.get( - LibraryDataset, trans.security.decode_id(input_id) - ).library_dataset_dataset_association + library_dataset = trans.sa_session.get(LibraryDataset, trans.security.decode_id(input_id)) + assert library_dataset + ldda = library_dataset.library_dataset_dataset_association assert ldda assert trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset From a541a10ffecebc4f227d4051b4ea53859f4daa0e Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 7 Feb 2024 16:14:56 -0500 Subject: [PATCH 58/71] Mypy: type-ignores to handle late evaluation of relationship arguments --- lib/galaxy/model/__init__.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5f99411be19d..da587b59def1 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3039,7 +3039,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable lambda: ( and_( HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] - not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type] + not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type, arg-type] ) ) ), @@ -3063,8 +3063,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable primaryjoin=( lambda: and_( HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] - not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type] - HistoryDatasetCollectionAssociation.visible, # type: ignore[has-type] + not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type, arg-type] + HistoryDatasetCollectionAssociation.visible, # type: ignore[has-type, arg-type] ) ), order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), # type: ignore[has-type] @@ -3972,7 +3972,7 @@ class Dataset(Base, StorableObject, Serializable): lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] HistoryDatasetAssociation.deleted == false(), # type: ignore[has-type] - HistoryDatasetAssociation.purged == false(), # type: ignore[attr-defined] + HistoryDatasetAssociation.purged == false(), # type: ignore[attr-defined, arg-type] ) ), viewonly=True, @@ -3982,7 +3982,7 @@ class Dataset(Base, StorableObject, Serializable): primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] - HistoryDatasetAssociation.purged == true(), # type: ignore[attr-defined] + HistoryDatasetAssociation.purged == true(), # type: ignore[attr-defined, arg-type] ) ), viewonly=True, @@ -6074,7 +6074,10 @@ class LibraryInfoAssociation(Base, RepresentById): library = relationship( "Library", primaryjoin=( - lambda: and_(LibraryInfoAssociation.library_id == Library.id, not_(LibraryInfoAssociation.deleted)) + lambda: and_( + LibraryInfoAssociation.library_id == Library.id, + not_(LibraryInfoAssociation.deleted), # type:ignore[arg-type] + ) ), ) template = relationship( @@ -6395,7 +6398,7 @@ def attribute_columns(column_collection, attributes, nesting_level=None): for entity in return_entities: q = q.add_columns(entity) if entity == DatasetCollectionElement: - q = q.filter(entity.id == dce.c.id) + q = q.filter(entity.id == dce.c.id) # type:ignore[arg-type] q = q.order_by(*order_by_columns) return q From c58ff7375d1b4c015eee1cf618896c969abaa194 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 8 Feb 2024 15:17:19 -0500 Subject: [PATCH 59/71] Mypy: type-ignore column property assignments (type is correct) --- lib/galaxy/model/__init__.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index da587b59def1..e6754f14ec83 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -11335,7 +11335,7 @@ def __repr__(self): # ---------------------------------------------------------------------------------------- # The following statements must not precede the mapped models defined above. -Job.any_output_dataset_collection_instances_deleted = column_property( +Job.any_output_dataset_collection_instances_deleted = column_property( # type:ignore[assignment] exists(HistoryDatasetCollectionAssociation.id).where( and_( Job.id == JobToOutputDatasetCollectionAssociation.job_id, @@ -11345,7 +11345,7 @@ def __repr__(self): ) ) -Job.any_output_dataset_deleted = column_property( +Job.any_output_dataset_deleted = column_property( # type:ignore[assignment] exists(HistoryDatasetAssociation.id).where( and_( Job.id == JobToOutputDatasetAssociation.job_id, @@ -11355,44 +11355,44 @@ def __repr__(self): ) ) -History.average_rating = column_property( +History.average_rating = column_property( # type:ignore[assignment] select(func.avg(HistoryRatingAssociation.rating)) .where(HistoryRatingAssociation.history_id == History.id) .scalar_subquery(), deferred=True, ) -History.users_shared_with_count = column_property( +History.users_shared_with_count = column_property( # type:ignore[assignment] select(func.count(HistoryUserShareAssociation.id)) .where(History.id == HistoryUserShareAssociation.history_id) .scalar_subquery(), deferred=True, ) -Page.average_rating = column_property( +Page.average_rating = column_property( # type:ignore[assignment] select(func.avg(PageRatingAssociation.rating)).where(PageRatingAssociation.page_id == Page.id).scalar_subquery(), deferred=True, ) -StoredWorkflow.average_rating = column_property( +StoredWorkflow.average_rating = column_property( # type:ignore[assignment] select(func.avg(StoredWorkflowRatingAssociation.rating)) .where(StoredWorkflowRatingAssociation.stored_workflow_id == StoredWorkflow.id) .scalar_subquery(), deferred=True, ) -Visualization.average_rating = column_property( +Visualization.average_rating = column_property( # type:ignore[assignment] select(func.avg(VisualizationRatingAssociation.rating)) .where(VisualizationRatingAssociation.visualization_id == Visualization.id) .scalar_subquery(), deferred=True, ) -Workflow.step_count = column_property( +Workflow.step_count = column_property( # type:ignore[assignment] select(func.count(WorkflowStep.id)).where(Workflow.id == WorkflowStep.workflow_id).scalar_subquery(), deferred=True ) -WorkflowInvocationStep.subworkflow_invocation_id = column_property( +WorkflowInvocationStep.subworkflow_invocation_id = column_property( # type:ignore[assignment] select(WorkflowInvocationToSubworkflowInvocationAssociation.subworkflow_invocation_id) .where( and_( From c1ec3ae1afc4c617d4cd8a3fcfed4f017b9b26a6 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 6 Feb 2024 14:04:15 -0500 Subject: [PATCH 60/71] Mypy: typing errors, misc. fixes --- lib/galaxy/app_unittest_utils/galaxy_mock.py | 2 +- lib/galaxy/celery/base_task.py | 3 +-- lib/galaxy/jobs/__init__.py | 4 ++-- lib/galaxy/managers/datasets.py | 4 ++-- lib/galaxy/managers/export_tracker.py | 4 ++-- lib/galaxy/managers/folders.py | 3 ++- lib/galaxy/managers/genomes.py | 2 +- lib/galaxy/managers/hdas.py | 5 +++-- lib/galaxy/managers/histories.py | 2 +- lib/galaxy/managers/history_contents.py | 2 +- lib/galaxy/managers/interactivetool.py | 4 ++-- lib/galaxy/managers/job_connections.py | 2 +- lib/galaxy/managers/jobs.py | 4 ++-- lib/galaxy/managers/model_stores.py | 1 + lib/galaxy/managers/notification.py | 18 +++++++++--------- lib/galaxy/managers/pages.py | 2 +- lib/galaxy/managers/users.py | 2 +- lib/galaxy/managers/visualizations.py | 2 +- lib/galaxy/managers/workflows.py | 13 ++++++------- lib/galaxy/model/__init__.py | 13 ++++++++----- lib/galaxy/model/database_utils.py | 4 ++-- lib/galaxy/model/store/__init__.py | 12 ++++++------ lib/galaxy/model/unittest_utils/data_app.py | 2 +- .../migration_scripts_testing_utils.py | 1 + .../unittest_utils/model_testing_utils.py | 2 +- .../installed_repository_manager.py | 8 +++++--- .../installed_repository_metadata_manager.py | 2 +- .../update_repository_manager.py | 2 +- .../tool_shed/metadata/metadata_generator.py | 2 +- lib/galaxy/tools/wrappers.py | 6 ++++-- lib/galaxy/visualization/genomes.py | 2 +- .../galaxy/services/tool_shed_repositories.py | 3 ++- lib/galaxy/workflow/extract.py | 4 ++-- lib/galaxy/workflow/modules.py | 7 ++++--- lib/galaxy/workflow/run.py | 2 ++ lib/tool_shed/managers/repositories.py | 4 +++- .../metadata/repository_metadata_manager.py | 2 +- lib/tool_shed/test/base/twilltestcase.py | 7 +++++-- lib/tool_shed/util/shed_util_common.py | 2 +- test/unit/app/tools/test_metadata.py | 2 +- .../data/model/test_mapping_testing_utils.py | 2 +- test/unit/data/model/test_model_store.py | 4 ++-- test/unit/data/test_mutable_json_column.py | 2 +- 43 files changed, 97 insertions(+), 79 deletions(-) diff --git a/lib/galaxy/app_unittest_utils/galaxy_mock.py b/lib/galaxy/app_unittest_utils/galaxy_mock.py index c33e3b433326..d50842158020 100644 --- a/lib/galaxy/app_unittest_utils/galaxy_mock.py +++ b/lib/galaxy/app_unittest_utils/galaxy_mock.py @@ -125,7 +125,7 @@ def __init__(self, config=None, **kwargs) -> None: self[ShortTermStorageMonitor] = sts_manager # type: ignore[type-abstract] self[galaxy_scoped_session] = self.model.context self.visualizations_registry = MockVisualizationsRegistry() - self.tag_handler = tags.GalaxyTagHandler(self.model.context) + self.tag_handler = tags.GalaxyTagHandler(self.model.session) self[tags.GalaxyTagHandler] = self.tag_handler self.quota_agent = quota.DatabaseQuotaAgent(self.model) self.job_config = Bunch( diff --git a/lib/galaxy/celery/base_task.py b/lib/galaxy/celery/base_task.py index af032fec1821..724472765393 100644 --- a/lib/galaxy/celery/base_task.py +++ b/lib/galaxy/celery/base_task.py @@ -86,14 +86,13 @@ class GalaxyTaskBeforeStartUserRateLimitPostgres(GalaxyTaskBeforeStartUserRateLi .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) ")) .returning(CeleryUserRateLimit.last_scheduled_time) ) - _insert_stmt = ( ps_insert(CeleryUserRateLimit) .values(user_id=bindparam("userid"), last_scheduled_time=bindparam("now")) .returning(CeleryUserRateLimit.last_scheduled_time) ) - _upsert_stmt = _insert_stmt.on_conflict_do_update( + _upsert_stmt = _insert_stmt.on_conflict_do_update( # type:ignore[attr-defined] index_elements=["user_id"], set_=dict(last_scheduled_time=bindparam("sched_time")) ) diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index ae9a2dddca06..570d1d72b99b 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -1181,7 +1181,7 @@ def galaxy_url(self): return self.get_destination_configuration("galaxy_infrastructure_url") def get_job(self) -> model.Job: - return self.sa_session.get(Job, self.job_id) + return self.sa_session.get(Job, self.job_id) # type:ignore[return-value] def get_id_tag(self): # For compatibility with drmaa, which uses job_id right now, and TaskWrapper @@ -1551,7 +1551,7 @@ def change_state(self, state, info=False, flush=True, job=None): def get_state(self) -> str: job = self.get_job() self.sa_session.refresh(job) - return job.state + return job.state # type:ignore[return-value] def set_runner(self, runner_url, external_id): log.warning("set_runner() is deprecated, use set_job_destination()") diff --git a/lib/galaxy/managers/datasets.py b/lib/galaxy/managers/datasets.py index 612a69989deb..fbe82e56e996 100644 --- a/lib/galaxy/managers/datasets.py +++ b/lib/galaxy/managers/datasets.py @@ -112,8 +112,8 @@ def purge_datasets(self, request: PurgeDatasetsTaskRequest): self.error_unless_dataset_purge_allowed() with self.session().begin(): for dataset_id in request.dataset_ids: - dataset: Dataset = self.session().get(Dataset, dataset_id) - if dataset.user_can_purge: + dataset: Optional[Dataset] = self.session().get(Dataset, dataset_id) + if dataset and dataset.user_can_purge: try: dataset.full_delete() except Exception: diff --git a/lib/galaxy/managers/export_tracker.py b/lib/galaxy/managers/export_tracker.py index f0d6ae0d33f5..c2f9719f2441 100644 --- a/lib/galaxy/managers/export_tracker.py +++ b/lib/galaxy/managers/export_tracker.py @@ -44,7 +44,7 @@ def set_export_association_metadata(self, export_association_id: int, export_met export_association: StoreExportAssociation = self.session.execute(stmt).scalars().one() except NoResultFound: raise ObjectNotFound("Cannot set export metadata. Reason: Export association not found") - export_association.export_metadata = export_metadata.json() + export_association.export_metadata = export_metadata.json() # type:ignore[assignment] with transaction(self.session): self.session.commit() @@ -72,4 +72,4 @@ def get_object_exports( stmt = stmt.offset(offset) if limit: stmt = stmt.limit(limit) - return self.session.execute(stmt).scalars() + return self.session.execute(stmt).scalars() # type:ignore[return-value] diff --git a/lib/galaxy/managers/folders.py b/lib/galaxy/managers/folders.py index 04762990cf9b..fcbbed0cb9c5 100644 --- a/lib/galaxy/managers/folders.py +++ b/lib/galaxy/managers/folders.py @@ -536,7 +536,7 @@ def _filter_by_include_deleted( def build_folder_path( self, sa_session: galaxy_scoped_session, folder: model.LibraryFolder - ) -> List[Tuple[str, str]]: + ) -> List[Tuple[int, Optional[str]]]: """ Returns the folder path from root to the given folder. @@ -546,6 +546,7 @@ def build_folder_path( path_to_root = [(current_folder.id, current_folder.name)] while current_folder.parent_id is not None: parent_folder = sa_session.get(LibraryFolder, current_folder.parent_id) + assert parent_folder current_folder = parent_folder path_to_root.insert(0, (current_folder.id, current_folder.name)) return path_to_root diff --git a/lib/galaxy/managers/genomes.py b/lib/galaxy/managers/genomes.py index 82cd1131d9f4..ce742e7c6d96 100644 --- a/lib/galaxy/managers/genomes.py +++ b/lib/galaxy/managers/genomes.py @@ -95,7 +95,7 @@ def _create_genome_filter(model_class=None): if self.database_connection.startswith("postgres"): column = text("convert_from(metadata, 'UTF8')::json ->> 'dbkey'") else: - column = func.json_extract(model_class.table.c._metadata, "$.dbkey") + column = func.json_extract(model_class.table.c._metadata, "$.dbkey") # type:ignore[assignment] lower_val = val.lower() # Ignore case # dbkey can either be "hg38" or '["hg38"]', so we need to check both if op == "eq": diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index 9b6bd50222c8..956f73425bcc 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -364,12 +364,13 @@ def get_discarded_summary(self, user: model.User) -> CleanableItemsSummary: .where( and_( model.HistoryDatasetAssociation.deleted == true(), - model.HistoryDatasetAssociation.purged == false(), + model.HistoryDatasetAssociation.purged == false(), # type:ignore[arg-type] model.History.user_id == user.id, ) ) ) result = self.hda_manager.session().execute(stmt).fetchone() + assert result total_size = 0 if result[0] is None else result[0] return CleanableItemsSummary(total_size=total_size, total_items=result[1]) @@ -393,7 +394,7 @@ def get_discarded( .where( and_( model.HistoryDatasetAssociation.deleted == true(), - model.HistoryDatasetAssociation.purged == false(), + model.HistoryDatasetAssociation.purged == false(), # type:ignore[arg-type] model.History.user_id == user.id, ) ) diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py index 4e5b20bf5ba8..1f3df4fccf87 100644 --- a/lib/galaxy/managers/histories.py +++ b/lib/galaxy/managers/histories.py @@ -227,7 +227,7 @@ def p_tag_filter(term_text: str, quoted: bool): stmt = stmt.limit(payload.limit) if payload.offset is not None: stmt = stmt.offset(payload.offset) - return trans.sa_session.scalars(stmt), total_matches + return trans.sa_session.scalars(stmt), total_matches # type:ignore[return-value] def copy(self, history, user, **kwargs): """ diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py index 4bf0240ca623..3dbcecfcdbda 100644 --- a/lib/galaxy/managers/history_contents.py +++ b/lib/galaxy/managers/history_contents.py @@ -166,7 +166,7 @@ def state_counts(self, history): ] contents_subquery = self._union_of_contents_query(history, filters=filters).subquery() statement: Select = ( - select(sql.column("state"), func.count("*")).select_from(contents_subquery).group_by(sql.column("state")) + select(sql.column("state"), func.count()).select_from(contents_subquery).group_by(sql.column("state")) ) counts = self.app.model.session().execute(statement).fetchall() return dict(counts) diff --git a/lib/galaxy/managers/interactivetool.py b/lib/galaxy/managers/interactivetool.py index ac05892bef7c..4892091eee99 100644 --- a/lib/galaxy/managers/interactivetool.py +++ b/lib/galaxy/managers/interactivetool.py @@ -243,8 +243,8 @@ def build_subquery(): filters = [] for state in Job.non_ready_states: filters.append(Job.state == state) - stmt = stmt.where(or_(*filters)).subquery() - return stmt + stmt = stmt.where(or_(*filters)) + return stmt.subquery() stmt = select(InteractiveToolEntryPoint).where(InteractiveToolEntryPoint.job_id.in_(build_subquery())) return trans.sa_session.scalars(stmt) diff --git a/lib/galaxy/managers/job_connections.py b/lib/galaxy/managers/job_connections.py index c9000869cce3..c3317a0e8a52 100644 --- a/lib/galaxy/managers/job_connections.py +++ b/lib/galaxy/managers/job_connections.py @@ -54,7 +54,7 @@ def get_related_hids(self, history_id, hid: int): for val in graph["outputs"] + graph["inputs"]: item_class = get_class(val["src"]) item_hid = self.sa_session.execute(select(item_class.hid).where(item_class.id == val["id"])).scalar() - result.append(item_hid) + result.append(item_hid) # type:ignore[arg-type] return result def _get_union_results(self, *selects): diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 70957f89e076..f34d8a2dda2e 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -188,7 +188,7 @@ def add_search_criteria(stmt): elif key == "runner": stmt = stmt.where(text_column_filter(Job.job_runner_name, term)) elif isinstance(term, RawTextTerm): - columns = [Job.tool_id] + columns: List = [Job.tool_id] if user_details: columns.append(User.email) if is_admin: @@ -847,7 +847,7 @@ def summarize_jobs_to_dict(sa_session, jobs_source): model.ImplicitCollectionJobsJobAssociation.table.join(model.Job) ) statement = ( - select(model.Job.state, func.count("*")) + select(model.Job.state, func.count()) .select_from(join) .where(model.ImplicitCollectionJobs.id == jobs_source.id) .group_by(model.Job.state) diff --git a/lib/galaxy/managers/model_stores.py b/lib/galaxy/managers/model_stores.py index 395d9dbade4c..1e4a82c89363 100644 --- a/lib/galaxy/managers/model_stores.py +++ b/lib/galaxy/managers/model_stores.py @@ -96,6 +96,7 @@ def setup_history_export_job(self, request: SetupHistoryExportJob): store_directory = request.store_directory history = self._sa_session.get(model.History, history_id) + assert history # symlink files on export, on worker files will tarred up in a dereferenced manner. with DirectoryModelExportStore(store_directory, app=self._app, export_files="symlink") as export_store: export_store.export_history(history, include_hidden=include_hidden, include_deleted=include_deleted) diff --git a/lib/galaxy/managers/notification.py b/lib/galaxy/managers/notification.py index 4f11742dec69..47b62142a43b 100644 --- a/lib/galaxy/managers/notification.py +++ b/lib/galaxy/managers/notification.py @@ -18,6 +18,7 @@ union, update, ) +from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.sql import Select from typing_extensions import Protocol @@ -64,7 +65,7 @@ def __init__(self, sa_session: galaxy_scoped_session, config: GalaxyAppConfigura self.sa_session = sa_session self.config = config self.recipient_resolver = NotificationRecipientResolver(strategy=DefaultStrategy(sa_session)) - self.user_notification_columns = [ + self.user_notification_columns: List[InstrumentedAttribute] = [ Notification.id, Notification.source, Notification.category, @@ -77,7 +78,7 @@ def __init__(self, sa_session: galaxy_scoped_session, config: GalaxyAppConfigura UserNotificationAssociation.seen_time, UserNotificationAssociation.deleted, ] - self.broadcast_notification_columns = [ + self.broadcast_notification_columns: List[InstrumentedAttribute] = [ Notification.id, Notification.source, Notification.category, @@ -126,7 +127,7 @@ def send_notification_to_recipients(self, request: NotificationCreateRequest) -> def _send_to_users(self, notification: Notification, users: List[User]): # TODO: Move this potentially expensive operation to a task? for user in users: - if self._user_is_subscribed_to_notification(user, notification.category): + if self._user_is_subscribed_to_notification(user, notification.category): # type:ignore[arg-type] user_notification_association = UserNotificationAssociation(user, notification) self.sa_session.add(user_notification_association) @@ -199,8 +200,7 @@ def get_user_total_unread_notification_count(self, user: User) -> int: ) ) ) - result = self.sa_session.execute(stmt).scalar() - return result + return self.sa_session.execute(stmt).scalar() or 0 def get_broadcasted_notification(self, notification_id: int, active_only: Optional[bool] = True): stmt = ( @@ -275,8 +275,8 @@ def update_broadcasted_notification(self, notification_id: int, request: Notific def get_user_notification_preferences(self, user: User) -> UserNotificationPreferences: """Gets the user's current notification preferences or the default ones if no preferences are set.""" current_notification_preferences = ( - user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] - if NOTIFICATION_PREFERENCES_SECTION_NAME in user.preferences + user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] # type:ignore[index] + if NOTIFICATION_PREFERENCES_SECTION_NAME in user.preferences # type:ignore[operator] else None ) try: @@ -291,7 +291,7 @@ def update_user_notification_preferences( """Updates the user's notification preferences with the requested changes.""" notification_preferences = self.get_user_notification_preferences(user) notification_preferences.update(request.preferences) - user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] = notification_preferences.json() + user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] = notification_preferences.json() # type:ignore[index] with transaction(self.sa_session): self.sa_session.commit() return notification_preferences @@ -413,7 +413,7 @@ def resolve_users(self, recipients: NotificationRecipients) -> List[User]: unique_user_ids.update(user_ids_from_groups_and_roles) stmt = select(User).where(User.id.in_(unique_user_ids)) - return self.sa_session.scalars(stmt).all() + return self.sa_session.scalars(stmt).all() # type:ignore[return-value] def _get_all_user_ids_from_roles_query(self, role_ids: Set[int]) -> Select: stmt = ( diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py index 5e058a623a13..581b5b93562b 100644 --- a/lib/galaxy/managers/pages.py +++ b/lib/galaxy/managers/pages.py @@ -239,7 +239,7 @@ def p_tag_filter(term_text: str, quoted: bool): stmt = stmt.limit(payload.limit) if payload.offset is not None: stmt = stmt.offset(payload.offset) - return trans.sa_session.scalars(stmt), total_matches + return trans.sa_session.scalars(stmt), total_matches # type:ignore[return-value] def create_page(self, trans, payload: CreatePagePayload): user = trans.get_user() diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index f92b82b68c10..edf557e35a90 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -176,7 +176,7 @@ def _get_all_active_jobs_from_user(self, user: User) -> List[Job]: """Get all jobs that are not ready yet and belong to the given user.""" stmt = select(Job).where(and_(Job.user_id == user.id, Job.state.in_(Job.non_ready_states))) jobs = self.session().scalars(stmt) - return jobs + return jobs # type:ignore[return-value] def undelete(self, user, flush=True): """Remove the deleted flag for the given user.""" diff --git a/lib/galaxy/managers/visualizations.py b/lib/galaxy/managers/visualizations.py index bd097b99e9cc..80fbf13348c8 100644 --- a/lib/galaxy/managers/visualizations.py +++ b/lib/galaxy/managers/visualizations.py @@ -171,7 +171,7 @@ def p_tag_filter(term_text: str, quoted: bool): stmt = stmt.limit(payload.limit) if payload.offset is not None: stmt = stmt.offset(payload.offset) - return trans.sa_session.scalars(stmt), total_matches + return trans.sa_session.scalars(stmt), total_matches # type:ignore[return-value] class VisualizationSerializer(sharable.SharableModelSerializer): diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index 054f5f1e78a9..b5085d02fb27 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -39,7 +39,6 @@ from sqlalchemy.orm import ( aliased, joinedload, - Query, subqueryload, ) from typing_extensions import Annotated @@ -192,7 +191,7 @@ def index_query( latest_workflow_load = joinedload(StoredWorkflow.latest_workflow) if not payload.skip_step_counts: - latest_workflow_load = latest_workflow_load.undefer(Workflow.step_count) + latest_workflow_load = latest_workflow_load.undefer(Workflow.step_count) # type:ignore[arg-type] latest_workflow_load = latest_workflow_load.lazyload(Workflow.steps) stmt = stmt.options(joinedload(StoredWorkflow.annotations)) @@ -269,7 +268,7 @@ def name_filter(term): if payload.offset is not None: stmt = stmt.offset(payload.offset) result = trans.sa_session.scalars(stmt).unique() - return result, total_matches + return result, total_matches # type:ignore[return-value] def get_stored_workflow(self, trans, workflow_id, by_stored_id=True) -> StoredWorkflow: """Use a supplied ID (UUID or encoded stored workflow ID) to find @@ -488,7 +487,7 @@ def build_invocations_query( sort_by=None, sort_desc=None, include_nested_invocations=True, - ) -> Tuple[Query, int]: + ) -> Tuple[List, int]: """Get invocations owned by the current user.""" stmt = select(WorkflowInvocation) @@ -536,7 +535,7 @@ def build_invocations_query( for inv in trans.sa_session.scalars(stmt) if self.check_security(trans, inv, check_ownership=True, check_accessible=False) ] - return invocations, total_matches + return invocations, total_matches # type:ignore[return-value] MissingToolsT = List[Tuple[str, str, Optional[str], str]] @@ -792,7 +791,7 @@ def _workflow_from_raw_description( elif not workflow_state_resolution_options.archive_source.startswith("file://"): # URL import source_metadata["url"] = workflow_state_resolution_options.archive_source workflow_state_resolution_options.archive_source = None # so trs_id is not set for subworkflows - workflow.source_metadata = source_metadata + workflow.source_metadata = source_metadata # type:ignore[assignment] # Assume no errors until we find a step that has some workflow.has_errors = False @@ -1785,7 +1784,7 @@ def __module_from_dict( temp_input_connections: Dict[str, Union[List[DictConnection], DictConnection]] = step_dict.get( "input_connections", {} ) - step.temp_input_connections = temp_input_connections + step.temp_input_connections = temp_input_connections # type: ignore[assignment] # Create the model class for the step steps.append(step) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index e6754f14ec83..29463c29f64b 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -28,6 +28,7 @@ from string import Template from typing import ( Any, + cast, Dict, Iterable, List, @@ -4199,7 +4200,7 @@ def get_size(self, nice_size: bool = False, calculate_size: bool = True) -> Unio if nice_size: return galaxy.util.nice_size(self.file_size) else: - return self.file_size + return cast(int, self.file_size) elif calculate_size: # Hopefully we only reach this branch in sessionless mode if nice_size: @@ -4207,7 +4208,7 @@ def get_size(self, nice_size: bool = False, calculate_size: bool = True) -> Unio else: return self._calculate_size() else: - return self.file_size or 0 + return cast(int, self.file_size) or 0 def set_size(self, no_extra_files=False): """Sets the size of the data on disk. @@ -4855,7 +4856,9 @@ def find_conversion_destination( self, accepted_formats: List[str], **kwd ) -> Tuple[bool, Optional[str], Optional["DatasetInstance"]]: """Returns ( target_ext, existing converted dataset )""" - return self.datatype.find_conversion_destination(self, accepted_formats, _get_datatypes_registry(), **kwd) + return self.datatype.find_conversion_destination( + self, accepted_formats, _get_datatypes_registry(), **kwd # type:ignore[arg-type] + ) def add_validation_error(self, validation_error): self.validation_errors.append(validation_error) @@ -6897,7 +6900,7 @@ def build_statement(): col = func.sum(case((column(state_label) == state, 1), else_=0)).label(state) stm = stm.add_columns(col) # Add aggregate column for all jobs - col = func.count("*").label("all_jobs") + col = func.count().label("all_jobs") stm = stm.add_columns(col) return stm @@ -11407,7 +11410,7 @@ def __repr__(self): # Set up proxy so that this syntax is possible: # .preferences[pref_name] = pref_value -User.preferences = association_proxy("_preferences", "value", creator=UserPreference) +User.preferences = association_proxy("_preferences", "value", creator=UserPreference) # type:ignore[assignment] # Optimized version of getting the current Galaxy session. # See https://github.com/sqlalchemy/sqlalchemy/discussions/7638 for approach diff --git a/lib/galaxy/model/database_utils.py b/lib/galaxy/model/database_utils.py index a3ec08cd46dc..403be8d1242f 100644 --- a/lib/galaxy/model/database_utils.py +++ b/lib/galaxy/model/database_utils.py @@ -15,7 +15,7 @@ from sqlalchemy.orm import object_session from sqlalchemy.sql.compiler import IdentifierPreparer from sqlalchemy.sql.expression import ( - ClauseElement, + Executable, text, ) @@ -163,7 +163,7 @@ def supports_skip_locked(engine: Engine) -> bool: return _statement_executed_without_error(stmt, engine) -def _statement_executed_without_error(statement: ClauseElement, engine: Engine) -> bool: +def _statement_executed_without_error(statement: Executable, engine: Engine) -> bool: # Execute statement against database, then issue a rollback. try: with engine.connect() as conn, conn.begin() as trans: diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 38564174ab34..32ac1215b6cd 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -704,7 +704,7 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs): # Try to set metadata directly. @mvdbeek thinks we should only record the datasets try: if dataset_instance.has_metadata_files: - dataset_instance.datatype.set_meta(dataset_instance) + dataset_instance.datatype.set_meta(dataset_instance) # type:ignore[arg-type] except Exception: log.debug(f"Metadata setting failed on {dataset_instance}", exc_info=True) dataset_instance.state = dataset_instance.dataset.states.FAILED_METADATA @@ -1232,7 +1232,7 @@ def _import_jobs(self, object_import_tracker: "ObjectImportTracker", history: Op continue imported_job = model.Job() - imported_job.id = job_attrs.get("id") + imported_job.id = cast(int, job_attrs.get("id")) imported_job.user = self.user add_object_to_session(imported_job, history_sa_session) imported_job.history = history @@ -2159,12 +2159,12 @@ def export_history( sa_session = app.model.session # Write collections' attributes (including datasets list) to file. - stmt = ( + stmt_hdca = ( select(model.HistoryDatasetCollectionAssociation) .where(model.HistoryDatasetCollectionAssociation.history == history) .where(model.HistoryDatasetCollectionAssociation.deleted == expression.false()) ) - collections = sa_session.scalars(stmt) + collections = sa_session.scalars(stmt_hdca) for collection in collections: # filter this ? @@ -2178,7 +2178,7 @@ def export_history( # Write datasets' attributes to file. actions_backref = model.Dataset.actions # type: ignore[attr-defined] - stmt = ( + stmt_hda = ( select(model.HistoryDatasetAssociation) .where(model.HistoryDatasetAssociation.history == history) .join(model.Dataset) @@ -2186,7 +2186,7 @@ def export_history( .order_by(model.HistoryDatasetAssociation.hid) .where(model.Dataset.purged == expression.false()) ) - datasets = sa_session.scalars(stmt).unique() + datasets = sa_session.scalars(stmt_hda).unique() for dataset in datasets: dataset.annotation = get_item_annotation_str(sa_session, history.user, dataset) should_include_file = (dataset.visible or include_hidden) and (not dataset.deleted or include_deleted) diff --git a/lib/galaxy/model/unittest_utils/data_app.py b/lib/galaxy/model/unittest_utils/data_app.py index 8942af46a4d7..75307163ae3d 100644 --- a/lib/galaxy/model/unittest_utils/data_app.py +++ b/lib/galaxy/model/unittest_utils/data_app.py @@ -98,7 +98,7 @@ def __init__(self, config: Optional[GalaxyDataTestConfig] = None, **kwd): self.object_store = objectstore.build_object_store_from_config(self.config) self.model = init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent - self.tag_handler = GalaxyTagHandler(self.model.context) + self.tag_handler = GalaxyTagHandler(self.model.session) self.init_datatypes() def init_datatypes(self): diff --git a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py index 7aad9d474081..0fbfaf947de3 100644 --- a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py @@ -75,6 +75,7 @@ def run_command(cmd: str) -> subprocess.CompletedProcess: def get_db_heads(config: Config) -> Tuple[str, ...]: """Return revision ids (version heads) stored in the database.""" dburl = config.get_main_option("sqlalchemy.url") + assert dburl engine = create_engine(dburl, future=True) with engine.connect() as conn: context = MigrationContext.configure(conn) diff --git a/lib/galaxy/model/unittest_utils/model_testing_utils.py b/lib/galaxy/model/unittest_utils/model_testing_utils.py index 2f8d71babc54..960045510abb 100644 --- a/lib/galaxy/model/unittest_utils/model_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/model_testing_utils.py @@ -145,7 +145,7 @@ def drop_database(db_url, database): _drop_database(db_url, database) else: url = make_url(db_url) - os.remove(url.database) + os.remove(url.database) # type:ignore[arg-type] def dbcleanup_wrapper(session, obj, where_clause=None): diff --git a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py index aff38254d183..792b07abc386 100644 --- a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py @@ -117,7 +117,7 @@ def activate_repository(self, repository: ToolShedRepository) -> None: tpm=tpm, repository=repository, changeset_revision=repository.changeset_revision, - metadata_dict=repository.metadata_, + metadata_dict=repository.metadata_, # type:ignore[arg-type] ) repository_tools_tups = irmm.get_repository_tools_tups() # Reload tools into the appropriate tool panel section. @@ -139,7 +139,7 @@ def activate_repository(self, repository: ToolShedRepository) -> None: dmh = data_manager.DataManagerHandler(self.app) dmh.install_data_managers( self.app.config.shed_data_manager_config_file, - repository.metadata_, + repository.metadata_, # type:ignore[arg-type] repository.get_shed_config_dict(self.app), data_manager_relative_install_dir, repository, @@ -206,7 +206,9 @@ def add_entry_to_repository_dependencies_of_installed_repositories(self, reposit def get_containing_repository_for_tool_dependency(self, tool_dependency_tup: tuple) -> ToolShedRepository: tool_shed_repository_id, name, version, type = tool_dependency_tup - return self.context.query(ToolShedRepository).get(tool_shed_repository_id) + repository = self.context.query(ToolShedRepository).get(tool_shed_repository_id) + assert repository + return repository def get_dependencies_for_repository( self, diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index bf7e3ccbd33e..e2ed787bdba2 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -131,7 +131,7 @@ def reset_all_metadata_on_installed_repository(self): original_metadata_dict = self.repository.metadata_ self.generate_metadata_for_changeset_revision() if self.metadata_dict != original_metadata_dict: - self.repository.metadata_ = self.metadata_dict + self.repository.metadata_ = self.metadata_dict # type:ignore[assignment] self.update_in_shed_tool_config() session = self.app.install_model.context diff --git a/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py index e7ac8399b88b..dfa8bec8b769 100644 --- a/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py @@ -116,7 +116,7 @@ def update_repository_record( if tool_shed_status_dict: repository.tool_shed_status = tool_shed_status_dict else: - repository.tool_shed_status = None + repository.tool_shed_status = None # type:ignore[assignment] session = self.app.install_model.context session.add(repository) with transaction(session): diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index e040ec0bdf16..b0827bdbf8b0 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -854,7 +854,7 @@ class GalaxyMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on Galaxy's app and repository constructs.""" app: InstallationTarget - repository: Optional[ToolShedRepository] + repository: Optional[ToolShedRepository] # type:ignore[assignment] def __init__( self, diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index 0059fc29092a..95fbd019e0e6 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -388,8 +388,10 @@ def __init__( self.metadata = self.MetadataWrapper(dataset_instance, compute_environment) if isinstance(dataset_instance, HasTags): self.groups = { - tag.user_value.lower() for tag in dataset_instance.tags if tag.user_tname == "group" - } # type-ignore[attr-defined] + tag.user_value.lower() + for tag in dataset_instance.tags # type:ignore[attr-defined] + if tag.user_tname == "group" + } else: # May be a 'FakeDatasetAssociation' self.groups = set() diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py index 98019adec70f..066785feae7a 100644 --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -269,7 +269,7 @@ def get_dbkeys(self, user: Optional[User], chrom_info=False): dbkeys = [] # Add user's custom keys to dbkeys. - if user and "dbkeys" in user.preferences: + if user and user.preferences and "dbkeys" in user.preferences: user_keys_dict = loads(user.preferences["dbkeys"]) dbkeys.extend([(attributes["name"], key) for key, attributes in user_keys_dict.items()]) diff --git a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py index f827c0ee999b..594015ca4528 100644 --- a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py @@ -57,6 +57,7 @@ def index(self, request: InstalledToolShedRepositoryIndexRequest) -> List[Instal def show(self, repository_id: DecodedDatabaseIdField) -> InstalledToolShedRepository: tool_shed_repository = self._install_model_context.get(ToolShedRepository, repository_id) + assert tool_shed_repository return self._show(tool_shed_repository) def check_for_updates(self, repository_id: Optional[int]) -> CheckForUpdatesResponse: @@ -74,7 +75,7 @@ def _get_tool_shed_repositories(self, **kwd): stmt = select(ToolShedRepository) for key, value in kwd.items(): if value is not None: - column = ToolShedRepository.__table__.c[key] + column = ToolShedRepository.__table__.c[key] # type:ignore[attr-defined] stmt = stmt.filter(column == value) stmt = stmt.order_by(ToolShedRepository.name).order_by(cast(ToolShedRepository.ctx_rev, Integer).desc()) session = self._install_model_context diff --git a/lib/galaxy/workflow/extract.py b/lib/galaxy/workflow/extract.py index 54899414413c..5d10aa8a8a19 100644 --- a/lib/galaxy/workflow/extract.py +++ b/lib/galaxy/workflow/extract.py @@ -116,7 +116,7 @@ def extract_steps( if name not in step_labels: step.label = name step_labels.add(name) - step.tool_inputs = dict(name=name) + step.tool_inputs = dict(name=name) # type:ignore[assignment] hid_to_output_pair[hid] = (step, "output") steps.append(step) for i, hid in enumerate(dataset_collection_ids): @@ -132,7 +132,7 @@ def extract_steps( if name not in step_labels: step.label = name step_labels.add(name) - step.tool_inputs = dict(name=name, collection_type=collection_type) + step.tool_inputs = dict(name=name, collection_type=collection_type) # type:ignore[assignment] hid_to_output_pair[hid] = (step, "output") steps.append(step) # Tool steps diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index da53d9de6159..6374b1a0ab18 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -1376,7 +1376,7 @@ def get_inputs(self): def restrict_options(self, step, connections: Iterable[WorkflowStepConnection], default_value): try: - static_options = [] + static_options = [] # type:ignore[var-annotated] # Retrieve possible runtime options for 'select' type inputs for connection in connections: # Well this isn't a great assumption... @@ -2421,6 +2421,7 @@ def from_workflow_step(self, trans, step: WorkflowStep, **kwargs) -> WorkflowMod Return module initialized from the WorkflowStep object `step`. """ type = step.type + assert type return self.module_types[type].from_workflow_step(trans, step, **kwargs) @@ -2509,13 +2510,13 @@ def inject(self, step: WorkflowStep, step_args=None, steps=None, **kwargs): If step_args is provided from a web form this is applied to generate 'state' else it is just obtained from the database. """ - step.upgrade_messages = {} + step.upgrade_messages = {} # type: ignore[assignment] # Make connection information available on each step by input name. step.setup_input_connections_by_name() # Populate module. - module = step.module = module_factory.from_workflow_step(self.trans, step, **kwargs) + module = step.module = module_factory.from_workflow_step(self.trans, step, **kwargs) # type: ignore[assignment] # Any connected input needs to have value DummyDataset (these # are not persisted so we need to do it every time) diff --git a/lib/galaxy/workflow/run.py b/lib/galaxy/workflow/run.py index c7eab00a7a1d..02b7a4c8f0c7 100644 --- a/lib/galaxy/workflow/run.py +++ b/lib/galaxy/workflow/run.py @@ -600,6 +600,7 @@ def set_step_outputs( workflow_output = model.WorkflowOutput(step, output_name=output_name) step.workflow_outputs.append(workflow_output) for workflow_output in step.workflow_outputs: + assert workflow_output.output_name output_name = workflow_output.output_name if output_name not in outputs: invocation_step.workflow_invocation.add_message( @@ -632,6 +633,7 @@ def _subworkflow_invocation(self, step: "WorkflowStep") -> WorkflowInvocation: workflow_invocation = self.workflow_invocation subworkflow_invocation = workflow_invocation.get_subworkflow_invocation_for_step(step) if subworkflow_invocation is None: + assert step.order_index raise MessageException(f"Failed to find persisted subworkflow invocation for step [{step.order_index + 1}]") return subworkflow_invocation diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 79fbb68cc0d5..5159c915cd87 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -210,7 +210,8 @@ def index_tool_ids(app: ToolShedApp, tool_ids: List[str]) -> Dict[str, Any]: repository = guid_to_repository(app, tool_id) owner = repository.user.username name = repository.name - repository = _get_repository_by_name_and_owner(app.model.context.current, name, owner, app.model.User) + assert name + repository = _get_repository_by_name_and_owner(app.model.session().current, name, owner, app.model.User) if not repository: log.warning(f"Repository {owner}/{name} does not exist, skipping") continue @@ -527,6 +528,7 @@ def upload_tar_and_set_metadata( app = trans.app user = trans.user assert user + assert user.username repo_dir = repository.repo_path(app) tip = repository.tip() tar_response = upload_tar( diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index a5df2eddcf91..bff8d0a21788 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -48,7 +48,7 @@ class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" app: ToolShedApp - repository: Optional[Repository] + repository: Optional[Repository] # type:ignore[assignment] # why is mypy making me re-annotate these things from the base class, it didn't # when they were in the same file diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 968109ac0235..4579ca1c72f1 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1300,8 +1300,11 @@ def edit_repository_information(self, repository: Repository, revert=True, **kwd self.check_for_strings(strings_displayed) if revert: strings_displayed = [] + # assert original_information[input_elem_name] for input_elem_name in ["repo_name", "description", "long_description"]: - self._browser.fill_form_value("edit_repository", input_elem_name, original_information[input_elem_name]) + self._browser.fill_form_value( + "edit_repository", input_elem_name, original_information[input_elem_name] # type:ignore[arg-type] + ) strings_displayed.append(self.escape_html(original_information[input_elem_name])) self._browser.submit_form_with_name("edit_repository", "edit_repository_button") if self._browser.is_twill: @@ -2050,7 +2053,7 @@ def _assert_repo_has_tool_with_id( ) tools = installed_repository.metadata_["tools"] found_it = False - for tool in tools: + for tool in tools: # type:ignore[attr-defined] if "id" not in tool: continue if tool["id"] == tool_id: diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index 99027ea17fc8..a6cab6cc954d 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -287,7 +287,7 @@ def handle_email_alerts( email_alerts.append(user.email) else: subject = f"Galaxy tool shed update alert for repository named {str(repository.name)}" - email_alerts = json.loads(repository.email_alerts) + email_alerts = json.loads(repository.email_alerts) # type:ignore[arg-type] for email in email_alerts: to = email.strip() # Send it diff --git a/test/unit/app/tools/test_metadata.py b/test/unit/app/tools/test_metadata.py index 8f83c6b90ef6..7e1784a84caa 100644 --- a/test/unit/app/tools/test_metadata.py +++ b/test/unit/app/tools/test_metadata.py @@ -18,7 +18,7 @@ class TestMetadata(TestCase, tools_support.UsesTools): def setUp(self): super().setUp() self.setup_app() - model.Dataset.object_store = self.app.object_store + model.Dataset.object_store = self.app.object_store # type: ignore[assignment] job = model.Job() sa_session = self.app.model.session sa_session.add(job) diff --git a/test/unit/data/model/test_mapping_testing_utils.py b/test/unit/data/model/test_mapping_testing_utils.py index 6424632422ab..346ec2421403 100644 --- a/test/unit/data/model/test_mapping_testing_utils.py +++ b/test/unit/data/model/test_mapping_testing_utils.py @@ -65,7 +65,7 @@ def test_collection_consists_of_objects(session): # contains wrong number of objects assert not collection_consists_of_objects([stored_foo1, stored_foo1, stored_foo2], foo1, foo2) # if an object's primary key is not set, it cannot be equal to another object - foo1.id, stored_foo1.id = None, None + foo1.id, stored_foo1.id = None, None # type:ignore[assignment] assert not collection_consists_of_objects([stored_foo1], foo1) diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index 81c3d9f68b5f..e31511e7c272 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -988,7 +988,7 @@ def _setup_collection_invocation(app): workflow_step_1 = model.WorkflowStep() workflow_step_1.order_index = 0 workflow_step_1.type = "data_collection_input" - workflow_step_1.tool_inputs = {} + workflow_step_1.tool_inputs = {} # type:ignore[assignment] sa_session.add(workflow_step_1) workflow_1 = _workflow_from_steps(u, [workflow_step_1]) workflow_1.license = "MIT" @@ -1014,7 +1014,7 @@ def _setup_simple_invocation(app): workflow_step_1 = model.WorkflowStep() workflow_step_1.order_index = 0 workflow_step_1.type = "data_input" - workflow_step_1.tool_inputs = {} + workflow_step_1.tool_inputs = {} # type:ignore[assignment] sa_session.add(workflow_step_1) workflow = _workflow_from_steps(u, [workflow_step_1]) workflow.license = "MIT" diff --git a/test/unit/data/test_mutable_json_column.py b/test/unit/data/test_mutable_json_column.py index 895438e74b40..9bc67fbc68e0 100644 --- a/test/unit/data/test_mutable_json_column.py +++ b/test/unit/data/test_mutable_json_column.py @@ -20,7 +20,7 @@ def test_metadata_mutable_column(self): session = self.model.session with transaction(session): session.commit() - w.value = {"x": "z"} + w.value = {"x": "z"} # type:ignore[assignment] persisted = self.persist_and_reload(w) assert persisted.value == {"x": "z"} persisted.value["x"] = "1" From 69581d1466f16033d258f0c0e4f4cf6446509bad Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 12 Feb 2024 13:47:18 -0500 Subject: [PATCH 61/71] Mypy: all statements are reachable --- lib/galaxy/managers/workflows.py | 2 +- lib/galaxy/model/__init__.py | 2 +- lib/galaxy/visualization/genomes.py | 4 ++-- lib/galaxy/workflow/modules.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index b5085d02fb27..3d454b457f82 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -1882,7 +1882,7 @@ def __connect_workflow_steps(self, steps: List[model.WorkflowStep], steps_by_ext for step in steps: # Input connections if step.temp_input_connections: # populated by __module_from_dict - for input_name, conn_list in step.temp_input_connections.items(): + for input_name, conn_list in step.temp_input_connections.items(): # type:ignore[unreachable] if not conn_list: continue if not isinstance(conn_list, list): # Older style singleton connection diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 29463c29f64b..5565bca37f04 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -4186,7 +4186,7 @@ def _calculate_size(self) -> int: except OSError: return 0 assert self.object_store - return self.object_store.size(self) + return self.object_store.size(self) # type:ignore[unreachable] @overload def get_size(self, nice_size: Literal[False], calculate_size: bool = True) -> int: ... diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py index 066785feae7a..50672c8227b4 100644 --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -269,8 +269,8 @@ def get_dbkeys(self, user: Optional[User], chrom_info=False): dbkeys = [] # Add user's custom keys to dbkeys. - if user and user.preferences and "dbkeys" in user.preferences: - user_keys_dict = loads(user.preferences["dbkeys"]) + if user and user.preferences and "dbkeys" in user.preferences: # type:ignore[unreachable] + user_keys_dict = loads(user.preferences["dbkeys"]) # type:ignore[unreachable] dbkeys.extend([(attributes["name"], key) for key, attributes in user_keys_dict.items()]) # Add app keys to dbkeys. diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index 6374b1a0ab18..6dfcbc500a68 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -1383,7 +1383,7 @@ def restrict_options(self, step, connections: Iterable[WorkflowStepConnection], assert connection.input_step module = connection.input_step.module assert isinstance(module, (ToolModule, SubWorkflowModule)) - if isinstance(module, ToolModule): + if isinstance(module, ToolModule): # type:ignore[unreachable] assert module.tool tool_inputs = module.tool.inputs # may not be set, but we're catching the Exception below. @@ -2545,7 +2545,7 @@ def inject_all(self, workflow: Workflow, param_map=None, ignore_tool_missing_exc def compute_runtime_state(self, step: WorkflowStep, step_args=None): assert step.module, "module must be injected before computing runtime state" - state, step_errors = step.module.compute_runtime_state(self.trans, step, step_args) + state, step_errors = step.module.compute_runtime_state(self.trans, step, step_args) # type:ignore[unreachable] step.state = state # Fix any missing parameters From 6396effcb75387d40c7e3d6472ad1e3cb27bb38b Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 12 Feb 2024 13:54:14 -0500 Subject: [PATCH 62/71] Mypy: need to map hda declaratively, then its parent is model.Base --- lib/galaxy/managers/hdas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index 956f73425bcc..62d8e87e9bc6 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -125,7 +125,7 @@ def is_accessible(self, item: model.HistoryDatasetAssociation, user: Optional[mo # return True return super().is_accessible(item, user, **kwargs) - def is_owner(self, item: model.Base, user: Optional[model.User], current_history=None, **kwargs: Any) -> bool: + def is_owner(self, item, user: Optional[model.User], current_history=None, **kwargs: Any) -> bool: """ Use history to see if current user owns HDA. """ From 6311c371b6552dfa348c508375f33fb03f113c8b Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 9 Feb 2024 15:07:03 -0500 Subject: [PATCH 63/71] Fix typing errors: sharable, secured --- lib/galaxy/managers/secured.py | 2 +- lib/galaxy/managers/sharable.py | 10 +++------- lib/galaxy/webapps/galaxy/services/sharable.py | 2 +- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/lib/galaxy/managers/secured.py b/lib/galaxy/managers/secured.py index 20d95b8a8217..64721d8b137e 100644 --- a/lib/galaxy/managers/secured.py +++ b/lib/galaxy/managers/secured.py @@ -33,7 +33,7 @@ class AccessibleManagerMixin: def by_id(self, id: int): ... # don't want to override by_id since consumers will also want to fetch w/o any security checks - def is_accessible(self, item: "Query", user: model.User, **kwargs: Any) -> bool: + def is_accessible(self, item, user: model.User, **kwargs: Any) -> bool: """ Return True if the item accessible to user. """ diff --git a/lib/galaxy/managers/sharable.py b/lib/galaxy/managers/sharable.py index 053c11562fe4..35e1afec570d 100644 --- a/lib/galaxy/managers/sharable.py +++ b/lib/galaxy/managers/sharable.py @@ -18,7 +18,6 @@ Optional, Set, Type, - TYPE_CHECKING, ) from sqlalchemy import ( @@ -55,9 +54,6 @@ from galaxy.util import ready_name_for_url from galaxy.util.hash_util import md5_hash_str -if TYPE_CHECKING: - from sqlalchemy.orm import Query - log = logging.getLogger(__name__) @@ -101,9 +97,9 @@ def is_owner(self, item: model.Base, user: Optional[User], **kwargs: Any) -> boo # ... effectively a good fit to have this here, but not semantically if self.user_manager.is_admin(user, trans=kwargs.get("trans", None)): return True - return item.user == user + return item.user == user # type:ignore[attr-defined] - def is_accessible(self, item: "Query", user: Optional[User], **kwargs: Any) -> bool: + def is_accessible(self, item, user: Optional[User], **kwargs: Any) -> bool: """ If the item is importable, is owned by `user`, or (the valid) `user` is in 'users shared with' list for the item: return True. @@ -183,7 +179,7 @@ def share_with(self, item, user: User, flush: bool = True): """ # precondition: user has been validated # get or create - existing = self.get_share_assocs(item, user=user) + existing = self.get_share_assocs(item, user=user) # type:ignore[dict-item] if existing: return existing.pop(0) return self._create_user_share_assoc(item, user, flush=flush) diff --git a/lib/galaxy/webapps/galaxy/services/sharable.py b/lib/galaxy/webapps/galaxy/services/sharable.py index f940ed4e94ea..73724ba6e28e 100644 --- a/lib/galaxy/webapps/galaxy/services/sharable.py +++ b/lib/galaxy/webapps/galaxy/services/sharable.py @@ -182,7 +182,7 @@ def _send_notification_to_users(self, users_to_notify: Set[User], item: Sharable class SharedItemNotificationFactory: source = "galaxy_sharing_system" - type_map: Dict[SharableItem, SharableItemType] = { + type_map: Dict[Type[SharableItem], SharableItemType] = { History: "history", StoredWorkflow: "workflow", Visualization: "visualization", From 49a2112291524dea35298de86a8c5918dadfd56d Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 12 Feb 2024 20:24:24 -0500 Subject: [PATCH 64/71] Fix package mypy errors --- lib/galaxy/model/store/__init__.py | 2 +- lib/galaxy/tool_util/toolbox/watcher.py | 6 +++--- lib/galaxy/tools/wrappers.py | 2 +- lib/galaxy/util/watcher.py | 6 +++--- lib/galaxy_test/selenium/framework.py | 6 +++--- lib/tool_shed/test/base/playwrightbrowser.py | 6 +++--- lib/tool_shed/test/base/twillbrowser.py | 4 ++-- test/unit/app/managers/test_HistoryManager.py | 5 ++++- 8 files changed, 20 insertions(+), 17 deletions(-) diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 32ac1215b6cd..58a18dea7d13 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -2161,7 +2161,7 @@ def export_history( # Write collections' attributes (including datasets list) to file. stmt_hdca = ( select(model.HistoryDatasetCollectionAssociation) - .where(model.HistoryDatasetCollectionAssociation.history == history) + .where(model.HistoryDatasetCollectionAssociation.history == history) # type:ignore[arg-type] .where(model.HistoryDatasetCollectionAssociation.deleted == expression.false()) ) collections = sa_session.scalars(stmt_hdca) diff --git a/lib/galaxy/tool_util/toolbox/watcher.py b/lib/galaxy/tool_util/toolbox/watcher.py index e570bb58198d..b885ea964e27 100644 --- a/lib/galaxy/tool_util/toolbox/watcher.py +++ b/lib/galaxy/tool_util/toolbox/watcher.py @@ -9,9 +9,9 @@ can_watch = True except ImportError: - Observer = None - FileSystemEventHandler = object - PollingObserver = None + Observer = None # type:ignore[assignment, misc] + FileSystemEventHandler = object # type:ignore[assignment, misc] + PollingObserver = None # type:ignore[assignment, misc] can_watch = False from galaxy.util.hash_util import md5_hash_file diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index 95fbd019e0e6..3946baf593a2 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -389,7 +389,7 @@ def __init__( if isinstance(dataset_instance, HasTags): self.groups = { tag.user_value.lower() - for tag in dataset_instance.tags # type:ignore[attr-defined] + for tag in dataset_instance.tags # type:ignore[unused-ignore, attr-defined] if tag.user_tname == "group" } else: diff --git a/lib/galaxy/util/watcher.py b/lib/galaxy/util/watcher.py index dd4f0110299a..501a2212201c 100644 --- a/lib/galaxy/util/watcher.py +++ b/lib/galaxy/util/watcher.py @@ -13,9 +13,9 @@ can_watch = True except ImportError: - Observer = None - FileSystemEventHandler = object - PollingObserver = None + Observer = None # type:ignore[assignment] + FileSystemEventHandler = object # type:ignore[assignment, misc] + PollingObserver = None # type:ignore[assignment, misc] can_watch = False from galaxy.util.hash_util import md5_hash_file diff --git a/lib/galaxy_test/selenium/framework.py b/lib/galaxy_test/selenium/framework.py index 5b919286dbc3..a3b298d54c19 100644 --- a/lib/galaxy_test/selenium/framework.py +++ b/lib/galaxy_test/selenium/framework.py @@ -536,7 +536,7 @@ def setup_shared_state(self): NavigatesGalaxyMixin = object -class UsesLibraryAssertions(NavigatesGalaxyMixin): +class UsesLibraryAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] @retry_assertion_during_transitions def assert_num_displayed_items_is(self, n): num_displayed = self.num_displayed_items() @@ -546,7 +546,7 @@ def num_displayed_items(self) -> int: return len(self.libraries_table_elements()) -class UsesHistoryItemAssertions(NavigatesGalaxyMixin): +class UsesHistoryItemAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] def assert_item_peek_includes(self, hid, expected): item_body = self.history_panel_item_component(hid=hid) peek_text = item_body.peek.wait_for_text() @@ -584,7 +584,7 @@ def assert_item_hid_text(self, hid): ) -class UsesWorkflowAssertions(NavigatesGalaxyMixin): +class UsesWorkflowAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] @retry_assertion_during_transitions def _assert_showing_n_workflows(self, n): actual_count = len(self.workflow_card_elements()) diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py index d493e97aa990..4c82a0d7183c 100644 --- a/lib/tool_shed/test/base/playwrightbrowser.py +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -132,7 +132,7 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp input_s = form.locator(f"select[name='{control_name}']") if input_i.count(): if control_name in ["redirect"]: - input_i.input_value = value + input_i.input_value = value # type:ignore[method-assign, assignment] else: if isinstance(value, bool): if value and not input_i.is_checked(): @@ -142,9 +142,9 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp else: input_i.fill(value) if input_t.count(): - input_t.fill(value) + input_t.fill(value) # type:ignore[arg-type] if input_s.count(): - input_s.select_option(value) + input_s.select_option(value) # type:ignore[arg-type] def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: multi_select = "form[name='categories'] select[name='category_id']" diff --git a/lib/tool_shed/test/base/twillbrowser.py b/lib/tool_shed/test/base/twillbrowser.py index 1f4af8791678..a73cdf85b299 100644 --- a/lib/tool_shed/test/base/twillbrowser.py +++ b/lib/tool_shed/test/base/twillbrowser.py @@ -5,7 +5,7 @@ ) import twill.commands as tc -from twill.browser import FormElement +from twill.browser import FormElement # type:ignore[attr-defined] from galaxy.util import smart_str from .browser import ( @@ -19,7 +19,7 @@ def visit_url(url: str, allowed_codes: List[int]) -> str: - new_url = tc.go(url) + new_url = tc.go(url) # type:ignore[func-returns-value] return_code = tc.browser.code assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( return_code, diff --git a/test/unit/app/managers/test_HistoryManager.py b/test/unit/app/managers/test_HistoryManager.py index ba1710740540..c3627c87d166 100644 --- a/test/unit/app/managers/test_HistoryManager.py +++ b/test/unit/app/managers/test_HistoryManager.py @@ -85,7 +85,10 @@ def test_base(self): self.log("should be able to order") history3 = self.history_manager.create(name="history3", user=user2) - name_first_then_time = (model.History.name, sqlalchemy.desc(model.History.create_time)) + name_first_then_time = ( + model.History.name, + sqlalchemy.desc(model.History.create_time), + ) # type:ignore[var-annotated] assert self.history_manager.list(order_by=name_first_then_time) == [history2, history1, history3] def test_copy(self): From acdb9c65faac3b013a8ea25e9e4169babea935fd Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 13 Feb 2024 02:50:50 -0500 Subject: [PATCH 65/71] Fix SA2.0 error: celery task 1. In 2.0, when the statement contains "returning", the result type is ChunkedIteratorResult, which does not have the rowcount attr, becuase: 2. result.rowcount should not be used for statements containting the returning clause Ref: https://docs.sqlalchemy.org/en/20/core/connections.html#sqlalchemy.engine.CursorResult.rowcount --- lib/galaxy/celery/base_task.py | 37 +++++++++++++--------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/lib/galaxy/celery/base_task.py b/lib/galaxy/celery/base_task.py index 724472765393..6737aba0231c 100644 --- a/lib/galaxy/celery/base_task.py +++ b/lib/galaxy/celery/base_task.py @@ -80,37 +80,28 @@ class GalaxyTaskBeforeStartUserRateLimitPostgres(GalaxyTaskBeforeStartUserRateLi We take advantage of efficiencies in its dialect. """ - _update_stmt = ( - update(CeleryUserRateLimit) - .where(CeleryUserRateLimit.user_id == bindparam("userid")) - .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) ")) - .returning(CeleryUserRateLimit.last_scheduled_time) - ) - _insert_stmt = ( - ps_insert(CeleryUserRateLimit) - .values(user_id=bindparam("userid"), last_scheduled_time=bindparam("now")) - .returning(CeleryUserRateLimit.last_scheduled_time) - ) - - _upsert_stmt = _insert_stmt.on_conflict_do_update( # type:ignore[attr-defined] - index_elements=["user_id"], set_=dict(last_scheduled_time=bindparam("sched_time")) - ) - def calculate_task_start_time( # type: ignore self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: with transaction(sa_session): - result = sa_session.execute( - self._update_stmt, {"userid": user_id, "interval": task_interval_secs, "now": now} + update_stmt = ( + update(CeleryUserRateLimit) + .where(CeleryUserRateLimit.user_id == user_id) + .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) ")) + .returning(CeleryUserRateLimit.last_scheduled_time) ) - if result.rowcount == 0: + result = sa_session.execute(update_stmt, {"interval": task_interval_secs, "now": now}).all() + if not result: sched_time = now + datetime.timedelta(seconds=task_interval_secs) - result = sa_session.execute( - self._upsert_stmt, {"userid": user_id, "now": now, "sched_time": sched_time} + upsert_stmt = ( + ps_insert(CeleryUserRateLimit) # type:ignore[attr-defined] + .values(user_id=user_id, last_scheduled_time=now) + .returning(CeleryUserRateLimit.last_scheduled_time) + .on_conflict_do_update(index_elements=["user_id"], set_=dict(last_scheduled_time=sched_time)) ) - for row in result: - return row[0] + result = sa_session.execute(upsert_stmt).all() sa_session.commit() + return result[0][0] class GalaxyTaskBeforeStartUserRateLimitStandard(GalaxyTaskBeforeStartUserRateLimit): From 036dad5c8b190ef297111ba86331c4f7505daf14 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 20 Feb 2024 00:28:43 -0500 Subject: [PATCH 66/71] Wrap call to ensure session is closed Otherwise there's an idle transaction left in the database (+locks) --- lib/tool_shed/webapp/controllers/admin.py | 2 +- lib/tool_shed/webapp/model/mapping.py | 5 +++++ lib/tool_shed/webapp/util/shed_statistics.py | 10 ++-------- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/lib/tool_shed/webapp/controllers/admin.py b/lib/tool_shed/webapp/controllers/admin.py index 73ddef546528..9b1681bafe24 100644 --- a/lib/tool_shed/webapp/controllers/admin.py +++ b/lib/tool_shed/webapp/controllers/admin.py @@ -324,7 +324,7 @@ def regenerate_statistics(self, trans, **kwd): message = escape(kwd.get("message", "")) status = kwd.get("status", "done") if "regenerate_statistics_button" in kwd: - trans.app.shed_counter.generate_statistics() + trans.app.shed_counter.generate_statistics(trans.sa_session) message = "Successfully regenerated statistics" return trans.fill_template("/webapps/tool_shed/admin/statistics.mako", message=message, status=status) diff --git a/lib/tool_shed/webapp/model/mapping.py b/lib/tool_shed/webapp/model/mapping.py index 0ef27ff7f8b9..94ff0317f3c4 100644 --- a/lib/tool_shed/webapp/model/mapping.py +++ b/lib/tool_shed/webapp/model/mapping.py @@ -45,4 +45,9 @@ def init( result.security_agent = CommunityRBACAgent(result) result.shed_counter = shed_statistics.ShedCounter(result) + + session = result.session() + with session.begin(): + result.shed_counter.generate_statistics(session) + return result diff --git a/lib/tool_shed/webapp/util/shed_statistics.py b/lib/tool_shed/webapp/util/shed_statistics.py index d864420ecb2d..7e21673de19d 100644 --- a/lib/tool_shed/webapp/util/shed_statistics.py +++ b/lib/tool_shed/webapp/util/shed_statistics.py @@ -23,14 +23,8 @@ def __init__(self, model): self.unique_owners = 0 self.unique_valid_tools = 0 self.workflows = 0 - self.generate_statistics() - @property - def sa_session(self): - """Returns a SQLAlchemy session""" - return self.model.context - - def generate_statistics(self): + def generate_statistics(self, sa_session): self.custom_datatypes = 0 self.deleted_repositories = 0 self.deprecated_repositories = 0 @@ -42,7 +36,7 @@ def generate_statistics(self): self.unique_valid_tools = 0 self.workflows = 0 unique_user_ids = [] - for repository in self.sa_session.scalars(select(Repository)): + for repository in sa_session.scalars(select(Repository)): self.repositories += 1 self.total_clones += repository.times_downloaded is_deleted = repository.deleted From a701cde23d0b055479179b6b403ba91b444fd337 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 20 Feb 2024 00:45:51 -0500 Subject: [PATCH 67/71] Ensure session is closed on TS Registry load Same as prev. commit: otherwise db locks are left --- lib/tool_shed/repository_registry.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/tool_shed/repository_registry.py b/lib/tool_shed/repository_registry.py index 6927b4e68e74..521ae1a06c6f 100644 --- a/lib/tool_shed/repository_registry.py +++ b/lib/tool_shed/repository_registry.py @@ -44,8 +44,12 @@ def __init__(self, app): self.viewable_suites_by_category = {} self.viewable_valid_repositories_and_suites_by_category = {} self.viewable_valid_suites_by_category = {} - self.load_viewable_repositories_and_suites_by_category() - self.load_repository_and_suite_tuples() + self.load() + + def load(self): + with self.sa_session.begin(): + self.load_viewable_repositories_and_suites_by_category() + self.load_repository_and_suite_tuples() def add_category_entry(self, category): category_name = str(category.name) From 8c88bf9de1c0df6b19082f69f3703656f2fe6a32 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 20 Feb 2024 01:08:42 -0500 Subject: [PATCH 68/71] Fix SA2.0 error: list arg to select; mypy --- lib/galaxy/model/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 5565bca37f04..a2f860ff26e5 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -7611,8 +7611,9 @@ def copy_tags_from(self, target_user, source_workflow): def invocation_counts(self) -> InvocationsStateCounts: sa_session = object_session(self) + assert sa_session stmt = ( - select([WorkflowInvocation.state, func.count(WorkflowInvocation.state)]) + select(WorkflowInvocation.state, func.count(WorkflowInvocation.state)) .select_from(StoredWorkflow) .join(Workflow, Workflow.stored_workflow_id == StoredWorkflow.id) .join(WorkflowInvocation, WorkflowInvocation.workflow_id == Workflow.id) @@ -7620,7 +7621,7 @@ def invocation_counts(self) -> InvocationsStateCounts: .where(StoredWorkflow.id == self.id) ) rows = sa_session.execute(stmt).all() - rows_as_dict = dict(r for r in rows if r[0] is not None) + rows_as_dict = dict(r for r in rows if r[0] is not None) # type:ignore[arg-type, var-annotated] return InvocationsStateCounts(rows_as_dict) def to_dict(self, view="collection", value_mapper=None): From b644b1da4e620603e9e6a807c9177ffa46fca86e Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 22 Feb 2024 23:51:23 -0500 Subject: [PATCH 69/71] Use NullPool for sqlite engines This restores the behavior under SQLAlchemy 1.4 (Note that we set the pool for sqlite only if it's not an in-memory db --- lib/galaxy/model/orm/engine_factory.py | 18 +++++++----- test/unit/data/model/test_engine_factory.py | 32 ++++++++++----------- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/lib/galaxy/model/orm/engine_factory.py b/lib/galaxy/model/orm/engine_factory.py index 886a4e3462ab..374c53219c73 100644 --- a/lib/galaxy/model/orm/engine_factory.py +++ b/lib/galaxy/model/orm/engine_factory.py @@ -12,6 +12,7 @@ exc, ) from sqlalchemy.engine import Engine +from sqlalchemy.pool import NullPool log = logging.getLogger(__name__) @@ -101,8 +102,13 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema pass engine_options = engine_options or {} - engine_options = set_sqlite_connect_args(engine_options, url) - engine = create_engine(url, **engine_options, future=True) + if url.startswith("sqlite://"): + set_sqlite_connect_args(engine_options, url) + + if url.startswith("sqlite://") and url not in ("sqlite:///:memory:", "sqlite://"): + engine = create_engine(url, **engine_options, poolclass=NullPool, future=True) + else: + engine = create_engine(url, **engine_options, future=True) # Prevent sharing connection across fork: https://docs.sqlalchemy.org/en/14/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork register_after_fork(engine, lambda e: e.dispose()) @@ -123,13 +129,11 @@ def checkout(dbapi_connection, connection_record, connection_proxy): return engine -def set_sqlite_connect_args(engine_options: Dict, url: str): +def set_sqlite_connect_args(engine_options: Dict, url: str) -> None: """ Add or update `connect_args` in `engine_options` if db is sqlite. Set check_same_thread to False for sqlite, handled by request-specific session. See https://fastapi.tiangolo.com/tutorial/sql-databases/#note """ - if url.startswith("sqlite://"): - connect_args = engine_options.setdefault("connect_args", {}) - connect_args["check_same_thread"] = False - return engine_options + connect_args = engine_options.setdefault("connect_args", {}) + connect_args["check_same_thread"] = False diff --git a/test/unit/data/model/test_engine_factory.py b/test/unit/data/model/test_engine_factory.py index 947664865f3e..361fc1f608d3 100644 --- a/test/unit/data/model/test_engine_factory.py +++ b/test/unit/data/model/test_engine_factory.py @@ -7,28 +7,28 @@ class TestSetSqliteConnectArgs: def test_engine_options_empty(self): engine_options = {} # type: ignore[var-annotated] - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert updated == {"connect_args": {"check_same_thread": False}} + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert engine_options == {"connect_args": {"check_same_thread": False}} def test_update_nonempty_engine_options(self): engine_options = {"foo": "some foo"} - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert len(updated) == 2 - assert updated["foo"] == "some foo" - assert updated["connect_args"] == {"check_same_thread": False} + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert len(engine_options) == 2 + assert engine_options["foo"] == "some foo" + assert engine_options["connect_args"] == {"check_same_thread": False} # type:ignore[comparison-overlap] def test_overwrite_connect_args(self): engine_options = {"foo": "some foo", "connect_args": {"check_same_thread": True}} - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert len(updated) == 2 - assert updated["foo"] == "some foo" - assert updated["connect_args"] == {"check_same_thread": False} + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert len(engine_options) == 2 + assert engine_options["foo"] == "some foo" + assert engine_options["connect_args"] == {"check_same_thread": False} def test_update_nonempty_connect_args(self): engine_options = {"foo": "some foo", "connect_args": {"bar": "some bar"}} - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert len(updated) == 2 - assert updated["foo"] == "some foo" - assert len(updated["connect_args"]) == 2 - assert updated["connect_args"]["check_same_thread"] is False - assert updated["connect_args"]["bar"] == "some bar" + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert len(engine_options) == 2 + assert engine_options["foo"] == "some foo" + assert len(engine_options["connect_args"]) == 2 + assert engine_options["connect_args"]["check_same_thread"] is False # type:ignore[index] + assert engine_options["connect_args"]["bar"] == "some bar" # type:ignore[index] From 230c4b48b8571f8d064d9ee4ca87a7f8d085c1fb Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 28 Feb 2024 11:40:28 -0600 Subject: [PATCH 70/71] Help mypy: job is never None --- lib/galaxy/jobs/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index 570d1d72b99b..f15afe2b58e6 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -1181,7 +1181,9 @@ def galaxy_url(self): return self.get_destination_configuration("galaxy_infrastructure_url") def get_job(self) -> model.Job: - return self.sa_session.get(Job, self.job_id) # type:ignore[return-value] + job = self.sa_session.get(Job, self.job_id) + assert job + return job def get_id_tag(self): # For compatibility with drmaa, which uses job_id right now, and TaskWrapper From fd644c3d03cae83ec2f049457159de6517d42431 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 5 Mar 2024 13:40:15 -0500 Subject: [PATCH 71/71] Add Decimal to accpted types by util.nice_size() --- lib/galaxy/util/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py index 4769a6842ba3..53532c6a57ed 100644 --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -29,6 +29,7 @@ datetime, timezone, ) +from decimal import Decimal from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from hashlib import md5 @@ -1519,7 +1520,7 @@ def shorten_with_metric_prefix(amount: int) -> str: return str(amount) -def nice_size(size: Union[float, int, str]) -> str: +def nice_size(size: Union[float, int, str, Decimal]) -> str: """ Returns a readably formatted string with the size