diff --git a/lib/galaxy/app_unittest_utils/galaxy_mock.py b/lib/galaxy/app_unittest_utils/galaxy_mock.py index c33e3b433326..d50842158020 100644 --- a/lib/galaxy/app_unittest_utils/galaxy_mock.py +++ b/lib/galaxy/app_unittest_utils/galaxy_mock.py @@ -125,7 +125,7 @@ def __init__(self, config=None, **kwargs) -> None: self[ShortTermStorageMonitor] = sts_manager # type: ignore[type-abstract] self[galaxy_scoped_session] = self.model.context self.visualizations_registry = MockVisualizationsRegistry() - self.tag_handler = tags.GalaxyTagHandler(self.model.context) + self.tag_handler = tags.GalaxyTagHandler(self.model.session) self[tags.GalaxyTagHandler] = self.tag_handler self.quota_agent = quota.DatabaseQuotaAgent(self.model) self.job_config = Bunch( diff --git a/lib/galaxy/celery/base_task.py b/lib/galaxy/celery/base_task.py index 410d6d4d2bfe..6737aba0231c 100644 --- a/lib/galaxy/celery/base_task.py +++ b/lib/galaxy/celery/base_task.py @@ -11,7 +11,6 @@ ) from sqlalchemy.dialects.postgresql import insert as ps_insert from sqlalchemy.exc import IntegrityError -from sqlalchemy.orm import Session from galaxy.model import CeleryUserRateLimit from galaxy.model.base import transaction @@ -70,7 +69,7 @@ def __call__(self, task: Task, task_id, args, kwargs): @abstractmethod def calculate_task_start_time( - self, user_id: int, sa_session: Session, task_interval_secs: float, now: datetime.datetime + self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: return now @@ -81,38 +80,28 @@ class GalaxyTaskBeforeStartUserRateLimitPostgres(GalaxyTaskBeforeStartUserRateLi We take advantage of efficiencies in its dialect. """ - _update_stmt = ( - update(CeleryUserRateLimit) - .where(CeleryUserRateLimit.user_id == bindparam("userid")) - .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) ")) - .returning(CeleryUserRateLimit.last_scheduled_time) - ) - - _insert_stmt = ( - ps_insert(CeleryUserRateLimit) - .values(user_id=bindparam("userid"), last_scheduled_time=bindparam("now")) - .returning(CeleryUserRateLimit.last_scheduled_time) - ) - - _upsert_stmt = _insert_stmt.on_conflict_do_update( - index_elements=["user_id"], set_=dict(last_scheduled_time=bindparam("sched_time")) - ) - def calculate_task_start_time( # type: ignore - self, user_id: int, sa_session: Session, task_interval_secs: float, now: datetime.datetime + self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: with transaction(sa_session): - result = sa_session.execute( - self._update_stmt, {"userid": user_id, "interval": task_interval_secs, "now": now} + update_stmt = ( + update(CeleryUserRateLimit) + .where(CeleryUserRateLimit.user_id == user_id) + .values(last_scheduled_time=text("greatest(last_scheduled_time + ':interval second', " ":now) ")) + .returning(CeleryUserRateLimit.last_scheduled_time) ) - if result.rowcount == 0: + result = sa_session.execute(update_stmt, {"interval": task_interval_secs, "now": now}).all() + if not result: sched_time = now + datetime.timedelta(seconds=task_interval_secs) - result = sa_session.execute( - self._upsert_stmt, {"userid": user_id, "now": now, "sched_time": sched_time} + upsert_stmt = ( + ps_insert(CeleryUserRateLimit) # type:ignore[attr-defined] + .values(user_id=user_id, last_scheduled_time=now) + .returning(CeleryUserRateLimit.last_scheduled_time) + .on_conflict_do_update(index_elements=["user_id"], set_=dict(last_scheduled_time=sched_time)) ) - for row in result: - return row[0] + result = sa_session.execute(upsert_stmt).all() sa_session.commit() + return result[0][0] class GalaxyTaskBeforeStartUserRateLimitStandard(GalaxyTaskBeforeStartUserRateLimit): @@ -138,7 +127,7 @@ class GalaxyTaskBeforeStartUserRateLimitStandard(GalaxyTaskBeforeStartUserRateLi ) def calculate_task_start_time( - self, user_id: int, sa_session: Session, task_interval_secs: float, now: datetime.datetime + self, user_id: int, sa_session: galaxy_scoped_session, task_interval_secs: float, now: datetime.datetime ) -> datetime.datetime: last_scheduled_time = None with transaction(sa_session): diff --git a/lib/galaxy/celery/tasks.py b/lib/galaxy/celery/tasks.py index fc60f6921327..e49008fb9cec 100644 --- a/lib/galaxy/celery/tasks.py +++ b/lib/galaxy/celery/tasks.py @@ -195,7 +195,7 @@ def set_metadata( try: if overwrite: hda_manager.overwrite_metadata(dataset_instance) - dataset_instance.datatype.set_meta(dataset_instance) + dataset_instance.datatype.set_meta(dataset_instance) # type:ignore [arg-type] dataset_instance.set_peek() # Reset SETTING_METADATA state so the dataset instance getter picks the dataset state dataset_instance.set_metadata_success_state() @@ -228,6 +228,7 @@ def setup_fetch_data( ): tool = cached_create_tool_from_representation(app=app, raw_tool_source=raw_tool_source) job = sa_session.get(Job, job_id) + assert job # self.request.hostname is the actual worker name given by the `-n` argument, not the hostname as you might think. job.handler = self.request.hostname job.job_runner_name = "celery" @@ -260,6 +261,7 @@ def finish_job( ): tool = cached_create_tool_from_representation(app=app, raw_tool_source=raw_tool_source) job = sa_session.get(Job, job_id) + assert job # TODO: assert state ? mini_job_wrapper = MinimalJobWrapper(job=job, app=app, tool=tool) mini_job_wrapper.finish("", "") @@ -320,6 +322,7 @@ def fetch_data( task_user_id: Optional[int] = None, ) -> str: job = sa_session.get(Job, job_id) + assert job mini_job_wrapper = MinimalJobWrapper(job=job, app=app) mini_job_wrapper.change_state(model.Job.states.RUNNING, flush=True, job=job) return abort_when_job_stops(_fetch_data, session=sa_session, job_id=job_id, setup_return=setup_return) diff --git a/lib/galaxy/config/__init__.py b/lib/galaxy/config/__init__.py index ff3ea85a5ae1..a68c692f1cff 100644 --- a/lib/galaxy/config/__init__.py +++ b/lib/galaxy/config/__init__.py @@ -743,7 +743,6 @@ class GalaxyAppConfiguration(BaseAppConfiguration, CommonConfigurationMixin): def __init__(self, **kwargs): super().__init__(**kwargs) self._override_tempdir(kwargs) - self._configure_sqlalchemy20_warnings(kwargs) self._process_config(kwargs) self._set_dependent_defaults() @@ -760,36 +759,6 @@ def _set_dependent_defaults(self): f"{dependent_config_param}, {config_param}" ) - def _configure_sqlalchemy20_warnings(self, kwargs): - """ - This method should be deleted after migration to SQLAlchemy 2.0 is complete. - To enable warnings, set `GALAXY_CONFIG_SQLALCHEMY_WARN_20=1`, - """ - warn = string_as_bool(kwargs.get("sqlalchemy_warn_20", False)) - if warn: - import sqlalchemy - - sqlalchemy.util.deprecations.SQLALCHEMY_WARN_20 = True - self._setup_sqlalchemy20_warnings_filters() - - def _setup_sqlalchemy20_warnings_filters(self): - import warnings - - from sqlalchemy.exc import RemovedIn20Warning - - # Always display RemovedIn20Warning warnings. - warnings.filterwarnings("always", category=RemovedIn20Warning) - # Optionally, enable filters for specific warnings (raise error, or log, etc.) - # messages = [ - # r"replace with warning text to match", - # ] - # for msg in messages: - # warnings.filterwarnings('error', message=msg, category=RemovedIn20Warning) - # - # See documentation: - # https://docs.python.org/3.7/library/warnings.html#the-warnings-filter - # https://docs.sqlalchemy.org/en/14/changelog/migration_20.html#migration-to-2-0-step-three-resolve-all-removedin20warnings - def _load_schema(self): return AppSchema(GALAXY_CONFIG_SCHEMA_PATH, GALAXY_APP_NAME) diff --git a/lib/galaxy/dependencies/pinned-requirements.txt b/lib/galaxy/dependencies/pinned-requirements.txt index 9b51bb5490a2..bf5d947867a4 100644 --- a/lib/galaxy/dependencies/pinned-requirements.txt +++ b/lib/galaxy/dependencies/pinned-requirements.txt @@ -186,7 +186,7 @@ sniffio==1.3.1 ; python_version >= "3.8" and python_version < "3.13" social-auth-core[openidconnect]==4.0.3 ; python_version >= "3.8" and python_version < "3.13" sortedcontainers==2.4.0 ; python_version >= "3.8" and python_version < "3.13" spython==0.3.13 ; python_version >= "3.8" and python_version < "3.13" -sqlalchemy==1.4.51 ; python_version >= "3.8" and python_version < "3.13" +sqlalchemy==2.0.25 ; python_version >= "3.8" and python_version < "3.13" sqlitedict==2.1.0 ; python_version >= "3.8" and python_version < "3.13" sqlparse==0.4.4 ; python_version >= "3.8" and python_version < "3.13" starlette-context==0.3.6 ; python_version >= "3.8" and python_version < "3.13" diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index ae9a2dddca06..f15afe2b58e6 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -1181,7 +1181,9 @@ def galaxy_url(self): return self.get_destination_configuration("galaxy_infrastructure_url") def get_job(self) -> model.Job: - return self.sa_session.get(Job, self.job_id) + job = self.sa_session.get(Job, self.job_id) + assert job + return job def get_id_tag(self): # For compatibility with drmaa, which uses job_id right now, and TaskWrapper @@ -1551,7 +1553,7 @@ def change_state(self, state, info=False, flush=True, job=None): def get_state(self) -> str: job = self.get_job() self.sa_session.refresh(job) - return job.state + return job.state # type:ignore[return-value] def set_runner(self, runner_url, external_id): log.warning("set_runner() is deprecated, use set_job_destination()") diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py index c151852ee054..be487b4da675 100644 --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -318,9 +318,9 @@ def _one_with_recast_errors(self, query: Query) -> U: # overridden to raise serializable errors try: return query.one() - except sqlalchemy.orm.exc.NoResultFound: + except sqlalchemy.exc.NoResultFound: raise exceptions.ObjectNotFound(f"{self.model_class.__name__} not found") - except sqlalchemy.orm.exc.MultipleResultsFound: + except sqlalchemy.exc.MultipleResultsFound: raise exceptions.InconsistentDatabase(f"found more than one {self.model_class.__name__}") # NOTE: at this layer, all ids are expected to be decoded and in int form diff --git a/lib/galaxy/managers/collections.py b/lib/galaxy/managers/collections.py index 8db9037b55a5..772009aa6464 100644 --- a/lib/galaxy/managers/collections.py +++ b/lib/galaxy/managers/collections.py @@ -839,7 +839,7 @@ def get_collection_contents(self, trans: ProvidesAppContext, parent_id, limit=No def _get_collection_contents_qry(self, parent_id, limit=None, offset=None): """Build query to find first level of collection contents by containing collection parent_id""" DCE = model.DatasetCollectionElement - qry = Query(DCE).filter(DCE.dataset_collection_id == parent_id) + qry = Query(DCE).filter(DCE.dataset_collection_id == parent_id) # type:ignore[var-annotated] qry = qry.order_by(DCE.element_index) qry = qry.options( joinedload(model.DatasetCollectionElement.child_collection), joinedload(model.DatasetCollectionElement.hda) diff --git a/lib/galaxy/managers/datasets.py b/lib/galaxy/managers/datasets.py index 612a69989deb..fbe82e56e996 100644 --- a/lib/galaxy/managers/datasets.py +++ b/lib/galaxy/managers/datasets.py @@ -112,8 +112,8 @@ def purge_datasets(self, request: PurgeDatasetsTaskRequest): self.error_unless_dataset_purge_allowed() with self.session().begin(): for dataset_id in request.dataset_ids: - dataset: Dataset = self.session().get(Dataset, dataset_id) - if dataset.user_can_purge: + dataset: Optional[Dataset] = self.session().get(Dataset, dataset_id) + if dataset and dataset.user_can_purge: try: dataset.full_delete() except Exception: diff --git a/lib/galaxy/managers/dbkeys.py b/lib/galaxy/managers/dbkeys.py index 8ec01e3d57bd..99168133a69e 100644 --- a/lib/galaxy/managers/dbkeys.py +++ b/lib/galaxy/managers/dbkeys.py @@ -14,9 +14,9 @@ ) from sqlalchemy import select -from sqlalchemy.orm import Session from galaxy.model import HistoryDatasetAssociation +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.util import ( galaxy_directory, sanitize_lists_to_string, @@ -166,6 +166,6 @@ def get_chrom_info(self, dbkey, trans=None, custom_build_hack_get_len_from_fasta return (chrom_info, db_dataset) -def get_len_files_by_history(session: Session, history_id: int): +def get_len_files_by_history(session: galaxy_scoped_session, history_id: int): stmt = select(HistoryDatasetAssociation).filter_by(history_id=history_id, extension="len", deleted=False) return session.scalars(stmt) diff --git a/lib/galaxy/managers/export_tracker.py b/lib/galaxy/managers/export_tracker.py index 29ec780e0261..c2f9719f2441 100644 --- a/lib/galaxy/managers/export_tracker.py +++ b/lib/galaxy/managers/export_tracker.py @@ -8,7 +8,7 @@ and_, select, ) -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from sqlalchemy.orm.scoping import scoped_session from galaxy.exceptions import ObjectNotFound @@ -44,7 +44,7 @@ def set_export_association_metadata(self, export_association_id: int, export_met export_association: StoreExportAssociation = self.session.execute(stmt).scalars().one() except NoResultFound: raise ObjectNotFound("Cannot set export metadata. Reason: Export association not found") - export_association.export_metadata = export_metadata.json() + export_association.export_metadata = export_metadata.json() # type:ignore[assignment] with transaction(self.session): self.session.commit() @@ -72,4 +72,4 @@ def get_object_exports( stmt = stmt.offset(offset) if limit: stmt = stmt.limit(limit) - return self.session.execute(stmt).scalars() + return self.session.execute(stmt).scalars() # type:ignore[return-value] diff --git a/lib/galaxy/managers/folders.py b/lib/galaxy/managers/folders.py index 97f0f3ad4cac..fcbbed0cb9c5 100644 --- a/lib/galaxy/managers/folders.py +++ b/lib/galaxy/managers/folders.py @@ -20,11 +20,11 @@ or_, select, ) -from sqlalchemy.orm import aliased -from sqlalchemy.orm.exc import ( +from sqlalchemy.exc import ( MultipleResultsFound, NoResultFound, ) +from sqlalchemy.orm import aliased from galaxy import ( model, @@ -505,7 +505,7 @@ def _get_contained_datasets_statement( stmt = stmt.where( or_( func.lower(ldda.name).contains(search_text, autoescape=True), - func.lower(ldda.message).contains(search_text, autoescape=True), + func.lower(ldda.message).contains(search_text, autoescape=True), # type:ignore[attr-defined] ) ) sort_column = LDDA_SORT_COLUMN_MAP[payload.order_by](ldda, associated_dataset) @@ -536,7 +536,7 @@ def _filter_by_include_deleted( def build_folder_path( self, sa_session: galaxy_scoped_session, folder: model.LibraryFolder - ) -> List[Tuple[str, str]]: + ) -> List[Tuple[int, Optional[str]]]: """ Returns the folder path from root to the given folder. @@ -546,6 +546,7 @@ def build_folder_path( path_to_root = [(current_folder.id, current_folder.name)] while current_folder.parent_id is not None: parent_folder = sa_session.get(LibraryFolder, current_folder.parent_id) + assert parent_folder current_folder = parent_folder path_to_root.insert(0, (current_folder.id, current_folder.name)) return path_to_root diff --git a/lib/galaxy/managers/forms.py b/lib/galaxy/managers/forms.py index 989a7310fe65..fdbbd4985e7c 100644 --- a/lib/galaxy/managers/forms.py +++ b/lib/galaxy/managers/forms.py @@ -1,5 +1,8 @@ from sqlalchemy import select -from sqlalchemy.orm import exc as sqlalchemy_exceptions +from sqlalchemy.exc import ( + MultipleResultsFound, + NoResultFound, +) from galaxy.exceptions import ( InconsistentDatabase, @@ -59,9 +62,9 @@ def get(self, trans: ProvidesUserContext, form_id: int) -> FormDefinitionCurrent try: stmt = select(FormDefinitionCurrent).where(FormDefinitionCurrent.id == form_id) form = self.session().execute(stmt).scalar_one() - except sqlalchemy_exceptions.MultipleResultsFound: + except MultipleResultsFound: raise InconsistentDatabase("Multiple forms found with the same id.") - except sqlalchemy_exceptions.NoResultFound: + except NoResultFound: raise RequestParameterInvalidException("No accessible form found with the id provided.") except Exception as e: raise InternalServerError(f"Error loading from the database.{unicodify(e)}") diff --git a/lib/galaxy/managers/genomes.py b/lib/galaxy/managers/genomes.py index 82cd1131d9f4..ce742e7c6d96 100644 --- a/lib/galaxy/managers/genomes.py +++ b/lib/galaxy/managers/genomes.py @@ -95,7 +95,7 @@ def _create_genome_filter(model_class=None): if self.database_connection.startswith("postgres"): column = text("convert_from(metadata, 'UTF8')::json ->> 'dbkey'") else: - column = func.json_extract(model_class.table.c._metadata, "$.dbkey") + column = func.json_extract(model_class.table.c._metadata, "$.dbkey") # type:ignore[assignment] lower_val = val.lower() # Ignore case # dbkey can either be "hg38" or '["hg38"]', so we need to check both if op == "eq": diff --git a/lib/galaxy/managers/group_roles.py b/lib/galaxy/managers/group_roles.py index 984a5c48f9ab..c942bbe7431a 100644 --- a/lib/galaxy/managers/group_roles.py +++ b/lib/galaxy/managers/group_roles.py @@ -5,13 +5,13 @@ ) from sqlalchemy import select -from sqlalchemy.orm import Session from galaxy import model from galaxy.exceptions import ObjectNotFound from galaxy.managers.context import ProvidesAppContext from galaxy.model import GroupRoleAssociation from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.structured_app import MinimalManagerApp log = logging.getLogger(__name__) @@ -93,7 +93,7 @@ def _remove_role_from_group(self, trans: ProvidesAppContext, group_role: model.G trans.sa_session.commit() -def get_group_role(session: Session, group, role) -> Optional[GroupRoleAssociation]: +def get_group_role(session: galaxy_scoped_session, group, role) -> Optional[GroupRoleAssociation]: stmt = ( select(GroupRoleAssociation).where(GroupRoleAssociation.group == group).where(GroupRoleAssociation.role == role) ) diff --git a/lib/galaxy/managers/group_users.py b/lib/galaxy/managers/group_users.py index e71eb8ecadcf..6bcd088fcc3f 100644 --- a/lib/galaxy/managers/group_users.py +++ b/lib/galaxy/managers/group_users.py @@ -5,7 +5,6 @@ ) from sqlalchemy import select -from sqlalchemy.orm import Session from galaxy import model from galaxy.exceptions import ObjectNotFound @@ -15,6 +14,7 @@ UserGroupAssociation, ) from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.structured_app import MinimalManagerApp log = logging.getLogger(__name__) @@ -96,7 +96,7 @@ def _remove_user_from_group(self, trans: ProvidesAppContext, group_user: model.U trans.sa_session.commit() -def get_group_user(session: Session, user, group) -> Optional[UserGroupAssociation]: +def get_group_user(session: galaxy_scoped_session, user, group) -> Optional[UserGroupAssociation]: stmt = ( select(UserGroupAssociation).where(UserGroupAssociation.user == user).where(UserGroupAssociation.group == group) ) diff --git a/lib/galaxy/managers/groups.py b/lib/galaxy/managers/groups.py index 33de69670cf5..f600369e3184 100644 --- a/lib/galaxy/managers/groups.py +++ b/lib/galaxy/managers/groups.py @@ -2,7 +2,6 @@ false, select, ) -from sqlalchemy.orm import Session from galaxy import model from galaxy.exceptions import ( @@ -152,11 +151,11 @@ def _get_group(self, sa_session: galaxy_scoped_session, group_id: int) -> model. return group -def get_group_by_name(session: Session, name: str): +def get_group_by_name(session: galaxy_scoped_session, name: str): stmt = select(Group).filter(Group.name == name).limit(1) return session.scalars(stmt).first() -def get_not_deleted_groups(session: Session): +def get_not_deleted_groups(session: galaxy_scoped_session): stmt = select(Group).where(Group.deleted == false()) return session.scalars(stmt) diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index c6176e2cac8d..62d8e87e9bc6 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -125,7 +125,7 @@ def is_accessible(self, item: model.HistoryDatasetAssociation, user: Optional[mo # return True return super().is_accessible(item, user, **kwargs) - def is_owner(self, item: model.Base, user: Optional[model.User], current_history=None, **kwargs: Any) -> bool: + def is_owner(self, item, user: Optional[model.User], current_history=None, **kwargs: Any) -> bool: """ Use history to see if current user owns HDA. """ @@ -178,7 +178,7 @@ def materialize(self, request: MaterializeDatasetInstanceTaskRequest) -> None: True, # attached... object_store=self.app.object_store, file_sources=self.app.file_sources, - sa_session=self.app.model.context, + sa_session=self.app.model.session(), ) user = self.user_manager.by_id(request_user.user_id) if request.source == DatasetSourceType.hda: @@ -201,7 +201,12 @@ def copy( if not isinstance(item, model.HistoryDatasetAssociation): raise TypeError() hda = item - copy = hda.copy(parent_id=kwargs.get("parent_id"), copy_hid=False, copy_tags=hda.tags, flush=flush) + copy = hda.copy( + parent_id=kwargs.get("parent_id"), + copy_hid=False, + copy_tags=hda.tags, # type:ignore[attr-defined] + flush=flush, + ) if hide_copy: copy.visible = False if history: @@ -215,6 +220,7 @@ def copy( if history: history.add_pending_items() session = object_session(copy) + assert session with transaction(session): session.commit() @@ -251,6 +257,7 @@ def _purge(self, hda, flush=True): user.adjust_total_disk_usage(-quota_amount_reduction, quota_source_info.label) # TODO: don't flush above if we're going to re-flush here session = object_session(user) + assert session with transaction(session): session.commit() @@ -357,12 +364,13 @@ def get_discarded_summary(self, user: model.User) -> CleanableItemsSummary: .where( and_( model.HistoryDatasetAssociation.deleted == true(), - model.HistoryDatasetAssociation.purged == false(), + model.HistoryDatasetAssociation.purged == false(), # type:ignore[arg-type] model.History.user_id == user.id, ) ) ) result = self.hda_manager.session().execute(stmt).fetchone() + assert result total_size = 0 if result[0] is None else result[0] return CleanableItemsSummary(total_size=total_size, total_items=result[1]) @@ -386,7 +394,7 @@ def get_discarded( .where( and_( model.HistoryDatasetAssociation.deleted == true(), - model.HistoryDatasetAssociation.purged == false(), + model.HistoryDatasetAssociation.purged == false(), # type:ignore[arg-type] model.History.user_id == user.id, ) ) diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py index bebd03446786..1f3df4fccf87 100644 --- a/lib/galaxy/managers/histories.py +++ b/lib/galaxy/managers/histories.py @@ -213,6 +213,8 @@ def p_tag_filter(term_text: str, quoted: bool): total_matches = get_count(trans.sa_session, stmt) else: total_matches = None + + sort_column: Any if payload.sort_by == "username": sort_column = model.User.username else: @@ -220,11 +222,12 @@ def p_tag_filter(term_text: str, quoted: bool): if payload.sort_desc: sort_column = sort_column.desc() stmt = stmt.order_by(sort_column) + if payload.limit is not None: stmt = stmt.limit(payload.limit) if payload.offset is not None: stmt = stmt.offset(payload.offset) - return trans.sa_session.scalars(stmt), total_matches + return trans.sa_session.scalars(stmt), total_matches # type:ignore[return-value] def copy(self, history, user, **kwargs): """ @@ -479,7 +482,7 @@ def is_history_shared_with(self, history: model.History, user: model.User) -> bo .where(HistoryUserShareAssociation.user_id == user.id) .where(HistoryUserShareAssociation.history_id == history.id) ) - return self.session().scalar(stmt) + return bool(self.session().scalar(stmt)) def make_members_public(self, trans, item): """Make the non-purged datasets in history public. @@ -560,6 +563,7 @@ def get_discarded_summary(self, user: model.User) -> CleanableItemsSummary: model.History.purged == false(), ) result = self.history_manager.session().execute(stmt).fetchone() + assert result total_size = 0 if result[0] is None else result[0] return CleanableItemsSummary(total_size=total_size, total_items=result[1]) @@ -594,6 +598,7 @@ def get_archived_summary(self, user: model.User) -> CleanableItemsSummary: model.History.purged == false(), ) result = self.history_manager.session().execute(stmt).fetchone() + assert result total_size = 0 if result[0] is None else result[0] return CleanableItemsSummary(total_size=total_size, total_items=result[1]) diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py index 0a4a09f08f00..3dbcecfcdbda 100644 --- a/lib/galaxy/managers/history_contents.py +++ b/lib/galaxy/managers/history_contents.py @@ -22,8 +22,10 @@ nullsfirst, nullslast, select, + Select, sql, true, + UnaryExpression, ) from sqlalchemy.orm import ( joinedload, @@ -134,7 +136,7 @@ def parse_order_by(self, order_by_string, default=None): attribute_dsc = f"{attribute}-dsc" attribute_asc = f"{attribute}-asc" if order_by_string in (attribute, attribute_dsc): - order_by = desc(attribute) + order_by: UnaryExpression = desc(attribute) if attribute == "size": return nullslast(order_by) return order_by @@ -163,12 +165,10 @@ def state_counts(self, history): base.ModelFilterParser.parsed_filter("orm", sql.column("visible") == true()), ] contents_subquery = self._union_of_contents_query(history, filters=filters).subquery() - statement = ( - sql.select(sql.column("state"), func.count("*")) - .select_from(contents_subquery) - .group_by(sql.column("state")) + statement: Select = ( + select(sql.column("state"), func.count()).select_from(contents_subquery).group_by(sql.column("state")) ) - counts = self.app.model.context.execute(statement).fetchall() + counts = self.app.model.session().execute(statement).fetchall() return dict(counts) def active_counts(self, history): @@ -418,7 +418,7 @@ def _contained_id_map(self, id_list): .where(component_class.id.in_(id_list)) # type: ignore[attr-defined] .options(undefer(component_class._metadata)) .options(joinedload(component_class.dataset).joinedload(model.Dataset.actions)) - .options(joinedload(component_class.tags)) + .options(joinedload(component_class.tags)) # type: ignore[attr-defined] .options(joinedload(component_class.annotations)) # type: ignore[attr-defined] ) result = self._session().scalars(stmt).unique() diff --git a/lib/galaxy/managers/interactivetool.py b/lib/galaxy/managers/interactivetool.py index ac05892bef7c..4892091eee99 100644 --- a/lib/galaxy/managers/interactivetool.py +++ b/lib/galaxy/managers/interactivetool.py @@ -243,8 +243,8 @@ def build_subquery(): filters = [] for state in Job.non_ready_states: filters.append(Job.state == state) - stmt = stmt.where(or_(*filters)).subquery() - return stmt + stmt = stmt.where(or_(*filters)) + return stmt.subquery() stmt = select(InteractiveToolEntryPoint).where(InteractiveToolEntryPoint.job_id.in_(build_subquery())) return trans.sa_session.scalars(stmt) diff --git a/lib/galaxy/managers/job_connections.py b/lib/galaxy/managers/job_connections.py index c9000869cce3..c3317a0e8a52 100644 --- a/lib/galaxy/managers/job_connections.py +++ b/lib/galaxy/managers/job_connections.py @@ -54,7 +54,7 @@ def get_related_hids(self, history_id, hid: int): for val in graph["outputs"] + graph["inputs"]: item_class = get_class(val["src"]) item_hid = self.sa_session.execute(select(item_class.hid).where(item_class.id == val["id"])).scalar() - result.append(item_hid) + result.append(item_hid) # type:ignore[arg-type] return result def _get_union_results(self, *selects): diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index 102d8b04f591..f34d8a2dda2e 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -5,6 +5,7 @@ date, datetime, ) +from typing import List import sqlalchemy from boltons.iterutils import remap @@ -20,10 +21,7 @@ or_, true, ) -from sqlalchemy.orm import ( - aliased, - Session, -) +from sqlalchemy.orm import aliased from sqlalchemy.sql import select from galaxy import model @@ -140,13 +138,13 @@ def add_workflow_jobs(): ) elif invocation_id is not None: wfi_step = wfi_step.where(WorkflowInvocationStep.workflow_invocation_id == invocation_id) - wfi_step = wfi_step.subquery() + wfi_step_sq = wfi_step.subquery() - stmt1 = stmt.join(wfi_step) + stmt1 = stmt.join(wfi_step_sq) stmt2 = stmt.join(ImplicitCollectionJobsJobAssociation).join( - wfi_step, + wfi_step_sq, ImplicitCollectionJobsJobAssociation.implicit_collection_jobs_id - == wfi_step.c.implicit_collection_jobs_id, + == wfi_step_sq.c.implicit_collection_jobs_id, ) # Ensure the result is models, not tuples sq = stmt1.union(stmt2).subquery() @@ -190,7 +188,7 @@ def add_search_criteria(stmt): elif key == "runner": stmt = stmt.where(text_column_filter(Job.job_runner_name, term)) elif isinstance(term, RawTextTerm): - columns = [Job.tool_id] + columns: List = [Job.tool_id] if user_details: columns.append(User.email) if is_admin: @@ -358,78 +356,19 @@ def replace_dataset_ids(path, key, value): return key, value return key, value - # build one subquery that selects a job with correct job parameters + stmt_sq = self._build_job_subquery(tool_id, user.id, tool_version, job_state, wildcard_param_dump) - subq = select(model.Job.id).where( - and_( - model.Job.tool_id == tool_id, - model.Job.user_id == user.id, - model.Job.copied_from_job_id.is_(None), # Always pick original job - ) - ) - if tool_version: - subq = subq.where(Job.tool_version == str(tool_version)) - - if job_state is None: - subq = subq.where( - Job.state.in_( - [Job.states.NEW, Job.states.QUEUED, Job.states.WAITING, Job.states.RUNNING, Job.states.OK] - ) - ) - else: - if isinstance(job_state, str): - subq = subq.where(Job.state == job_state) - elif isinstance(job_state, list): - subq = subq.where(or_(*[Job.state == s for s in job_state])) + stmt = select(Job.id).select_from(Job.table.join(stmt_sq, stmt_sq.c.id == Job.id)) - # exclude jobs with deleted outputs - subq = subq.where( - and_( - model.Job.any_output_dataset_collection_instances_deleted == false(), - model.Job.any_output_dataset_deleted == false(), - ) - ) + data_conditions: List = [] - for k, v in wildcard_param_dump.items(): - if v == {"__class__": "RuntimeValue"}: - # TODO: verify this is always None. e.g. run with runtime input input - v = None - elif k.endswith("|__identifier__"): - # We've taken care of this while constructing the conditions based on ``input_data`` above - continue - elif k == "chromInfo" and "?.len" in v: - continue - value_dump = json.dumps(v, sort_keys=True) - wildcard_value = value_dump.replace('"id": "__id_wildcard__"', '"id": %') - a = aliased(JobParameter) - if value_dump == wildcard_value: - subq = subq.join(a).where( - and_( - Job.id == a.job_id, - a.name == k, - a.value == value_dump, - ) - ) - else: - subq = subq.join(a).where( - and_( - Job.id == a.job_id, - a.name == k, - a.value.like(wildcard_value), - ) - ) - - query = select(Job.id).select_from(Job.table.join(subq, subq.c.id == Job.id)) - - data_conditions = [] - - # We now build the query filters that relate to the input datasets + # We now build the stmt filters that relate to the input datasets # that this job uses. We keep track of the requested dataset id in `requested_ids`, # the type (hda, hdca or lda) in `data_types` # and the ids that have been used in the job that has already been run in `used_ids`. requested_ids = [] data_types = [] - used_ids = [] + used_ids: List = [] for k, input_list in input_data.items(): # k will be matched against the JobParameter.name column. This can be prefixed depending on whethter # the input is in a repeat, or not (section and conditional) @@ -441,137 +380,19 @@ def replace_dataset_ids(path, key, value): data_types.append(t) identifier = type_values["identifier"] if t == "hda": - a = aliased(model.JobToInputDatasetAssociation) - b = aliased(model.HistoryDatasetAssociation) - c = aliased(model.HistoryDatasetAssociation) - d = aliased(model.JobParameter) - e = aliased(model.HistoryDatasetAssociationHistory) - query = query.add_columns(a.dataset_id) - used_ids.append(a.dataset_id) - query = query.join(a, a.job_id == model.Job.id) - stmt = select(model.HistoryDatasetAssociation.id).where( - model.HistoryDatasetAssociation.id == e.history_dataset_association_id - ) - # b is the HDA used for the job - query = query.join(b, a.dataset_id == b.id).join(c, c.dataset_id == b.dataset_id) - name_condition = [] - if identifier: - query = query.join(d) - data_conditions.append( - and_( - d.name.in_({f"{_}|__identifier__" for _ in k}), - d.value == json.dumps(identifier), - ) - ) - else: - stmt = stmt.where(e.name == c.name) - name_condition.append(b.name == c.name) - stmt = ( - stmt.where( - e.extension == c.extension, - ) - .where( - a.dataset_version == e.version, - ) - .where( - e._metadata == c._metadata, - ) - ) - data_conditions.append( - and_( - a.name.in_(k), - c.id == v, # c is the requested job input HDA - # We need to make sure that the job we are looking for has been run with identical inputs. - # Here we deal with 3 requirements: - # - the jobs' input dataset (=b) version is 0, meaning the job's input dataset is not yet ready - # - b's update_time is older than the job create time, meaning no changes occurred - # - the job has a dataset_version recorded, and that versions' metadata matches c's metadata. - or_( - and_( - or_(a.dataset_version.in_([0, b.version]), b.update_time < model.Job.create_time), - b.extension == c.extension, - b.metadata == c.metadata, - *name_condition, - ), - b.id.in_(stmt), - ), - or_(b.deleted == false(), c.deleted == false()), - ) - ) + stmt = self._build_stmt_for_hda(stmt, data_conditions, used_ids, k, v, identifier) elif t == "ldda": - a = aliased(model.JobToInputLibraryDatasetAssociation) - query = query.add_columns(a.ldda_id) - query = query.join(a, a.job_id == model.Job.id) - data_conditions.append(and_(a.name.in_(k), a.ldda_id == v)) - used_ids.append(a.ldda_id) + stmt = self._build_stmt_for_ldda(stmt, data_conditions, used_ids, k, v) elif t == "hdca": - a = aliased(model.JobToInputDatasetCollectionAssociation) - b = aliased(model.HistoryDatasetCollectionAssociation) - c = aliased(model.HistoryDatasetCollectionAssociation) - query = query.add_columns(a.dataset_collection_id) - query = ( - query.join(a, a.job_id == model.Job.id) - .join(b, b.id == a.dataset_collection_id) - .join(c, b.name == c.name) - ) - data_conditions.append( - and_( - a.name.in_(k), - c.id == v, - or_( - and_(b.deleted == false(), b.id == v), - and_( - or_( - c.copied_from_history_dataset_collection_association_id == b.id, - b.copied_from_history_dataset_collection_association_id == c.id, - ), - c.deleted == false(), - ), - ), - ) - ) - used_ids.append(a.dataset_collection_id) + stmt = self._build_stmt_for_hdca(stmt, data_conditions, used_ids, k, v) elif t == "dce": - a = aliased(model.JobToInputDatasetCollectionElementAssociation) - b = aliased(model.DatasetCollectionElement) - c = aliased(model.DatasetCollectionElement) - d = aliased(model.HistoryDatasetAssociation) - e = aliased(model.HistoryDatasetAssociation) - query = query.add_columns(a.dataset_collection_element_id) - query = ( - query.join(a, a.job_id == model.Job.id) - .join(b, b.id == a.dataset_collection_element_id) - .join( - c, - and_( - c.element_identifier == b.element_identifier, - or_(c.hda_id == b.hda_id, c.child_collection_id == b.child_collection_id), - ), - ) - .outerjoin(d, d.id == c.hda_id) - .outerjoin(e, e.dataset_id == d.dataset_id) - ) - data_conditions.append( - and_( - a.name.in_(k), - or_( - c.child_collection_id == b.child_collection_id, - and_( - c.hda_id == b.hda_id, - d.id == c.hda_id, - e.dataset_id == d.dataset_id, - ), - ), - c.id == v, - ) - ) - used_ids.append(a.dataset_collection_element_id) + stmt = self._build_stmt_for_dce(stmt, data_conditions, used_ids, k, v) else: return [] - query = query.where(*data_conditions).group_by(model.Job.id, *used_ids).order_by(model.Job.id.desc()) + stmt = stmt.where(*data_conditions).group_by(model.Job.id, *used_ids).order_by(model.Job.id.desc()) - for job in self.sa_session.execute(query): + for job in self.sa_session.execute(stmt): # We found a job that is equal in terms of tool_id, user, state and input datasets, # but to be able to verify that the parameters match we need to modify all instances of # dataset_ids (HDA, LDDA, HDCA) in the incoming param_dump to point to those used by the @@ -629,6 +450,199 @@ def replace_dataset_ids(path, key, value): log.info("No equivalent jobs found %s", search_timer) return None + def _build_job_subquery(self, tool_id, user_id, tool_version, job_state, wildcard_param_dump): + """Build subquery that selects a job with correct job parameters.""" + stmt = select(model.Job.id).where( + and_( + model.Job.tool_id == tool_id, + model.Job.user_id == user_id, + model.Job.copied_from_job_id.is_(None), # Always pick original job + ) + ) + if tool_version: + stmt = stmt.where(Job.tool_version == str(tool_version)) + + if job_state is None: + stmt = stmt.where( + Job.state.in_( + [Job.states.NEW, Job.states.QUEUED, Job.states.WAITING, Job.states.RUNNING, Job.states.OK] + ) + ) + else: + if isinstance(job_state, str): + stmt = stmt.where(Job.state == job_state) + elif isinstance(job_state, list): + stmt = stmt.where(or_(*[Job.state == s for s in job_state])) + + # exclude jobs with deleted outputs + stmt = stmt.where( + and_( + model.Job.any_output_dataset_collection_instances_deleted == false(), + model.Job.any_output_dataset_deleted == false(), + ) + ) + + for k, v in wildcard_param_dump.items(): + if v == {"__class__": "RuntimeValue"}: + # TODO: verify this is always None. e.g. run with runtime input input + v = None + elif k.endswith("|__identifier__"): + # We've taken care of this while constructing the conditions based on ``input_data`` above + continue + elif k == "chromInfo" and "?.len" in v: + continue + value_dump = json.dumps(v, sort_keys=True) + wildcard_value = value_dump.replace('"id": "__id_wildcard__"', '"id": %') + a = aliased(JobParameter) + if value_dump == wildcard_value: + stmt = stmt.join(a).where( + and_( + Job.id == a.job_id, + a.name == k, + a.value == value_dump, + ) + ) + else: + stmt = stmt.join(a).where( + and_( + Job.id == a.job_id, + a.name == k, + a.value.like(wildcard_value), + ) + ) + + return stmt.subquery() + + def _build_stmt_for_hda(self, stmt, data_conditions, used_ids, k, v, identifier): + a = aliased(model.JobToInputDatasetAssociation) + b = aliased(model.HistoryDatasetAssociation) + c = aliased(model.HistoryDatasetAssociation) + d = aliased(model.JobParameter) + e = aliased(model.HistoryDatasetAssociationHistory) + stmt = stmt.add_columns(a.dataset_id) + used_ids.append(a.dataset_id) + stmt = stmt.join(a, a.job_id == model.Job.id) + hda_stmt = select(model.HistoryDatasetAssociation.id).where( + model.HistoryDatasetAssociation.id == e.history_dataset_association_id + ) + # b is the HDA used for the job + stmt = stmt.join(b, a.dataset_id == b.id).join(c, c.dataset_id == b.dataset_id) # type:ignore[attr-defined] + name_condition = [] + if identifier: + stmt = stmt.join(d) + data_conditions.append( + and_( + d.name.in_({f"{_}|__identifier__" for _ in k}), + d.value == json.dumps(identifier), + ) + ) + else: + hda_stmt = hda_stmt.where(e.name == c.name) + name_condition.append(b.name == c.name) + hda_stmt = ( + hda_stmt.where( + e.extension == c.extension, + ) + .where( + a.dataset_version == e.version, + ) + .where( + e._metadata == c._metadata, + ) + ) + data_conditions.append( + and_( + a.name.in_(k), + c.id == v, # c is the requested job input HDA + # We need to make sure that the job we are looking for has been run with identical inputs. + # Here we deal with 3 requirements: + # - the jobs' input dataset (=b) version is 0, meaning the job's input dataset is not yet ready + # - b's update_time is older than the job create time, meaning no changes occurred + # - the job has a dataset_version recorded, and that versions' metadata matches c's metadata. + or_( + and_( + or_(a.dataset_version.in_([0, b.version]), b.update_time < model.Job.create_time), + b.extension == c.extension, + b.metadata == c.metadata, + *name_condition, + ), + b.id.in_(hda_stmt), + ), + or_(b.deleted == false(), c.deleted == false()), + ) + ) + return stmt + + def _build_stmt_for_ldda(self, stmt, data_conditions, used_ids, k, v): + a = aliased(model.JobToInputLibraryDatasetAssociation) + stmt = stmt.add_columns(a.ldda_id) + stmt = stmt.join(a, a.job_id == model.Job.id) + data_conditions.append(and_(a.name.in_(k), a.ldda_id == v)) + used_ids.append(a.ldda_id) + return stmt + + def _build_stmt_for_hdca(self, stmt, data_conditions, used_ids, k, v): + a = aliased(model.JobToInputDatasetCollectionAssociation) + b = aliased(model.HistoryDatasetCollectionAssociation) + c = aliased(model.HistoryDatasetCollectionAssociation) + stmt = stmt.add_columns(a.dataset_collection_id) + stmt = stmt.join(a, a.job_id == model.Job.id).join(b, b.id == a.dataset_collection_id).join(c, b.name == c.name) + data_conditions.append( + and_( + a.name.in_(k), + c.id == v, + or_( + and_(b.deleted == false(), b.id == v), + and_( + or_( + c.copied_from_history_dataset_collection_association_id == b.id, + b.copied_from_history_dataset_collection_association_id == c.id, + ), + c.deleted == false(), + ), + ), + ) + ) + used_ids.append(a.dataset_collection_id) + return stmt + + def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v): + a = aliased(model.JobToInputDatasetCollectionElementAssociation) + b = aliased(model.DatasetCollectionElement) + c = aliased(model.DatasetCollectionElement) + d = aliased(model.HistoryDatasetAssociation) + e = aliased(model.HistoryDatasetAssociation) + stmt = stmt.add_columns(a.dataset_collection_element_id) + stmt = ( + stmt.join(a, a.job_id == model.Job.id) + .join(b, b.id == a.dataset_collection_element_id) + .join( + c, + and_( + c.element_identifier == b.element_identifier, + or_(c.hda_id == b.hda_id, c.child_collection_id == b.child_collection_id), + ), + ) + .outerjoin(d, d.id == c.hda_id) + .outerjoin(e, e.dataset_id == d.dataset_id) # type:ignore[attr-defined] + ) + data_conditions.append( + and_( + a.name.in_(k), + or_( + c.child_collection_id == b.child_collection_id, + and_( + c.hda_id == b.hda_id, + d.id == c.hda_id, + e.dataset_id == d.dataset_id, # type:ignore[attr-defined] + ), + ), + c.id == v, + ) + ) + used_ids.append(a.dataset_collection_element_id) + return stmt + def view_show_job(trans, job: Job, full: bool) -> typing.Dict: is_admin = trans.user_is_admin @@ -833,7 +847,7 @@ def summarize_jobs_to_dict(sa_session, jobs_source): model.ImplicitCollectionJobsJobAssociation.table.join(model.Job) ) statement = ( - select(model.Job.state, func.count("*")) + select(model.Job.state, func.count()) .select_from(join) .where(model.ImplicitCollectionJobs.id == jobs_source.id) .group_by(model.Job.state) @@ -1068,7 +1082,7 @@ def summarize_job_outputs(job: model.Job, tool, params): return outputs -def get_jobs_to_check_at_startup(session: Session, track_jobs_in_database: bool, config): +def get_jobs_to_check_at_startup(session: galaxy_scoped_session, track_jobs_in_database: bool, config): if track_jobs_in_database: in_list = (Job.states.QUEUED, Job.states.RUNNING, Job.states.STOPPED) else: diff --git a/lib/galaxy/managers/libraries.py b/lib/galaxy/managers/libraries.py index 45c1c582a074..2e7ff0efa128 100644 --- a/lib/galaxy/managers/libraries.py +++ b/lib/galaxy/managers/libraries.py @@ -19,11 +19,11 @@ select, true, ) -from sqlalchemy.orm import Query -from sqlalchemy.orm.exc import ( +from sqlalchemy.exc import ( MultipleResultsFound, NoResultFound, ) +from sqlalchemy.orm import Query from galaxy import exceptions from galaxy.managers.folders import FolderManager diff --git a/lib/galaxy/managers/model_stores.py b/lib/galaxy/managers/model_stores.py index 48a70117604f..1e4a82c89363 100644 --- a/lib/galaxy/managers/model_stores.py +++ b/lib/galaxy/managers/model_stores.py @@ -96,10 +96,12 @@ def setup_history_export_job(self, request: SetupHistoryExportJob): store_directory = request.store_directory history = self._sa_session.get(model.History, history_id) + assert history # symlink files on export, on worker files will tarred up in a dereferenced manner. with DirectoryModelExportStore(store_directory, app=self._app, export_files="symlink") as export_store: export_store.export_history(history, include_hidden=include_hidden, include_deleted=include_deleted) job = self._sa_session.get(model.Job, job_id) + assert job job.state = model.Job.states.NEW with transaction(self._sa_session): self._sa_session.commit() diff --git a/lib/galaxy/managers/notification.py b/lib/galaxy/managers/notification.py index e2e78a793029..47b62142a43b 100644 --- a/lib/galaxy/managers/notification.py +++ b/lib/galaxy/managers/notification.py @@ -18,6 +18,7 @@ union, update, ) +from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.sql import Select from typing_extensions import Protocol @@ -64,7 +65,7 @@ def __init__(self, sa_session: galaxy_scoped_session, config: GalaxyAppConfigura self.sa_session = sa_session self.config = config self.recipient_resolver = NotificationRecipientResolver(strategy=DefaultStrategy(sa_session)) - self.user_notification_columns = [ + self.user_notification_columns: List[InstrumentedAttribute] = [ Notification.id, Notification.source, Notification.category, @@ -77,7 +78,7 @@ def __init__(self, sa_session: galaxy_scoped_session, config: GalaxyAppConfigura UserNotificationAssociation.seen_time, UserNotificationAssociation.deleted, ] - self.broadcast_notification_columns = [ + self.broadcast_notification_columns: List[InstrumentedAttribute] = [ Notification.id, Notification.source, Notification.category, @@ -126,7 +127,7 @@ def send_notification_to_recipients(self, request: NotificationCreateRequest) -> def _send_to_users(self, notification: Notification, users: List[User]): # TODO: Move this potentially expensive operation to a task? for user in users: - if self._user_is_subscribed_to_notification(user, notification.category): + if self._user_is_subscribed_to_notification(user, notification.category): # type:ignore[arg-type] user_notification_association = UserNotificationAssociation(user, notification) self.sa_session.add(user_notification_association) @@ -199,8 +200,7 @@ def get_user_total_unread_notification_count(self, user: User) -> int: ) ) ) - result = self.sa_session.execute(stmt).scalar() - return result + return self.sa_session.execute(stmt).scalar() or 0 def get_broadcasted_notification(self, notification_id: int, active_only: Optional[bool] = True): stmt = ( @@ -275,8 +275,8 @@ def update_broadcasted_notification(self, notification_id: int, request: Notific def get_user_notification_preferences(self, user: User) -> UserNotificationPreferences: """Gets the user's current notification preferences or the default ones if no preferences are set.""" current_notification_preferences = ( - user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] - if NOTIFICATION_PREFERENCES_SECTION_NAME in user.preferences + user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] # type:ignore[index] + if NOTIFICATION_PREFERENCES_SECTION_NAME in user.preferences # type:ignore[operator] else None ) try: @@ -291,7 +291,7 @@ def update_user_notification_preferences( """Updates the user's notification preferences with the requested changes.""" notification_preferences = self.get_user_notification_preferences(user) notification_preferences.update(request.preferences) - user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] = notification_preferences.json() + user.preferences[NOTIFICATION_PREFERENCES_SECTION_NAME] = notification_preferences.json() # type:ignore[index] with transaction(self.sa_session): self.sa_session.commit() return notification_preferences @@ -304,7 +304,7 @@ def cleanup_expired_notifications(self) -> CleanupResultSummary: expired_notifications_stmt = select(Notification.id).where(notification_has_expired) delete_stmt = delete(UserNotificationAssociation).where( - UserNotificationAssociation.notification_id.in_(expired_notifications_stmt.subquery()) + UserNotificationAssociation.notification_id.in_(expired_notifications_stmt) ) result = self.sa_session.execute(delete_stmt, execution_options={"synchronize_session": False}) deleted_associations_count = result.rowcount @@ -413,7 +413,7 @@ def resolve_users(self, recipients: NotificationRecipients) -> List[User]: unique_user_ids.update(user_ids_from_groups_and_roles) stmt = select(User).where(User.id.in_(unique_user_ids)) - return self.sa_session.scalars(stmt).all() + return self.sa_session.scalars(stmt).all() # type:ignore[return-value] def _get_all_user_ids_from_roles_query(self, role_ids: Set[int]) -> Select: stmt = ( diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py index da0d076305b0..581b5b93562b 100644 --- a/lib/galaxy/managers/pages.py +++ b/lib/galaxy/managers/pages.py @@ -12,6 +12,7 @@ from html.parser import HTMLParser from typing import ( Callable, + Optional, Tuple, ) @@ -24,10 +25,7 @@ select, true, ) -from sqlalchemy.orm import ( - aliased, - Session, -) +from sqlalchemy.orm import aliased from galaxy import ( exceptions, @@ -64,6 +62,7 @@ text_column_filter, ) from galaxy.model.item_attrs import UsesAnnotations +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.schema.schema import ( CreatePagePayload, PageContentFormat, @@ -240,7 +239,7 @@ def p_tag_filter(term_text: str, quoted: bool): stmt = stmt.limit(payload.limit) if payload.offset is not None: stmt = stmt.offset(payload.offset) - return trans.sa_session.scalars(stmt), total_matches + return trans.sa_session.scalars(stmt), total_matches # type:ignore[return-value] def create_page(self, trans, payload: CreatePagePayload): user = trans.get_user() @@ -612,23 +611,27 @@ def placeholderRenderForEdit(trans: ProvidesHistoryContext, item_class, item_id) def placeholderRenderForSave(trans: ProvidesHistoryContext, item_class, item_id, encode=False): encoded_item_id, decoded_item_id = get_page_identifiers(item_id, trans.app) - item_name = "" + item_name: Optional[str] = "" if item_class == "History": history = trans.sa_session.get(History, decoded_item_id) history = base.security_check(trans, history, False, True) + assert history item_name = history.name elif item_class == "HistoryDatasetAssociation": hda = trans.sa_session.get(HistoryDatasetAssociation, decoded_item_id) hda_manager = trans.app.hda_manager hda = hda_manager.get_accessible(decoded_item_id, trans.user) + assert hda item_name = hda.name elif item_class == "StoredWorkflow": wf = trans.sa_session.get(StoredWorkflow, decoded_item_id) wf = base.security_check(trans, wf, False, True) + assert wf item_name = wf.name elif item_class == "Visualization": visualization = trans.sa_session.get(Visualization, decoded_item_id) visualization = base.security_check(trans, visualization, False, True) + assert visualization item_name = visualization.title class_shorthand = PAGE_CLASS_MAPPING[item_class] if encode: @@ -644,12 +647,12 @@ def placeholderRenderForSave(trans: ProvidesHistoryContext, item_class, item_id, ) -def get_page_revision(session: Session, page_id: int): +def get_page_revision(session: galaxy_scoped_session, page_id: int): stmt = select(PageRevision).filter_by(page_id=page_id) return session.scalars(stmt) -def get_shared_pages(session: Session, user: User): +def get_shared_pages(session: galaxy_scoped_session, user: User): stmt = ( select(PageUserShareAssociation) .where(PageUserShareAssociation.user == user) @@ -660,12 +663,12 @@ def get_shared_pages(session: Session, user: User): return session.scalars(stmt) -def get_page(session: Session, user: User, slug: str): +def get_page(session: galaxy_scoped_session, user: User, slug: str): stmt = _build_page_query(select(Page), user, slug) return session.scalars(stmt).first() -def page_exists(session: Session, user: User, slug: str) -> bool: +def page_exists(session: galaxy_scoped_session, user: User, slug: str) -> bool: stmt = _build_page_query(select(Page.id), user, slug) return session.scalars(stmt).first() is not None diff --git a/lib/galaxy/managers/roles.py b/lib/galaxy/managers/roles.py index 89bf69815e2c..1f8ef428b101 100644 --- a/lib/galaxy/managers/roles.py +++ b/lib/galaxy/managers/roles.py @@ -9,9 +9,9 @@ false, select, ) -from sqlalchemy.orm import ( - exc as sqlalchemy_exceptions, - Session, +from sqlalchemy.exc import ( + MultipleResultsFound, + NoResultFound, ) from galaxy import model @@ -26,6 +26,7 @@ from galaxy.managers.context import ProvidesUserContext from galaxy.model import Role from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.schema.schema import RoleDefinitionModel from galaxy.util import unicodify @@ -58,9 +59,9 @@ def get(self, trans: ProvidesUserContext, role_id: int) -> model.Role: try: stmt = select(self.model_class).where(self.model_class.id == role_id) role = self.session().execute(stmt).scalar_one() - except sqlalchemy_exceptions.MultipleResultsFound: + except MultipleResultsFound: raise InconsistentDatabase("Multiple roles found with the same id.") - except sqlalchemy_exceptions.NoResultFound: + except NoResultFound: raise ObjectNotFound("No accessible role found with the id provided.") except Exception as e: raise InternalServerError(f"Error loading from the database.{unicodify(e)}") @@ -127,7 +128,8 @@ def purge(self, trans: ProvidesUserContext, role: model.Role) -> model.Role: raise RequestParameterInvalidException(f"Role '{role.name}' has not been deleted, so it cannot be purged.") # Delete UserRoleAssociations for ura in role.users: - user = sa_session.query(trans.app.model.User).get(ura.user_id) + user = sa_session.get(trans.app.model.User, ura.user_id) + assert user # Delete DefaultUserPermissions for associated users for dup in user.default_permissions: if role == dup.role: @@ -162,6 +164,6 @@ def undelete(self, trans: ProvidesUserContext, role: model.Role) -> model.Role: return role -def get_roles_by_ids(session: Session, role_ids): +def get_roles_by_ids(session: galaxy_scoped_session, role_ids): stmt = select(Role).where(Role.id.in_(role_ids)) return session.scalars(stmt).all() diff --git a/lib/galaxy/managers/secured.py b/lib/galaxy/managers/secured.py index 30958ceb8c92..64721d8b137e 100644 --- a/lib/galaxy/managers/secured.py +++ b/lib/galaxy/managers/secured.py @@ -33,14 +33,14 @@ class AccessibleManagerMixin: def by_id(self, id: int): ... # don't want to override by_id since consumers will also want to fetch w/o any security checks - def is_accessible(self, item: "Query", user: model.User, **kwargs: Any) -> bool: + def is_accessible(self, item, user: model.User, **kwargs: Any) -> bool: """ Return True if the item accessible to user. """ # override in subclasses raise exceptions.NotImplemented("Abstract interface Method") - def get_accessible(self, id: int, user: model.User, **kwargs: Any) -> "Query": + def get_accessible(self, id: int, user: model.User, **kwargs: Any): """ Return the item with the given id if it's accessible to user, otherwise raise an error. diff --git a/lib/galaxy/managers/sharable.py b/lib/galaxy/managers/sharable.py index 0d106a9ae1b7..35e1afec570d 100644 --- a/lib/galaxy/managers/sharable.py +++ b/lib/galaxy/managers/sharable.py @@ -18,7 +18,6 @@ Optional, Set, Type, - TYPE_CHECKING, ) from sqlalchemy import ( @@ -28,7 +27,10 @@ true, ) -from galaxy import exceptions +from galaxy import ( + exceptions, + model, +) from galaxy.managers import ( annotatable, base, @@ -52,9 +54,6 @@ from galaxy.util import ready_name_for_url from galaxy.util.hash_util import md5_hash_str -if TYPE_CHECKING: - from sqlalchemy.orm import Query - log = logging.getLogger(__name__) @@ -91,16 +90,16 @@ def by_user(self, user: User, **kwargs: Any) -> List[Any]: return self.list(filters=filters, **kwargs) # .... owned/accessible interfaces - def is_owner(self, item: "Query", user: Optional[User], **kwargs: Any) -> bool: + def is_owner(self, item: model.Base, user: Optional[User], **kwargs: Any) -> bool: """ Return true if this sharable belongs to `user` (or `user` is an admin). """ # ... effectively a good fit to have this here, but not semantically if self.user_manager.is_admin(user, trans=kwargs.get("trans", None)): return True - return item.user == user + return item.user == user # type:ignore[attr-defined] - def is_accessible(self, item: "Query", user: Optional[User], **kwargs: Any) -> bool: + def is_accessible(self, item, user: Optional[User], **kwargs: Any) -> bool: """ If the item is importable, is owned by `user`, or (the valid) `user` is in 'users shared with' list for the item: return True. @@ -180,7 +179,7 @@ def share_with(self, item, user: User, flush: bool = True): """ # precondition: user has been validated # get or create - existing = self.get_share_assocs(item, user=user) + existing = self.get_share_assocs(item, user=user) # type:ignore[dict-item] if existing: return existing.pop(0) return self._create_user_share_assoc(item, user, flush=flush) diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 8f4b9c428e82..edf557e35a90 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -24,8 +24,7 @@ select, true, ) -from sqlalchemy.orm import Session -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from galaxy import ( exceptions, @@ -46,6 +45,7 @@ UserQuotaUsage, ) from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.security.validate_user_input import ( VALID_EMAIL_RE, validate_email, @@ -176,7 +176,7 @@ def _get_all_active_jobs_from_user(self, user: User) -> List[Job]: """Get all jobs that are not ready yet and belong to the given user.""" stmt = select(Job).where(and_(Job.user_id == user.id, Job.state.in_(Job.non_ready_states))) jobs = self.session().scalars(stmt) - return jobs + return jobs # type:ignore[return-value] def undelete(self, user, flush=True): """Remove the deleted flag for the given user.""" @@ -873,7 +873,7 @@ def _add_parsers(self): self.fn_filter_parsers.update({}) -def get_users_by_ids(session: Session, user_ids): +def get_users_by_ids(session: galaxy_scoped_session, user_ids): stmt = select(User).where(User.id.in_(user_ids)) return session.scalars(stmt).all() diff --git a/lib/galaxy/managers/visualizations.py b/lib/galaxy/managers/visualizations.py index 1f03e3900ff1..80fbf13348c8 100644 --- a/lib/galaxy/managers/visualizations.py +++ b/lib/galaxy/managers/visualizations.py @@ -14,7 +14,9 @@ from sqlalchemy import ( false, + func, or_, + select, true, ) from sqlalchemy.orm import aliased @@ -86,7 +88,7 @@ def index_query( message = "Requires user to log in." raise exceptions.RequestParameterInvalidException(message) - query = trans.sa_session.query(self.model_class) + stmt = select(self.model_class) filters = [] if show_own or (not show_published and not show_shared and not is_admin): @@ -95,20 +97,20 @@ def index_query( filters.append(self.model_class.published == true()) if user and show_shared: filters.append(self.user_share_model.user == user) - query = query.outerjoin(self.model_class.users_shared_with) - query = query.filter(or_(*filters)) + stmt = stmt.outerjoin(self.model_class.users_shared_with) + stmt = stmt.where(or_(*filters)) if payload.user_id: - query = query.filter(self.model_class.user_id == payload.user_id) + stmt = stmt.where(self.model_class.user_id == payload.user_id) if payload.search: search_query = payload.search parsed_search = parse_filters_structured(search_query, INDEX_SEARCH_FILTERS) def p_tag_filter(term_text: str, quoted: bool): - nonlocal query + nonlocal stmt alias = aliased(model.VisualizationTagAssociation) - query = query.outerjoin(self.model_class.tags.of_type(alias)) + stmt = stmt.outerjoin(self.model_class.tags.of_type(alias)) return tag_filter(alias, term_text, quoted) for term in parsed_search.terms: @@ -117,30 +119,30 @@ def p_tag_filter(term_text: str, quoted: bool): q = term.text if key == "tag": pg = p_tag_filter(term.text, term.quoted) - query = query.filter(pg) + stmt = stmt.where(pg) elif key == "title": - query = query.filter(text_column_filter(self.model_class.title, term)) + stmt = stmt.where(text_column_filter(self.model_class.title, term)) elif key == "slug": - query = query.filter(text_column_filter(self.model_class.slug, term)) + stmt = stmt.where(text_column_filter(self.model_class.slug, term)) elif key == "user": - query = append_user_filter(query, self.model_class, term) + stmt = append_user_filter(stmt, self.model_class, term) elif key == "is": if q == "deleted": show_deleted = True if q == "published": - query = query.filter(self.model_class.published == true()) + stmt = stmt.where(self.model_class.published == true()) if q == "importable": - query = query.filter(self.model_class.importable == true()) + stmt = stmt.where(self.model_class.importable == true()) elif q == "shared_with_me": if not show_shared: message = "Can only use tag is:shared_with_me if show_shared parameter also true." raise exceptions.RequestParameterInvalidException(message) - query = query.filter(self.user_share_model.user == user) + stmt = stmt.where(self.user_share_model.user == user) elif isinstance(term, RawTextTerm): tf = p_tag_filter(term.text, False) alias = aliased(model.User) - query = query.outerjoin(self.model_class.user.of_type(alias)) - query = query.filter( + stmt = stmt.outerjoin(self.model_class.user.of_type(alias)) + stmt = stmt.where( raw_text_column_filter( [ self.model_class.title, @@ -155,21 +157,21 @@ def p_tag_filter(term_text: str, quoted: bool): if (show_published or show_shared) and not is_admin: show_deleted = False - query = query.filter(self.model_class.deleted == (true() if show_deleted else false())).distinct() + stmt = stmt.where(self.model_class.deleted == (true() if show_deleted else false())).distinct() if include_total_count: - total_matches = query.count() + total_matches = get_count(trans.sa_session, stmt) else: total_matches = None sort_column = getattr(model.Visualization, payload.sort_by) if payload.sort_desc: sort_column = sort_column.desc() - query = query.order_by(sort_column) + stmt = stmt.order_by(sort_column) if payload.limit is not None: - query = query.limit(payload.limit) + stmt = stmt.limit(payload.limit) if payload.offset is not None: - query = query.offset(payload.offset) - return query, total_matches + stmt = stmt.offset(payload.offset) + return trans.sa_session.scalars(stmt), total_matches # type:ignore[return-value] class VisualizationSerializer(sharable.SharableModelSerializer): @@ -210,3 +212,8 @@ def add_deserializers(self): super().add_deserializers() self.deserializers.update({}) self.deserializable_keyset.update(self.deserializers.keys()) + + +def get_count(session, statement): + stmt = select(func.count()).select_from(statement) + return session.scalar(stmt) diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index 054f5f1e78a9..3d454b457f82 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -39,7 +39,6 @@ from sqlalchemy.orm import ( aliased, joinedload, - Query, subqueryload, ) from typing_extensions import Annotated @@ -192,7 +191,7 @@ def index_query( latest_workflow_load = joinedload(StoredWorkflow.latest_workflow) if not payload.skip_step_counts: - latest_workflow_load = latest_workflow_load.undefer(Workflow.step_count) + latest_workflow_load = latest_workflow_load.undefer(Workflow.step_count) # type:ignore[arg-type] latest_workflow_load = latest_workflow_load.lazyload(Workflow.steps) stmt = stmt.options(joinedload(StoredWorkflow.annotations)) @@ -269,7 +268,7 @@ def name_filter(term): if payload.offset is not None: stmt = stmt.offset(payload.offset) result = trans.sa_session.scalars(stmt).unique() - return result, total_matches + return result, total_matches # type:ignore[return-value] def get_stored_workflow(self, trans, workflow_id, by_stored_id=True) -> StoredWorkflow: """Use a supplied ID (UUID or encoded stored workflow ID) to find @@ -488,7 +487,7 @@ def build_invocations_query( sort_by=None, sort_desc=None, include_nested_invocations=True, - ) -> Tuple[Query, int]: + ) -> Tuple[List, int]: """Get invocations owned by the current user.""" stmt = select(WorkflowInvocation) @@ -536,7 +535,7 @@ def build_invocations_query( for inv in trans.sa_session.scalars(stmt) if self.check_security(trans, inv, check_ownership=True, check_accessible=False) ] - return invocations, total_matches + return invocations, total_matches # type:ignore[return-value] MissingToolsT = List[Tuple[str, str, Optional[str], str]] @@ -792,7 +791,7 @@ def _workflow_from_raw_description( elif not workflow_state_resolution_options.archive_source.startswith("file://"): # URL import source_metadata["url"] = workflow_state_resolution_options.archive_source workflow_state_resolution_options.archive_source = None # so trs_id is not set for subworkflows - workflow.source_metadata = source_metadata + workflow.source_metadata = source_metadata # type:ignore[assignment] # Assume no errors until we find a step that has some workflow.has_errors = False @@ -1785,7 +1784,7 @@ def __module_from_dict( temp_input_connections: Dict[str, Union[List[DictConnection], DictConnection]] = step_dict.get( "input_connections", {} ) - step.temp_input_connections = temp_input_connections + step.temp_input_connections = temp_input_connections # type: ignore[assignment] # Create the model class for the step steps.append(step) @@ -1883,7 +1882,7 @@ def __connect_workflow_steps(self, steps: List[model.WorkflowStep], steps_by_ext for step in steps: # Input connections if step.temp_input_connections: # populated by __module_from_dict - for input_name, conn_list in step.temp_input_connections.items(): + for input_name, conn_list in step.temp_input_connections.items(): # type:ignore[unreachable] if not conn_list: continue if not isinstance(conn_list, list): # Older style singleton connection diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 97c41e7ee2e1..a2f860ff26e5 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -18,12 +18,17 @@ import string from collections import defaultdict from collections.abc import Callable -from datetime import timedelta +from datetime import ( + datetime, + timedelta, +) +from decimal import Decimal from enum import Enum from secrets import token_hex from string import Template from typing import ( Any, + cast, Dict, Iterable, List, @@ -100,6 +105,8 @@ column_property, deferred, joinedload, + Mapped, + mapped_column, object_session, Query, reconstructor, @@ -107,7 +114,7 @@ relationship, ) from sqlalchemy.orm.attributes import flag_modified -from sqlalchemy.orm.collections import attribute_mapped_collection +from sqlalchemy.orm.collections import attribute_keyed_dict from sqlalchemy.sql import exists from typing_extensions import ( Literal, @@ -140,7 +147,6 @@ ) from galaxy.model.orm.now import now from galaxy.model.orm.util import add_object_to_object_session -from galaxy.objectstore import ObjectStore from galaxy.schema.invocation import ( InvocationCancellationUserRequest, InvocationState, @@ -216,7 +222,7 @@ if TYPE_CHECKING: # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm.decl_api import DeclarativeMeta as _DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta class DeclarativeMeta(_DeclarativeMeta, type): pass @@ -230,7 +236,7 @@ class _HasTable: __table__: Table else: - from sqlalchemy.orm.decl_api import DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta _HasTable = object @@ -256,7 +262,7 @@ def __declare_last__(cls): class RepresentById: - id: int + id: Mapped[int] def __repr__(self): try: @@ -302,7 +308,6 @@ def set_datatypes_registry(d_registry): class HasTags: dict_collection_visible_keys = ["tags"] dict_element_visible_keys = ["tags"] - tags: List["ItemTagAssociation"] def to_dict(self, *args, **kwargs): rval = super().to_dict(*args, **kwargs) @@ -415,7 +420,7 @@ def get_display_name(self): class UsesCreateAndUpdateTime: - update_time: DateTime + update_time: Mapped[Optional[datetime]] @property def seconds_since_updated(self): @@ -435,11 +440,11 @@ class WorkerProcess(Base, UsesCreateAndUpdateTime): __tablename__ = "worker_process" __table_args__ = (UniqueConstraint("server_name", "hostname"),) - id = Column(Integer, primary_key=True) - server_name = Column(String(255), index=True) - hostname = Column(String(255)) - pid = Column(Integer) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + server_name: Mapped[Optional[str]] = mapped_column(String(255), index=True) + hostname: Mapped[Optional[str]] = mapped_column(String(255)) + pid: Mapped[Optional[int]] = mapped_column(Integer) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) def cached_id(galaxy_model_object): @@ -682,46 +687,50 @@ class User(Base, Dictifiable, RepresentById): __tablename__ = "galaxy_user" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - email = Column(TrimmedString(255), index=True, nullable=False) - username = Column(TrimmedString(255), index=True, unique=True) - password = Column(TrimmedString(255), nullable=False) - last_password_change = Column(DateTime, default=now) - external = Column(Boolean, default=False) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - preferred_object_store_id = Column(String(255), nullable=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - disk_usage = Column(Numeric(15, 0), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + email: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=False) + username: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + password: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + last_password_change: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + external: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + preferred_object_store_id: Mapped[str] = mapped_column(String(255), nullable=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) # Column("person_metadata", JSONType), # TODO: add persistent, configurable metadata rep for workflow creator - active = Column(Boolean, index=True, default=True, nullable=False) - activation_token = Column(TrimmedString(64), nullable=True, index=True) + active: Mapped[bool] = mapped_column(Boolean, index=True, default=True, nullable=False) + activation_token: Mapped[Optional[str]] = mapped_column(TrimmedString(64), nullable=True, index=True) - addresses = relationship( + addresses: Mapped[List["UserAddress"]] = relationship( "UserAddress", back_populates="user", order_by=lambda: desc(UserAddress.update_time), cascade_backrefs=False ) cloudauthz = relationship("CloudAuthz", back_populates="user") custos_auth = relationship("CustosAuthnzToken", back_populates="user") - default_permissions = relationship("DefaultUserPermissions", back_populates="user") - groups = relationship("UserGroupAssociation", back_populates="user") - histories = relationship( + default_permissions: Mapped[List["DefaultUserPermissions"]] = relationship( + "DefaultUserPermissions", back_populates="user" + ) + groups: Mapped[List["UserGroupAssociation"]] = relationship("UserGroupAssociation", back_populates="user") + histories: Mapped[List["History"]] = relationship( "History", back_populates="user", order_by=lambda: desc(History.update_time), cascade_backrefs=False # type: ignore[has-type] ) - active_histories = relationship( + active_histories: Mapped[List["History"]] = relationship( "History", primaryjoin=(lambda: (History.user_id == User.id) & (not_(History.deleted)) & (not_(History.archived))), # type: ignore[has-type] viewonly=True, order_by=lambda: desc(History.update_time), # type: ignore[has-type] ) - galaxy_sessions = relationship( + galaxy_sessions: Mapped[List["GalaxySession"]] = relationship( "GalaxySession", back_populates="user", order_by=lambda: desc(GalaxySession.update_time), cascade_backrefs=False # type: ignore[has-type] ) - quotas = relationship("UserQuotaAssociation", back_populates="user") - quota_source_usages = relationship("UserQuotaSourceUsage", back_populates="user") + quotas: Mapped[List["UserQuotaAssociation"]] = relationship("UserQuotaAssociation", back_populates="user") + quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship( + "UserQuotaSourceUsage", back_populates="user" + ) social_auth = relationship("UserAuthnzToken", back_populates="user") - stored_workflow_menu_entries = relationship( + stored_workflow_menu_entries: Mapped[List["StoredWorkflowMenuEntry"]] = relationship( "StoredWorkflowMenuEntry", primaryjoin=( lambda: (StoredWorkflowMenuEntry.user_id == User.id) @@ -732,12 +741,14 @@ class User(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", collection_class=ordering_list("order_index"), ) - _preferences = relationship("UserPreference", collection_class=attribute_mapped_collection("name")) - values = relationship( + _preferences: Mapped[List["UserPreference"]] = relationship( + "UserPreference", collection_class=attribute_keyed_dict("name") + ) + values: Mapped[List["FormValues"]] = relationship( "FormValues", primaryjoin=(lambda: User.form_values_id == FormValues.id) # type: ignore[has-type] ) # Add type hint (will this work w/SA?) - api_keys: "List[APIKeys]" = relationship( + api_keys: Mapped[List["APIKeys"]] = relationship( "APIKeys", back_populates="user", order_by=lambda: desc(APIKeys.create_time), @@ -748,16 +759,20 @@ class User(Base, Dictifiable, RepresentById): ) ), ) - data_manager_histories = relationship("DataManagerHistoryAssociation", back_populates="user") - roles = relationship("UserRoleAssociation", back_populates="user") - stored_workflows = relationship( + data_manager_histories: Mapped[List["DataManagerHistoryAssociation"]] = relationship( + "DataManagerHistoryAssociation", back_populates="user" + ) + roles: Mapped[List["UserRoleAssociation"]] = relationship("UserRoleAssociation", back_populates="user") + stored_workflows: Mapped[List["StoredWorkflow"]] = relationship( "StoredWorkflow", back_populates="user", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), # type: ignore[has-type] cascade_backrefs=False, ) - all_notifications = relationship("UserNotificationAssociation", back_populates="user", cascade_backrefs=False) - non_private_roles = relationship( + all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship( + "UserNotificationAssociation", back_populates="user", cascade_backrefs=False + ) + non_private_roles: Mapped[List["UserRoleAssociation"]] = relationship( "UserRoleAssociation", viewonly=True, primaryjoin=( @@ -767,7 +782,7 @@ class User(Base, Dictifiable, RepresentById): ), ) - preferences: association_proxy # defined at the end of this module + preferences = None # attributes that will be accessed and returned when calling to_dict( view='collection' ) dict_collection_visible_keys = ["id", "email", "username", "deleted", "active", "last_password_change"] @@ -928,7 +943,7 @@ def get_disk_usage(self, nice_size=False, quota_source_label=None): "user_id": self.id, "label": quota_source_label, } - row = sa_session.execute(statement, params).fetchone() + row = sa_session.execute(text(statement), params).fetchone() if row is not None: rval = row[0] else: @@ -1202,19 +1217,20 @@ def quota_source_usage_for(self, quota_source_label: Optional[str]) -> Optional[ return None def count_stored_workflow_user_assocs(self, stored_workflow) -> int: - stmt = select(StoredWorkflowUserShareAssociation).filter_by(user=self, stored_workflow=stored_workflow) - stmt = select(func.count()).select_from(stmt) + sq = select(StoredWorkflowUserShareAssociation).filter_by(user=self, stored_workflow=stored_workflow).subquery() + stmt = select(func.count()).select_from(sq) session = object_session(self) - return session.scalar(stmt) + assert session + return session.scalar(stmt) or 0 class PasswordResetToken(Base): __tablename__ = "password_reset_token" - token = Column(String(32), primary_key=True, unique=True, index=True) - expiration_time = Column(DateTime) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + token: Mapped[str] = mapped_column(String(32), primary_key=True, unique=True, index=True) + expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user: Mapped["User"] = relationship("User") def __init__(self, user, token=None): if token: @@ -1228,18 +1244,18 @@ def __init__(self, user, token=None): class DynamicTool(Base, Dictifiable, RepresentById): __tablename__ = "dynamic_tool" - id = Column(Integer, primary_key=True) - uuid = Column(UUIDType()) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - tool_id = Column(Unicode(255)) - tool_version = Column(Unicode(255)) - tool_format = Column(Unicode(255)) - tool_path = Column(Unicode(255)) - tool_directory = Column(Unicode(255)) - hidden = Column(Boolean, default=True) - active = Column(Boolean, default=True) - value = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType()) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + tool_id: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_version: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_format: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_path: Mapped[Optional[str]] = mapped_column(Unicode(255)) + tool_directory: Mapped[Optional[str]] = mapped_column(Unicode(255)) + hidden: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + active: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) dict_collection_visible_keys = ("id", "tool_id", "tool_format", "tool_version", "uuid", "active", "hidden") dict_element_visible_keys = ("id", "tool_id", "tool_format", "tool_version", "uuid", "active", "hidden") @@ -1265,41 +1281,41 @@ def __init__(self, plugin, metric_name, metric_value): class JobMetricText(BaseJobMetric, RepresentById): __tablename__ = "job_metric_text" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Unicode(JOB_METRIC_MAX_LENGTH)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[str]] = mapped_column(Unicode(JOB_METRIC_MAX_LENGTH)) class JobMetricNumeric(BaseJobMetric, RepresentById): __tablename__ = "job_metric_numeric" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[Decimal]] = mapped_column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) class TaskMetricText(BaseJobMetric, RepresentById): __tablename__ = "task_metric_text" - id = Column(Integer, primary_key=True) - task_id = Column(Integer, ForeignKey("task.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Unicode(JOB_METRIC_MAX_LENGTH)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + task_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("task.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[str]] = mapped_column(Unicode(JOB_METRIC_MAX_LENGTH)) class TaskMetricNumeric(BaseJobMetric, RepresentById): __tablename__ = "task_metric_numeric" - id = Column(Integer, primary_key=True) - task_id = Column(Integer, ForeignKey("task.id"), index=True) - plugin = Column(Unicode(255)) - metric_name = Column(Unicode(255)) - metric_value = Column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + task_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("task.id"), index=True) + plugin: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + metric_value: Mapped[Optional[Decimal]] = mapped_column(Numeric(JOB_METRIC_PRECISION, JOB_METRIC_SCALE)) class IoDicts(NamedTuple): @@ -1316,41 +1332,45 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): __tablename__ = "job" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) - tool_id = Column(String(255)) - tool_version = Column(TEXT, default="1.0.0") - galaxy_version = Column(String(64), default=None) - dynamic_tool_id = Column(Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True) - state = Column(String(64), index=True) - info = Column(TrimmedString(255)) - copied_from_job_id = Column(Integer, nullable=True) - command_line = Column(TEXT) - dependencies = Column(MutableJSONType, nullable=True) - job_messages = Column(MutableJSONType, nullable=True) - param_filename = Column(String(1024)) - runner_name = Column(String(255)) - job_stdout = Column(TEXT) - job_stderr = Column(TEXT) - tool_stdout = Column(TEXT) - tool_stderr = Column(TEXT) - exit_code = Column(Integer, nullable=True) - traceback = Column(TEXT) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) - job_runner_name = Column(String(255)) - job_runner_external_id = Column(String(255), index=True) - destination_id = Column(String(255), nullable=True) - destination_params = Column(MutableJSONType, nullable=True) - object_store_id = Column(TrimmedString(255), index=True) - imported = Column(Boolean, default=False, index=True) - params = Column(TrimmedString(255), index=True) - handler = Column(TrimmedString(255), index=True) - preferred_object_store_id = Column(String(255), nullable=True) - object_store_id_overrides = Column(JSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + library_folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) + tool_id: Mapped[Optional[str]] = mapped_column(String(255)) + tool_version: Mapped[Optional[str]] = mapped_column(TEXT, default="1.0.0") + galaxy_version: Mapped[Optional[str]] = mapped_column(String(64), default=None) + dynamic_tool_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True + ) + state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + copied_from_job_id: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + command_line: Mapped[Optional[str]] = mapped_column(TEXT) + dependencies: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + job_messages: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + param_filename: Mapped[Optional[str]] = mapped_column(String(1024)) + runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + job_stdout: Mapped[Optional[str]] = mapped_column(TEXT) + job_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stdout: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + exit_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + traceback: Mapped[Optional[str]] = mapped_column(TEXT) + session_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) + job_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + job_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255), index=True) + destination_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + destination_params: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + imported: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) + params: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + handler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + object_store_id_overrides: Mapped[Optional[bytes]] = mapped_column(JSONType) user = relationship("User") galaxy_session = relationship("GalaxySession") @@ -1386,8 +1406,8 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): "WorkflowInvocationStep", back_populates="job", uselist=False, cascade_backrefs=False ) - any_output_dataset_collection_instances_deleted: column_property # defined at the end of this module - any_output_dataset_deleted: column_property # defined at the end of this module + any_output_dataset_collection_instances_deleted = None + any_output_dataset_deleted = None dict_collection_visible_keys = ["id", "state", "exit_code", "update_time", "create_time", "galaxy_version"] dict_element_visible_keys = [ @@ -1682,7 +1702,7 @@ def set_state(self, state: JobState) -> bool: if session and self.id and state not in Job.finished_states: # generate statement that will not revert DELETING or DELETED back to anything non-terminal rval = session.execute( - update(Job.table) + update(Job) .where(Job.id == self.id, ~Job.state.in_((Job.states.DELETING, Job.states.DELETED))) .values(state=state) ) @@ -1923,12 +1943,12 @@ def update_hdca_update_time_for_job(self, update_time, sa_session, supports_skip subq = subq.with_for_update(skip_locked=True).subquery() implicit_statement = ( HistoryDatasetCollectionAssociation.table.update() - .where(HistoryDatasetCollectionAssociation.table.c.id.in_(select(subq))) + .where(HistoryDatasetCollectionAssociation.id.in_(select(subq))) .values(update_time=update_time) ) explicit_statement = ( HistoryDatasetCollectionAssociation.table.update() - .where(HistoryDatasetCollectionAssociation.table.c.job_id == self.id) + .where(HistoryDatasetCollectionAssociation.job_id == self.id) .values(update_time=update_time) ) sa_session.execute(explicit_statement) @@ -2084,27 +2104,29 @@ class Task(Base, JobLike, RepresentById): __tablename__ = "task" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - execution_time = Column(DateTime) - update_time = Column(DateTime, default=now, onupdate=now) - state = Column(String(64), index=True) - command_line = Column(TEXT) - param_filename = Column(String(1024)) - runner_name = Column(String(255)) - job_stdout = Column(TEXT) # job_stdout makes sense here because it is short for job script standard out - job_stderr = Column(TEXT) - tool_stdout = Column(TEXT) - tool_stderr = Column(TEXT) - exit_code = Column(Integer, nullable=True) - job_messages = Column(MutableJSONType, nullable=True) - info = Column(TrimmedString(255)) - traceback = Column(TEXT) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=False) - working_directory = Column(String(1024)) - task_runner_name = Column(String(255)) - task_runner_external_id = Column(String(255)) - prepare_input_files_cmd = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + execution_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + command_line: Mapped[Optional[str]] = mapped_column(TEXT) + param_filename: Mapped[Optional[str]] = mapped_column(String(1024)) + runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + job_stdout: Mapped[Optional[str]] = mapped_column( + TEXT + ) # job_stdout makes sense here because it is short for job script standard out + job_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stdout: Mapped[Optional[str]] = mapped_column(TEXT) + tool_stderr: Mapped[Optional[str]] = mapped_column(TEXT) + exit_code: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + job_messages: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + traceback: Mapped[Optional[str]] = mapped_column(TEXT) + job_id: Mapped[int] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=False) + working_directory: Mapped[Optional[str]] = mapped_column(String(1024)) + task_runner_name: Mapped[Optional[str]] = mapped_column(String(255)) + task_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255)) + prepare_input_files_cmd: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job", back_populates="tasks") text_metrics = relationship("TaskMetricText") numeric_metrics = relationship("TaskMetricNumeric") @@ -2254,10 +2276,10 @@ def set_prepare_input_files_cmd(self, prepare_input_files_cmd): class JobParameter(Base, RepresentById): __tablename__ = "job_parameter" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - name = Column(String(255)) - value = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) + value: Mapped[Optional[str]] = mapped_column(TEXT) def __init__(self, name, value): self.name = name @@ -2270,11 +2292,11 @@ def copy(self): class JobToInputDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - dataset_version = Column(Integer) - name = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + dataset_version: Mapped[Optional[int]] = mapped_column(Integer) + name: Mapped[Optional[str]] = mapped_column(String(255)) dataset = relationship("HistoryDatasetAssociation", lazy="joined", back_populates="dependent_jobs") job = relationship("Job", back_populates="input_datasets") @@ -2288,10 +2310,10 @@ def __init__(self, name, dataset): class JobToOutputDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_output_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - name = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) dataset = relationship("HistoryDatasetAssociation", lazy="joined", back_populates="creating_job_associations") job = relationship("Job", back_populates="output_datasets") @@ -2308,10 +2330,12 @@ def item(self): class JobToInputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset_collection" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - name = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(String(255)) dataset_collection = relationship("HistoryDatasetCollectionAssociation", lazy="joined") job = relationship("Job", back_populates="input_dataset_collections") @@ -2323,10 +2347,12 @@ def __init__(self, name, dataset_collection): class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): __tablename__ = "job_to_input_dataset_collection_element" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_element_id = Column(Integer, ForeignKey("dataset_collection_element.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_element_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dataset_collection_element.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) dataset_collection_element = relationship("DatasetCollectionElement", lazy="joined") job = relationship("Job", back_populates="input_dataset_collection_elements") @@ -2340,10 +2366,12 @@ def __init__(self, name, dataset_collection_element): class JobToOutputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_output_dataset_collection" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) dataset_collection_instance = relationship("HistoryDatasetCollectionAssociation", lazy="joined") job = relationship("Job", back_populates="output_dataset_collection_instances") @@ -2362,10 +2390,12 @@ def item(self): class JobToImplicitOutputDatasetCollectionAssociation(Base, RepresentById): __tablename__ = "job_to_implicit_output_dataset_collection" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dataset_collection.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) dataset_collection = relationship("DatasetCollection") job = relationship("Job", back_populates="output_dataset_collections") @@ -2377,10 +2407,12 @@ def __init__(self, name, dataset_collection): class JobToInputLibraryDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_input_library_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) job = relationship("Job", back_populates="input_library_datasets") dataset = relationship("LibraryDatasetDatasetAssociation", lazy="joined", back_populates="dependent_jobs") @@ -2393,10 +2425,12 @@ def __init__(self, name, dataset): class JobToOutputLibraryDatasetAssociation(Base, RepresentById): __tablename__ = "job_to_output_library_dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) job = relationship("Job", back_populates="output_library_datasets") dataset = relationship( "LibraryDatasetDatasetAssociation", lazy="joined", back_populates="creating_job_associations" @@ -2411,11 +2445,11 @@ def __init__(self, name, dataset): class JobStateHistory(Base, RepresentById): __tablename__ = "job_state_history" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - state = Column(String(64), index=True) - info = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + state: Mapped[Optional[str]] = mapped_column(String(64), index=True) + info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) def __init__(self, job): self.job_id = job.id @@ -2426,10 +2460,14 @@ def __init__(self, job): class ImplicitlyCreatedDatasetCollectionInput(Base, RepresentById): __tablename__ = "implicitly_created_dataset_collection_inputs" - id = Column(Integer, primary_key=True) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - input_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - name = Column(Unicode(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + input_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) input_dataset_collection = relationship( "HistoryDatasetCollectionAssociation", @@ -2447,8 +2485,8 @@ def __init__(self, name, input_dataset_collection): class ImplicitCollectionJobs(Base, Serializable): __tablename__ = "implicit_collection_jobs" - id = Column(Integer, primary_key=True) - populated_state = Column(TrimmedString(64), default="new", nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="new", nullable=False) jobs = relationship( "ImplicitCollectionJobsJobAssociation", back_populates="implicit_collection_jobs", cascade_backrefs=False ) @@ -2478,10 +2516,14 @@ def _serialize(self, id_encoder, serialization_options): class ImplicitCollectionJobsJobAssociation(Base, RepresentById): __tablename__ = "implicit_collection_jobs_job_association" - id = Column(Integer, primary_key=True) - implicit_collection_jobs_id = Column(Integer, ForeignKey("implicit_collection_jobs.id"), index=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) # Consider making this nullable... - order_index = Column(Integer, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("implicit_collection_jobs.id"), index=True + ) + job_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("job.id"), index=True + ) # Consider making this nullable... + order_index: Mapped[int] = mapped_column(Integer, nullable=False) implicit_collection_jobs = relationship("ImplicitCollectionJobs", back_populates="jobs") job = relationship("Job", back_populates="implicit_collection_jobs_association") @@ -2489,11 +2531,13 @@ class ImplicitCollectionJobsJobAssociation(Base, RepresentById): class PostJobAction(Base, RepresentById): __tablename__ = "post_job_action" - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=True) - action_type = Column(String(255), nullable=False) - output_name = Column(String(255), nullable=True) - action_arguments = Column(MutableJSONType, nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id"), index=True, nullable=True + ) + action_type: Mapped[str] = mapped_column(String(255), nullable=False) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + action_arguments: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) workflow_step = relationship( "WorkflowStep", back_populates="post_job_actions", @@ -2511,9 +2555,11 @@ def __init__(self, action_type, workflow_step=None, output_name=None, action_arg class PostJobActionAssociation(Base, RepresentById): __tablename__ = "post_job_action_association" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=False) - post_job_action_id = Column(Integer, ForeignKey("post_job_action.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[int] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=False) + post_job_action_id: Mapped[int] = mapped_column( + Integer, ForeignKey("post_job_action.id"), index=True, nullable=False + ) post_job_action = relationship("PostJobAction") job = relationship("Job", back_populates="post_job_actions") @@ -2531,21 +2577,21 @@ def __init__(self, pja, job=None, job_id=None): class JobExternalOutputMetadata(Base, RepresentById): __tablename__ = "job_external_output_metadata" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - history_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True ) - library_dataset_dataset_association_id = Column( + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True ) - is_valid = Column(Boolean, default=True) - filename_in = Column(String(255)) - filename_out = Column(String(255)) - filename_results_code = Column(String(255)) - filename_kwds = Column(String(255)) - filename_override_metadata = Column(String(255)) - job_runner_external_pid = Column(String(255)) + is_valid: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + filename_in: Mapped[Optional[str]] = mapped_column(String(255)) + filename_out: Mapped[Optional[str]] = mapped_column(String(255)) + filename_results_code: Mapped[Optional[str]] = mapped_column(String(255)) + filename_kwds: Mapped[Optional[str]] = mapped_column(String(255)) + filename_override_metadata: Mapped[Optional[str]] = mapped_column(String(255)) + job_runner_external_pid: Mapped[Optional[str]] = mapped_column(String(255)) history_dataset_association = relationship("HistoryDatasetAssociation", lazy="joined") library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", lazy="joined") job = relationship("Job", back_populates="external_output_metadata") @@ -2588,12 +2634,12 @@ def __eq__(self, other): class JobExportHistoryArchive(Base, RepresentById): __tablename__ = "job_export_history_archive" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - compressed = Column(Boolean, index=True, default=False) - history_attrs_filename = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + compressed: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + history_attrs_filename: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job") dataset = relationship("Dataset") history = relationship("History", back_populates="exports") @@ -2676,10 +2722,10 @@ def to_dict(self): class JobImportHistoryArchive(Base, RepresentById): __tablename__ = "job_import_history_archive" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - archive_dir = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + archive_dir: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job") history = relationship("History") @@ -2688,24 +2734,24 @@ class StoreExportAssociation(Base, RepresentById): __tablename__ = "store_export_association" __table_args__ = (Index("ix_store_export_object", "object_id", "object_type"),) - id = Column(Integer, primary_key=True) - task_uuid = Column(UUIDType(), index=True, unique=True) - create_time = Column(DateTime, default=now) - object_type = Column(TrimmedString(32)) - object_id = Column(Integer) - export_metadata = Column(JSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + task_uuid: Mapped[Optional[str]] = mapped_column(UUIDType(), index=True, unique=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + object_type: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) + object_id: Mapped[Optional[int]] = mapped_column(Integer) + export_metadata: Mapped[Optional[bytes]] = mapped_column(JSONType) class JobContainerAssociation(Base, RepresentById): __tablename__ = "job_container_association" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - container_type = Column(TEXT) - container_name = Column(TEXT) - container_info = Column(MutableJSONType, nullable=True) - created_time = Column(DateTime, default=now) - modified_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + container_type: Mapped[Optional[str]] = mapped_column(TEXT) + container_name: Mapped[Optional[str]] = mapped_column(TEXT) + container_info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) job = relationship("Job", back_populates="container") def __init__(self, **kwd): @@ -2718,24 +2764,24 @@ def __init__(self, **kwd): class InteractiveToolEntryPoint(Base, Dictifiable, RepresentById): __tablename__ = "interactivetool_entry_point" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - name = Column(TEXT) - token = Column(TEXT) - tool_port = Column(Integer) - host = Column(TEXT) - port = Column(Integer) - protocol = Column(TEXT) - entry_url = Column(TEXT) - requires_domain = Column(Boolean, default=True) - requires_path_in_url = Column(Boolean, default=False) - requires_path_in_header_named = Column(TEXT) - info = Column(MutableJSONType, nullable=True) - configured = Column(Boolean, default=False) - deleted = Column(Boolean, default=False) - created_time = Column(DateTime, default=now) - modified_time = Column(DateTime, default=now, onupdate=now) - label = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + token: Mapped[Optional[str]] = mapped_column(TEXT) + tool_port: Mapped[Optional[int]] = mapped_column(Integer) + host: Mapped[Optional[str]] = mapped_column(TEXT) + port: Mapped[Optional[int]] = mapped_column(Integer) + protocol: Mapped[Optional[str]] = mapped_column(TEXT) + entry_url: Mapped[Optional[str]] = mapped_column(TEXT) + requires_domain: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + requires_path_in_url: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + requires_path_in_header_named: Mapped[Optional[str]] = mapped_column(TEXT) + info: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + configured: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + label: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job", back_populates="interactivetool_entry_points", uselist=False) dict_collection_visible_keys = [ @@ -2794,14 +2840,14 @@ def output_datasets_ids(self): class GenomeIndexToolData(Base, RepresentById): # TODO: params arg is lost __tablename__ = "genome_index_tool_data" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - fasta_path = Column(String(255)) - created_time = Column(DateTime, default=now) - modified_time = Column(DateTime, default=now, onupdate=now) - indexer = Column(String(64)) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + fasta_path: Mapped[Optional[str]] = mapped_column(String(255)) + created_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + modified_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + indexer: Mapped[Optional[str]] = mapped_column(String(64)) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) job = relationship("Job") dataset = relationship("Dataset") user = relationship("User") @@ -2810,11 +2856,11 @@ class GenomeIndexToolData(Base, RepresentById): # TODO: params arg is lost class Group(Base, Dictifiable, RepresentById): __tablename__ = "galaxy_group" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) quotas = relationship("GroupQuotaAssociation", back_populates="group") roles = relationship("GroupRoleAssociation", back_populates="group", cascade_backrefs=False) users = relationship("UserGroupAssociation", back_populates="group") @@ -2830,11 +2876,11 @@ def __init__(self, name=None): class UserGroupAssociation(Base, RepresentById): __tablename__ = "user_group_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - group_id = Column(Integer, ForeignKey("galaxy_group.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + group_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_group.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) user = relationship("User", back_populates="groups") group = relationship("Group", back_populates="users") @@ -2847,25 +2893,25 @@ def __init__(self, user, group): class Notification(Base, Dictifiable, RepresentById): __tablename__ = "notification" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - publication_time = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + publication_time: Mapped[Optional[datetime]] = mapped_column( DateTime, default=now ) # The date of publication, can be a future date to allow scheduling - expiration_time = Column( + expiration_time: Mapped[Optional[datetime]] = mapped_column( DateTime, default=now() + timedelta(days=30 * 6) ) # The expiration date, expired notifications will be permanently removed from DB regularly - source = Column(String(32), index=True) # Who (or what) generated the notification - category = Column( + source: Mapped[Optional[str]] = mapped_column(String(32), index=True) # Who (or what) generated the notification + category: Mapped[Optional[str]] = mapped_column( String(64), index=True ) # Category of the notification, defines its contents. Used for filtering, un/subscribing, etc - variant = Column( + variant: Mapped[Optional[str]] = mapped_column( String(16), index=True ) # Defines the 'importance' of the notification ('info', 'warning', 'urgent', etc.). Used for filtering, highlight rendering, etc # A bug in early 23.1 led to values being stored as json string, so we use this special type to process the result value twice. # content should always be a dict - content = Column(DoubleEncodedJsonType) + content: Mapped[Optional[bytes]] = mapped_column(DoubleEncodedJsonType) user_notification_associations = relationship("UserNotificationAssociation", back_populates="notification") @@ -2879,12 +2925,12 @@ def __init__(self, source: str, category: str, variant: str, content): class UserNotificationAssociation(Base, RepresentById): __tablename__ = "user_notification_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - notification_id = Column(Integer, ForeignKey("notification.id"), index=True) - seen_time = Column(DateTime, nullable=True) - deleted = Column(Boolean, index=True, default=False) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + notification_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("notification.id"), index=True) + seen_time: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) user = relationship("User", back_populates="all_notifications") notification = relationship("Notification", back_populates="user_notification_associations") @@ -2899,17 +2945,25 @@ def is_hda(d): return isinstance(d, HistoryDatasetAssociation) -class HistoryAudit(Base, RepresentById): +class HistoryAudit(Base): __tablename__ = "history_audit" __table_args__ = (PrimaryKeyConstraint(sqlite_on_conflict="IGNORE"),) - history_id = Column(Integer, ForeignKey("history.id"), primary_key=True, nullable=False) - update_time = Column(DateTime, default=now, primary_key=True, nullable=False) + history_id = mapped_column(Integer, ForeignKey("history.id"), primary_key=True, nullable=False) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, primary_key=True, nullable=False) # This class should never be instantiated. # See https://github.com/galaxyproject/galaxy/pull/11914 for details. __init__ = None # type: ignore[assignment] + def __repr__(self): + try: + r = f"" + except Exception: + r = object.__repr__(self) + log.exception("Caught exception attempting to generate repr for: %s", r) + return r + @classmethod def prune(cls, sa_session): latest_subq = ( @@ -2938,22 +2992,26 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable __tablename__ = "history" __table_args__ = (Index("ix_history_slug", "slug", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - _update_time = Column("update_time", DateTime, index=True, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - name = Column(TrimmedString(255)) - hid_counter = Column(Integer, default=1) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - importing = Column(Boolean, index=True, default=False) - genome_build = Column(TrimmedString(40)) - importable = Column(Boolean, default=False) - slug = Column(TEXT) - published = Column(Boolean, index=True, default=False) - preferred_object_store_id = Column(String(255), nullable=True) - archived = Column(Boolean, index=True, default=False, server_default=false()) - archive_export_id = Column(Integer, ForeignKey("store_export_association.id"), nullable=True, default=None) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + _update_time: Mapped[datetime] = mapped_column( + "update_time", DateTime, index=True, default=now, onupdate=now, nullable=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + hid_counter: Mapped[Optional[int]] = mapped_column(Integer, default=1) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + importing: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + archived: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False, server_default=false()) + archive_export_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("store_export_association.id"), nullable=True, default=None + ) datasets = relationship( "HistoryDatasetAssociation", back_populates="history", cascade_backrefs=False, order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type] @@ -2982,7 +3040,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable lambda: ( and_( HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] - not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type] + not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type, arg-type] ) ) ), @@ -3006,14 +3064,16 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable primaryjoin=( lambda: and_( HistoryDatasetCollectionAssociation.history_id == History.id, # type: ignore[has-type] - not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type] - HistoryDatasetCollectionAssociation.visible, # type: ignore[has-type] + not_(HistoryDatasetCollectionAssociation.deleted), # type: ignore[has-type, arg-type] + HistoryDatasetCollectionAssociation.visible, # type: ignore[has-type, arg-type] ) ), order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), # type: ignore[has-type] viewonly=True, ) - tags = relationship("HistoryTagAssociation", order_by=lambda: HistoryTagAssociation.id, back_populates="history") + tags: Mapped[List["HistoryTagAssociation"]] = relationship( + "HistoryTagAssociation", order_by=lambda: HistoryTagAssociation.id, back_populates="history" + ) annotations = relationship( "HistoryAnnotationAssociation", order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" ) @@ -3032,8 +3092,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable update_time = column_property( select(func.max(HistoryAudit.update_time)).where(HistoryAudit.history_id == id).scalar_subquery(), ) - users_shared_with_count: column_property # defined at the end of this module - average_rating: column_property # defined at the end of this module + users_shared_with_count = None + average_rating = None # Set up proxy so that # History.users_shared_with @@ -3379,17 +3439,17 @@ def disk_size(cls): # .expression acts as a column_property and should return a scalar # first, get the distinct datasets within a history that are not purged hda_to_dataset_join = join( - HistoryDatasetAssociation, Dataset, HistoryDatasetAssociation.table.c.dataset_id == Dataset.table.c.id + HistoryDatasetAssociation, Dataset, HistoryDatasetAssociation.dataset_id == Dataset.id ) distinct_datasets = ( select( # use labels here to better access from the query above - HistoryDatasetAssociation.table.c.history_id.label("history_id"), + HistoryDatasetAssociation.history_id.label("history_id"), Dataset.total_size.label("dataset_size"), Dataset.id.label("dataset_id"), ) - .where(HistoryDatasetAssociation.table.c.purged != true()) - .where(Dataset.table.c.purged != true()) + .where(HistoryDatasetAssociation.purged != true()) + .where(Dataset.purged != true()) .select_from(hda_to_dataset_join) # TODO: slow (in general) but most probably here - index total_size for easier sorting/distinct? .distinct() @@ -3511,27 +3571,27 @@ def __filter_contents(self, content_class, **kwds): class UserShareAssociation(RepresentById): - user: Optional[User] + user: Mapped[User] class HistoryUserShareAssociation(Base, UserShareAssociation): __tablename__ = "history_user_share_association" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user: Mapped[User] = relationship("User") history = relationship("History", back_populates="users_shared_with") class UserRoleAssociation(Base, RepresentById): __tablename__ = "user_role_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) user = relationship("User", back_populates="roles") role = relationship("Role", back_populates="users") @@ -3545,11 +3605,11 @@ def __init__(self, user, role): class GroupRoleAssociation(Base, RepresentById): __tablename__ = "group_role_association" - id = Column(Integer, primary_key=True) - group_id = Column(Integer, ForeignKey("galaxy_group.id"), index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + group_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_group.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) group = relationship("Group", back_populates="roles") role = relationship("Role", back_populates="groups") @@ -3562,13 +3622,13 @@ def __init__(self, group, role): class Role(Base, Dictifiable, RepresentById): __tablename__ = "role" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - description = Column(TEXT) - type = Column(String(40), index=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(String(40), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) dataset_actions = relationship("DatasetPermissions", back_populates="role") groups = relationship("GroupRoleAssociation", back_populates="role") users = relationship("UserRoleAssociation", back_populates="role") @@ -3597,22 +3657,22 @@ class UserQuotaSourceUsage(Base, Dictifiable, RepresentById): dict_element_visible_keys = ["disk_usage", "quota_source_label"] - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - quota_source_label = Column(String(32), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), index=True) # user had an index on disk_usage - does that make any sense? -John - disk_usage = Column(Numeric(15, 0), default=0, nullable=False) + disk_usage: Mapped[Decimal] = mapped_column(Numeric(15, 0), default=0, nullable=False) user = relationship("User", back_populates="quota_source_usages") class UserQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "user_quota_association" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - quota_id = Column(Integer, ForeignKey("quota.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + quota_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("quota.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) user = relationship("User", back_populates="quotas") quota = relationship("Quota", back_populates="users") @@ -3627,11 +3687,11 @@ def __init__(self, user, quota): class GroupQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "group_quota_association" - id = Column(Integer, primary_key=True) - group_id = Column(Integer, ForeignKey("galaxy_group.id"), index=True) - quota_id = Column(Integer, ForeignKey("quota.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + group_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_group.id"), index=True) + quota_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("quota.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) group = relationship("Group", back_populates="quotas") quota = relationship("Quota", back_populates="groups") @@ -3647,15 +3707,15 @@ class Quota(Base, Dictifiable, RepresentById): __tablename__ = "quota" __table_args__ = (Index("ix_quota_quota_source_label", "quota_source_label"),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - description = Column(TEXT) - bytes = Column(BigInteger) - operation = Column(String(8)) - deleted = Column(Boolean, index=True, default=False) - quota_source_label = Column(String(32), default=None) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + bytes: Mapped[Optional[int]] = mapped_column(BigInteger) + operation: Mapped[Optional[str]] = mapped_column(String(8)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), default=None) default = relationship("DefaultQuotaAssociation", back_populates="quota", cascade_backrefs=False) groups = relationship("GroupQuotaAssociation", back_populates="quota") users = relationship("UserQuotaAssociation", back_populates="quota") @@ -3709,11 +3769,11 @@ def display_amount(self): class DefaultQuotaAssociation(Base, Dictifiable, RepresentById): __tablename__ = "default_quota_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - type = Column(String(32)) - quota_id = Column(Integer, ForeignKey("quota.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + type: Mapped[Optional[str]] = mapped_column(String(32)) + quota_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("quota.id"), index=True) quota = relationship("Quota", back_populates="default") dict_element_visible_keys = ["type"] @@ -3732,12 +3792,12 @@ def __init__(self, type, quota): class DatasetPermissions(Base, RepresentById): __tablename__ = "dataset_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) dataset = relationship("Dataset", back_populates="actions") role = relationship("Role", back_populates="dataset_actions") @@ -3754,12 +3814,12 @@ def __init__(self, action, dataset, role=None, role_id=None): class LibraryPermissions(Base, RepresentById): __tablename__ = "library_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_id = Column(Integer, ForeignKey("library.id"), nullable=True, index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library.id"), nullable=True, index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) library = relationship("Library", back_populates="actions") role = relationship("Role") @@ -3776,12 +3836,14 @@ def __init__(self, action, library_item, role): class LibraryFolderPermissions(Base, RepresentById): __tablename__ = "library_folder_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), nullable=True, index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_folder_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_folder.id"), nullable=True, index=True + ) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) folder = relationship("LibraryFolder", back_populates="actions") role = relationship("Role") @@ -3798,12 +3860,14 @@ def __init__(self, action, library_item, role): class LibraryDatasetPermissions(Base, RepresentById): __tablename__ = "library_dataset_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_dataset_id = Column(Integer, ForeignKey("library_dataset.id"), nullable=True, index=True) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_dataset_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset.id"), nullable=True, index=True + ) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) library_dataset = relationship("LibraryDataset", back_populates="actions") role = relationship("Role") @@ -3820,14 +3884,14 @@ def __init__(self, action, library_item, role): class LibraryDatasetDatasetAssociationPermissions(Base, RepresentById): __tablename__ = "library_dataset_dataset_association_permissions" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - action = Column(TEXT) - library_dataset_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), nullable=True, index=True ) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", back_populates="actions") role = relationship("Role") @@ -3844,10 +3908,10 @@ def __init__(self, action, library_item, role): class DefaultUserPermissions(Base, RepresentById): __tablename__ = "default_user_permissions" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - action = Column(TEXT) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) user = relationship("User", back_populates="default_permissions") role = relationship("Role") @@ -3861,10 +3925,10 @@ def __init__(self, user, action, role): class DefaultHistoryPermissions(Base, RepresentById): __tablename__ = "default_history_permissions" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - action = Column(TEXT) - role_id = Column(Integer, ForeignKey("role.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + action: Mapped[Optional[str]] = mapped_column(TEXT) + role_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("role.id"), index=True) history = relationship("History", back_populates="default_permissions") role = relationship("Role") @@ -3885,21 +3949,21 @@ def flush(self): class Dataset(Base, StorableObject, Serializable): __tablename__ = "dataset" - id = Column(Integer, primary_key=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - state = Column(TrimmedString(64), index=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - purgable = Column(Boolean, default=True) - object_store_id = Column(TrimmedString(255), index=True) - external_filename = Column(TEXT) - _extra_files_path = Column(TEXT) - created_from_basename = Column(TEXT) - file_size = Column(Numeric(15, 0)) - total_size = Column(Numeric(15, 0)) - uuid = Column(UUIDType()) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + state: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purgable: Mapped[Optional[bool]] = mapped_column(Boolean, default=True) + object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + external_filename: Mapped[Optional[str]] = mapped_column(TEXT) + _extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) + created_from_basename: Mapped[Optional[str]] = mapped_column(TEXT) + file_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) + total_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType()) actions = relationship("DatasetPermissions", back_populates="dataset") job = relationship(Job, primaryjoin=(lambda: Dataset.job_id == Job.id)) @@ -3909,7 +3973,7 @@ class Dataset(Base, StorableObject, Serializable): lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] HistoryDatasetAssociation.deleted == false(), # type: ignore[has-type] - HistoryDatasetAssociation.purged == false(), # type: ignore[attr-defined] + HistoryDatasetAssociation.purged == false(), # type: ignore[attr-defined, arg-type] ) ), viewonly=True, @@ -3919,7 +3983,7 @@ class Dataset(Base, StorableObject, Serializable): primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, # type: ignore[attr-defined] - HistoryDatasetAssociation.purged == true(), # type: ignore[attr-defined] + HistoryDatasetAssociation.purged == true(), # type: ignore[attr-defined, arg-type] ) ), viewonly=True, @@ -3972,7 +4036,7 @@ class conversion_messages(str, Enum): permitted_actions = get_permitted_actions(filter="DATASET") file_path = "/tmp/" - object_store: Optional[ObjectStore] = None # This get initialized in mapping.py (method init) by app.py + object_store = None # This get initialized in mapping.py (method init) by app.py engine = None def __init__( @@ -4122,7 +4186,7 @@ def _calculate_size(self) -> int: except OSError: return 0 assert self.object_store - return self.object_store.size(self) + return self.object_store.size(self) # type:ignore[unreachable] @overload def get_size(self, nice_size: Literal[False], calculate_size: bool = True) -> int: ... @@ -4136,7 +4200,7 @@ def get_size(self, nice_size: bool = False, calculate_size: bool = True) -> Unio if nice_size: return galaxy.util.nice_size(self.file_size) else: - return self.file_size + return cast(int, self.file_size) elif calculate_size: # Hopefully we only reach this branch in sessionless mode if nice_size: @@ -4144,7 +4208,7 @@ def get_size(self, nice_size: bool = False, calculate_size: bool = True) -> Unio else: return self._calculate_size() else: - return self.file_size or 0 + return cast(int, self.file_size) or 0 def set_size(self, no_extra_files=False): """Sets the size of the data on disk. @@ -4265,11 +4329,11 @@ def to_int(n) -> Optional[int]: class DatasetSource(Base, Dictifiable, Serializable): __tablename__ = "dataset_source" - id = Column(Integer, primary_key=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - source_uri = Column(TEXT) - extra_files_path = Column(TEXT) - transform = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + source_uri: Mapped[Optional[str]] = mapped_column(TEXT) + extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) + transform: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) dataset = relationship("Dataset", back_populates="sources") hashes = relationship("DatasetSourceHash", back_populates="source") dict_collection_visible_keys = ["id", "source_uri", "extra_files_path", "transform"] @@ -4303,10 +4367,10 @@ def copy(self) -> "DatasetSource": class DatasetSourceHash(Base, Serializable): __tablename__ = "dataset_source_hash" - id = Column(Integer, primary_key=True) - dataset_source_id = Column(Integer, ForeignKey("dataset_source.id"), index=True) - hash_function = Column(TEXT) - hash_value = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_source_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset_source.id"), index=True) + hash_function: Mapped[Optional[str]] = mapped_column(TEXT) + hash_value: Mapped[Optional[str]] = mapped_column(TEXT) source = relationship("DatasetSource", back_populates="hashes") def _serialize(self, id_encoder, serialization_options): @@ -4328,11 +4392,11 @@ def copy(self) -> "DatasetSourceHash": class DatasetHash(Base, Dictifiable, Serializable): __tablename__ = "dataset_hash" - id = Column(Integer, primary_key=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) - hash_function = Column(TEXT) - hash_value = Column(TEXT) - extra_files_path = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) + hash_function: Mapped[Optional[str]] = mapped_column(TEXT) + hash_value: Mapped[Optional[str]] = mapped_column(TEXT) + extra_files_path: Mapped[Optional[str]] = mapped_column(TEXT) dataset = relationship("Dataset", back_populates="hashes") dict_collection_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] dict_element_visible_keys = ["id", "hash_function", "hash_value", "extra_files_path"] @@ -4792,7 +4856,9 @@ def find_conversion_destination( self, accepted_formats: List[str], **kwd ) -> Tuple[bool, Optional[str], Optional["DatasetInstance"]]: """Returns ( target_ext, existing converted dataset )""" - return self.datatype.find_conversion_destination(self, accepted_formats, _get_datatypes_registry(), **kwd) + return self.datatype.find_conversion_destination( + self, accepted_formats, _get_datatypes_registry(), **kwd # type:ignore[arg-type] + ) def add_validation_error(self, validation_error): self.validation_errors.append(validation_error) @@ -5355,17 +5421,19 @@ def type_id(cls): return (type_coerce(cls.content_type, Unicode) + "-" + type_coerce(cls.id, Unicode)).label("type_id") -class HistoryDatasetAssociationHistory(Base, Serializable): +class HistoryDatasetAssociationHistory(Base): __tablename__ = "history_dataset_association_history" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - update_time = Column(DateTime, default=now) - version = Column(Integer) - name = Column(TrimmedString(255)) - extension = Column(TrimmedString(64)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + version: Mapped[Optional[int]] = mapped_column(Integer) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + extension: Mapped[Optional[str]] = mapped_column(TrimmedString(64)) _metadata = Column("metadata", MetadataType) - extended_metadata_id = Column(Integer, ForeignKey("extended_metadata.id"), index=True) + extended_metadata_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("extended_metadata.id"), index=True) def __init__( self, @@ -5392,12 +5460,14 @@ def __init__( class HistoryDatasetAssociationDisplayAtAuthorization(Base, RepresentById): __tablename__ = "history_dataset_association_display_at_authorization" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - site = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + site: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) history_dataset_association = relationship("HistoryDatasetAssociation") user = relationship("User") @@ -5410,10 +5480,14 @@ def __init__(self, hda=None, user=None, site=None): class HistoryDatasetAssociationSubset(Base, RepresentById): __tablename__ = "history_dataset_association_subset" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - history_dataset_association_subset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - location = Column(Unicode(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + history_dataset_association_subset_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + location: Mapped[Optional[str]] = mapped_column(Unicode(255), index=True) hda = relationship( "HistoryDatasetAssociation", @@ -5438,15 +5512,15 @@ def __init__(self, hda, subset, location): class Library(Base, Dictifiable, HasName, Serializable): __tablename__ = "library" - id = Column(Integer, primary_key=True) - root_folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - description = Column(TEXT) - synopsis = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + root_folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + description: Mapped[Optional[str]] = mapped_column(TEXT) + synopsis: Mapped[Optional[str]] = mapped_column(TEXT) root_folder = relationship("LibraryFolder", back_populates="library_root") actions = relationship("LibraryPermissions", back_populates="library", cascade_backrefs=False) @@ -5516,17 +5590,19 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): __tablename__ = "library_folder" __table_args__ = (Index("ix_library_folder_name", "name", mysql_length=200),) - id = Column(Integer, primary_key=True) - parent_id = Column(Integer, ForeignKey("library_folder.id"), nullable=True, index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TEXT) - description = Column(TEXT) - order_id = Column(Integer) # not currently being used, but for possible future use - item_count = Column(Integer) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) - genome_build = Column(TrimmedString(40)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + parent_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_folder.id"), nullable=True, index=True + ) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + description: Mapped[Optional[str]] = mapped_column(TEXT) + order_id: Mapped[Optional[int]] = mapped_column(Integer) # not currently being used, but for possible future use + item_count: Mapped[Optional[int]] = mapped_column(Integer) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) folders = relationship( "LibraryFolder", @@ -5657,9 +5733,9 @@ def parent_library(self): class LibraryDataset(Base, Serializable): __tablename__ = "library_dataset" - id = Column(Integer, primary_key=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) # current version of dataset, if null, there is not a current version selected - library_dataset_dataset_association_id = Column( + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey( "library_dataset_dataset_association.id", use_alter=True, name="library_dataset_dataset_association_id_fk" @@ -5667,17 +5743,17 @@ class LibraryDataset(Base, Serializable): nullable=True, index=True, ) - folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) + folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) # not currently being used, but for possible future use - order_id = Column(Integer) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + order_id: Mapped[Optional[int]] = mapped_column(Integer) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) # when not None/null this will supercede display in library (but not when imported into user's history?) - _name = Column("name", TrimmedString(255), index=True) + _name: Mapped[Optional[str]] = mapped_column("name", TrimmedString(255), index=True) # when not None/null this will supercede display in library (but not when imported into user's history?) - _info = Column("info", TrimmedString(255)) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + _info: Mapped[Optional[str]] = mapped_column("info", TrimmedString(255)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) folder = relationship("LibraryFolder") library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=library_dataset_dataset_association_id, post_update=True @@ -5963,8 +6039,8 @@ def update_parent_folder_update_times(self): class ExtendedMetadata(Base, RepresentById): __tablename__ = "extended_metadata" - id = Column(Integer, primary_key=True) - data = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) children = relationship("ExtendedMetadataIndex", back_populates="extended_metadata") def __init__(self, data): @@ -5974,12 +6050,12 @@ def __init__(self, data): class ExtendedMetadataIndex(Base, RepresentById): __tablename__ = "extended_metadata_index" - id = Column(Integer, primary_key=True) - extended_metadata_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + extended_metadata_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("extended_metadata.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) - path = Column(String(255)) - value = Column(TEXT) + path: Mapped[Optional[str]] = mapped_column(String(255)) + value: Mapped[Optional[str]] = mapped_column(TEXT) extended_metadata = relationship("ExtendedMetadata", back_populates="children") def __init__(self, extended_metadata, path, value): @@ -5991,17 +6067,20 @@ def __init__(self, extended_metadata, path, value): class LibraryInfoAssociation(Base, RepresentById): __tablename__ = "library_info_association" - id = Column(Integer, primary_key=True) - library_id = Column(Integer, ForeignKey("library.id"), index=True) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - inheritable = Column(Boolean, index=True, default=False) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library.id"), index=True) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + inheritable: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) library = relationship( "Library", primaryjoin=( - lambda: and_(LibraryInfoAssociation.library_id == Library.id, not_(LibraryInfoAssociation.deleted)) + lambda: and_( + LibraryInfoAssociation.library_id == Library.id, + not_(LibraryInfoAssociation.deleted), # type:ignore[arg-type] + ) ), ) template = relationship( @@ -6021,12 +6100,14 @@ def __init__(self, library, form_definition, info, inheritable=False): class LibraryFolderInfoAssociation(Base, RepresentById): __tablename__ = "library_folder_info_association" - id = Column(Integer, primary_key=True) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), nullable=True, index=True) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - inheritable = Column(Boolean, index=True, default=False) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_folder_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_folder.id"), nullable=True, index=True + ) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + inheritable: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) folder = relationship( "LibraryFolder", @@ -6052,13 +6133,13 @@ def __init__(self, folder, form_definition, info, inheritable=False): class LibraryDatasetDatasetInfoAssociation(Base, RepresentById): __tablename__ = "library_dataset_dataset_info_association" - id = Column(Integer, primary_key=True) - library_dataset_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), nullable=True, index=True ) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - form_values_id = Column(Integer, ForeignKey("form_values.id"), index=True) - deleted = Column(Boolean, index=True, default=False) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + form_values_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_values.id"), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", @@ -6092,16 +6173,24 @@ def inheritable(self): class ImplicitlyConvertedDatasetAssociation(Base, Serializable): __tablename__ = "implicitly_converted_dataset_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True) - hda_parent_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - ldda_parent_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) - deleted = Column(Boolean, index=True, default=False) - metadata_safe = Column(Boolean, index=True, default=True) - type = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + hda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True + ) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) + hda_parent_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + ldda_parent_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + metadata_safe: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) parent_hda = relationship( "HistoryDatasetAssociation", @@ -6202,13 +6291,13 @@ def produce_filter(self, table): class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable): __tablename__ = "dataset_collection" - id = Column(Integer, primary_key=True) - collection_type = Column(Unicode(255), nullable=False) - populated_state = Column(TrimmedString(64), default="ok", nullable=False) - populated_state_message = Column(TEXT) - element_count = Column(Integer, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + collection_type: Mapped[str] = mapped_column(Unicode(255), nullable=False) + populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="ok", nullable=False) + populated_state_message: Mapped[Optional[str]] = mapped_column(TEXT) + element_count: Mapped[Optional[int]] = mapped_column(Integer, nullable=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) elements = relationship( "DatasetCollectionElement", @@ -6256,8 +6345,8 @@ def _build_nested_collection_attributes_stmt( dataset_permission_attributes = dataset_permission_attributes or () return_entities = return_entities or () dataset_collection = self - dc = alias(DatasetCollection) - dce = alias(DatasetCollectionElement) + dc = alias(DatasetCollection.__table__) + dce = alias(DatasetCollectionElement.__table__) depth_collection_type = dataset_collection.collection_type order_by_columns = [dce.c.element_index] nesting_level = 0 @@ -6278,8 +6367,8 @@ def attribute_columns(column_collection, attributes, nesting_level=None): while ":" in depth_collection_type: nesting_level += 1 - inner_dc = alias(DatasetCollection) - inner_dce = alias(DatasetCollectionElement) + inner_dc = alias(DatasetCollection.__table__) + inner_dce = alias(DatasetCollectionElement.__table__) order_by_columns.append(inner_dce.c.element_index) q = q.join( inner_dc, and_(inner_dc.c.id == dce.c.child_collection_id, dce.c.dataset_collection_id == dc.c.id) @@ -6312,7 +6401,7 @@ def attribute_columns(column_collection, attributes, nesting_level=None): for entity in return_entities: q = q.add_columns(entity) if entity == DatasetCollectionElement: - q = q.filter(entity.id == dce.c.id) + q = q.filter(entity.id == dce.c.id) # type:ignore[arg-type] q = q.order_by(*order_by_columns) return q @@ -6409,7 +6498,7 @@ def element_identifiers_extensions_paths_and_metadata_files( hda_attributes=("extension",), return_entities=(HistoryDatasetAssociation, Dataset), ) - tuples = object_session(self).execute(stmt) + tuples = object_session(self).execute(stmt) # type:ignore[union-attr] # element_identifiers, extension, path for row in tuples: result = [row[:-3], row.extension, row.Dataset.get_file_name()] @@ -6660,21 +6749,23 @@ class HistoryDatasetCollectionAssociation( __tablename__ = "history_dataset_collection_association" - id = Column(Integer, primary_key=True) - collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - name = Column(TrimmedString(255)) - hid = Column(Integer) - visible = Column(Boolean) - deleted = Column(Boolean, default=False) - copied_from_history_dataset_collection_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + collection_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset_collection.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + hid: Mapped[Optional[int]] = mapped_column(Integer) + visible: Mapped[Optional[bool]] = mapped_column(Boolean) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + copied_from_history_dataset_collection_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True ) - implicit_output_name = Column(Unicode(255), nullable=True) - job_id = Column(ForeignKey("job.id"), index=True, nullable=True) - implicit_collection_jobs_id = Column(ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) + implicit_output_name: Mapped[Optional[str]] = mapped_column(Unicode(255), nullable=True) + job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) + implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( + ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True + ) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) collection = relationship("DatasetCollection") history = relationship("History", back_populates="dataset_collections") @@ -6703,7 +6794,7 @@ class HistoryDatasetCollectionAssociation( back_populates="history_dataset_collection_associations", uselist=False, ) - tags = relationship( + tags: Mapped[List["HistoryDatasetCollectionTagAssociation"]] = relationship( "HistoryDatasetCollectionTagAssociation", order_by=lambda: HistoryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", @@ -6809,7 +6900,7 @@ def build_statement(): col = func.sum(case((column(state_label) == state, 1), else_=0)).label(state) stm = stm.add_columns(col) # Add aggregate column for all jobs - col = func.count("*").label("all_jobs") + col = func.count().label("all_jobs") stm = stm.add_columns(col) return stm @@ -7040,16 +7131,16 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre __tablename__ = "library_dataset_collection_association" - id = Column(Integer, primary_key=True) - collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True) - folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) - name = Column(TrimmedString(255)) - deleted = Column(Boolean, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + collection_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset_collection.id"), index=True) + folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) collection = relationship("DatasetCollection") folder = relationship("LibraryFolder") - tags = relationship( + tags: Mapped[List["LibraryDatasetCollectionTagAssociation"]] = relationship( "LibraryDatasetCollectionTagAssociation", order_by=lambda: LibraryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", @@ -7084,16 +7175,24 @@ class DatasetCollectionElement(Base, Dictifiable, Serializable): __tablename__ = "dataset_collection_element" - id = Column(Integer, primary_key=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) # Parent collection id describing what collection this element belongs to. - dataset_collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True, nullable=False) + dataset_collection_id: Mapped[int] = mapped_column( + Integer, ForeignKey("dataset_collection.id"), index=True, nullable=False + ) # Child defined by this association - HDA, LDDA, or another dataset association... - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True) - child_collection_id = Column(Integer, ForeignKey("dataset_collection.id"), index=True, nullable=True) + hda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True + ) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) + child_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dataset_collection.id"), index=True, nullable=True + ) # Element index and identifier to define this parent-child relationship. - element_index = Column(Integer) - element_identifier = Column(Unicode(255)) + element_index: Mapped[Optional[int]] = mapped_column(Integer) + element_identifier: Mapped[Optional[str]] = mapped_column(Unicode(255)) hda = relationship( "HistoryDatasetAssociation", @@ -7272,14 +7371,16 @@ def _serialize(self, id_encoder, serialization_options): class Event(Base, RepresentById): __tablename__ = "event" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - history_id = Column(Integer, ForeignKey("history.id"), index=True, nullable=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) - message = Column(TrimmedString(1024)) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True) - tool_id = Column(String(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) + message: Mapped[Optional[str]] = mapped_column(TrimmedString(1024)) + session_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("galaxy_session.id"), index=True, nullable=True + ) + tool_id: Mapped[Optional[str]] = mapped_column(String(255)) history = relationship("History") user = relationship("User") @@ -7289,21 +7390,21 @@ class Event(Base, RepresentById): class GalaxySession(Base, RepresentById): __tablename__ = "galaxy_session" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) - remote_host = Column(String(255)) - remote_addr = Column(String(255)) - referer = Column(TEXT) - current_history_id = Column(Integer, ForeignKey("history.id"), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=True) + remote_host: Mapped[Optional[str]] = mapped_column(String(255)) + remote_addr: Mapped[Optional[str]] = mapped_column(String(255)) + referer: Mapped[Optional[str]] = mapped_column(TEXT) + current_history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), nullable=True) # unique 128 bit random number coerced to a string - session_key = Column(TrimmedString(255), index=True, unique=True) - is_valid = Column(Boolean, default=False) + session_key: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + is_valid: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) # saves a reference to the previous session so we have a way to chain them together - prev_session_id = Column(Integer) - disk_usage = Column(Numeric(15, 0), index=True) - last_action = Column(DateTime) + prev_session_id: Mapped[Optional[int]] = mapped_column(Integer) + disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) + last_action: Mapped[Optional[datetime]] = mapped_column(DateTime) current_history = relationship("History") histories = relationship( "GalaxySessionToHistoryAssociation", back_populates="galaxy_session", cascade_backrefs=False @@ -7335,10 +7436,10 @@ def set_disk_usage(self, bytes): class GalaxySessionToHistoryAssociation(Base, RepresentById): __tablename__ = "galaxy_session_to_history" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + session_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_session.id"), index=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) galaxy_session = relationship("GalaxySession", back_populates="histories") history = relationship("History", back_populates="galaxy_sessions") @@ -7367,20 +7468,20 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): __tablename__ = "stored_workflow" __table_args__ = (Index("ix_stored_workflow_slug", "slug", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) - latest_workflow_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) + latest_workflow_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow.id", use_alter=True, name="stored_workflow_latest_workflow_id_fk"), index=True ) - name = Column(TEXT) - deleted = Column(Boolean, default=False) - hidden = Column(Boolean, default=False) - importable = Column(Boolean, default=False) - slug = Column(TEXT) - from_path = Column(TEXT) - published = Column(Boolean, index=True, default=False) + name: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + hidden: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + from_path: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) user = relationship( "User", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" @@ -7399,12 +7500,12 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: StoredWorkflow.latest_workflow_id == Workflow.id), # type: ignore[has-type] lazy=False, ) - tags = relationship( + tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( "StoredWorkflowTagAssociation", order_by=lambda: StoredWorkflowTagAssociation.id, back_populates="stored_workflow", ) - owner_tags = relationship( + owner_tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( "StoredWorkflowTagAssociation", primaryjoin=( lambda: and_( @@ -7427,7 +7528,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById): ) users_shared_with = relationship("StoredWorkflowUserShareAssociation", back_populates="stored_workflow") - average_rating: column_property + average_rating = None # Set up proxy so that # StoredWorkflow.users_shared_with @@ -7510,8 +7611,9 @@ def copy_tags_from(self, target_user, source_workflow): def invocation_counts(self) -> InvocationsStateCounts: sa_session = object_session(self) + assert sa_session stmt = ( - select([WorkflowInvocation.state, func.count(WorkflowInvocation.state)]) + select(WorkflowInvocation.state, func.count(WorkflowInvocation.state)) .select_from(StoredWorkflow) .join(Workflow, Workflow.stored_workflow_id == StoredWorkflow.id) .join(WorkflowInvocation, WorkflowInvocation.workflow_id == Workflow.id) @@ -7519,7 +7621,7 @@ def invocation_counts(self) -> InvocationsStateCounts: .where(StoredWorkflow.id == self.id) ) rows = sa_session.execute(stmt).all() - rows_as_dict = dict(r for r in rows if r[0] is not None) + rows_as_dict = dict(r for r in rows if r[0] is not None) # type:ignore[arg-type, var-annotated] return InvocationsStateCounts(rows_as_dict) def to_dict(self, view="collection", value_mapper=None): @@ -7540,22 +7642,26 @@ class Workflow(Base, Dictifiable, RepresentById): __tablename__ = "workflow" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) # workflows will belong to either a stored workflow or a parent/nesting workflow. - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True, nullable=True) - parent_workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=True) - name = Column(TEXT) - has_cycles = Column(Boolean) - has_errors = Column(Boolean) - reports_config = Column(JSONType) - creator_metadata = Column(JSONType) - license = Column(TEXT) - source_metadata = Column(JSONType) - uuid = Column(UUIDType, nullable=True) - - steps: List["WorkflowStep"] = relationship( + stored_workflow_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("stored_workflow.id"), index=True, nullable=True + ) + parent_workflow_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow.id"), index=True, nullable=True + ) + name: Mapped[Optional[str]] = mapped_column(TEXT) + has_cycles: Mapped[Optional[bool]] = mapped_column(Boolean) + has_errors: Mapped[Optional[bool]] = mapped_column(Boolean) + reports_config: Mapped[Optional[bytes]] = mapped_column(JSONType) + creator_metadata: Mapped[Optional[bytes]] = mapped_column(JSONType) + license: Mapped[Optional[str]] = mapped_column(TEXT) + source_metadata: Mapped[Optional[bytes]] = mapped_column(JSONType) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType, nullable=True) + + steps = relationship( "WorkflowStep", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), # type: ignore[has-type] @@ -7563,7 +7669,7 @@ class Workflow(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", lazy=False, ) - comments: List["WorkflowComment"] = relationship( + comments = relationship( "WorkflowComment", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), # type: ignore[has-type] @@ -7582,7 +7688,7 @@ class Workflow(Base, Dictifiable, RepresentById): back_populates="workflows", ) - step_count: column_property + step_count = None dict_collection_visible_keys = ["name", "has_cycles", "has_errors"] dict_element_visible_keys = ["name", "has_cycles", "has_errors"] @@ -7723,25 +7829,27 @@ class WorkflowStep(Base, RepresentById): __tablename__ = "workflow_step" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) - subworkflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=True) - dynamic_tool_id = Column(Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True) - type: str = Column(String(64)) - tool_id = Column(TEXT) - tool_version = Column(TEXT) - tool_inputs = Column(JSONType) - tool_errors = Column(JSONType) - position = Column(MutableJSONType) - config = Column(JSONType) - order_index: int = Column(Integer) - when_expression = Column(JSONType) - uuid = Column(UUIDType) - label = Column(Unicode(255)) - temp_input_connections: Optional[InputConnDictType] - parent_comment_id = Column(Integer, ForeignKey("workflow_comment.id"), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) + subworkflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=True) + dynamic_tool_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("dynamic_tool.id"), index=True, nullable=True + ) + type: Mapped[Optional[str]] = mapped_column(String(64)) + tool_id: Mapped[Optional[str]] = mapped_column(TEXT) + tool_version: Mapped[Optional[str]] = mapped_column(TEXT) + tool_inputs: Mapped[Optional[bytes]] = mapped_column(JSONType) + tool_errors: Mapped[Optional[bytes]] = mapped_column(JSONType) + position: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + config: Mapped[Optional[bytes]] = mapped_column(JSONType) + order_index: Mapped[Optional[int]] = mapped_column(Integer) + when_expression: Mapped[Optional[bytes]] = mapped_column(JSONType) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType) + label: Mapped[Optional[str]] = mapped_column(Unicode(255)) + temp_input_connections = None + parent_comment_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_comment.id"), nullable=True) parent_comment = relationship( "WorkflowComment", @@ -7749,13 +7857,13 @@ class WorkflowStep(Base, RepresentById): back_populates="child_steps", ) - subworkflow: Optional[Workflow] = relationship( + subworkflow = relationship( "Workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.subworkflow_id), back_populates="parent_workflow_steps", ) dynamic_tool = relationship("DynamicTool", primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id)) - tags = relationship( + tags: Mapped[List["WorkflowStepTagAssociation"]] = relationship( "WorkflowStepTagAssociation", order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" ) annotations = relationship( @@ -7776,12 +7884,6 @@ class WorkflowStep(Base, RepresentById): cascade_backrefs=False, ) - # Injected attributes - # TODO: code using these should be refactored to not depend on these non-persistent fields - module: Optional["WorkflowModule"] - state: Optional["DefaultToolState"] - upgrade_messages: Optional[Dict] - STEP_TYPE_TO_INPUT_TYPE = { "data_input": "dataset", "data_collection_input": "dataset_collection", @@ -7793,6 +7895,11 @@ def __init__(self): self.uuid = uuid4() self._input_connections_by_name = None self._inputs_by_name = None + # Injected attributes + # TODO: code using these should be refactored to not depend on these non-persistent fields + self.module: Optional["WorkflowModule"] + self.state: Optional["DefaultToolState"] + self.upgrade_messages: Optional[Dict] @reconstructor def init_on_load(self): @@ -8039,16 +8146,16 @@ class WorkflowStepInput(Base, RepresentById): ), ) - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - name = Column(TEXT) - merge_type = Column(TEXT) - scatter_type = Column(TEXT) - value_from = Column(MutableJSONType) - value_from_type = Column(TEXT) - default_value = Column(MutableJSONType) - default_value_set = Column(Boolean, default=False) - runtime_value = Column(Boolean, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + merge_type: Mapped[Optional[str]] = mapped_column(TEXT) + scatter_type: Mapped[Optional[str]] = mapped_column(TEXT) + value_from: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + value_from_type: Mapped[Optional[str]] = mapped_column(TEXT) + default_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + default_value_set: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + runtime_value: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) workflow_step = relationship( "WorkflowStep", @@ -8083,11 +8190,15 @@ def copy(self, copied_step): class WorkflowStepConnection(Base, RepresentById): __tablename__ = "workflow_step_connection" - id = Column(Integer, primary_key=True) - output_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - input_step_input_id = Column(Integer, ForeignKey("workflow_step_input.id"), index=True) - output_name = Column(TEXT) - input_subworkflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + output_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + input_step_input_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step_input.id"), index=True + ) + output_name: Mapped[Optional[str]] = mapped_column(TEXT) + input_subworkflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id"), index=True + ) input_step_input = relationship( "WorkflowStepInput", @@ -8139,11 +8250,11 @@ def copy(self): class WorkflowOutput(Base, Serializable): __tablename__ = "workflow_output" - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) - output_name = Column(String(255), nullable=True) - label = Column(Unicode(255)) - uuid = Column(UUIDType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + label: Mapped[Optional[str]] = mapped_column(Unicode(255)) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType) workflow_step = relationship( "WorkflowStep", back_populates="workflow_outputs", @@ -8180,15 +8291,15 @@ class WorkflowComment(Base, RepresentById): __tablename__ = "workflow_comment" - id = Column(Integer, primary_key=True) - order_index: int = Column(Integer) - workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) - position = Column(MutableJSONType) - size = Column(JSONType) - type = Column(String(16)) - color = Column(String(16)) - data = Column(JSONType) - parent_comment_id = Column(Integer, ForeignKey("workflow_comment.id"), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + order_index: Mapped[Optional[int]] = mapped_column(Integer) + workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) + position: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + size: Mapped[Optional[bytes]] = mapped_column(JSONType) + type: Mapped[Optional[str]] = mapped_column(String(16)) + color: Mapped[Optional[str]] = mapped_column(String(16)) + data: Mapped[Optional[bytes]] = mapped_column(JSONType) + parent_comment_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_comment.id"), nullable=True) workflow = relationship( "Workflow", @@ -8196,20 +8307,20 @@ class WorkflowComment(Base, RepresentById): back_populates="comments", ) - child_steps: List["WorkflowStep"] = relationship( + child_steps = relationship( "WorkflowStep", primaryjoin=(lambda: WorkflowStep.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", ) - parent_comment: "WorkflowComment" = relationship( + parent_comment = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.id == WorkflowComment.parent_comment_id), back_populates="child_comments", remote_side=[id], ) - child_comments: List["WorkflowComment"] = relationship( + child_comments = relationship( "WorkflowComment", primaryjoin=(lambda: WorkflowComment.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", @@ -8251,20 +8362,20 @@ def from_dict(dict): class StoredWorkflowUserShareAssociation(Base, UserShareAssociation): __tablename__ = "stored_workflow_user_share_connection" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user: Mapped[User] = relationship("User") stored_workflow = relationship("StoredWorkflow", back_populates="users_shared_with") class StoredWorkflowMenuEntry(Base, RepresentById): __tablename__ = "stored_workflow_menu_entry" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - order_index = Column(Integer) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + order_index: Mapped[Optional[int]] = mapped_column(Integer) stored_workflow = relationship("StoredWorkflow") user = relationship( @@ -8281,15 +8392,15 @@ class StoredWorkflowMenuEntry(Base, RepresentById): class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializable): __tablename__ = "workflow_invocation" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now, index=True) - workflow_id = Column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) - state = Column(TrimmedString(64), index=True) - scheduler = Column(TrimmedString(255), index=True) - handler = Column(TrimmedString(255), index=True) - uuid = Column(UUIDType()) - history_id = Column(Integer, ForeignKey("history.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, index=True, nullable=True) + workflow_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow.id"), index=True, nullable=False) + state: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) + scheduler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + handler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType()) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) history = relationship("History", back_populates="workflow_invocations") input_parameters = relationship( @@ -8321,7 +8432,7 @@ class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializabl order_by=lambda: WorkflowInvocationStep.order_index, cascade_backrefs=False, ) - workflow: Workflow = relationship("Workflow") + workflow = relationship("Workflow") output_dataset_collections = relationship( "WorkflowInvocationOutputDatasetCollectionAssociation", back_populates="workflow_invocation", @@ -8403,7 +8514,7 @@ def set_state(self, state: InvocationState): if session and self.id and state not in priority_states: # generate statement that will not revert CANCELLING or CANCELLED back to anything non-terminal session.execute( - update(WorkflowInvocation.table) + update(WorkflowInvocation) .where( WorkflowInvocation.id == self.id, or_(~WorkflowInvocation.state.in_(priority_states), WorkflowInvocation.state.is_(None)), @@ -8432,7 +8543,7 @@ def cancel_invocation_steps(self): .filter(~Job.state.in_(Job.finished_states)) .with_for_update() ) - sa_session.execute(update(Job.table).where(Job.id.in_(job_subq)).values({"state": Job.states.DELETING})) + sa_session.execute(update(Job).where(Job.id.in_(job_subq)).values({"state": Job.states.DELETING})) job_collection_subq = ( select(Job.id) @@ -8448,9 +8559,7 @@ def cancel_invocation_steps(self): ) sa_session.execute( - update(Job.table) - .where(Job.table.c.id.in_(job_collection_subq.element)) - .values({"state": Job.states.DELETING}) + update(Job).where(Job.id.in_(job_collection_subq.element)).values({"state": Job.states.DELETING}) ) for invocation in self.subworkflow_invocations: @@ -8827,10 +8936,16 @@ def log_str(self): class WorkflowInvocationToSubworkflowInvocationAssociation(Base, Dictifiable, RepresentById): __tablename__ = "workflow_invocation_to_subworkflow_invocation_association" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_wfi"), index=True) - subworkflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_swi"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id", name="fk_wfi_swi_ws")) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_wfi"), index=True + ) + subworkflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id", name="fk_wfi_swi_swi"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id", name="fk_wfi_swi_ws") + ) subworkflow_invocation = relationship( "WorkflowInvocation", @@ -8855,16 +8970,22 @@ class WorkflowInvocationToSubworkflowInvocationAssociation(Base, Dictifiable, Re class WorkflowInvocationMessage(Base, Dictifiable, Serializable): __tablename__ = "workflow_invocation_message" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False) - reason = Column(String(32)) - details = Column(TrimmedString(255), nullable=True) - output_name = Column(String(255), nullable=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), nullable=True) - dependent_workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), nullable=True) - job_id = Column(Integer, ForeignKey("job.id"), nullable=True) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), nullable=True) - hdca_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[int] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False + ) + reason: Mapped[Optional[str]] = mapped_column(String(32)) + details: Mapped[Optional[str]] = mapped_column(TrimmedString(255), nullable=True) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), nullable=True) + dependent_workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id"), nullable=True + ) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), nullable=True) + hda_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), nullable=True) + hdca_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), nullable=True + ) workflow_invocation = relationship("WorkflowInvocation", back_populates="messages", lazy=True) workflow_step = relationship("WorkflowStep", foreign_keys=workflow_step_id, lazy=True) @@ -8928,15 +9049,19 @@ def is_split_configuration(self): class WorkflowInvocationStep(Base, Dictifiable, Serializable): __tablename__ = "workflow_invocation_step" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) - state = Column(TrimmedString(64), index=True) - job_id = Column(Integer, ForeignKey("job.id"), index=True, nullable=True) - implicit_collection_jobs_id = Column(Integer, ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True) - action = Column(MutableJSONType, nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + workflow_invocation_id: Mapped[int] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True, nullable=False + ) + workflow_step_id: Mapped[int] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True, nullable=False) + state: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True, nullable=True) + implicit_collection_jobs_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("implicit_collection_jobs.id"), index=True, nullable=True + ) + action: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) workflow_step = relationship("WorkflowStep") job = relationship("Job", back_populates="workflow_invocation_step", uselist=False) @@ -8968,7 +9093,7 @@ class WorkflowInvocationStep(Base, Dictifiable, Serializable): select(WorkflowStep.order_index).where(WorkflowStep.id == workflow_step_id).scalar_subquery() ) - subworkflow_invocation_id: column_property + subworkflow_invocation_id = None dict_collection_visible_keys = [ "id", @@ -9136,13 +9261,13 @@ class WorkflowRequestInputParameter(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_input_parameters" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) - name = Column(Unicode(255)) - value = Column(TEXT) - type = Column(Unicode(255)) + name: Mapped[Optional[str]] = mapped_column(Unicode(255)) + value: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(Unicode(255)) workflow_invocation = relationship("WorkflowInvocation", back_populates="input_parameters") dict_collection_visible_keys = ["id", "name", "value", "type"] @@ -9166,12 +9291,12 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_step_states" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - value = Column(MutableJSONType) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step = relationship("WorkflowStep") workflow_invocation = relationship("WorkflowInvocation", back_populates="step_states") @@ -9189,11 +9314,13 @@ class WorkflowRequestToInputDatasetAssociation(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_to_input_dataset" - id = Column(Integer, primary_key=True) - name = Column(String(255)) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) workflow_step = relationship("WorkflowStep") dataset = relationship("HistoryDatasetAssociation") @@ -9217,11 +9344,15 @@ class WorkflowRequestToInputDatasetCollectionAssociation(Base, Dictifiable, Seri __tablename__ = "workflow_request_to_input_collection_dataset" - id = Column(Integer, primary_key=True) - name = Column(String(255)) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[Optional[str]] = mapped_column(String(255)) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) workflow_step = relationship("WorkflowStep") dataset_collection = relationship("HistoryDatasetCollectionAssociation") workflow_invocation = relationship("WorkflowInvocation", back_populates="input_dataset_collections") @@ -9244,10 +9375,12 @@ class WorkflowRequestInputStepParameter(Base, Dictifiable, Serializable): __tablename__ = "workflow_request_input_step_parameter" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - parameter_value = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + parameter_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_step = relationship("WorkflowStep") workflow_invocation = relationship("WorkflowInvocation", back_populates="input_step_parameters") @@ -9266,11 +9399,13 @@ class WorkflowInvocationOutputDatasetAssociation(Base, Dictifiable, Serializable __tablename__ = "workflow_invocation_output_dataset_association" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - workflow_output_id = Column(Integer, ForeignKey("workflow_output.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + workflow_output_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_output.id"), index=True) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_datasets") workflow_step = relationship("WorkflowStep") @@ -9293,13 +9428,17 @@ class WorkflowInvocationOutputDatasetCollectionAssociation(Base, Dictifiable, Se __tablename__ = "workflow_invocation_output_dataset_collection_association" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id", name="fk_wiodca_wii"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id", name="fk_wiodca_wsi"), index=True) - dataset_collection_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id", name="fk_wiodca_wii"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id", name="fk_wiodca_wsi"), index=True + ) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id", name="fk_wiodca_dci"), index=True ) - workflow_output_id = Column(Integer, ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True) + workflow_output_id = mapped_column(Integer, ForeignKey("workflow_output.id", name="fk_wiodca_woi"), index=True) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_dataset_collections") workflow_step = relationship("WorkflowStep") @@ -9324,11 +9463,13 @@ class WorkflowInvocationOutputValue(Base, Dictifiable, Serializable): __tablename__ = "workflow_invocation_output_value" - id = Column(Integer, primary_key=True) - workflow_invocation_id = Column(Integer, ForeignKey("workflow_invocation.id"), index=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id")) - workflow_output_id = Column(Integer, ForeignKey("workflow_output.id"), index=True) - value = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation.id"), index=True + ) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id")) + workflow_output_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_output.id"), index=True) + value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) workflow_invocation = relationship("WorkflowInvocation", back_populates="output_values") @@ -9363,10 +9504,12 @@ class WorkflowInvocationStepOutputDatasetAssociation(Base, Dictifiable, Represen __tablename__ = "workflow_invocation_step_output_dataset_association" - id = Column(Integer, primary_key=True) - workflow_invocation_step_id = Column(Integer, ForeignKey("workflow_invocation_step.id"), index=True) - dataset_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - output_name = Column(String(255), nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_invocation_step.id"), index=True + ) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) workflow_invocation_step = relationship("WorkflowInvocationStep", back_populates="output_datasets") dataset = relationship("HistoryDatasetAssociation") @@ -9378,15 +9521,17 @@ class WorkflowInvocationStepOutputDatasetCollectionAssociation(Base, Dictifiable __tablename__ = "workflow_invocation_step_output_dataset_collection_association" - id = Column(Integer, primary_key=True) - workflow_invocation_step_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_invocation_step_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("workflow_invocation_step.id", name="fk_wisodca_wisi"), index=True ) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id", name="fk_wisodca_wsi"), index=True) - dataset_collection_id = Column( + workflow_step_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("workflow_step.id", name="fk_wisodca_wsi"), index=True + ) + dataset_collection_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("history_dataset_collection_association.id", name="fk_wisodca_dci"), index=True ) - output_name = Column(String(255), nullable=True) + output_name: Mapped[Optional[str]] = mapped_column(String(255), nullable=True) workflow_invocation_step = relationship("WorkflowInvocationStep", back_populates="output_dataset_collections") dataset_collection = relationship("HistoryDatasetCollectionAssociation") @@ -9397,16 +9542,20 @@ class WorkflowInvocationStepOutputDatasetCollectionAssociation(Base, Dictifiable class MetadataFile(Base, StorableObject, Serializable): __tablename__ = "metadata_file" - id = Column(Integer, primary_key=True) - name = Column(TEXT) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True) - lda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - object_store_id = Column(TrimmedString(255), index=True) - uuid = Column(UUIDType(), index=True) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[Optional[str]] = mapped_column(TEXT) + hda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True, nullable=True + ) + lda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True + ) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + object_store_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + uuid: Mapped[Optional[str]] = mapped_column(UUIDType(), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) history_dataset = relationship("HistoryDatasetAssociation") library_dataset = relationship("LibraryDatasetDatasetAssociation") @@ -9479,17 +9628,17 @@ def _serialize(self, id_encoder, serialization_options): class FormDefinition(Base, Dictifiable, RepresentById): __tablename__ = "form_definition" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TrimmedString(255), nullable=False) - desc = Column(TEXT) - form_definition_current_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + desc: Mapped[Optional[str]] = mapped_column(TEXT) + form_definition_current_id: Mapped[int] = mapped_column( Integer, ForeignKey("form_definition_current.id", use_alter=True), index=True, nullable=False ) - fields = Column(MutableJSONType) - type = Column(TrimmedString(255), index=True) - layout = Column(MutableJSONType) + fields: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + layout: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition_current = relationship( "FormDefinitionCurrent", back_populates="forms", @@ -9551,11 +9700,11 @@ def grid_fields(self, grid_index): class FormDefinitionCurrent(Base, RepresentById): __tablename__ = "form_definition_current" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - latest_form_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + latest_form_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) forms = relationship( "FormDefinition", back_populates="form_definition_current", @@ -9575,11 +9724,11 @@ def __init__(self, form_definition=None): class FormValues(Base, RepresentById): __tablename__ = "form_values" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - form_definition_id = Column(Integer, ForeignKey("form_definition.id"), index=True) - content = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + form_definition_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("form_definition.id"), index=True) + content: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) form_definition = relationship( "FormDefinition", primaryjoin=(lambda: FormValues.form_definition_id == FormDefinition.id) ) @@ -9592,21 +9741,21 @@ def __init__(self, form_def=None, content=None): class UserAddress(Base, RepresentById): __tablename__ = "user_address" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - desc = Column(TrimmedString(255)) - name = Column(TrimmedString(255), nullable=False) - institution = Column(TrimmedString(255)) - address = Column(TrimmedString(255), nullable=False) - city = Column(TrimmedString(255), nullable=False) - state = Column(TrimmedString(255), nullable=False) - postal_code = Column(TrimmedString(255), nullable=False) - country = Column(TrimmedString(255), nullable=False) - phone = Column(TrimmedString(255)) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + desc: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + name: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + institution: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + address: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + city: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + state: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + postal_code: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + country: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + phone: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) # `desc` needs to be fully qualified because it is shadowed by `desc` Column defined above # TODO: db migration to rename column, then use `desc` user = relationship("User", back_populates="addresses", order_by=sqlalchemy.desc("update_time")) @@ -9629,13 +9778,13 @@ def to_dict(self, trans): class PSAAssociation(Base, AssociationMixin, RepresentById): __tablename__ = "psa_association" - id = Column(Integer, primary_key=True) - server_url = Column(VARCHAR(255)) - handle = Column(VARCHAR(255)) - secret = Column(VARCHAR(255)) - issued = Column(Integer) - lifetime = Column(Integer) - assoc_type = Column(VARCHAR(64)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + server_url: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + handle: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + secret: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + issued: Mapped[Optional[int]] = mapped_column(Integer) + lifetime: Mapped[Optional[int]] = mapped_column(Integer) + assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9693,9 +9842,9 @@ class PSACode(Base, CodeMixin, RepresentById): __tablename__ = "psa_code" __table_args__ = (UniqueConstraint("code", "email"),) - id = Column(Integer, primary_key=True) - email = Column(VARCHAR(200)) - code = Column(VARCHAR(32)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + email: Mapped[Optional[str]] = mapped_column(VARCHAR(200)) + code: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9721,10 +9870,10 @@ def get_code(cls, code): class PSANonce(Base, NonceMixin, RepresentById): __tablename__ = "psa_nonce" - id = Column(Integer, primary_key=True) - server_url = Column(VARCHAR(255)) - timestamp = Column(Integer) - salt = Column(VARCHAR(40)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + server_url: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + timestamp: Mapped[Optional[int]] = mapped_column(Integer) + salt: Mapped[Optional[str]] = mapped_column(VARCHAR(40)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9759,11 +9908,11 @@ def use(cls, server_url, timestamp, salt): class PSAPartial(Base, PartialMixin, RepresentById): __tablename__ = "psa_partial" - id = Column(Integer, primary_key=True) - token = Column(VARCHAR(32)) - data = Column(TEXT) - next_step = Column(Integer) - backend = Column(VARCHAR(32)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + token: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) + data: Mapped[Optional[str]] = mapped_column(TEXT) + next_step: Mapped[Optional[int]] = mapped_column(Integer) + backend: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz sa_session = None @@ -9803,13 +9952,13 @@ class UserAuthnzToken(Base, UserMixin, RepresentById): __tablename__ = "oidc_user_authnz_tokens" __table_args__ = (UniqueConstraint("provider", "uid"),) - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - uid = Column(VARCHAR(255)) - provider = Column(VARCHAR(32)) - extra_data = Column(MutableJSONType, nullable=True) - lifetime = Column(Integer) - assoc_type = Column(VARCHAR(64)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + uid: Mapped[Optional[str]] = mapped_column(VARCHAR(255)) + provider: Mapped[Optional[str]] = mapped_column(VARCHAR(32)) + extra_data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + lifetime: Mapped[Optional[int]] = mapped_column(Integer) + assoc_type: Mapped[Optional[str]] = mapped_column(VARCHAR(64)) user = relationship("User", back_populates="social_auth") # This static property is set at: galaxy.authnz.psa_authnz.PSAAuthnz @@ -9966,31 +10115,31 @@ class CustosAuthnzToken(Base, RepresentById): UniqueConstraint("external_user_id", "provider"), ) - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id")) - external_user_id = Column(String(255)) - provider = Column(String(255)) - access_token = Column(Text) - id_token = Column(Text) - refresh_token = Column(Text) - expiration_time = Column(DateTime) - refresh_expiration_time = Column(DateTime) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id")) + external_user_id: Mapped[Optional[str]] = mapped_column(String(255)) + provider: Mapped[Optional[str]] = mapped_column(String(255)) + access_token: Mapped[Optional[str]] = mapped_column(Text) + id_token: Mapped[Optional[str]] = mapped_column(Text) + refresh_token: Mapped[Optional[str]] = mapped_column(Text) + expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + refresh_expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) user = relationship("User", back_populates="custos_auth") class CloudAuthz(Base): __tablename__ = "cloudauthz" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - provider = Column(String(255)) - config = Column(MutableJSONType) - authn_id = Column(Integer, ForeignKey("oidc_user_authnz_tokens.id"), index=True) - tokens = Column(MutableJSONType) - last_update = Column(DateTime) - last_activity = Column(DateTime) - description = Column(TEXT) - create_time = Column(DateTime, default=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + provider: Mapped[Optional[str]] = mapped_column(String(255)) + config: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + authn_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("oidc_user_authnz_tokens.id"), index=True) + tokens: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) + last_update: Mapped[Optional[datetime]] = mapped_column(DateTime) + last_activity: Mapped[Optional[datetime]] = mapped_column(DateTime) + description: Mapped[Optional[str]] = mapped_column(TEXT) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) user = relationship("User", back_populates="cloudauthz") authn = relationship("UserAuthnzToken") @@ -10018,18 +10167,18 @@ class Page(Base, HasTags, Dictifiable, RepresentById): __tablename__ = "page" __table_args__ = (Index("ix_page_slug", "slug", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) - latest_revision_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) + latest_revision_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("page_revision.id", use_alter=True, name="page_latest_revision_id_fk"), index=True ) - title = Column(TEXT) - deleted = Column(Boolean, index=True, default=False) - importable = Column(Boolean, index=True, default=False) - slug = Column(TEXT) - published = Column(Boolean, index=True, default=False) + title: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) user = relationship("User") revisions = relationship( "PageRevision", @@ -10043,7 +10192,9 @@ class Page(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), # type: ignore[has-type] lazy=False, ) - tags = relationship("PageTagAssociation", order_by=lambda: PageTagAssociation.id, back_populates="page") + tags: Mapped[List["PageTagAssociation"]] = relationship( + "PageTagAssociation", order_by=lambda: PageTagAssociation.id, back_populates="page" + ) annotations = relationship( "PageAnnotationAssociation", order_by=lambda: PageAnnotationAssociation.id, back_populates="page" ) @@ -10054,7 +10205,7 @@ class Page(Base, HasTags, Dictifiable, RepresentById): ) users_shared_with = relationship("PageUserShareAssociation", back_populates="page") - average_rating: column_property # defined at the end of this module + average_rating = None # Set up proxy so that # Page.users_shared_with @@ -10097,13 +10248,13 @@ def email_hash(self): class PageRevision(Base, Dictifiable, RepresentById): __tablename__ = "page_revision" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - page_id = Column(Integer, ForeignKey("page.id"), index=True, nullable=False) - title = Column(TEXT) - content = Column(TEXT) - content_format = Column(TrimmedString(32)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + page_id: Mapped[int] = mapped_column(Integer, ForeignKey("page.id"), index=True, nullable=False) + title: Mapped[Optional[str]] = mapped_column(TEXT) + content: Mapped[Optional[str]] = mapped_column(TEXT) + content_format: Mapped[Optional[str]] = mapped_column(TrimmedString(32)) page = relationship("Page", primaryjoin=(lambda: Page.id == PageRevision.page_id)) DEFAULT_CONTENT_FORMAT = "html" dict_element_visible_keys = ["id", "page_id", "title", "content", "content_format"] @@ -10121,10 +10272,10 @@ def to_dict(self, view="element"): class PageUserShareAssociation(Base, UserShareAssociation): __tablename__ = "page_user_share_association" - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user: Mapped[User] = relationship("User") page = relationship("Page", back_populates="users_shared_with") @@ -10135,22 +10286,22 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): Index("ix_visualization_slug", "slug", mysql_length=200), ) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) - latest_revision_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True, nullable=False) + latest_revision_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("visualization_revision.id", use_alter=True, name="visualization_latest_revision_id_fk"), index=True, ) - title = Column(TEXT) - type = Column(TEXT) - dbkey = Column(TEXT) - deleted = Column(Boolean, default=False, index=True) - importable = Column(Boolean, default=False, index=True) - slug = Column(TEXT) - published = Column(Boolean, default=False, index=True) + title: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(TEXT) + dbkey: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) + importable: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) + slug: Mapped[Optional[str]] = mapped_column(TEXT) + published: Mapped[Optional[bool]] = mapped_column(Boolean, default=False, index=True) user = relationship("User") revisions = relationship( @@ -10166,7 +10317,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): primaryjoin=(lambda: Visualization.latest_revision_id == VisualizationRevision.id), lazy=False, ) - tags = relationship( + tags: Mapped[List["VisualizationTagAssociation"]] = relationship( "VisualizationTagAssociation", order_by=lambda: VisualizationTagAssociation.id, back_populates="visualization" ) annotations = relationship( @@ -10181,7 +10332,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById): ) users_shared_with = relationship("VisualizationUserShareAssociation", back_populates="visualization") - average_rating: column_property # defined at the end of this module + average_rating = None # Set up proxy so that # Visualization.users_shared_with @@ -10249,13 +10400,13 @@ class VisualizationRevision(Base, RepresentById): __tablename__ = "visualization_revision" __table_args__ = (Index("ix_visualization_revision_dbkey", "dbkey", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True, nullable=False) - title = Column(TEXT) - dbkey = Column(TEXT) - config = Column(MutableJSONType) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + visualization_id: Mapped[int] = mapped_column(Integer, ForeignKey("visualization.id"), index=True, nullable=False) + title: Mapped[Optional[str]] = mapped_column(TEXT) + dbkey: Mapped[Optional[str]] = mapped_column(TEXT) + config: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) visualization = relationship( "Visualization", back_populates="revisions", @@ -10277,10 +10428,10 @@ def copy(self, visualization=None): class VisualizationUserShareAssociation(Base, UserShareAssociation): __tablename__ = "visualization_user_share_association" - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user = relationship("User") + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user: Mapped[User] = relationship("User") visualization = relationship("Visualization", back_populates="users_shared_with") @@ -10288,10 +10439,10 @@ class Tag(Base, RepresentById): __tablename__ = "tag" __table_args__ = (UniqueConstraint("name"),) - id = Column(Integer, primary_key=True) - type = Column(Integer) - parent_id = Column(Integer, ForeignKey("tag.id")) - name = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + type: Mapped[Optional[int]] = mapped_column(Integer) + parent_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id")) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) children = relationship("Tag", back_populates="parent") parent = relationship("Tag", back_populates="children", remote_side=[id]) @@ -10302,8 +10453,8 @@ def __str__(self): class ItemTagAssociation(Dictifiable): dict_collection_visible_keys = ["id", "user_tname", "user_value"] dict_element_visible_keys = dict_collection_visible_keys - user_tname: Column - user_value = Column(TrimmedString(255), index=True) + user_tname: Mapped[Optional[str]] + user_value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) @@ -10323,12 +10474,12 @@ def copy(self, cls=None): class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_tag_association" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) history = relationship("History", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10337,12 +10488,14 @@ class HistoryTagAssociation(Base, ItemTagAssociation, RepresentById): class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_dataset_association_tag_association" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) history_dataset_association = relationship("HistoryDatasetAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10351,14 +10504,14 @@ class HistoryDatasetAssociationTagAssociation(Base, ItemTagAssociation, Represen class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "library_dataset_dataset_association_tag_association" - id = Column(Integer, primary_key=True) - library_dataset_dataset_association_id = Column( + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_dataset_association_id: Mapped[Optional[int]] = mapped_column( Integer, ForeignKey("library_dataset_dataset_association.id"), index=True ) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) library_dataset_dataset_association = relationship("LibraryDatasetDatasetAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10367,12 +10520,12 @@ class LibraryDatasetDatasetAssociationTagAssociation(Base, ItemTagAssociation, R class PageTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "page_tag_association" - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) page = relationship("Page", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10381,12 +10534,12 @@ class PageTagAssociation(Base, ItemTagAssociation, RepresentById): class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "workflow_step_tag_association" - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) workflow_step = relationship("WorkflowStep", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10395,12 +10548,12 @@ class WorkflowStepTagAssociation(Base, ItemTagAssociation, RepresentById): class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "stored_workflow_tag_association" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) stored_workflow = relationship("StoredWorkflow", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10409,12 +10562,12 @@ class StoredWorkflowTagAssociation(Base, ItemTagAssociation, RepresentById): class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "visualization_tag_association" - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) visualization = relationship("Visualization", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10423,12 +10576,14 @@ class VisualizationTagAssociation(Base, ItemTagAssociation, RepresentById): class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "history_dataset_collection_tag_association" - id = Column(Integer, primary_key=True) - history_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10437,12 +10592,14 @@ class HistoryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "library_dataset_collection_tag_association" - id = Column(Integer, primary_key=True) - library_dataset_collection_id = Column(Integer, ForeignKey("library_dataset_collection_association.id"), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_collection_association.id"), index=True + ) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="tags") tag = relationship("Tag") user = relationship("User") @@ -10451,12 +10608,12 @@ class LibraryDatasetCollectionTagAssociation(Base, ItemTagAssociation, Represent class ToolTagAssociation(Base, ItemTagAssociation, RepresentById): __tablename__ = "tool_tag_association" - id = Column(Integer, primary_key=True) - tool_id = Column(TrimmedString(255), index=True) - tag_id = Column(Integer, ForeignKey("tag.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - user_tname = Column(TrimmedString(255), index=True) - value = Column(TrimmedString(255), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + tool_id: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + tag_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("tag.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + user_tname: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + value: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) tag = relationship("Tag") user = relationship("User") @@ -10466,10 +10623,10 @@ class HistoryAnnotationAssociation(Base, RepresentById): __tablename__ = "history_annotation_association" __table_args__ = (Index("ix_history_anno_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) history = relationship("History", back_populates="annotations") user = relationship("User") @@ -10478,10 +10635,12 @@ class HistoryDatasetAssociationAnnotationAssociation(Base, RepresentById): __tablename__ = "history_dataset_association_annotation_association" __table_args__ = (Index("ix_history_dataset_anno_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) hda = relationship("HistoryDatasetAssociation", back_populates="annotations") user = relationship("User") @@ -10490,10 +10649,10 @@ class StoredWorkflowAnnotationAssociation(Base, RepresentById): __tablename__ = "stored_workflow_annotation_association" __table_args__ = (Index("ix_stored_workflow_ann_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) stored_workflow = relationship("StoredWorkflow", back_populates="annotations") user = relationship("User") @@ -10502,10 +10661,10 @@ class WorkflowStepAnnotationAssociation(Base, RepresentById): __tablename__ = "workflow_step_annotation_association" __table_args__ = (Index("ix_workflow_step_ann_assoc_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - workflow_step_id = Column(Integer, ForeignKey("workflow_step.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + workflow_step_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("workflow_step.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) workflow_step = relationship("WorkflowStep", back_populates="annotations") user = relationship("User") @@ -10514,10 +10673,10 @@ class PageAnnotationAssociation(Base, RepresentById): __tablename__ = "page_annotation_association" __table_args__ = (Index("ix_page_annotation_association_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) page = relationship("Page", back_populates="annotations") user = relationship("User") @@ -10526,10 +10685,10 @@ class VisualizationAnnotationAssociation(Base, RepresentById): __tablename__ = "visualization_annotation_association" __table_args__ = (Index("ix_visualization_annotation_association_annotation", "annotation", mysql_length=200),) - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) visualization = relationship("Visualization", back_populates="annotations") user = relationship("User") @@ -10537,10 +10696,12 @@ class VisualizationAnnotationAssociation(Base, RepresentById): class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentById): __tablename__ = "history_dataset_collection_annotation_association" - id = Column(Integer, primary_key=True) - history_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) history_dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="annotations") user = relationship("User") @@ -10548,10 +10709,12 @@ class HistoryDatasetCollectionAssociationAnnotationAssociation(Base, RepresentBy class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): __tablename__ = "library_dataset_collection_annotation_association" - id = Column(Integer, primary_key=True) - library_dataset_collection_id = Column(Integer, ForeignKey("library_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - annotation = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + annotation: Mapped[Optional[str]] = mapped_column(TEXT) dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="annotations") user = relationship("User") @@ -10559,13 +10722,13 @@ class LibraryDatasetCollectionAnnotationAssociation(Base, RepresentById): class Vault(Base): __tablename__ = "vault" - key = Column(Text, primary_key=True) - parent_key = Column(Text, ForeignKey(key), index=True, nullable=True) + key: Mapped[str] = mapped_column(Text, primary_key=True) + parent_key: Mapped[Optional[str]] = mapped_column(Text, ForeignKey(key), index=True, nullable=True) children = relationship("Vault", back_populates="parent") parent = relationship("Vault", back_populates="children", remote_side=[key]) - value = Column(Text, nullable=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + value: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) # Item rating classes. @@ -10585,10 +10748,10 @@ def _set_item(self, item): class HistoryRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_rating_association" - id = Column(Integer, primary_key=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) history = relationship("History", back_populates="ratings") user = relationship("User") @@ -10600,10 +10763,12 @@ def _set_item(self, history): class HistoryDatasetAssociationRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_dataset_association_rating_association" - id = Column(Integer, primary_key=True) - history_dataset_association_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_association_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) history_dataset_association = relationship("HistoryDatasetAssociation", back_populates="ratings") user = relationship("User") @@ -10615,10 +10780,10 @@ def _set_item(self, history_dataset_association): class StoredWorkflowRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "stored_workflow_rating_association" - id = Column(Integer, primary_key=True) - stored_workflow_id = Column(Integer, ForeignKey("stored_workflow.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + stored_workflow_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("stored_workflow.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) stored_workflow = relationship("StoredWorkflow", back_populates="ratings") user = relationship("User") @@ -10630,10 +10795,10 @@ def _set_item(self, stored_workflow): class PageRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "page_rating_association" - id = Column(Integer, primary_key=True) - page_id = Column(Integer, ForeignKey("page.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + page_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("page.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) page = relationship("Page", back_populates="ratings") user = relationship("User") @@ -10645,10 +10810,10 @@ def _set_item(self, page): class VisualizationRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "visualization_rating_association" - id = Column(Integer, primary_key=True) - visualization_id = Column(Integer, ForeignKey("visualization.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + visualization_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("visualization.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) visualization = relationship("Visualization", back_populates="ratings") user = relationship("User") @@ -10660,10 +10825,12 @@ def _set_item(self, visualization): class HistoryDatasetCollectionRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "history_dataset_collection_rating_association" - id = Column(Integer, primary_key=True) - history_dataset_collection_id = Column(Integer, ForeignKey("history_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + history_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("history_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) dataset_collection = relationship("HistoryDatasetCollectionAssociation", back_populates="ratings") user = relationship("User") @@ -10675,10 +10842,12 @@ def _set_item(self, dataset_collection): class LibraryDatasetCollectionRatingAssociation(ItemRatingAssociation, RepresentById): __tablename__ = "library_dataset_collection_rating_association" - id = Column(Integer, primary_key=True) - library_dataset_collection_id = Column(Integer, ForeignKey("library_dataset_collection_association.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + library_dataset_collection_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_collection_association.id"), index=True + ) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) dataset_collection = relationship("LibraryDatasetCollectionAssociation", back_populates="ratings") user = relationship("User") @@ -10691,11 +10860,11 @@ def _set_item(self, dataset_collection): class DataManagerHistoryAssociation(Base, RepresentById): __tablename__ = "data_manager_history_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - history_id = Column(Integer, ForeignKey("history.id"), index=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) history = relationship("History") user = relationship("User", back_populates="data_manager_histories") @@ -10704,21 +10873,21 @@ class DataManagerJobAssociation(Base, RepresentById): __tablename__ = "data_manager_job_association" __table_args__ = (Index("ix_data_manager_job_association_data_manager_id", "data_manager_id", mysql_length=200),) - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, index=True, default=now, onupdate=now) - job_id = Column(Integer, ForeignKey("job.id"), index=True) - data_manager_id = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, index=True, default=now, onupdate=now, nullable=True) + job_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("job.id"), index=True) + data_manager_id: Mapped[Optional[str]] = mapped_column(TEXT) job = relationship("Job", back_populates="data_manager_association", uselist=False) class UserPreference(Base, RepresentById): __tablename__ = "user_preference" - id = Column(Integer, primary_key=True) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - name = Column(Unicode(255), index=True) - value = Column(Text) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + name: Mapped[Optional[str]] = mapped_column(Unicode(255), index=True) + value: Mapped[Optional[str]] = mapped_column(Text) def __init__(self, name=None, value=None): # Do not remove this constructor: it is set as the creator for the User.preferences @@ -10730,25 +10899,25 @@ def __init__(self, name=None, value=None): class UserAction(Base, RepresentById): __tablename__ = "user_action" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - session_id = Column(Integer, ForeignKey("galaxy_session.id"), index=True) - action = Column(Unicode(255)) - context = Column(Unicode(512)) - params = Column(Unicode(1024)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + session_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_session.id"), index=True) + action: Mapped[Optional[str]] = mapped_column(Unicode(255)) + context: Mapped[Optional[str]] = mapped_column(Unicode(512)) + params: Mapped[Optional[str]] = mapped_column(Unicode(1024)) user = relationship("User") class APIKeys(Base, RepresentById): __tablename__ = "api_keys" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - user_id = Column(Integer, ForeignKey("galaxy_user.id"), index=True) - key = Column(TrimmedString(32), index=True, unique=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + user_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("galaxy_user.id"), index=True) + key: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True, unique=True) user = relationship("User", back_populates="api_keys") - deleted = Column(Boolean, index=True, server_default=false(), nullable=False) + deleted: Mapped[bool] = mapped_column(Boolean, index=True, server_default=false(), nullable=False) def copy_list(lst, *args, **kwds): @@ -10781,90 +10950,112 @@ def _prepare_metadata_for_serialization(id_encoder, serialization_options, metad class CleanupEvent(Base): __tablename__ = "cleanup_event" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - message = Column(TrimmedString(1024)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + message: Mapped[Optional[str]] = mapped_column(TrimmedString(1024)) class CleanupEventDatasetAssociation(Base): __tablename__ = "cleanup_event_dataset_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - dataset_id = Column(Integer, ForeignKey("dataset.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("dataset.id"), index=True) class CleanupEventMetadataFileAssociation(Base): __tablename__ = "cleanup_event_metadata_file_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - metadata_file_id = Column(Integer, ForeignKey("metadata_file.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + metadata_file_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("metadata_file.id"), index=True) class CleanupEventHistoryAssociation(Base): __tablename__ = "cleanup_event_history_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - history_id = Column(Integer, ForeignKey("history.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + history_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history.id"), index=True) class CleanupEventHistoryDatasetAssociationAssociation(Base): __tablename__ = "cleanup_event_hda_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - hda_id = Column(Integer, ForeignKey("history_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + hda_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("history_dataset_association.id"), index=True) class CleanupEventLibraryAssociation(Base): __tablename__ = "cleanup_event_library_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - library_id = Column(Integer, ForeignKey("library.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + library_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library.id"), index=True) class CleanupEventLibraryFolderAssociation(Base): __tablename__ = "cleanup_event_library_folder_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - library_folder_id = Column(Integer, ForeignKey("library_folder.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + library_folder_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_folder.id"), index=True) class CleanupEventLibraryDatasetAssociation(Base): __tablename__ = "cleanup_event_library_dataset_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - library_dataset_id = Column(Integer, ForeignKey("library_dataset.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + library_dataset_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("library_dataset.id"), index=True) class CleanupEventLibraryDatasetDatasetAssociationAssociation(Base): __tablename__ = "cleanup_event_ldda_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - ldda_id = Column(Integer, ForeignKey("library_dataset_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + ldda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("library_dataset_dataset_association.id"), index=True + ) class CleanupEventImplicitlyConvertedDatasetAssociationAssociation(Base): __tablename__ = "cleanup_event_icda_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - cleanup_event_id = Column(Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True) - icda_id = Column(Integer, ForeignKey("implicitly_converted_dataset_association.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + cleanup_event_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("cleanup_event.id"), index=True, nullable=True + ) + icda_id: Mapped[Optional[int]] = mapped_column( + Integer, ForeignKey("implicitly_converted_dataset_association.id"), index=True + ) class CeleryUserRateLimit(Base): @@ -10875,8 +11066,8 @@ class CeleryUserRateLimit(Base): __tablename__ = "celery_user_rate_limit" - user_id = Column(Integer, ForeignKey("galaxy_user.id", ondelete="CASCADE"), primary_key=True) - last_scheduled_time = Column(DateTime, nullable=False) + user_id = mapped_column(Integer, ForeignKey("galaxy_user.id", ondelete="CASCADE"), primary_key=True) + last_scheduled_time: Mapped[datetime] = mapped_column(DateTime, nullable=False) def __repr__(self): return ( @@ -11148,7 +11339,7 @@ def __repr__(self): # ---------------------------------------------------------------------------------------- # The following statements must not precede the mapped models defined above. -Job.any_output_dataset_collection_instances_deleted = column_property( +Job.any_output_dataset_collection_instances_deleted = column_property( # type:ignore[assignment] exists(HistoryDatasetCollectionAssociation.id).where( and_( Job.id == JobToOutputDatasetCollectionAssociation.job_id, @@ -11158,7 +11349,7 @@ def __repr__(self): ) ) -Job.any_output_dataset_deleted = column_property( +Job.any_output_dataset_deleted = column_property( # type:ignore[assignment] exists(HistoryDatasetAssociation.id).where( and_( Job.id == JobToOutputDatasetAssociation.job_id, @@ -11168,44 +11359,44 @@ def __repr__(self): ) ) -History.average_rating = column_property( +History.average_rating = column_property( # type:ignore[assignment] select(func.avg(HistoryRatingAssociation.rating)) .where(HistoryRatingAssociation.history_id == History.id) .scalar_subquery(), deferred=True, ) -History.users_shared_with_count = column_property( +History.users_shared_with_count = column_property( # type:ignore[assignment] select(func.count(HistoryUserShareAssociation.id)) .where(History.id == HistoryUserShareAssociation.history_id) .scalar_subquery(), deferred=True, ) -Page.average_rating = column_property( +Page.average_rating = column_property( # type:ignore[assignment] select(func.avg(PageRatingAssociation.rating)).where(PageRatingAssociation.page_id == Page.id).scalar_subquery(), deferred=True, ) -StoredWorkflow.average_rating = column_property( +StoredWorkflow.average_rating = column_property( # type:ignore[assignment] select(func.avg(StoredWorkflowRatingAssociation.rating)) .where(StoredWorkflowRatingAssociation.stored_workflow_id == StoredWorkflow.id) .scalar_subquery(), deferred=True, ) -Visualization.average_rating = column_property( +Visualization.average_rating = column_property( # type:ignore[assignment] select(func.avg(VisualizationRatingAssociation.rating)) .where(VisualizationRatingAssociation.visualization_id == Visualization.id) .scalar_subquery(), deferred=True, ) -Workflow.step_count = column_property( +Workflow.step_count = column_property( # type:ignore[assignment] select(func.count(WorkflowStep.id)).where(Workflow.id == WorkflowStep.workflow_id).scalar_subquery(), deferred=True ) -WorkflowInvocationStep.subworkflow_invocation_id = column_property( +WorkflowInvocationStep.subworkflow_invocation_id = column_property( # type:ignore[assignment] select(WorkflowInvocationToSubworkflowInvocationAssociation.subworkflow_invocation_id) .where( and_( @@ -11220,7 +11411,7 @@ def __repr__(self): # Set up proxy so that this syntax is possible: # .preferences[pref_name] = pref_value -User.preferences = association_proxy("_preferences", "value", creator=UserPreference) +User.preferences = association_proxy("_preferences", "value", creator=UserPreference) # type:ignore[assignment] # Optimized version of getting the current Galaxy session. # See https://github.com/sqlalchemy/sqlalchemy/discussions/7638 for approach diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index 707dc486a734..8cfe259a4dc0 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -53,8 +53,8 @@ def transaction(session: Union[scoped_session, Session, "SessionlessContext"]): yield return # exit: can't use as a Session - if not session.in_transaction(): - with session.begin(): + if not session.in_transaction(): # type:ignore[union-attr] + with session.begin(): # type:ignore[union-attr] yield else: yield @@ -197,7 +197,9 @@ def ensure_object_added_to_session(object_to_add, *, object_in_session=None, ses if session: session.add(object_to_add) return True - if object_in_session and object_session(object_in_session): - object_session(object_in_session).add(object_to_add) - return True + if object_in_session: + session = object_session(object_in_session) + if session: + session.add(object_to_add) + return True return False diff --git a/lib/galaxy/model/database_utils.py b/lib/galaxy/model/database_utils.py index 123bd22a12e6..403be8d1242f 100644 --- a/lib/galaxy/model/database_utils.py +++ b/lib/galaxy/model/database_utils.py @@ -15,7 +15,7 @@ from sqlalchemy.orm import object_session from sqlalchemy.sql.compiler import IdentifierPreparer from sqlalchemy.sql.expression import ( - ClauseElement, + Executable, text, ) @@ -163,7 +163,7 @@ def supports_skip_locked(engine: Engine) -> bool: return _statement_executed_without_error(stmt, engine) -def _statement_executed_without_error(statement: ClauseElement, engine: Engine) -> bool: +def _statement_executed_without_error(statement: Executable, engine: Engine) -> bool: # Execute statement against database, then issue a rollback. try: with engine.connect() as conn, conn.begin() as trans: @@ -192,6 +192,6 @@ def ensure_object_added_to_session(object_to_add, *, object_in_session=None, ses session.add(object_to_add) return True if object_in_session and object_session(object_in_session): - object_session(object_in_session).add(object_to_add) + object_session(object_in_session).add(object_to_add) # type:ignore[union-attr] return True return False diff --git a/lib/galaxy/model/deferred.py b/lib/galaxy/model/deferred.py index e72d4e126aca..d57622f700ba 100644 --- a/lib/galaxy/model/deferred.py +++ b/lib/galaxy/model/deferred.py @@ -9,9 +9,11 @@ Union, ) -from sqlalchemy.orm import object_session +from sqlalchemy.orm import ( + object_session, + Session, +) from sqlalchemy.orm.exc import DetachedInstanceError -from sqlalchemy.orm.scoping import scoped_session from galaxy.datatypes.sniff import ( convert_function, @@ -75,7 +77,7 @@ def __init__( object_store_populator: Optional[ObjectStorePopulator] = None, transient_path_mapper: Optional[TransientPathMapper] = None, file_sources: Optional[ConfiguredFileSources] = None, - sa_session: Optional[scoped_session] = None, + sa_session: Optional[Session] = None, ): """Constructor for DatasetInstanceMaterializer. @@ -123,6 +125,7 @@ def ensure_materialized( sa_session = self._sa_session if sa_session is None: sa_session = object_session(dataset_instance) + assert sa_session sa_session.add(materialized_dataset) with transaction(sa_session): sa_session.commit() @@ -153,6 +156,7 @@ def ensure_materialized( sa_session = self._sa_session if sa_session is None: sa_session = object_session(dataset_instance) + assert sa_session sa_session.add(materialized_dataset_instance) materialized_dataset_instance.copy_from( dataset_instance, new_dataset=materialized_dataset, include_tags=attached, include_metadata=True @@ -174,12 +178,12 @@ def ensure_materialized( def _stream_source(self, target_source: DatasetSource, datatype) -> str: path = stream_url_to_file(target_source.source_uri, file_sources=self._file_sources) - transform = target_source.transform or [] + transform = target_source.transform or [] # type:ignore[var-annotated] to_posix_lines = False spaces_to_tabs = False datatype_groom = False for transform_action in transform: - action = transform_action["action"] + action = transform_action["action"] # type:ignore[index] if action == "to_posix_lines": to_posix_lines = True elif action == "spaces_to_tabs": @@ -278,7 +282,7 @@ def materializer_factory( transient_path_mapper: Optional[TransientPathMapper] = None, transient_directory: Optional[str] = None, file_sources: Optional[ConfiguredFileSources] = None, - sa_session: Optional[scoped_session] = None, + sa_session: Optional[Session] = None, ) -> DatasetInstanceMaterializer: if object_store_populator is None and object_store is not None: object_store_populator = ObjectStorePopulator(object_store, None) diff --git a/lib/galaxy/model/item_attrs.py b/lib/galaxy/model/item_attrs.py index e361d5458441..37a467f5dde7 100644 --- a/lib/galaxy/model/item_attrs.py +++ b/lib/galaxy/model/item_attrs.py @@ -180,12 +180,12 @@ def _get_annotation_assoc_class(item): def get_foreign_key(source_class, target_class): """Returns foreign key in source class that references target class.""" target_fk = None - for fk in source_class.table.foreign_keys: - if fk.references(target_class.table): + for fk in source_class.__table__.foreign_keys: + if fk.references(target_class.__table__): target_fk = fk break if not target_fk: - raise Exception(f"No foreign key found between objects: {source_class.table}, {target_class.table}") + raise Exception(f"No foreign key found between objects: {source_class.__table__}, {target_class.__table__}") return target_fk diff --git a/lib/galaxy/model/migrations/base.py b/lib/galaxy/model/migrations/base.py index 973f12224936..e03257d3f522 100644 --- a/lib/galaxy/model/migrations/base.py +++ b/lib/galaxy/model/migrations/base.py @@ -33,7 +33,6 @@ ) from sqlalchemy.engine import ( Connection, - CursorResult, Engine, ) @@ -398,10 +397,11 @@ def _load_db_metadata(self, conn: Connection) -> MetaData: metadata.reflect(bind=conn) return metadata - def _load_sqlalchemymigrate_version(self, conn: Connection) -> CursorResult: + def _load_sqlalchemymigrate_version(self, conn: Connection) -> Optional[int]: if self.has_sqlalchemymigrate_version_table(): sql = text(f"select version from {SQLALCHEMYMIGRATE_TABLE}") return conn.execute(sql).scalar() + return None def pop_arg_from_args(args: List[str], arg_name) -> Optional[str]: diff --git a/lib/galaxy/model/orm/engine_factory.py b/lib/galaxy/model/orm/engine_factory.py index 886a4e3462ab..374c53219c73 100644 --- a/lib/galaxy/model/orm/engine_factory.py +++ b/lib/galaxy/model/orm/engine_factory.py @@ -12,6 +12,7 @@ exc, ) from sqlalchemy.engine import Engine +from sqlalchemy.pool import NullPool log = logging.getLogger(__name__) @@ -101,8 +102,13 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema pass engine_options = engine_options or {} - engine_options = set_sqlite_connect_args(engine_options, url) - engine = create_engine(url, **engine_options, future=True) + if url.startswith("sqlite://"): + set_sqlite_connect_args(engine_options, url) + + if url.startswith("sqlite://") and url not in ("sqlite:///:memory:", "sqlite://"): + engine = create_engine(url, **engine_options, poolclass=NullPool, future=True) + else: + engine = create_engine(url, **engine_options, future=True) # Prevent sharing connection across fork: https://docs.sqlalchemy.org/en/14/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork register_after_fork(engine, lambda e: e.dispose()) @@ -123,13 +129,11 @@ def checkout(dbapi_connection, connection_record, connection_proxy): return engine -def set_sqlite_connect_args(engine_options: Dict, url: str): +def set_sqlite_connect_args(engine_options: Dict, url: str) -> None: """ Add or update `connect_args` in `engine_options` if db is sqlite. Set check_same_thread to False for sqlite, handled by request-specific session. See https://fastapi.tiangolo.com/tutorial/sql-databases/#note """ - if url.startswith("sqlite://"): - connect_args = engine_options.setdefault("connect_args", {}) - connect_args["check_same_thread"] = False - return engine_options + connect_args = engine_options.setdefault("connect_args", {}) + connect_args["check_same_thread"] = False diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 38564174ab34..58a18dea7d13 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -704,7 +704,7 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs): # Try to set metadata directly. @mvdbeek thinks we should only record the datasets try: if dataset_instance.has_metadata_files: - dataset_instance.datatype.set_meta(dataset_instance) + dataset_instance.datatype.set_meta(dataset_instance) # type:ignore[arg-type] except Exception: log.debug(f"Metadata setting failed on {dataset_instance}", exc_info=True) dataset_instance.state = dataset_instance.dataset.states.FAILED_METADATA @@ -1232,7 +1232,7 @@ def _import_jobs(self, object_import_tracker: "ObjectImportTracker", history: Op continue imported_job = model.Job() - imported_job.id = job_attrs.get("id") + imported_job.id = cast(int, job_attrs.get("id")) imported_job.user = self.user add_object_to_session(imported_job, history_sa_session) imported_job.history = history @@ -2159,12 +2159,12 @@ def export_history( sa_session = app.model.session # Write collections' attributes (including datasets list) to file. - stmt = ( + stmt_hdca = ( select(model.HistoryDatasetCollectionAssociation) - .where(model.HistoryDatasetCollectionAssociation.history == history) + .where(model.HistoryDatasetCollectionAssociation.history == history) # type:ignore[arg-type] .where(model.HistoryDatasetCollectionAssociation.deleted == expression.false()) ) - collections = sa_session.scalars(stmt) + collections = sa_session.scalars(stmt_hdca) for collection in collections: # filter this ? @@ -2178,7 +2178,7 @@ def export_history( # Write datasets' attributes to file. actions_backref = model.Dataset.actions # type: ignore[attr-defined] - stmt = ( + stmt_hda = ( select(model.HistoryDatasetAssociation) .where(model.HistoryDatasetAssociation.history == history) .join(model.Dataset) @@ -2186,7 +2186,7 @@ def export_history( .order_by(model.HistoryDatasetAssociation.hid) .where(model.Dataset.purged == expression.false()) ) - datasets = sa_session.scalars(stmt).unique() + datasets = sa_session.scalars(stmt_hda).unique() for dataset in datasets: dataset.annotation = get_item_annotation_str(sa_session, history.user, dataset) should_include_file = (dataset.visible or include_hidden) and (not dataset.deleted or include_deleted) diff --git a/lib/galaxy/model/store/_bco_convert_utils.py b/lib/galaxy/model/store/_bco_convert_utils.py index 82bb952b47f5..1763b2851665 100644 --- a/lib/galaxy/model/store/_bco_convert_utils.py +++ b/lib/galaxy/model/store/_bco_convert_utils.py @@ -27,6 +27,7 @@ def register_step(self, step: WorkflowStep) -> None: return tool_version = step.tool_version + assert tool_id self._recorded_tools.add(tool_id) uri_safe_tool_id = urllib.parse.quote(tool_id) if "repos/" in tool_id: diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index 4ec5d103158d..8664e09adca0 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -462,8 +462,8 @@ def override_object_store_id(self, output_name: Optional[str] = None) -> Optiona if not job: return None default_object_store_id = job.object_store_id - object_store_id_overrides = job.object_store_id_overrides or {} - return object_store_id_overrides.get(output_name, default_object_store_id) + object_store_id_overrides = job.object_store_id_overrides or {} # type:ignore[var-annotated] + return object_store_id_overrides.get(output_name, default_object_store_id) # type:ignore[union-attr] @property @abc.abstractmethod diff --git a/lib/galaxy/model/tags.py b/lib/galaxy/model/tags.py index a62051b79eef..f62b3cbb3b67 100644 --- a/lib/galaxy/model/tags.py +++ b/lib/galaxy/model/tags.py @@ -194,6 +194,7 @@ def _ensure_user_owns_item(self, user: Optional["User"], item): def item_has_tag(self, user, item, tag): """Returns true if item is has a given tag.""" # Get tag name. + tag_name = None if isinstance(tag, str): tag_name = tag elif isinstance(tag, galaxy.model.Tag): diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py index ab5a7d9a41b8..edd922600b1d 100644 --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -1,5 +1,6 @@ import logging import os +from datetime import datetime from enum import Enum from typing import ( Any, @@ -19,6 +20,8 @@ TEXT, ) from sqlalchemy.orm import ( + Mapped, + mapped_column, registry, relationship, ) @@ -44,13 +47,13 @@ if TYPE_CHECKING: # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm.decl_api import DeclarativeMeta as _DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta class DeclarativeMeta(_DeclarativeMeta, type): pass else: - from sqlalchemy.orm.decl_api import DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta class HasToolBox(common_util.HasToolShedRegistry, Protocol): @@ -75,24 +78,24 @@ def __declare_last__(cls): class ToolShedRepository(Base): __tablename__ = "tool_shed_repository" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed = Column(TrimmedString(255), index=True) - name = Column(TrimmedString(255), index=True) - description = Column(TEXT) - owner = Column(TrimmedString(255), index=True) - installed_changeset_revision = Column(TrimmedString(255)) - changeset_revision = Column(TrimmedString(255), index=True) - ctx_rev = Column(TrimmedString(10)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[datetime] = mapped_column(DateTime, default=now, nullable=True) + update_time: Mapped[datetime] = mapped_column(DateTime, default=now, onupdate=now, nullable=True) + tool_shed: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + name: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + owner: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + installed_changeset_revision: Mapped[str] = mapped_column(TrimmedString(255), nullable=True) + changeset_revision: Mapped[str] = mapped_column(TrimmedString(255), index=True, nullable=True) + ctx_rev: Mapped[Optional[str]] = mapped_column(TrimmedString(10)) metadata_ = Column("metadata", MutableJSONType, nullable=True) - includes_datatypes = Column(Boolean, index=True, default=False) + includes_datatypes: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) tool_shed_status = Column(MutableJSONType, nullable=True) - deleted = Column(Boolean, index=True, default=False) - uninstalled = Column(Boolean, default=False) - dist_to_shed = Column(Boolean, default=False) - status = Column(TrimmedString(255)) - error_message = Column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + uninstalled: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + dist_to_shed: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + status: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + error_message: Mapped[Optional[str]] = mapped_column(TEXT) tool_versions = relationship("ToolVersion", back_populates="tool_shed_repository") tool_dependencies = relationship( "ToolDependency", order_by="ToolDependency.name", back_populates="tool_shed_repository" @@ -654,11 +657,11 @@ def upgrade_available(self): class RepositoryRepositoryDependencyAssociation(Base): __tablename__ = "repository_repository_dependency_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True) - repository_dependency_id = Column(ForeignKey("repository_dependency.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_shed_repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tool_shed_repository.id"), index=True) + repository_dependency_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository_dependency.id"), index=True) repository = relationship("ToolShedRepository", back_populates="required_repositories") repository_dependency = relationship("RepositoryDependency") @@ -670,10 +673,12 @@ def __init__(self, tool_shed_repository_id=None, repository_dependency_id=None): class RepositoryDependency(Base): __tablename__ = "repository_dependency" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_shed_repository_id: Mapped[int] = mapped_column( + ForeignKey("tool_shed_repository.id"), index=True, nullable=False + ) repository = relationship("ToolShedRepository") def __init__(self, tool_shed_repository_id=None): @@ -683,15 +688,17 @@ def __init__(self, tool_shed_repository_id=None): class ToolDependency(Base): __tablename__ = "tool_dependency" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True, nullable=False) - name = Column(TrimmedString(255)) - version = Column(TEXT) - type = Column(TrimmedString(40)) - status = Column(TrimmedString(255), nullable=False) - error_message = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_shed_repository_id: Mapped[int] = mapped_column( + ForeignKey("tool_shed_repository.id"), index=True, nullable=False + ) + name: Mapped[str] = mapped_column(TrimmedString(255), nullable=True) + version: Mapped[str] = mapped_column(TEXT, nullable=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) + status: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + error_message: Mapped[Optional[str]] = mapped_column(TEXT) tool_shed_repository = relationship("ToolShedRepository", back_populates="tool_dependencies") # converting this one to Enum breaks the tool shed tests, @@ -773,11 +780,13 @@ def is_installed(self): class ToolVersion(Base, Dictifiable): __tablename__ = "tool_version" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - tool_id = Column(String(255)) - tool_shed_repository_id = Column(ForeignKey("tool_shed_repository.id"), index=True, nullable=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + tool_id: Mapped[Optional[str]] = mapped_column(String(255)) + tool_shed_repository_id: Mapped[Optional[int]] = mapped_column( + ForeignKey("tool_shed_repository.id"), index=True, nullable=True + ) parent_tool_association = relationship( "ToolVersionAssociation", primaryjoin=(lambda: ToolVersion.id == ToolVersionAssociation.tool_id) ) @@ -801,6 +810,6 @@ def to_dict(self, view="element"): class ToolVersionAssociation(Base): __tablename__ = "tool_version_association" - id = Column(Integer, primary_key=True) - tool_id = Column(ForeignKey("tool_version.id"), index=True, nullable=False) - parent_id = Column(ForeignKey("tool_version.id"), index=True, nullable=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + tool_id: Mapped[int] = mapped_column(ForeignKey("tool_version.id"), index=True, nullable=False) + parent_id: Mapped[int] = mapped_column(ForeignKey("tool_version.id"), index=True, nullable=False) diff --git a/lib/galaxy/model/unittest_utils/data_app.py b/lib/galaxy/model/unittest_utils/data_app.py index 8942af46a4d7..75307163ae3d 100644 --- a/lib/galaxy/model/unittest_utils/data_app.py +++ b/lib/galaxy/model/unittest_utils/data_app.py @@ -98,7 +98,7 @@ def __init__(self, config: Optional[GalaxyDataTestConfig] = None, **kwd): self.object_store = objectstore.build_object_store_from_config(self.config) self.model = init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent - self.tag_handler = GalaxyTagHandler(self.model.context) + self.tag_handler = GalaxyTagHandler(self.model.session) self.init_datatypes() def init_datatypes(self): diff --git a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py index 7aad9d474081..0fbfaf947de3 100644 --- a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py @@ -75,6 +75,7 @@ def run_command(cmd: str) -> subprocess.CompletedProcess: def get_db_heads(config: Config) -> Tuple[str, ...]: """Return revision ids (version heads) stored in the database.""" dburl = config.get_main_option("sqlalchemy.url") + assert dburl engine = create_engine(dburl, future=True) with engine.connect() as conn: context = MigrationContext.configure(conn) diff --git a/lib/galaxy/model/unittest_utils/model_testing_utils.py b/lib/galaxy/model/unittest_utils/model_testing_utils.py index 3f13dd79f731..960045510abb 100644 --- a/lib/galaxy/model/unittest_utils/model_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/model_testing_utils.py @@ -145,7 +145,7 @@ def drop_database(db_url, database): _drop_database(db_url, database) else: url = make_url(db_url) - os.remove(url.database) + os.remove(url.database) # type:ignore[arg-type] def dbcleanup_wrapper(session, obj, where_clause=None): @@ -263,4 +263,4 @@ def _make_sqlite_db_url(tmpdir: str, database: str) -> DbUrl: def _make_postgres_db_url(connection_url: DbUrl, database: str) -> DbUrl: url = make_url(connection_url) url = url.set(database=database) - return DbUrl(str(url)) + return DbUrl(url.render_as_string(hide_password=False)) diff --git a/lib/galaxy/quota/__init__.py b/lib/galaxy/quota/__init__.py index 0c81d75f9466..3f0b55cda645 100644 --- a/lib/galaxy/quota/__init__.py +++ b/lib/galaxy/quota/__init__.py @@ -4,7 +4,6 @@ from typing import Optional from sqlalchemy import select -from sqlalchemy.orm import object_session from sqlalchemy.sql import text import galaxy.util @@ -198,7 +197,7 @@ def relabel_quota_for_dataset(self, dataset, from_label: Optional[str], to_label WHERE dataset_id = :dataset_id )""" - engine = object_session(dataset).bind + engine = self.sa_session.get_bind() # Hack for older sqlite, would work on newer sqlite - 3.24.0 for_sqlite = "sqlite" in engine.dialect.name diff --git a/lib/galaxy/tool_shed/galaxy_install/install_manager.py b/lib/galaxy/tool_shed/galaxy_install/install_manager.py index d97a586c08f0..9c45ea79e7fc 100644 --- a/lib/galaxy/tool_shed/galaxy_install/install_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/install_manager.py @@ -328,7 +328,7 @@ def initiate_repository_installation(self, installation_dict): tool_shed_repositories.append(tsr) clause_list = [] for tsr_id in tsr_ids: - clause_list.append(self.install_model.ToolShedRepository.table.c.id == tsr_id) + clause_list.append(self.install_model.ToolShedRepository.id == tsr_id) query = self.install_model.context.query(self.install_model.ToolShedRepository).filter(or_(*clause_list)) return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids diff --git a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py index aff38254d183..792b07abc386 100644 --- a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py @@ -117,7 +117,7 @@ def activate_repository(self, repository: ToolShedRepository) -> None: tpm=tpm, repository=repository, changeset_revision=repository.changeset_revision, - metadata_dict=repository.metadata_, + metadata_dict=repository.metadata_, # type:ignore[arg-type] ) repository_tools_tups = irmm.get_repository_tools_tups() # Reload tools into the appropriate tool panel section. @@ -139,7 +139,7 @@ def activate_repository(self, repository: ToolShedRepository) -> None: dmh = data_manager.DataManagerHandler(self.app) dmh.install_data_managers( self.app.config.shed_data_manager_config_file, - repository.metadata_, + repository.metadata_, # type:ignore[arg-type] repository.get_shed_config_dict(self.app), data_manager_relative_install_dir, repository, @@ -206,7 +206,9 @@ def add_entry_to_repository_dependencies_of_installed_repositories(self, reposit def get_containing_repository_for_tool_dependency(self, tool_dependency_tup: tuple) -> ToolShedRepository: tool_shed_repository_id, name, version, type = tool_dependency_tup - return self.context.query(ToolShedRepository).get(tool_shed_repository_id) + repository = self.context.query(ToolShedRepository).get(tool_shed_repository_id) + assert repository + return repository def get_dependencies_for_repository( self, diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index aea14cacdd93..e2ed787bdba2 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -86,15 +86,15 @@ def get_query_for_setting_metadata_on_repositories(self, order=True): if order: return ( self.app.install_model.context.query(self.app.install_model.ToolShedRepository) - .filter(self.app.install_model.ToolShedRepository.table.c.uninstalled == false()) + .filter(self.app.install_model.ToolShedRepository.uninstalled == false()) .order_by( - self.app.install_model.ToolShedRepository.table.c.name, - self.app.install_model.ToolShedRepository.table.c.owner, + self.app.install_model.ToolShedRepository.name, + self.app.install_model.ToolShedRepository.owner, ) ) else: return self.app.install_model.context.query(self.app.install_model.ToolShedRepository).filter( - self.app.install_model.ToolShedRepository.table.c.uninstalled == false() + self.app.install_model.ToolShedRepository.uninstalled == false() ) def get_repository_tools_tups(self): @@ -131,7 +131,7 @@ def reset_all_metadata_on_installed_repository(self): original_metadata_dict = self.repository.metadata_ self.generate_metadata_for_changeset_revision() if self.metadata_dict != original_metadata_dict: - self.repository.metadata_ = self.metadata_dict + self.repository.metadata_ = self.metadata_dict # type:ignore[assignment] self.update_in_shed_tool_config() session = self.app.install_model.context diff --git a/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py index 42f86250f563..dfa8bec8b769 100644 --- a/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/update_repository_manager.py @@ -75,7 +75,7 @@ def __restarter(self) -> None: # the repository revision is the latest installable revision, and whether the repository # has been deprecated in the Tool Shed. for repository in self.context.query(self.app.install_model.ToolShedRepository).filter( - self.app.install_model.ToolShedRepository.table.c.deleted == false() + self.app.install_model.ToolShedRepository.deleted == false() ): tool_shed_status_dict = get_tool_shed_status_for_installed_repository(self.app, repository) if tool_shed_status_dict: @@ -116,7 +116,7 @@ def update_repository_record( if tool_shed_status_dict: repository.tool_shed_status = tool_shed_status_dict else: - repository.tool_shed_status = None + repository.tool_shed_status = None # type:ignore[assignment] session = self.app.install_model.context session.add(repository) with transaction(session): diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index e040ec0bdf16..b0827bdbf8b0 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -854,7 +854,7 @@ class GalaxyMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on Galaxy's app and repository constructs.""" app: InstallationTarget - repository: Optional[ToolShedRepository] + repository: Optional[ToolShedRepository] # type:ignore[assignment] def __init__( self, diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index 11e9a675039d..dae1c02ccd6c 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -48,9 +48,7 @@ def check_for_updates( success_count = 0 repository_names_not_updated = [] updated_count = 0 - for repository in install_model_context.query(ToolShedRepository).filter( - ToolShedRepository.table.c.deleted == false() - ): + for repository in install_model_context.query(ToolShedRepository).filter(ToolShedRepository.deleted == false()): ok, updated = _check_or_update_tool_shed_status_for_installed_repository( tool_shed_registry, install_model_context, repository ) @@ -66,7 +64,8 @@ def check_for_updates( message += "Unable to retrieve status from the tool shed for the following repositories:\n" message += ", ".join(repository_names_not_updated) else: - repository = install_model_context.get(ToolShedRepository, repository_id) + repository = install_model_context.get(ToolShedRepository, repository_id) # type:ignore[assignment] + assert repository ok, updated = _check_or_update_tool_shed_status_for_installed_repository( tool_shed_registry, install_model_context, repository ) @@ -298,18 +297,18 @@ def get_installed_repository( ) query = app.install_model.context.query(app.install_model.ToolShedRepository) if repository_id: - clause_list = [app.install_model.ToolShedRepository.table.c.id == repository_id] + clause_list = [app.install_model.ToolShedRepository.id == repository_id] else: clause_list = [ - app.install_model.ToolShedRepository.table.c.tool_shed == tool_shed, - app.install_model.ToolShedRepository.table.c.name == name, - app.install_model.ToolShedRepository.table.c.owner == owner, + app.install_model.ToolShedRepository.tool_shed == tool_shed, + app.install_model.ToolShedRepository.name == name, + app.install_model.ToolShedRepository.owner == owner, ] if changeset_revision is not None: - clause_list.append(app.install_model.ToolShedRepository.table.c.changeset_revision == changeset_revision) + clause_list.append(app.install_model.ToolShedRepository.changeset_revision == changeset_revision) if installed_changeset_revision is not None: clause_list.append( - app.install_model.ToolShedRepository.table.c.installed_changeset_revision == installed_changeset_revision + app.install_model.ToolShedRepository.installed_changeset_revision == installed_changeset_revision ) return query.filter(and_(*clause_list)).first() @@ -443,16 +442,16 @@ def get_repository_by_name_and_owner(app, name, owner, eagerload_columns=None): if is_tool_shed_client(app): return repository_query.filter( and_( - app.install_model.ToolShedRepository.table.c.name == name, - app.install_model.ToolShedRepository.table.c.owner == owner, + app.install_model.ToolShedRepository.name == name, + app.install_model.ToolShedRepository.owner == owner, ) ).first() # We're in the tool shed. q = repository_query.filter( and_( - app.model.Repository.table.c.name == name, - app.model.User.table.c.username == owner, - app.model.Repository.table.c.user_id == app.model.User.table.c.id, + app.model.Repository.name == name, + app.model.User.username == owner, + app.model.Repository.user_id == app.model.User.id, ) ) if eagerload_columns: @@ -637,7 +636,9 @@ def get_tool_shed_repository_by_id(app, repository_id) -> ToolShedRepository: def get_tool_shed_status_for(tool_shed_registry: Registry, repository: ToolShedRepository): tool_shed_url = tool_shed_registry.get_tool_shed_url(str(repository.tool_shed)) assert tool_shed_url - params = dict(name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision) + params: Dict[str, Any] = dict( + name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision + ) pathspec = ["repository", "status_for_installed_repository"] try: encoded_tool_shed_status_dict = util.url_get( diff --git a/lib/galaxy/tool_util/toolbox/watcher.py b/lib/galaxy/tool_util/toolbox/watcher.py index e570bb58198d..b885ea964e27 100644 --- a/lib/galaxy/tool_util/toolbox/watcher.py +++ b/lib/galaxy/tool_util/toolbox/watcher.py @@ -9,9 +9,9 @@ can_watch = True except ImportError: - Observer = None - FileSystemEventHandler = object - PollingObserver = None + Observer = None # type:ignore[assignment, misc] + FileSystemEventHandler = object # type:ignore[assignment, misc] + PollingObserver = None # type:ignore[assignment, misc] can_watch = False from galaxy.util.hash_util import md5_hash_file diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py index a345abde954a..b1f9a8f22a44 100644 --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -102,12 +102,12 @@ def handle_library_params( template: Optional[FormDefinition] = None if template_id not in [None, "None"]: template = session.get(FormDefinition, template_id) - assert template - for field in template.fields: - field_name = field["name"] - if params.get(field_name, False): - field_value = util.restore_text(params.get(field_name, "")) - template_field_contents[field_name] = field_value + if template and template.fields: + for field in template.fields: + field_name = field["name"] # type:ignore[index] + if params.get(field_name, False): + field_value = util.restore_text(params.get(field_name, "")) + template_field_contents[field_name] = field_value roles: List[Role] = [] for role_id in util.listify(params.get("roles", [])): role = session.get(Role, role_id) @@ -441,7 +441,6 @@ def active_folders(trans, folder): select(LibraryFolder) .filter_by(parent=folder, deleted=False) .options(joinedload(LibraryFolder.actions)) - .unique() .order_by(LibraryFolder.name) ) - return trans.sa_session.scalars(stmt).all() + return trans.sa_session.scalars(stmt).unique().all() diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index b5a8c30a8754..57084e3c5b4b 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -2044,6 +2044,7 @@ def src_id_to_item( item = sa_session.get(src_to_class[value["src"]], decoded_id) except KeyError: raise ValueError(f"Unknown input source {value['src']} passed to job submission API.") + assert item item.extra_params = {k: v for k, v in value.items() if k not in ("src", "id")} return item diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index b8591ea749d5..3946baf593a2 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -387,7 +387,11 @@ def __init__( self.dataset = wrap_with_safe_string(dataset_instance, no_wrap_classes=ToolParameterValueWrapper) self.metadata = self.MetadataWrapper(dataset_instance, compute_environment) if isinstance(dataset_instance, HasTags): - self.groups = {tag.user_value.lower() for tag in dataset_instance.tags if tag.user_tname == "group"} + self.groups = { + tag.user_value.lower() + for tag in dataset_instance.tags # type:ignore[unused-ignore, attr-defined] + if tag.user_tname == "group" + } else: # May be a 'FakeDatasetAssociation' self.groups = set() diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py index 4769a6842ba3..53532c6a57ed 100644 --- a/lib/galaxy/util/__init__.py +++ b/lib/galaxy/util/__init__.py @@ -29,6 +29,7 @@ datetime, timezone, ) +from decimal import Decimal from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from hashlib import md5 @@ -1519,7 +1520,7 @@ def shorten_with_metric_prefix(amount: int) -> str: return str(amount) -def nice_size(size: Union[float, int, str]) -> str: +def nice_size(size: Union[float, int, str, Decimal]) -> str: """ Returns a readably formatted string with the size diff --git a/lib/galaxy/util/watcher.py b/lib/galaxy/util/watcher.py index dd4f0110299a..501a2212201c 100644 --- a/lib/galaxy/util/watcher.py +++ b/lib/galaxy/util/watcher.py @@ -13,9 +13,9 @@ can_watch = True except ImportError: - Observer = None - FileSystemEventHandler = object - PollingObserver = None + Observer = None # type:ignore[assignment] + FileSystemEventHandler = object # type:ignore[assignment, misc] + PollingObserver = None # type:ignore[assignment, misc] can_watch = False from galaxy.util.hash_util import md5_hash_file diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py index 98019adec70f..50672c8227b4 100644 --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -269,8 +269,8 @@ def get_dbkeys(self, user: Optional[User], chrom_info=False): dbkeys = [] # Add user's custom keys to dbkeys. - if user and "dbkeys" in user.preferences: - user_keys_dict = loads(user.preferences["dbkeys"]) + if user and user.preferences and "dbkeys" in user.preferences: # type:ignore[unreachable] + user_keys_dict = loads(user.preferences["dbkeys"]) # type:ignore[unreachable] dbkeys.extend([(attributes["name"], key) for key, attributes in user_keys_dict.items()]) # Add app keys to dbkeys. diff --git a/lib/galaxy/web/framework/helpers/grids.py b/lib/galaxy/web/framework/helpers/grids.py index 98795511186d..ea01f1efd098 100644 --- a/lib/galaxy/web/framework/helpers/grids.py +++ b/lib/galaxy/web/framework/helpers/grids.py @@ -52,7 +52,7 @@ def sort(self, trans, query, ascending, column_name=None): column_name = self.key column = getattr(self.model_class, column_name) if column is None: - column = self.model_class.table.c.get(column_name) + column = self.model_class.__table__.c.get(column_name) if ascending: query = query.order_by(column.asc()) else: diff --git a/lib/galaxy/web/legacy_framework/grids.py b/lib/galaxy/web/legacy_framework/grids.py index 3b62e8665012..dcaff6fa12e6 100644 --- a/lib/galaxy/web/legacy_framework/grids.py +++ b/lib/galaxy/web/legacy_framework/grids.py @@ -112,9 +112,9 @@ def sort(self, trans, query, ascending, column_name=None): if column_name is None: column_name = self.key if ascending: - query = query.order_by(self.model_class.table.c.get(column_name).asc()) + query = query.order_by(self.model_class.__table__.c.get(column_name).asc()) else: - query = query.order_by(self.model_class.table.c.get(column_name).desc()) + query = query.order_by(self.model_class.__table__.c.get(column_name).desc()) return query @@ -165,9 +165,9 @@ def sort(self, trans, query, ascending, column_name=None): if column_name is None: column_name = self.key if ascending: - query = query.order_by(func.lower(self.model_class.table.c.get(column_name)).asc()) + query = query.order_by(func.lower(self.model_class.__table__.c.get(column_name)).asc()) else: - query = query.order_by(func.lower(self.model_class.table.c.get(column_name)).desc()) + query = query.order_by(func.lower(self.model_class.__table__.c.get(column_name)).desc()) return query @@ -236,7 +236,7 @@ def sort(self, trans, query, ascending, column_name=None): item_rating_assoc_class = getattr(trans.model, f"{self.model_class.__name__}RatingAssociation") foreign_key = get_foreign_key(item_rating_assoc_class, self.model_class) fk_col = foreign_key.parent - referent_col = foreign_key.get_referent(self.model_class.table) + referent_col = foreign_key.get_referent(self.model_class.__table__) # Do sorting using a subquery. # Subquery to get average rating for each item. ave_rating_subquery = ( diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py index 155511afb585..9191d6a65913 100644 --- a/lib/galaxy/webapps/base/webapp.py +++ b/lib/galaxy/webapps/base/webapp.py @@ -25,7 +25,7 @@ select, true, ) -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from galaxy import util from galaxy.exceptions import ( diff --git a/lib/galaxy/webapps/galaxy/api/tool_entry_points.py b/lib/galaxy/webapps/galaxy/api/tool_entry_points.py index fcaffd3049de..f173a8b46ecf 100644 --- a/lib/galaxy/webapps/galaxy/api/tool_entry_points.py +++ b/lib/galaxy/webapps/galaxy/api/tool_entry_points.py @@ -54,6 +54,7 @@ def index(self, trans: ProvidesUserContext, running=False, job_id=None, **kwd): if job_id is not None: job = trans.sa_session.get(Job, self.decode_id(job_id)) + assert job if not self.interactivetool_manager.can_access_job(trans, job): raise exceptions.ItemAccessibilityException() entry_points = job.interactivetool_entry_points diff --git a/lib/galaxy/webapps/galaxy/api/users.py b/lib/galaxy/webapps/galaxy/api/users.py index 03e1511c732d..bf74edf20f58 100644 --- a/lib/galaxy/webapps/galaxy/api/users.py +++ b/lib/galaxy/webapps/galaxy/api/users.py @@ -515,6 +515,7 @@ def add_custom_builds( else: build_dict["fasta"] = trans.security.decode_id(len_value) dataset = trans.sa_session.get(HistoryDatasetAssociation, int(build_dict["fasta"])) + assert dataset try: new_len = dataset.get_converted_dataset(trans, "len") new_linecount = new_len.get_converted_dataset(trans, "linecount") diff --git a/lib/galaxy/webapps/galaxy/controllers/tag.py b/lib/galaxy/webapps/galaxy/controllers/tag.py index 04f315202da9..05dda97f1ebe 100644 --- a/lib/galaxy/webapps/galaxy/controllers/tag.py +++ b/lib/galaxy/webapps/galaxy/controllers/tag.py @@ -140,7 +140,7 @@ def _get_tag_autocomplete_values(self, trans, q, limit, timestamp, user=None, it # Do query and get result set. query = ( select(item_tag_assoc_class.table.c.value, func.count()) - .select_from_obj(from_obj) + .select_from(from_obj) .where(where_clause) .group_by(item_tag_assoc_class.table.c.value) .order_by(func.count().desc(), item_tag_assoc_class.table.c.value) diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index e661f3c40afe..f0a82c7f7519 100644 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -10,7 +10,7 @@ from urllib.parse import unquote from markupsafe import escape -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound from galaxy import ( util, diff --git a/lib/galaxy/webapps/galaxy/services/histories.py b/lib/galaxy/webapps/galaxy/services/histories.py index ab3fe0e69dc0..dd192b860f21 100644 --- a/lib/galaxy/webapps/galaxy/services/histories.py +++ b/lib/galaxy/webapps/galaxy/services/histories.py @@ -20,7 +20,6 @@ select, true, ) -from sqlalchemy.orm import Session from galaxy import ( exceptions as glx_exceptions, @@ -45,6 +44,7 @@ from galaxy.managers.users import UserManager from galaxy.model import HistoryDatasetAssociation from galaxy.model.base import transaction +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.model.store import payload_to_source_uri from galaxy.schema import ( FilterQueryParams, @@ -820,7 +820,7 @@ def _get_export_record_data(self, history: model.History) -> Optional[WriteStore return None -def get_fasta_hdas_by_history(session: Session, history_id: int): +def get_fasta_hdas_by_history(session: galaxy_scoped_session, history_id: int): stmt = ( select(HistoryDatasetAssociation) .filter_by(history_id=history_id, extension="fasta", deleted=False) diff --git a/lib/galaxy/webapps/galaxy/services/quotas.py b/lib/galaxy/webapps/galaxy/services/quotas.py index 290ee8056668..38b6a69fe849 100644 --- a/lib/galaxy/webapps/galaxy/services/quotas.py +++ b/lib/galaxy/webapps/galaxy/services/quotas.py @@ -6,7 +6,6 @@ select, true, ) -from sqlalchemy.orm import Session from galaxy import util from galaxy.managers.context import ProvidesUserContext @@ -14,6 +13,7 @@ from galaxy.managers.quotas import QuotaManager from galaxy.managers.users import get_user_by_email from galaxy.model import Quota +from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.quota._schema import ( CreateQuotaParams, CreateQuotaResult, @@ -161,9 +161,7 @@ def get_group_id(item): payload["in_groups"] = list(map(str, new_in_groups)) -def get_quotas(session: Session, deleted: bool = False): - is_deleted = true() - if not deleted: - is_deleted = false() +def get_quotas(session: galaxy_scoped_session, deleted: bool = False): + is_deleted = true() if deleted else false() stmt = select(Quota).where(Quota.deleted == is_deleted) return session.scalars(stmt) diff --git a/lib/galaxy/webapps/galaxy/services/sharable.py b/lib/galaxy/webapps/galaxy/services/sharable.py index f940ed4e94ea..73724ba6e28e 100644 --- a/lib/galaxy/webapps/galaxy/services/sharable.py +++ b/lib/galaxy/webapps/galaxy/services/sharable.py @@ -182,7 +182,7 @@ def _send_notification_to_users(self, users_to_notify: Set[User], item: Sharable class SharedItemNotificationFactory: source = "galaxy_sharing_system" - type_map: Dict[SharableItem, SharableItemType] = { + type_map: Dict[Type[SharableItem], SharableItemType] = { History: "history", StoredWorkflow: "workflow", Visualization: "visualization", diff --git a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py index 630ad5325bd9..594015ca4528 100644 --- a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py @@ -57,6 +57,7 @@ def index(self, request: InstalledToolShedRepositoryIndexRequest) -> List[Instal def show(self, repository_id: DecodedDatabaseIdField) -> InstalledToolShedRepository: tool_shed_repository = self._install_model_context.get(ToolShedRepository, repository_id) + assert tool_shed_repository return self._show(tool_shed_repository) def check_for_updates(self, repository_id: Optional[int]) -> CheckForUpdatesResponse: @@ -74,7 +75,7 @@ def _get_tool_shed_repositories(self, **kwd): stmt = select(ToolShedRepository) for key, value in kwd.items(): if value is not None: - column = ToolShedRepository.table.c[key] + column = ToolShedRepository.__table__.c[key] # type:ignore[attr-defined] stmt = stmt.filter(column == value) stmt = stmt.order_by(ToolShedRepository.name).order_by(cast(ToolShedRepository.ctx_rev, Integer).desc()) session = self._install_model_context diff --git a/lib/galaxy/webapps/galaxy/services/tools.py b/lib/galaxy/webapps/galaxy/services/tools.py index 2266c6c58597..3b2e3ff9f6dc 100644 --- a/lib/galaxy/webapps/galaxy/services/tools.py +++ b/lib/galaxy/webapps/galaxy/services/tools.py @@ -280,6 +280,7 @@ def _patch_library_inputs(self, trans: ProvidesHistoryContext, inputs, target_hi def _patch_library_dataset(self, trans: ProvidesHistoryContext, v, target_history): if isinstance(v, dict) and "id" in v and v.get("src") == "ldda": ldda = trans.sa_session.get(LibraryDatasetDatasetAssociation, self.decode_id(v["id"])) + assert ldda if trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ): diff --git a/lib/galaxy/workflow/extract.py b/lib/galaxy/workflow/extract.py index 54899414413c..5d10aa8a8a19 100644 --- a/lib/galaxy/workflow/extract.py +++ b/lib/galaxy/workflow/extract.py @@ -116,7 +116,7 @@ def extract_steps( if name not in step_labels: step.label = name step_labels.add(name) - step.tool_inputs = dict(name=name) + step.tool_inputs = dict(name=name) # type:ignore[assignment] hid_to_output_pair[hid] = (step, "output") steps.append(step) for i, hid in enumerate(dataset_collection_ids): @@ -132,7 +132,7 @@ def extract_steps( if name not in step_labels: step.label = name step_labels.add(name) - step.tool_inputs = dict(name=name, collection_type=collection_type) + step.tool_inputs = dict(name=name, collection_type=collection_type) # type:ignore[assignment] hid_to_output_pair[hid] = (step, "output") steps.append(step) # Tool steps diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index da53d9de6159..6dfcbc500a68 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -1376,14 +1376,14 @@ def get_inputs(self): def restrict_options(self, step, connections: Iterable[WorkflowStepConnection], default_value): try: - static_options = [] + static_options = [] # type:ignore[var-annotated] # Retrieve possible runtime options for 'select' type inputs for connection in connections: # Well this isn't a great assumption... assert connection.input_step module = connection.input_step.module assert isinstance(module, (ToolModule, SubWorkflowModule)) - if isinstance(module, ToolModule): + if isinstance(module, ToolModule): # type:ignore[unreachable] assert module.tool tool_inputs = module.tool.inputs # may not be set, but we're catching the Exception below. @@ -2421,6 +2421,7 @@ def from_workflow_step(self, trans, step: WorkflowStep, **kwargs) -> WorkflowMod Return module initialized from the WorkflowStep object `step`. """ type = step.type + assert type return self.module_types[type].from_workflow_step(trans, step, **kwargs) @@ -2509,13 +2510,13 @@ def inject(self, step: WorkflowStep, step_args=None, steps=None, **kwargs): If step_args is provided from a web form this is applied to generate 'state' else it is just obtained from the database. """ - step.upgrade_messages = {} + step.upgrade_messages = {} # type: ignore[assignment] # Make connection information available on each step by input name. step.setup_input_connections_by_name() # Populate module. - module = step.module = module_factory.from_workflow_step(self.trans, step, **kwargs) + module = step.module = module_factory.from_workflow_step(self.trans, step, **kwargs) # type: ignore[assignment] # Any connected input needs to have value DummyDataset (these # are not persisted so we need to do it every time) @@ -2544,7 +2545,7 @@ def inject_all(self, workflow: Workflow, param_map=None, ignore_tool_missing_exc def compute_runtime_state(self, step: WorkflowStep, step_args=None): assert step.module, "module must be injected before computing runtime state" - state, step_errors = step.module.compute_runtime_state(self.trans, step, step_args) + state, step_errors = step.module.compute_runtime_state(self.trans, step, step_args) # type:ignore[unreachable] step.state = state # Fix any missing parameters diff --git a/lib/galaxy/workflow/run.py b/lib/galaxy/workflow/run.py index c7eab00a7a1d..02b7a4c8f0c7 100644 --- a/lib/galaxy/workflow/run.py +++ b/lib/galaxy/workflow/run.py @@ -600,6 +600,7 @@ def set_step_outputs( workflow_output = model.WorkflowOutput(step, output_name=output_name) step.workflow_outputs.append(workflow_output) for workflow_output in step.workflow_outputs: + assert workflow_output.output_name output_name = workflow_output.output_name if output_name not in outputs: invocation_step.workflow_invocation.add_message( @@ -632,6 +633,7 @@ def _subworkflow_invocation(self, step: "WorkflowStep") -> WorkflowInvocation: workflow_invocation = self.workflow_invocation subworkflow_invocation = workflow_invocation.get_subworkflow_invocation_for_step(step) if subworkflow_invocation is None: + assert step.order_index raise MessageException(f"Failed to find persisted subworkflow invocation for step [{step.order_index + 1}]") return subworkflow_invocation diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py index 029b1e7630c6..516c85d07eb1 100644 --- a/lib/galaxy/workflow/run_request.py +++ b/lib/galaxy/workflow/run_request.py @@ -113,7 +113,7 @@ def _normalize_inputs( elif inputs_by_el == "step_uuid": possible_input_keys.append(str(step.uuid)) elif inputs_by_el == "name": - possible_input_keys.append(step.label or step.tool_inputs.get("name")) + possible_input_keys.append(step.label or step.tool_inputs.get("name")) # type:ignore[union-attr] else: raise exceptions.MessageException( "Workflow cannot be run because unexpected inputs_by value specified." @@ -379,14 +379,16 @@ def build_workflow_run_configs( try: if input_source == "ldda": ldda = trans.sa_session.get(LibraryDatasetDatasetAssociation, trans.security.decode_id(input_id)) + assert ldda assert trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ) content = ldda.to_history_dataset_association(history, add_to_history=add_to_history) elif input_source == "ld": - ldda = trans.sa_session.get( - LibraryDataset, trans.security.decode_id(input_id) - ).library_dataset_dataset_association + library_dataset = trans.sa_session.get(LibraryDataset, trans.security.decode_id(input_id)) + assert library_dataset + ldda = library_dataset.library_dataset_dataset_association + assert ldda assert trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), ldda.dataset ) diff --git a/lib/galaxy_test/selenium/framework.py b/lib/galaxy_test/selenium/framework.py index 5b919286dbc3..a3b298d54c19 100644 --- a/lib/galaxy_test/selenium/framework.py +++ b/lib/galaxy_test/selenium/framework.py @@ -536,7 +536,7 @@ def setup_shared_state(self): NavigatesGalaxyMixin = object -class UsesLibraryAssertions(NavigatesGalaxyMixin): +class UsesLibraryAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] @retry_assertion_during_transitions def assert_num_displayed_items_is(self, n): num_displayed = self.num_displayed_items() @@ -546,7 +546,7 @@ def num_displayed_items(self) -> int: return len(self.libraries_table_elements()) -class UsesHistoryItemAssertions(NavigatesGalaxyMixin): +class UsesHistoryItemAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] def assert_item_peek_includes(self, hid, expected): item_body = self.history_panel_item_component(hid=hid) peek_text = item_body.peek.wait_for_text() @@ -584,7 +584,7 @@ def assert_item_hid_text(self, hid): ) -class UsesWorkflowAssertions(NavigatesGalaxyMixin): +class UsesWorkflowAssertions(NavigatesGalaxyMixin): # type:ignore[valid-type, misc] @retry_assertion_during_transitions def _assert_showing_n_workflows(self, n): actual_count = len(self.workflow_card_elements()) diff --git a/lib/tool_shed/grids/repository_grids.py b/lib/tool_shed/grids/repository_grids.py index 5c80c185ff38..691959f32916 100644 --- a/lib/tool_shed/grids/repository_grids.py +++ b/lib/tool_shed/grids/repository_grids.py @@ -237,7 +237,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES: @@ -247,7 +247,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) else: @@ -258,7 +258,7 @@ def build_initial_query(self, trans, **kwd): and_(model.Repository.table.c.deleted == false(), model.Repository.table.c.deprecated == false()) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -458,7 +458,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES: @@ -473,7 +473,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) else: @@ -488,7 +488,7 @@ def build_initial_query(self, trans, **kwd): ) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -538,7 +538,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.Category.table.c.name == category.name) ) @@ -547,7 +547,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) if filter == trans.app.repository_grid_filter_manager.filters.CERTIFIED_LEVEL_ONE_SUITES: @@ -560,7 +560,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.Category.table.c.name == category.name) ) @@ -570,7 +570,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) else: @@ -587,7 +587,7 @@ def build_initial_query(self, trans, **kwd): ) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.Category.table.c.name == category.name) ) @@ -597,7 +597,7 @@ def build_initial_query(self, trans, **kwd): and_(model.Repository.table.c.deleted == false(), model.Repository.table.c.deprecated == false()) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -632,7 +632,7 @@ def build_initial_query(self, trans, **kwd): and_(model.Repository.table.c.deleted == false(), model.Repository.table.c.user_id == trans.user.id) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -680,11 +680,11 @@ def build_initial_query(self, trans, **kwd): return ( trans.sa_session.query(model.Repository) .filter(model.Repository.table.c.deleted == false()) - .outerjoin(model.RepositoryRoleAssociation.table) + .outerjoin(model.RepositoryRoleAssociation) .outerjoin(model.Role.table) .filter(or_(*clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -835,7 +835,7 @@ def build_initial_query(self, trans, **kwd): ) ) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) ) @@ -1477,11 +1477,11 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .join(model.RepositoryCategoryAssociation.table) + .join(model.RepositoryCategoryAssociation) .join(model.Category.table) .filter( and_( - model.Category.table.c.id == trans.security.decode_id(kwd["id"]), + model.Category.__table__.c.id == trans.security.decode_id(kwd["id"]), model.RepositoryMetadata.table.c.downloadable == true(), ) ) @@ -1493,7 +1493,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .join(model.RepositoryCategoryAssociation.table) + .join(model.RepositoryCategoryAssociation) .join(model.Category.table) .filter( and_( @@ -1513,7 +1513,7 @@ def build_initial_query(self, trans, **kwd): ) .join(model.RepositoryMetadata.table) .join(model.User.table) - .join(model.RepositoryCategoryAssociation.table) + .join(model.RepositoryCategoryAssociation) .join(model.Category.table) .filter( and_( @@ -1529,7 +1529,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.RepositoryMetadata.table.c.downloadable == true()) ) @@ -1540,7 +1540,7 @@ def build_initial_query(self, trans, **kwd): .join(model.RepositoryMetadata.table) .filter(or_(*trans.app.repository_registry.certified_level_one_clause_list)) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.RepositoryMetadata.table.c.downloadable == true()) ) @@ -1553,7 +1553,7 @@ def build_initial_query(self, trans, **kwd): ) .join(model.RepositoryMetadata.table) .join(model.User.table) - .outerjoin(model.RepositoryCategoryAssociation.table) + .outerjoin(model.RepositoryCategoryAssociation) .outerjoin(model.Category.table) .filter(model.RepositoryMetadata.table.c.downloadable == true()) ) diff --git a/lib/tool_shed/managers/groups.py b/lib/tool_shed/managers/groups.py index 280f1e685995..21f2a6ce0e62 100644 --- a/lib/tool_shed/managers/groups.py +++ b/lib/tool_shed/managers/groups.py @@ -9,7 +9,7 @@ select, true, ) -from sqlalchemy.orm.exc import ( +from sqlalchemy.exc import ( MultipleResultsFound, NoResultFound, ) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 79fbb68cc0d5..5159c915cd87 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -210,7 +210,8 @@ def index_tool_ids(app: ToolShedApp, tool_ids: List[str]) -> Dict[str, Any]: repository = guid_to_repository(app, tool_id) owner = repository.user.username name = repository.name - repository = _get_repository_by_name_and_owner(app.model.context.current, name, owner, app.model.User) + assert name + repository = _get_repository_by_name_and_owner(app.model.session().current, name, owner, app.model.User) if not repository: log.warning(f"Repository {owner}/{name} does not exist, skipping") continue @@ -527,6 +528,7 @@ def upload_tar_and_set_metadata( app = trans.app user = trans.user assert user + assert user.username repo_dir = repository.repo_path(app) tip = repository.tip() tar_response = upload_tar( diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index a5df2eddcf91..bff8d0a21788 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -48,7 +48,7 @@ class ToolShedMetadataGenerator(BaseMetadataGenerator): """A MetadataGenerator building on ToolShed's app and repository constructs.""" app: ToolShedApp - repository: Optional[Repository] + repository: Optional[Repository] # type:ignore[assignment] # why is mypy making me re-annotate these things from the base class, it didn't # when they were in the same file diff --git a/lib/tool_shed/repository_registry.py b/lib/tool_shed/repository_registry.py index 6927b4e68e74..521ae1a06c6f 100644 --- a/lib/tool_shed/repository_registry.py +++ b/lib/tool_shed/repository_registry.py @@ -44,8 +44,12 @@ def __init__(self, app): self.viewable_suites_by_category = {} self.viewable_valid_repositories_and_suites_by_category = {} self.viewable_valid_suites_by_category = {} - self.load_viewable_repositories_and_suites_by_category() - self.load_repository_and_suite_tuples() + self.load() + + def load(self): + with self.sa_session.begin(): + self.load_viewable_repositories_and_suites_by_category() + self.load_repository_and_suite_tuples() def add_category_entry(self, category): category_name = str(category.name) diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py index d493e97aa990..4c82a0d7183c 100644 --- a/lib/tool_shed/test/base/playwrightbrowser.py +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -132,7 +132,7 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp input_s = form.locator(f"select[name='{control_name}']") if input_i.count(): if control_name in ["redirect"]: - input_i.input_value = value + input_i.input_value = value # type:ignore[method-assign, assignment] else: if isinstance(value, bool): if value and not input_i.is_checked(): @@ -142,9 +142,9 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp else: input_i.fill(value) if input_t.count(): - input_t.fill(value) + input_t.fill(value) # type:ignore[arg-type] if input_s.count(): - input_s.select_option(value) + input_s.select_option(value) # type:ignore[arg-type] def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: multi_select = "form[name='categories'] select[name='category_id']" diff --git a/lib/tool_shed/test/base/twillbrowser.py b/lib/tool_shed/test/base/twillbrowser.py index 1f4af8791678..a73cdf85b299 100644 --- a/lib/tool_shed/test/base/twillbrowser.py +++ b/lib/tool_shed/test/base/twillbrowser.py @@ -5,7 +5,7 @@ ) import twill.commands as tc -from twill.browser import FormElement +from twill.browser import FormElement # type:ignore[attr-defined] from galaxy.util import smart_str from .browser import ( @@ -19,7 +19,7 @@ def visit_url(url: str, allowed_codes: List[int]) -> str: - new_url = tc.go(url) + new_url = tc.go(url) # type:ignore[func-returns-value] return_code = tc.browser.code assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( return_code, diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 968109ac0235..4579ca1c72f1 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1300,8 +1300,11 @@ def edit_repository_information(self, repository: Repository, revert=True, **kwd self.check_for_strings(strings_displayed) if revert: strings_displayed = [] + # assert original_information[input_elem_name] for input_elem_name in ["repo_name", "description", "long_description"]: - self._browser.fill_form_value("edit_repository", input_elem_name, original_information[input_elem_name]) + self._browser.fill_form_value( + "edit_repository", input_elem_name, original_information[input_elem_name] # type:ignore[arg-type] + ) strings_displayed.append(self.escape_html(original_information[input_elem_name])) self._browser.submit_form_with_name("edit_repository", "edit_repository_button") if self._browser.is_twill: @@ -2050,7 +2053,7 @@ def _assert_repo_has_tool_with_id( ) tools = installed_repository.metadata_["tools"] found_it = False - for tool in tools: + for tool in tools: # type:ignore[attr-defined] if "id" not in tool: continue if tool["id"] == tool_id: diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index 99027ea17fc8..a6cab6cc954d 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -287,7 +287,7 @@ def handle_email_alerts( email_alerts.append(user.email) else: subject = f"Galaxy tool shed update alert for repository named {str(repository.name)}" - email_alerts = json.loads(repository.email_alerts) + email_alerts = json.loads(repository.email_alerts) # type:ignore[arg-type] for email in email_alerts: to = email.strip() # Send it diff --git a/lib/tool_shed/webapp/controllers/admin.py b/lib/tool_shed/webapp/controllers/admin.py index 73ddef546528..9b1681bafe24 100644 --- a/lib/tool_shed/webapp/controllers/admin.py +++ b/lib/tool_shed/webapp/controllers/admin.py @@ -324,7 +324,7 @@ def regenerate_statistics(self, trans, **kwd): message = escape(kwd.get("message", "")) status = kwd.get("status", "done") if "regenerate_statistics_button" in kwd: - trans.app.shed_counter.generate_statistics() + trans.app.shed_counter.generate_statistics(trans.sa_session) message = "Successfully regenerated statistics" return trans.fill_template("/webapps/tool_shed/admin/statistics.mako", message=message, status=status) diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 2f2be9dccfbe..84dee54ccf93 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -10,6 +10,7 @@ from typing import ( Any, Mapping, + Optional, TYPE_CHECKING, ) @@ -32,6 +33,8 @@ UniqueConstraint, ) from sqlalchemy.orm import ( + Mapped, + mapped_column, registry, relationship, ) @@ -62,13 +65,13 @@ if TYPE_CHECKING: # Workaround for https://github.com/python/mypy/issues/14182 - from sqlalchemy.orm.decl_api import DeclarativeMeta as _DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta as _DeclarativeMeta class DeclarativeMeta(_DeclarativeMeta, type): pass else: - from sqlalchemy.orm.decl_api import DeclarativeMeta + from sqlalchemy.orm import DeclarativeMeta mapper_registry = registry() @@ -78,6 +81,8 @@ class Base(metaclass=DeclarativeMeta): registry = mapper_registry metadata = mapper_registry.metadata __init__ = mapper_registry.constructor + table: Table + __table__: Table @classmethod def __declare_last__(cls): @@ -87,27 +92,27 @@ def __declare_last__(cls): class APIKeys(Base): __tablename__ = "api_keys" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - key = Column(TrimmedString(32), index=True, unique=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + key: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True, unique=True) user = relationship("User", back_populates="api_keys") - deleted = Column(Boolean, index=True, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) class User(Base, Dictifiable): __tablename__ = "galaxy_user" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - email = Column(TrimmedString(255), nullable=False) - username = Column(String(255), index=True) - password = Column(TrimmedString(40), nullable=False) - external = Column(Boolean, default=False) - new_repo_alert = Column(Boolean, default=False) - deleted = Column(Boolean, index=True, default=False) - purged = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + email: Mapped[str] = mapped_column(TrimmedString(255), nullable=False) + username: Mapped[Optional[str]] = mapped_column(String(255), index=True) + password: Mapped[str] = mapped_column(TrimmedString(40), nullable=False) + external: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + new_repo_alert: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + purged: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) active_repositories = relationship( "Repository", primaryjoin=(lambda: (Repository.user_id == User.id) & (not_(Repository.deleted))), # type: ignore[has-type] @@ -186,9 +191,9 @@ def set_random_password(self, length=16): class PasswordResetToken(Base): __tablename__ = "password_reset_token" - token = Column(String(32), primary_key=True, unique=True, index=True) - expiration_time = Column(DateTime) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) + token: Mapped[str] = mapped_column(String(32), primary_key=True, unique=True, index=True) + expiration_time: Mapped[Optional[datetime]] = mapped_column(DateTime) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) user = relationship("User", back_populates="reset_tokens") def __init__(self, user, token=None): @@ -204,11 +209,11 @@ def __init__(self, user, token=None): class Group(Base, Dictifiable): __tablename__ = "galaxy_group" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) roles = relationship("GroupRoleAssociation", back_populates="group") users = relationship("UserGroupAssociation", back_populates="group") @@ -223,13 +228,13 @@ def __init__(self, name=None): class Role(Base, Dictifiable): __tablename__ = "role" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(String(255), index=True, unique=True) - description = Column(TEXT) - type = Column(String(40), index=True) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + type: Mapped[Optional[str]] = mapped_column(String(40), index=True) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) repositories = relationship("RepositoryRoleAssociation", back_populates="role") groups = relationship("GroupRoleAssociation", back_populates="role") users = relationship("UserRoleAssociation", back_populates="role") @@ -258,11 +263,11 @@ def is_repository_admin_role(self): class UserGroupAssociation(Base): __tablename__ = "user_group_association" - id = Column(Integer, primary_key=True) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - group_id = Column(ForeignKey("galaxy_group.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) user = relationship("User", back_populates="groups") group = relationship("Group", back_populates="users") @@ -275,11 +280,11 @@ def __init__(self, user, group): class UserRoleAssociation(Base): __tablename__ = "user_role_association" - id = Column(Integer, primary_key=True) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - role_id = Column(ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) user = relationship("User", back_populates="roles") role = relationship("Role", back_populates="users") @@ -293,11 +298,11 @@ def __init__(self, user, role): class GroupRoleAssociation(Base): __tablename__ = "group_role_association" - id = Column(Integer, primary_key=True) - group_id = Column(ForeignKey("galaxy_group.id"), index=True) - role_id = Column(ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + group_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) group = relationship("Group", back_populates="roles") role = relationship("Role", back_populates="groups") @@ -309,11 +314,11 @@ def __init__(self, group, role): class RepositoryRoleAssociation(Base): __tablename__ = "repository_role_association" - id = Column(Integer, primary_key=True) - repository_id = Column(ForeignKey("repository.id"), index=True) - role_id = Column(ForeignKey("role.id"), index=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository.id"), index=True) + role_id: Mapped[Optional[int]] = mapped_column(ForeignKey("role.id"), index=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) repository = relationship("Repository", back_populates="roles") role = relationship("Role", back_populates="repositories") @@ -326,19 +331,19 @@ def __init__(self, repository, role): class GalaxySession(Base): __tablename__ = "galaxy_session" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - user_id = Column(ForeignKey("galaxy_user.id"), index=True, nullable=True) - remote_host = Column(String(255)) - remote_addr = Column(String(255)) - referer = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) + remote_host: Mapped[Optional[str]] = mapped_column(String(255)) + remote_addr: Mapped[Optional[str]] = mapped_column(String(255)) + referer: Mapped[Optional[str]] = mapped_column(TEXT) # unique 128 bit random number coerced to a string - session_key = Column(TrimmedString(255), index=True, unique=True) - is_valid = Column(Boolean, default=False) + session_key: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + is_valid: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) # saves a reference to the previous session so we have a way to chain them together - prev_session_id = Column(Integer) - last_action = Column(DateTime) + prev_session_id: Mapped[Optional[int]] = mapped_column(Integer) + last_action: Mapped[Optional[datetime]] = mapped_column(DateTime) user = relationship("User", back_populates="galaxy_sessions") def __init__(self, is_valid=False, **kwd): @@ -350,21 +355,21 @@ def __init__(self, is_valid=False, **kwd): class Repository(Base, Dictifiable): __tablename__ = "repository" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TrimmedString(255), index=True) - type = Column(TrimmedString(255), index=True) - remote_repository_url = Column(TrimmedString(255)) - homepage_url = Column(TrimmedString(255)) - description = Column(TEXT) - long_description = Column(TEXT) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - private = Column(Boolean, default=False) - deleted = Column(Boolean, index=True, default=False) - email_alerts = Column(MutableJSONType, nullable=True) - times_downloaded = Column(Integer) - deprecated = Column(Boolean, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + type: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) + remote_repository_url: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + homepage_url: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) + description: Mapped[Optional[str]] = mapped_column(TEXT) + long_description: Mapped[Optional[str]] = mapped_column(TEXT) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + private: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) + email_alerts: Mapped[Optional[bytes]] = mapped_column(MutableJSONType, nullable=True) + times_downloaded: Mapped[Optional[int]] = mapped_column(Integer) + deprecated: Mapped[Optional[bool]] = mapped_column(Boolean, default=False) categories = relationship("RepositoryCategoryAssociation", back_populates="repository") ratings = relationship( "RepositoryRatingAssociation", @@ -564,13 +569,13 @@ def set_item(self, item): class RepositoryRatingAssociation(Base, ItemRatingAssociation): __tablename__ = "repository_rating_association" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - repository_id = Column(ForeignKey("repository.id"), index=True) - user_id = Column(ForeignKey("galaxy_user.id"), index=True) - rating = Column(Integer, index=True) - comment = Column(TEXT) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository.id"), index=True) + user_id: Mapped[Optional[int]] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + rating: Mapped[Optional[int]] = mapped_column(Integer, index=True) + comment: Mapped[Optional[str]] = mapped_column(TEXT) repository = relationship("Repository", back_populates="ratings") user = relationship("User") @@ -581,12 +586,12 @@ def set_item(self, repository): class Category(Base, Dictifiable): __tablename__ = "category" - id = Column(Integer, primary_key=True) - create_time = Column(DateTime, default=now) - update_time = Column(DateTime, default=now, onupdate=now) - name = Column(TrimmedString(255), index=True, unique=True) - description = Column(TEXT) - deleted = Column(Boolean, index=True, default=False) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + create_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now) + update_time: Mapped[Optional[datetime]] = mapped_column(DateTime, default=now, onupdate=now) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) + description: Mapped[Optional[str]] = mapped_column(TEXT) + deleted: Mapped[Optional[bool]] = mapped_column(Boolean, index=True, default=False) repositories = relationship("RepositoryCategoryAssociation", back_populates="category") dict_collection_visible_keys = ["id", "name", "description", "deleted"] @@ -600,9 +605,9 @@ def __init__(self, deleted=False, **kwd): class RepositoryCategoryAssociation(Base): __tablename__ = "repository_category_association" - id = Column(Integer, primary_key=True) - repository_id = Column(ForeignKey("repository.id"), index=True) - category_id = Column(ForeignKey("category.id"), index=True) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + repository_id: Mapped[Optional[int]] = mapped_column(ForeignKey("repository.id"), index=True) + category_id: Mapped[Optional[int]] = mapped_column(ForeignKey("category.id"), index=True) category = relationship("Category", back_populates="repositories") repository = relationship("Repository", back_populates="categories") @@ -615,10 +620,10 @@ class Tag(Base): __tablename__ = "tag" __table_args__ = (UniqueConstraint("name"),) - id = Column(Integer, primary_key=True) - type = Column(Integer) - parent_id = Column(ForeignKey("tag.id")) - name = Column(TrimmedString(255)) + id: Mapped[int] = mapped_column(Integer, primary_key=True) + type: Mapped[Optional[int]] = mapped_column(Integer) + parent_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id")) + name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) children = relationship("Tag", back_populates="parent") parent = relationship("Tag", back_populates="children", remote_side=[id]) diff --git a/lib/tool_shed/webapp/model/mapping.py b/lib/tool_shed/webapp/model/mapping.py index 0da07ef195c1..94ff0317f3c4 100644 --- a/lib/tool_shed/webapp/model/mapping.py +++ b/lib/tool_shed/webapp/model/mapping.py @@ -35,16 +35,19 @@ def init( engine_options = engine_options or {} # Create the database engine engine = build_engine(url, engine_options) - # Connect the metadata to the database. - metadata.bind = engine result = ToolShedModelMapping([tool_shed.webapp.model], engine=engine) if create_tables: - metadata.create_all() + metadata.create_all(bind=engine) result.create_tables = create_tables result.security_agent = CommunityRBACAgent(result) result.shed_counter = shed_statistics.ShedCounter(result) + + session = result.session() + with session.begin(): + result.shed_counter.generate_statistics(session) + return result diff --git a/lib/tool_shed/webapp/util/shed_statistics.py b/lib/tool_shed/webapp/util/shed_statistics.py index d864420ecb2d..7e21673de19d 100644 --- a/lib/tool_shed/webapp/util/shed_statistics.py +++ b/lib/tool_shed/webapp/util/shed_statistics.py @@ -23,14 +23,8 @@ def __init__(self, model): self.unique_owners = 0 self.unique_valid_tools = 0 self.workflows = 0 - self.generate_statistics() - @property - def sa_session(self): - """Returns a SQLAlchemy session""" - return self.model.context - - def generate_statistics(self): + def generate_statistics(self, sa_session): self.custom_datatypes = 0 self.deleted_repositories = 0 self.deprecated_repositories = 0 @@ -42,7 +36,7 @@ def generate_statistics(self): self.unique_valid_tools = 0 self.workflows = 0 unique_user_ids = [] - for repository in self.sa_session.scalars(select(Repository)): + for repository in sa_session.scalars(select(Repository)): self.repositories += 1 self.total_clones += repository.times_downloaded is_deleted = repository.deleted diff --git a/packages/app/setup.cfg b/packages/app/setup.cfg index 4360d7181811..1879c3437ee9 100644 --- a/packages/app/setup.cfg +++ b/packages/app/setup.cfg @@ -68,7 +68,7 @@ install_requires = refgenconf>=0.12.0 regex requests - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 sqlitedict starlette svgwrite diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 157324e102a8..3e3e0e085e5c 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -58,7 +58,7 @@ install_requires = pysam>=0.21 rocrate social-auth-core[openidconnect]==4.0.3 - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 tifffile typing-extensions WebOb diff --git a/packages/job_execution/setup.cfg b/packages/job_execution/setup.cfg index 40c3be56f524..05394dca8e27 100644 --- a/packages/job_execution/setup.cfg +++ b/packages/job_execution/setup.cfg @@ -38,7 +38,7 @@ install_requires = galaxy-tool-util galaxy-util MarkupSafe - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 packages = find: python_requires = >=3.8 diff --git a/packages/web_apps/setup.cfg b/packages/web_apps/setup.cfg index 20ac5878220b..53ef00625581 100644 --- a/packages/web_apps/setup.cfg +++ b/packages/web_apps/setup.cfg @@ -58,7 +58,7 @@ install_requires = PyYAML requests Routes - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 sqlalchemy-migrate starlette starlette-context diff --git a/packages/web_framework/setup.cfg b/packages/web_framework/setup.cfg index e04d3f101f32..55d3421dade9 100644 --- a/packages/web_framework/setup.cfg +++ b/packages/web_framework/setup.cfg @@ -40,7 +40,7 @@ install_requires = pydantic>=2,!=2.6.0,!=2.6.1 requests Routes - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 WebOb packages = find: diff --git a/packages/web_stack/setup.cfg b/packages/web_stack/setup.cfg index f7bc6adc5d54..16e6d08f1c15 100644 --- a/packages/web_stack/setup.cfg +++ b/packages/web_stack/setup.cfg @@ -34,7 +34,7 @@ include_package_data = True install_requires = galaxy-data galaxy-util - SQLAlchemy>=1.4.25,<2 + SQLAlchemy>=2.0,<2.1 packages = find: python_requires = >=3.8 diff --git a/pyproject.toml b/pyproject.toml index a1b0be2fda3d..86a696525145 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -108,7 +108,7 @@ Routes = "*" schema-salad = "!=8.3.20220721194857" # https://github.com/common-workflow-language/schema_salad/issues/575 social-auth-core = {version = "==4.0.3", extras = ["openidconnect"]} sortedcontainers = "*" -SQLAlchemy = ">=1.4.25,<2" +SQLAlchemy = "^2.0" sqlitedict = "*" sqlparse = "*" starlette = "*" diff --git a/scripts/cleanup_datasets/cleanup_datasets.py b/scripts/cleanup_datasets/cleanup_datasets.py index a373e551ff14..81fd7801f5da 100755 --- a/scripts/cleanup_datasets/cleanup_datasets.py +++ b/scripts/cleanup_datasets/cleanup_datasets.py @@ -261,7 +261,7 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r histories = ( app.sa_session.query(app.model.History) .filter(and_(app.model.History.__table__.c.deleted == true(), app.model.History.update_time < cutoff_time)) - .options(joinedload("datasets")) + .options(joinedload(app.model.History.datasets)) ) else: histories = ( @@ -273,7 +273,7 @@ def purge_histories(app, cutoff_time, remove_from_disk, info_only=False, force_r app.model.History.update_time < cutoff_time, ) ) - .options(joinedload("datasets")) + .options(joinedload(app.model.History.datasets)) ) for history in histories: log.info("### Processing history id %d (%s)", history.id, unicodify(history.name)) diff --git a/test/unit/app/managers/test_HistoryManager.py b/test/unit/app/managers/test_HistoryManager.py index ba1710740540..c3627c87d166 100644 --- a/test/unit/app/managers/test_HistoryManager.py +++ b/test/unit/app/managers/test_HistoryManager.py @@ -85,7 +85,10 @@ def test_base(self): self.log("should be able to order") history3 = self.history_manager.create(name="history3", user=user2) - name_first_then_time = (model.History.name, sqlalchemy.desc(model.History.create_time)) + name_first_then_time = ( + model.History.name, + sqlalchemy.desc(model.History.create_time), + ) # type:ignore[var-annotated] assert self.history_manager.list(order_by=name_first_then_time) == [history2, history1, history3] def test_copy(self): diff --git a/test/unit/app/managers/test_NotificationManager.py b/test/unit/app/managers/test_NotificationManager.py index 82b339f05792..c8c011160a83 100644 --- a/test/unit/app/managers/test_NotificationManager.py +++ b/test/unit/app/managers/test_NotificationManager.py @@ -264,8 +264,8 @@ def test_get_user_notification(self): actual_user_notification = self.notification_manager.get_user_notification(user, notification.id) self._assert_notification_expected(actual_user_notification, expected_user_notification) - assert actual_user_notification["seen_time"] is None - assert actual_user_notification["deleted"] is False + assert actual_user_notification._mapping["seen_time"] is None + assert actual_user_notification._mapping["deleted"] is False def test_update_user_notifications(self): user = self._create_test_user() diff --git a/test/unit/app/test_galaxy_install.py b/test/unit/app/test_galaxy_install.py index 1695e8faebc2..f7c44908ff58 100644 --- a/test/unit/app/test_galaxy_install.py +++ b/test/unit/app/test_galaxy_install.py @@ -57,7 +57,7 @@ def test_against_production_shed(tmp_path: Path): assert tsr message, status = check_for_updates( install_target.tool_shed_registry, - install_model_context, + install_model_context, # type:ignore[arg-type] tsr.id, ) assert status diff --git a/test/unit/app/tools/test_metadata.py b/test/unit/app/tools/test_metadata.py index 8f83c6b90ef6..7e1784a84caa 100644 --- a/test/unit/app/tools/test_metadata.py +++ b/test/unit/app/tools/test_metadata.py @@ -18,7 +18,7 @@ class TestMetadata(TestCase, tools_support.UsesTools): def setUp(self): super().setUp() self.setup_app() - model.Dataset.object_store = self.app.object_store + model.Dataset.object_store = self.app.object_store # type: ignore[assignment] job = model.Job() sa_session = self.app.model.session sa_session.add(job) diff --git a/test/unit/data/model/test_engine_factory.py b/test/unit/data/model/test_engine_factory.py index 947664865f3e..361fc1f608d3 100644 --- a/test/unit/data/model/test_engine_factory.py +++ b/test/unit/data/model/test_engine_factory.py @@ -7,28 +7,28 @@ class TestSetSqliteConnectArgs: def test_engine_options_empty(self): engine_options = {} # type: ignore[var-annotated] - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert updated == {"connect_args": {"check_same_thread": False}} + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert engine_options == {"connect_args": {"check_same_thread": False}} def test_update_nonempty_engine_options(self): engine_options = {"foo": "some foo"} - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert len(updated) == 2 - assert updated["foo"] == "some foo" - assert updated["connect_args"] == {"check_same_thread": False} + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert len(engine_options) == 2 + assert engine_options["foo"] == "some foo" + assert engine_options["connect_args"] == {"check_same_thread": False} # type:ignore[comparison-overlap] def test_overwrite_connect_args(self): engine_options = {"foo": "some foo", "connect_args": {"check_same_thread": True}} - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert len(updated) == 2 - assert updated["foo"] == "some foo" - assert updated["connect_args"] == {"check_same_thread": False} + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert len(engine_options) == 2 + assert engine_options["foo"] == "some foo" + assert engine_options["connect_args"] == {"check_same_thread": False} def test_update_nonempty_connect_args(self): engine_options = {"foo": "some foo", "connect_args": {"bar": "some bar"}} - updated = set_sqlite_connect_args(engine_options, SQLITE_URL) - assert len(updated) == 2 - assert updated["foo"] == "some foo" - assert len(updated["connect_args"]) == 2 - assert updated["connect_args"]["check_same_thread"] is False - assert updated["connect_args"]["bar"] == "some bar" + set_sqlite_connect_args(engine_options, SQLITE_URL) + assert len(engine_options) == 2 + assert engine_options["foo"] == "some foo" + assert len(engine_options["connect_args"]) == 2 + assert engine_options["connect_args"]["check_same_thread"] is False # type:ignore[index] + assert engine_options["connect_args"]["bar"] == "some bar" # type:ignore[index] diff --git a/test/unit/data/model/test_mapping_testing_utils.py b/test/unit/data/model/test_mapping_testing_utils.py index 6424632422ab..346ec2421403 100644 --- a/test/unit/data/model/test_mapping_testing_utils.py +++ b/test/unit/data/model/test_mapping_testing_utils.py @@ -65,7 +65,7 @@ def test_collection_consists_of_objects(session): # contains wrong number of objects assert not collection_consists_of_objects([stored_foo1, stored_foo1, stored_foo2], foo1, foo2) # if an object's primary key is not set, it cannot be equal to another object - foo1.id, stored_foo1.id = None, None + foo1.id, stored_foo1.id = None, None # type:ignore[assignment] assert not collection_consists_of_objects([stored_foo1], foo1) diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index 81c3d9f68b5f..e31511e7c272 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -988,7 +988,7 @@ def _setup_collection_invocation(app): workflow_step_1 = model.WorkflowStep() workflow_step_1.order_index = 0 workflow_step_1.type = "data_collection_input" - workflow_step_1.tool_inputs = {} + workflow_step_1.tool_inputs = {} # type:ignore[assignment] sa_session.add(workflow_step_1) workflow_1 = _workflow_from_steps(u, [workflow_step_1]) workflow_1.license = "MIT" @@ -1014,7 +1014,7 @@ def _setup_simple_invocation(app): workflow_step_1 = model.WorkflowStep() workflow_step_1.order_index = 0 workflow_step_1.type = "data_input" - workflow_step_1.tool_inputs = {} + workflow_step_1.tool_inputs = {} # type:ignore[assignment] sa_session.add(workflow_step_1) workflow = _workflow_from_steps(u, [workflow_step_1]) workflow.license = "MIT" diff --git a/test/unit/data/test_dataset_materialization.py b/test/unit/data/test_dataset_materialization.py index c107f2eb6bc8..9015b107539f 100644 --- a/test/unit/data/test_dataset_materialization.py +++ b/test/unit/data/test_dataset_materialization.py @@ -134,7 +134,7 @@ def test_deferred_hdas_basic_attached_from_detached_hda(): assert deferred_hda.dataset.state == "deferred" materializer = materializer_factory( - True, object_store=fixture_context.app.object_store, sa_session=fixture_context.sa_session + True, object_store=fixture_context.app.object_store, sa_session=fixture_context.sa_session() ) materialized_hda = materializer.ensure_materialized(deferred_hda) materialized_dataset = materialized_hda.dataset diff --git a/test/unit/data/test_mutable_json_column.py b/test/unit/data/test_mutable_json_column.py index 895438e74b40..9bc67fbc68e0 100644 --- a/test/unit/data/test_mutable_json_column.py +++ b/test/unit/data/test_mutable_json_column.py @@ -20,7 +20,7 @@ def test_metadata_mutable_column(self): session = self.model.session with transaction(session): session.commit() - w.value = {"x": "z"} + w.value = {"x": "z"} # type:ignore[assignment] persisted = self.persist_and_reload(w) assert persisted.value == {"x": "z"} persisted.value["x"] = "1"