diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index 1b10fb204817..e3fd7c3741f6 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -3,7 +3,7 @@ import os import re from json import dumps - +from typing import Any, cast, Dict, List, Set, Union from galaxy import model from galaxy.exceptions import ItemAccessibilityException @@ -69,7 +69,7 @@ def _collect_input_datasets(self, tool, param_values, trans, history, current_us if current_user_roles is None: current_user_roles = trans.get_current_user_roles() input_datasets = {} - all_permissions = {} + all_permissions: Dict[str, Set[str]] = {} def record_permission(action, role_id): if action not in all_permissions: @@ -211,7 +211,7 @@ def append_to_key(the_dict, key, value): the_dict[key] = [] the_dict[key].append(value) - input_dataset_collections = dict() + input_dataset_collections: Dict[str, str] = {} def visitor(input, value, prefix, parent=None, prefixed_name=None, **kwargs): if isinstance(input, DataToolParameter): @@ -228,7 +228,7 @@ def visitor(input, value, prefix, parent=None, prefixed_name=None, **kwargs): # collection with individual datasets. Database will still # record collection which should be enought for workflow # extraction and tool rerun. - if hasattr(value, 'child_collection'): + if isinstance(value, model.DatasetCollectionElement): # if we are mapping a collection over a tool, we only require the child_collection dataset_instances = value.child_collection.dataset_instances else: @@ -473,7 +473,10 @@ def handle_output(name, output, hidden=None): # Output collection is mapped over and has already been copied from original job continue collections_manager = app.dataset_collection_manager - element_identifiers = [] + element_identifiers: List[ + Dict[str, Union[str, List[Dict[str, Union[str, List[Any]]]]]] + ] = [] + # mypy doesn't yet support recursive type definitions known_outputs = output.known_outputs(input_collections, collections_manager.type_registry) # Just to echo TODO elsewhere - this should be restructured to allow # nested collections. @@ -497,7 +500,19 @@ def handle_output(name, output, hidden=None): )) else: index = name_to_index[parent_id] - current_element_identifiers = current_element_identifiers[index]["element_identifiers"] + current_element_identifiers = cast( + List[ + Dict[ + str, + Union[ + str, List[Dict[str, Union[str, List[Any]]]] + ], + ] + ], + current_element_identifiers[index][ + "element_identifiers" + ], + ) effective_output_name = output_part_def.effective_output_name element = handle_output(effective_output_name, output_part_def.output_def, hidden=True) @@ -722,7 +737,7 @@ def _record_inputs(self, trans, tool, job, incoming, inp_data, inp_dataset_colle # FIXME: Don't need all of incoming here, just the defined parameters # from the tool. We need to deal with tools that pass all post # parameters to the command as a special case. - reductions = {} + reductions: Dict[str, List[str]] = {} for name, dataset_collection_info_pairs in inp_dataset_collections.items(): for (dataset_collection, reduced) in dataset_collection_info_pairs: if reduced: diff --git a/setup.cfg b/setup.cfg index 79254b7f979a..ef0494971047 100644 --- a/setup.cfg +++ b/setup.cfg @@ -553,8 +553,6 @@ check_untyped_defs = False check_untyped_defs = False [mypy-galaxy.tools.cache] check_untyped_defs = False -[mypy-galaxy.tools.actions] -check_untyped_defs = False [mypy-galaxy.tool_util.deps.containers] check_untyped_defs = False [mypy-galaxy.managers.users]