diff --git a/.ci/flake8_ignorelist.txt b/.ci/flake8_ignorelist.txt index f1cffc4f5952..d4ced27b5b8d 100644 --- a/.ci/flake8_ignorelist.txt +++ b/.ci/flake8_ignorelist.txt @@ -11,6 +11,7 @@ database doc/build eggs lib/galaxy/web/proxy/js/node_modules +lib/tool_shed/test/test_data/repos static/maps static/scripts test/functional/tools/cwl_tools/v1.?/ diff --git a/.flake8 b/.flake8 index fe463fb975e1..3e086e9d1287 100644 --- a/.flake8 +++ b/.flake8 @@ -7,3 +7,4 @@ # W503 is line breaks before binary operators, which has been reversed in PEP 8. # D** are docstring linting - which we mostly ignore except D302. (Hopefully we will solve more over time). ignore = B008,E203,E402,E501,W503,D100,D101,D102,D103,D104,D105,D106,D107,D200,D201,D202,D204,D205,D206,D207,D208,D209,D210,D211,D300,D301,D400,D401,D402,D403,D412,D413 +exclude = lib/tool_shed/test/test_data/repos diff --git a/.github/workflows/lint_openapi_schema.yml b/.github/workflows/lint_openapi_schema.yml index c41e1489d933..02cfb35e640d 100644 --- a/.github/workflows/lint_openapi_schema.yml +++ b/.github/workflows/lint_openapi_schema.yml @@ -51,6 +51,9 @@ jobs: - name: Build typescript schema run: make update-client-api-schema working-directory: 'galaxy root' + - name: Diff... + run: git diff + working-directory: 'galaxy root' - name: Check for changes run: | if [[ `git status --porcelain` ]]; then diff --git a/.github/workflows/toolshed.yaml b/.github/workflows/toolshed.yaml index 64f148c0f346..f6c06663da3c 100644 --- a/.github/workflows/toolshed.yaml +++ b/.github/workflows/toolshed.yaml @@ -20,7 +20,23 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.7'] + include: + - test-install-client: 'galaxy_api' + python-version: '3.7' + shed-api: 'v1' + shed-browser: 'twill' + - test-install-client: 'standalone' + python-version: '3.8' + shed-api: 'v1' + shed-browser: 'twill' + - test-install-client: 'galaxy_api' + python-version: '3.9' + shed-api: 'v2' + shed-browser: 'playwright' + - test-install-client: 'standalone' + python-version: '3.10' + shed-api: 'v2' + shed-browser: 'playwright' services: postgres: image: postgres:13 @@ -52,11 +68,25 @@ jobs: with: path: 'galaxy root/.venv' key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-toolshed + key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy_root/requirements.txt') }}-toolshed + - name: Install dependencies + run: ./scripts/common_startup.sh --skip-client-build + working-directory: 'galaxy root' + - name: Build Frontend + run: '. .venv/bin/activate && cd lib/tool_shed/webapp/frontend && yarn && make client' + working-directory: 'galaxy root' + - name: Install playwright + run: '. .venv/bin/activate && playwright install' + working-directory: 'galaxy root' - name: Run tests run: './run_tests.sh -toolshed' + env: + TOOL_SHED_TEST_INSTALL_CLIENT: ${{ matrix.test-install-client }} + TOOL_SHED_API_VERSION: ${{ matrix.shed-api }} + TOOL_SHED_TEST_BROWSER: ${{ matrix.shed-browser }} working-directory: 'galaxy root' - uses: actions/upload-artifact@v3 if: failure() with: - name: Toolshed test results (${{ matrix.python-version }}) + name: Toolshed test results (${{ matrix.python-version }}, ${{ matrix.test-install-client }}) path: 'galaxy root/run_toolshed_tests.html' diff --git a/.isort.cfg b/.isort.cfg index c99585f7f278..ca490cd2930a 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -10,5 +10,5 @@ profile=black reverse_relative=true skip_gitignore=true # Make isort run faster by skipping database -skip_glob=database/* +skip_glob=database/*,lib/tool_shed/test/test_data/repos/* src_paths=lib diff --git a/.redocly.lint-ignore.yaml b/.redocly.lint-ignore.yaml index 01997597cebb..a09ad94d3248 100644 --- a/.redocly.lint-ignore.yaml +++ b/.redocly.lint-ignore.yaml @@ -14,3 +14,6 @@ _schema.yaml: #/paths/~1api~1histories~1{history_id}~1contents~1{history_content_id}~1metadata_file - '#/paths/~1api~1histories~1{history_id}~1contents~1{id}~1validate' - '#/paths/~1api~1histories~1{history_id}~1contents~1{type}s~1{id}' +_shed_schema.yaml: + no-empty-servers: + - '#/openapi' diff --git a/.vscode/shed.code-snippets b/.vscode/shed.code-snippets new file mode 100644 index 000000000000..f90a37c71313 --- /dev/null +++ b/.vscode/shed.code-snippets @@ -0,0 +1,41 @@ +{ + "shedcomp": { + "prefix": "shed_component", + "body": [ + "", + "" + ], + "description": "outline of a tool shed component" + }, + "shedpage": { + "prefix": "shed_page", + "body": [ + "", + "" + ], + "description": "outline of a tool shed page" + }, + "shedfetcher": { + "prefix": "shed_fetcher", + "body": [ + "import { fetcher } from \"@/schema\"", + "const fetcher = fetcher.path(\"$1\").method(\"get\").create()" + ], + "description": "Import shed fetcher and instantiate with a path" + }, + "shedrouter": { + "prefix": "shed_router", + "body": [ + "import router from \"@/router\"" + ] + } +} \ No newline at end of file diff --git a/Makefile b/Makefile index 1e5a171796c5..f20548df435e 100644 --- a/Makefile +++ b/Makefile @@ -182,17 +182,22 @@ endif build-api-schema: $(IN_VENV) python scripts/dump_openapi_schema.py _schema.yaml + $(IN_VENV) python scripts/dump_openapi_schema.py --app shed _shed_schema.yaml remove-api-schema: rm _schema.yaml + rm _shed_schema.yaml update-client-api-schema: client-node-deps build-api-schema $(IN_VENV) cd client && node openapi_to_schema.mjs ../_schema.yaml > src/schema/schema.ts && npx prettier --write src/schema/schema.ts + $(IN_VENV) cd client && node openapi_to_schema.mjs ../_shed_schema.yaml > ../lib/tool_shed/webapp/frontend/src/schema/schema.ts && npx prettier --write ../lib/tool_shed/webapp/frontend/src/schema/schema.ts $(MAKE) remove-api-schema lint-api-schema: build-api-schema $(IN_VENV) npx --yes @redocly/cli lint _schema.yaml + $(IN_VENV) npx --yes @redocly/cli lint _shed_schema.yaml $(IN_VENV) codespell -I .ci/ignore-spelling.txt _schema.yaml + $(IN_VENV) codespell -I .ci/ignore-spelling.txt _shed_schema.yaml $(MAKE) remove-api-schema update-navigation-schema: client-node-deps diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index 7767f32b2a48..8743d155a0d8 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -84,7 +84,10 @@ install_model_scoped_session, ) from galaxy.model.tags import GalaxyTagHandler -from galaxy.model.tool_shed_install import mapping as install_mapping +from galaxy.model.tool_shed_install import ( + HasToolBox, + mapping as install_mapping, +) from galaxy.objectstore import ( BaseObjectStore, build_object_store_from_config, @@ -105,8 +108,10 @@ VaultFactory, ) from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager from galaxy.tool_shed.galaxy_install.update_repository_manager import UpdateRepositoryManager +from galaxy.tool_util.data import ToolDataTableManager as BaseToolDataTableManager from galaxy.tool_util.deps import containers from galaxy.tool_util.deps.dependencies import AppInfo from galaxy.tool_util.deps.views import DependencyResolversView @@ -214,14 +219,13 @@ def configure_sentry_client(self): ) -class MinimalGalaxyApplication(BasicSharedApp, HaltableContainer, SentryClientMixin): +class MinimalGalaxyApplication(BasicSharedApp, HaltableContainer, SentryClientMixin, HasToolBox): """Encapsulates the state of a minimal Galaxy application""" model: GalaxyModelMapping config: config.GalaxyAppConfiguration tool_cache: ToolCache job_config: jobs.JobConfiguration - toolbox: tools.ToolBox toolbox_search: ToolBoxSearch container_finder: containers.ContainerFinder install_model: ModelMapping @@ -297,15 +301,12 @@ def _configure_tool_config_files(self): self.config.tool_configs.append(self.config.migrated_tools_config) def _configure_toolbox(self): - if not isinstance(self, BasicSharedApp): - raise Exception("Must inherit from BasicSharedApp") - self.citations_manager = CitationsManager(self) self.biotools_metadata_source = get_galaxy_biotools_metadata_source(self.config) self.dynamic_tools_manager = DynamicToolManager(self) self._toolbox_lock = threading.RLock() - self.toolbox = tools.ToolBox(self.config.tool_configs, self.config.tool_path, self) + self._toolbox = tools.ToolBox(self.config.tool_configs, self.config.tool_path, self) galaxy_root_dir = os.path.abspath(self.config.root) file_path = os.path.abspath(self.config.file_path) app_info = AppInfo( @@ -345,6 +346,10 @@ def _configure_toolbox(self): ToolBoxSearch(self.toolbox, index_dir=self.config.tool_search_index_dir, index_help=index_help), ) + @property + def toolbox(self) -> tools.ToolBox: + return self._toolbox + def reindex_tool_search(self) -> None: # Call this when tools are added or removed. self.toolbox_search.build_index(tool_cache=self.tool_cache, toolbox=self.toolbox) @@ -366,7 +371,7 @@ def _set_enabled_container_types(self): def _configure_tool_data_tables(self, from_shed_config): # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path. - self.tool_data_tables = ToolDataTableManager( + self.tool_data_tables: BaseToolDataTableManager = ToolDataTableManager( tool_data_path=self.config.tool_data_path, config_filename=self.config.tool_data_table_config_path, other_config_dict=self.config, @@ -488,7 +493,7 @@ def _wait_for_database(self, url): time.sleep(pause) @property - def tool_dependency_dir(self): + def tool_dependency_dir(self) -> Optional[str]: return self.toolbox.dependency_manager.default_base_path def _shutdown_object_store(self): @@ -498,7 +503,7 @@ def _shutdown_model(self): self.model.engine.dispose() -class GalaxyManagerApplication(MinimalManagerApp, MinimalGalaxyApplication): +class GalaxyManagerApplication(MinimalManagerApp, MinimalGalaxyApplication, InstallationTarget[tools.ToolBox]): """Extends the MinimalGalaxyApplication with most managers that are not tied to a web or job handling context.""" model: GalaxyModelMapping @@ -685,7 +690,7 @@ def __init__(self, **kwargs) -> None: self.watchers = self._register_singleton(ConfigWatchers) self._configure_toolbox() # Load Data Manager - self.data_managers = self._register_singleton(DataManagers) + self.data_managers = self._register_singleton(DataManagers) # type: ignore[type-abstract] # Load the update repository manager. self.update_repository_manager = self._register_singleton( UpdateRepositoryManager, UpdateRepositoryManager(self) diff --git a/lib/galaxy/app_unittest_utils/galaxy_mock.py b/lib/galaxy/app_unittest_utils/galaxy_mock.py index e1870b023974..65d559a851a2 100644 --- a/lib/galaxy/app_unittest_utils/galaxy_mock.py +++ b/lib/galaxy/app_unittest_utils/galaxy_mock.py @@ -94,7 +94,7 @@ class MockApp(di.Container, GalaxyDataTestApp): config: "MockAppConfig" amqp_type: str job_search: Optional[JobSearch] = None - toolbox: ToolBox + _toolbox: ToolBox tool_cache: ToolCache install_model: ModelMapping watchers: ConfigWatchers @@ -110,6 +110,7 @@ def __init__(self, config=None, **kwargs) -> None: super().__init__() config = config or MockAppConfig(**kwargs) GalaxyDataTestApp.__init__(self, config=config, **kwargs) + self.install_model = self.model self[BasicSharedApp] = cast(BasicSharedApp, self) self[MinimalManagerApp] = cast(MinimalManagerApp, self) # type: ignore[type-abstract] self[StructuredApp] = cast(StructuredApp, self) # type: ignore[type-abstract] @@ -153,6 +154,10 @@ def url_for(*args, **kwds): self.url_for = url_for + @property + def toolbox(self) -> ToolBox: + return self._toolbox + def wait_for_toolbox_reload(self, toolbox): # TODO: If the tpm test case passes, does the operation really # need to wait. diff --git a/lib/galaxy/app_unittest_utils/toolbox_support.py b/lib/galaxy/app_unittest_utils/toolbox_support.py new file mode 100644 index 000000000000..6308fb14244f --- /dev/null +++ b/lib/galaxy/app_unittest_utils/toolbox_support.py @@ -0,0 +1,193 @@ +import collections +import json +import logging +import os +import string +from typing import Optional + +from galaxy.app_unittest_utils.tools_support import UsesTools +from galaxy.config_watchers import ConfigWatchers +from galaxy.model import tool_shed_install +from galaxy.model.base import transaction +from galaxy.model.tool_shed_install import mapping +from galaxy.tools import ToolBox +from galaxy.tools.cache import ToolCache +from galaxy.util.unittest import TestCase + +log = logging.getLogger(__name__) + + +CONFIG_TEST_TOOL_VERSION_TEMPLATE = string.Template( + """ + github.com + example + galaxyproject + ${version} + github.com/galaxyproject/example/test_tool/0.${version} + 0.${version} + + """ +) +CONFIG_TEST_TOOL_VERSION_1 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="1")) +CONFIG_TEST_TOOL_VERSION_2 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="2")) + +REPO_TYPE = collections.namedtuple( + "REPO_TYPE", + "tool_shed owner name changeset_revision installed_changeset_revision description status", +) +DEFAULT_TEST_REPO = REPO_TYPE("github.com", "galaxyproject", "example", "1", "1", "description", "OK") + + +class SimplifiedToolBox(ToolBox): + def __init__(self, test_case: "BaseToolBoxTestCase"): + app = test_case.app + app.watchers.tool_config_watcher.reload_callback = lambda: reload_callback(test_case) + # Handle app/config stuff needed by toolbox but not by tools. + app.tool_cache = ToolCache() if not hasattr(app, "tool_cache") else app.tool_cache + config_files = test_case.config_files + tool_root_dir = test_case.test_directory + super().__init__( + config_files, + tool_root_dir, + app, + ) + # Need to start thread now for new reload callback to take effect + self.app.watchers.start() + + +class BaseToolBoxTestCase(TestCase, UsesTools): + _toolbox: Optional[SimplifiedToolBox] = None + + @property + def integrated_tool_panel_path(self): + return os.path.join(self.test_directory, "integrated_tool_panel.xml") + + def assert_integerated_tool_panel(self, exists=True): + does_exist = os.path.exists(self.integrated_tool_panel_path) + if exists: + assert does_exist + else: + assert not does_exist + + @property + def toolbox(self): + if self._toolbox is None: + self.app._toolbox = self._toolbox = SimplifiedToolBox(self) + return self._toolbox + + def setUp(self): + self.reindexed = False + self.setup_app() + install_model = mapping.init("sqlite:///:memory:", create_tables=True) + self.app.tool_cache = ToolCache() + self.app.install_model = install_model + self.app.reindex_tool_search = self.__reindex # type: ignore[assignment] + itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml") + self.app.config.integrated_tool_panel_config = itp_config + self.app.watchers = ConfigWatchers(self.app) + self._toolbox = None + self.config_files = [] + + def tearDown(self): + self.app.watchers.shutdown() + + def _repo_install(self, changeset, config_filename=None): + metadata = { + "tools": [ + { + "add_to_tool_panel": False, # to have repository.includes_tools_for_display_in_tool_panel=False in InstalledRepositoryManager.activate_repository() + "guid": f"github.com/galaxyproject/example/test_tool/0.{changeset}", + "tool_config": "tool.xml", + } + ], + } + if config_filename: + metadata["shed_config_filename"] = config_filename + repository = tool_shed_install.ToolShedRepository(metadata_=metadata) + repository.tool_shed = DEFAULT_TEST_REPO.tool_shed + repository.owner = DEFAULT_TEST_REPO.owner + repository.name = DEFAULT_TEST_REPO.name + repository.changeset_revision = changeset + repository.installed_changeset_revision = changeset + repository.deleted = False + repository.uninstalled = False + self.app.install_model.context.add(repository) + session = self.app.install_model.context + with transaction(session): + session.commit() + return repository + + def _setup_two_versions(self): + self._repo_install(changeset="1") + version1 = tool_shed_install.ToolVersion() + version1.tool_id = "github.com/galaxyproject/example/test_tool/0.1" + self.app.install_model.context.add(version1) + session = self.app.install_model.context + with transaction(session): + session.commit() + + self._repo_install(changeset="2") + version2 = tool_shed_install.ToolVersion() + version2.tool_id = "github.com/galaxyproject/example/test_tool/0.2" + self.app.install_model.context.add(version2) + session = self.app.install_model.context + with transaction(session): + session.commit() + + version_association = tool_shed_install.ToolVersionAssociation() + version_association.parent_id = version1.id + version_association.tool_id = version2.id + + self.app.install_model.context.add(version_association) + session = self.app.install_model.context + with transaction(session): + session.commit() + + def _setup_two_versions_in_config(self, section=False): + if section: + template = """ +
+ %s +
+
+ %s +
+
""" + else: + template = """ + %s + %s +""" + self._add_config(template % (self.test_directory, CONFIG_TEST_TOOL_VERSION_1, CONFIG_TEST_TOOL_VERSION_2)) + + def _add_config(self, content, name="tool_conf.xml"): + is_json = name.endswith(".json") + path = self._tool_conf_path(name=name) + with open(path, "w") as f: + if not is_json or isinstance(content, str): + f.write(content) + else: + json.dump(content, f) + self.config_files.append(path) + + def _init_dynamic_tool_conf(self): + # Add a dynamic tool conf (such as a ToolShed managed one) to list of configs. + self._add_config(f"""""") + + def _tool_conf_path(self, name="tool_conf.xml"): + path = os.path.join(self.test_directory, name) + return path + + def _tool_path(self, name="tool.xml"): + path = os.path.join(self.test_directory, name) + return path + + def __reindex(self): + self.reindexed = True + + +def reload_callback(test_case): + test_case.app.tool_cache.cleanup() + log.debug("Reload callback called, toolbox contains %s", test_case._toolbox._tool_versions_by_id) + test_case._toolbox = test_case.app.toolbox = SimplifiedToolBox(test_case) + log.debug("After callback toolbox contains %s", test_case._toolbox._tool_versions_by_id) diff --git a/lib/galaxy/config/schemas/tool_shed_config_schema.yml b/lib/galaxy/config/schemas/tool_shed_config_schema.yml index 116f0521f483..8e52afa04d9b 100644 --- a/lib/galaxy/config/schemas/tool_shed_config_schema.yml +++ b/lib/galaxy/config/schemas/tool_shed_config_schema.yml @@ -31,6 +31,13 @@ mapping: Where the hgweb.config file is stored. The default is the Galaxy installation directory. + config_hg_for_dev: + type: str + required: false + desc: | + Allow pushing directly to mercurial repositories directly + and without authentication. + file_path: type: str default: database/community_files @@ -342,6 +349,58 @@ mapping: desc: | Address to join mailing list + ga4gh_service_id: + type: str + required: false + desc: | + Service ID for GA4GH services (exposed via the service-info endpoint for the Galaxy DRS API). + If unset, one will be generated using the URL the target API requests are made against. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + + This value should likely reflect your service's URL. For instance for usegalaxy.org + this value should be org.usegalaxy. Particular Galaxy implementations will treat this + value as a prefix and append the service type to this ID. For instance for the DRS + service "id" (available via the DRS API) for the above configuration value would be + org.usegalaxy.drs. + + ga4gh_service_organization_name: + type: str + required: false + desc: | + Service name for host organization (exposed via the service-info endpoint for the Galaxy DRS API). + If unset, one will be generated using ga4gh_service_id. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + + ga4gh_service_organization_url: + type: str + required: False + desc: | + Organization URL for host organization (exposed via the service-info endpoint for the Galaxy DRS API). + If unset, one will be generated using the URL the target API requests are made against. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + + ga4gh_service_environment: + type: str + required: False + desc: | + Service environment (exposed via the service-info endpoint for the Galaxy DRS API) for + implemented GA4GH services. + + Suggested values are prod, test, dev, staging. + + For more information on GA4GH service definitions - check out + https://github.com/ga4gh-discovery/ga4gh-service-registry + and https://editor.swagger.io/?url=https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-registry/develop/service-registry.yaml + use_heartbeat: type: bool default: true diff --git a/lib/galaxy/dependencies/dev-requirements.txt b/lib/galaxy/dependencies/dev-requirements.txt index 2ef5e06b919b..6d417c6fe1fa 100644 --- a/lib/galaxy/dependencies/dev-requirements.txt +++ b/lib/galaxy/dependencies/dev-requirements.txt @@ -105,6 +105,7 @@ pytest-httpserver==1.0.6 ; python_version >= "3.7" and python_version < "3.12" pytest-json-report==1.5.0 ; python_version >= "3.7" and python_version < "3.12" pytest-metadata==3.0.0 ; python_version >= "3.7" and python_version < "3.12" pytest-mock==3.11.1 ; python_version >= "3.7" and python_version < "3.12" +pytest-playwright==0.3.0 ; python_version >= "3.7" and python_version < "3.12" pytest-postgresql==4.1.1 ; python_version >= "3.7" and python_version < "3.12" pytest-shard==0.1.2 ; python_version >= "3.7" and python_version < "3.12" pytest==7.4.2 ; python_version >= "3.7" and python_version < "3.12" diff --git a/lib/galaxy/dependencies/pinned-requirements.txt b/lib/galaxy/dependencies/pinned-requirements.txt index 4b851aed081a..dcc9ccb09e8c 100644 --- a/lib/galaxy/dependencies/pinned-requirements.txt +++ b/lib/galaxy/dependencies/pinned-requirements.txt @@ -73,6 +73,7 @@ fsspec==2023.1.0 ; python_version >= "3.7" and python_version < "3.12" future==0.18.3 ; python_version >= "3.7" and python_version < "3.12" galaxy-sequence-utils==1.1.5 ; python_version >= "3.7" and python_version < "3.12" galaxy2cwl==0.1.4 ; python_version >= "3.7" and python_version < "3.12" +graphene-sqlalchemy==3.0.0b3 ; python_version >= "3.7" and python_version < "3.12" gravity==1.0.3 ; python_version >= "3.7" and python_version < "3.12" greenlet==2.0.2 ; python_version >= "3.7" and (platform_machine == "aarch64" or platform_machine == "ppc64le" or platform_machine == "x86_64" or platform_machine == "amd64" or platform_machine == "AMD64" or platform_machine == "win32" or platform_machine == "WIN32") and python_version < "3.12" gunicorn==21.2.0 ; python_version >= "3.7" and python_version < "3.12" @@ -179,6 +180,7 @@ sqlalchemy==1.4.49 ; python_version >= "3.7" and python_version < "3.12" sqlitedict==2.1.0 ; python_version >= "3.7" and python_version < "3.12" sqlparse==0.4.4 ; python_version >= "3.7" and python_version < "3.12" starlette-context==0.3.5 ; python_version >= "3.7" and python_version < "3.12" +starlette_graphene3==0.6.0 ; python_version >= "3.7" and python_version < "3.12" starlette==0.27.0 ; python_version >= "3.7" and python_version < "3.12" supervisor==4.2.5 ; python_version >= "3.7" and python_version < "3.12" svgwrite==1.4.3 ; python_version >= "3.7" and python_version < "3.12" diff --git a/lib/galaxy/managers/api_keys.py b/lib/galaxy/managers/api_keys.py index dbc5121670a2..e306049259cb 100644 --- a/lib/galaxy/managers/api_keys.py +++ b/lib/galaxy/managers/api_keys.py @@ -3,22 +3,27 @@ select, update, ) +from typing_extensions import Protocol from galaxy.model.base import transaction from galaxy.structured_app import BasicSharedApp +class IsUserModel(Protocol): + id: int + + class ApiKeyManager: def __init__(self, app: BasicSharedApp): self.app = app self.session = self.app.model.context - def get_api_key(self, user): + def get_api_key(self, user: IsUserModel): APIKeys = self.app.model.APIKeys stmt = select(APIKeys).filter_by(user_id=user.id, deleted=False).order_by(APIKeys.create_time.desc()).limit(1) return self.session.scalars(stmt).first() - def create_api_key(self, user): + def create_api_key(self, user: IsUserModel): guid = self.app.security.get_new_guid() new_key = self.app.model.APIKeys() new_key.user_id = user.id @@ -28,7 +33,7 @@ def create_api_key(self, user): self.session.commit() return new_key - def get_or_create_api_key(self, user) -> str: + def get_or_create_api_key(self, user: IsUserModel) -> str: # Logic Galaxy has always used - but it would appear to have a race # condition. Worth fixing? Would kind of need a message queue to fix # in multiple process mode. @@ -36,7 +41,7 @@ def get_or_create_api_key(self, user) -> str: key = api_key.key if api_key else self.create_api_key(user).key return key - def delete_api_key(self, user) -> None: + def delete_api_key(self, user: IsUserModel) -> None: """Marks the current user API key as deleted.""" # Before it was possible to create multiple API keys for the same user although they were not considered valid # So all non-deleted keys are marked as deleted for backward compatibility diff --git a/lib/galaxy/managers/tool_data.py b/lib/galaxy/managers/tool_data.py index fe4f7f2a215b..b754e0296e60 100644 --- a/lib/galaxy/managers/tool_data.py +++ b/lib/galaxy/managers/tool_data.py @@ -13,7 +13,10 @@ MinimalManagerApp, StructuredApp, ) -from galaxy.tool_util.data import BundleProcessingOptions +from galaxy.tool_util.data import ( + BundleProcessingOptions, + ToolDataTableManager, +) from galaxy.tool_util.data._schema import ( ToolDataDetails, ToolDataEntryList, @@ -23,7 +26,6 @@ TabularToolDataField, TabularToolDataTable, ToolDataTable, - ToolDataTableManager, ) diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 69e74c3bae42..f90644a5d5e2 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -455,7 +455,9 @@ def change_password(self, trans, password=None, confirm=None, token=None, id=Non trans.sa_session.add(token_result) return user, "Password has been changed. Token has been invalidated." else: - user = self.by_id(self.app.security.decode_id(id)) + if not isinstance(id, int): + id = self.app.security.decode_id(id) + user = self.by_id(id) if user: message = self.app.auth_manager.check_change_password(user, current, trans.request) if message: diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py index 8bd5f9169065..d406935c79fb 100644 --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -22,12 +22,17 @@ registry, relationship, ) +from typing_extensions import Protocol from galaxy.model.custom_types import ( MutableJSONType, TrimmedString, ) from galaxy.model.orm.now import now +from galaxy.tool_util.toolbox.base import ( + AbstractToolBox, + DynamicToolConfDict, +) from galaxy.util import asbool from galaxy.util.bunch import Bunch from galaxy.util.dictifiable import Dictifiable @@ -48,6 +53,16 @@ class DeclarativeMeta(_DeclarativeMeta, type): from sqlalchemy.orm.decl_api import DeclarativeMeta +class HasToolBox(common_util.HasToolShedRegistry, Protocol): + @property + def tool_dependency_dir(self) -> Optional[str]: + ... + + @property + def toolbox(self) -> AbstractToolBox: + ... + + class Base(metaclass=DeclarativeMeta): __abstract__ = True registry = mapper_registry @@ -203,7 +218,7 @@ def can_deactivate(self): def can_reinstall_or_activate(self): return self.deleted - def get_sharable_url(self, app): + def get_sharable_url(self, app: HasToolBox): return common_util.get_tool_shed_repository_url(app, self.tool_shed, self.owner, self.name) @property @@ -214,7 +229,7 @@ def shed_config_filename(self): def shed_config_filename(self, value): self.metadata_["shed_config_filename"] = os.path.abspath(value) - def get_shed_config_dict(self, app): + def get_shed_config_dict(self, app: HasToolBox) -> DynamicToolConfDict: """ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry in the shed_tool_conf_dict. @@ -225,7 +240,7 @@ def get_shed_config_dict(self, app): return shed_config_dict return self.guess_shed_config(app) - def get_tool_relative_path(self, app): + def get_tool_relative_path(self, app: HasToolBox): # This is a somewhat public function, used by data_manager_manual for instance shed_conf_dict = self.get_shed_config_dict(app) tool_path = None @@ -237,7 +252,7 @@ def get_tool_relative_path(self, app): ) return tool_path, relative_path - def guess_shed_config(self, app): + def guess_shed_config(self, app: HasToolBox): tool_ids = [] for tool in self.metadata_.get("tools", []): tool_ids.append(tool.get("guid")) @@ -395,13 +410,13 @@ def missing_tool_dependencies(self): missing_dependencies.append(tool_dependency) return missing_dependencies - def repo_files_directory(self, app): + def repo_files_directory(self, app: HasToolBox): repo_path = self.repo_path(app) if repo_path: return os.path.join(repo_path, self.name) return None - def repo_path(self, app): + def repo_path(self, app: HasToolBox): tool_shed = common_util.remove_protocol_and_port_from_tool_shed_url(self.tool_shed) for shed_tool_conf_dict in app.toolbox.dynamic_confs(include_migrated_tool_conf=True): tool_path = shed_tool_conf_dict["tool_path"] @@ -731,8 +746,9 @@ def can_update(self): def in_error_state(self): return self.status == self.installation_status.ERROR - def installation_directory(self, app): + def installation_directory(self, app: HasToolBox) -> Optional[str]: if self.type == "package": + assert app.tool_dependency_dir return os.path.join( app.tool_dependency_dir, self.name, @@ -742,6 +758,7 @@ def installation_directory(self, app): self.tool_shed_repository.installed_changeset_revision, ) if self.type == "set_environment": + assert app.tool_dependency_dir return os.path.join( app.tool_dependency_dir, "environment_settings", diff --git a/lib/galaxy/queue_worker.py b/lib/galaxy/queue_worker.py index 7de46f1b9a5c..e133a9ee399b 100644 --- a/lib/galaxy/queue_worker.py +++ b/lib/galaxy/queue_worker.py @@ -185,7 +185,7 @@ def _get_new_toolbox(app, save_integrated_tool_panel=True): app.datatypes_registry.load_external_metadata_tool(new_toolbox) load_lib_tools(new_toolbox) [new_toolbox.register_tool(tool) for tool in new_toolbox.data_manager_tools.values()] - app.toolbox = new_toolbox + app._toolbox = new_toolbox app.toolbox.persist_cache() @@ -195,9 +195,8 @@ def reload_data_managers(app, **kwargs): log.debug("Executing data managers reload on '%s'", app.config.server_name) app._configure_tool_data_tables(from_shed_config=False) reload_tool_data_tables(app) - reload_count = app.data_managers._reload_count - app.data_managers = DataManagers(app) - app.data_managers._reload_count = reload_count + 1 + reload_count = app.data_managers._reload_count + 1 + app.data_managers = DataManagers(app, None, reload_count) if hasattr(app, "tool_cache"): app.tool_cache.reset_status() if hasattr(app, "watchers"): diff --git a/lib/galaxy/schema/fields.py b/lib/galaxy/schema/fields.py index 40eec4f2b12f..687785e6eff8 100644 --- a/lib/galaxy/schema/fields.py +++ b/lib/galaxy/schema/fields.py @@ -3,7 +3,11 @@ from pydantic import Field from typing_extensions import get_args -from galaxy.security.idencoding import IdEncodingHelper +try: + from galaxy.security.idencoding import IdEncodingHelper +except ImportError: + IdEncodingHelper = object # type: ignore[assignment,misc] + ENCODED_DATABASE_ID_PATTERN = re.compile("f?[0-9a-f]+") ENCODED_ID_LENGTH_MULTIPLE = 16 diff --git a/lib/galaxy/structured_app.py b/lib/galaxy/structured_app.py index d18638c97c09..ba62268c35c6 100644 --- a/lib/galaxy/structured_app.py +++ b/lib/galaxy/structured_app.py @@ -30,6 +30,7 @@ from galaxy.security.idencoding import IdEncodingHelper from galaxy.security.vault import Vault from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_util.data import ToolDataTableManager from galaxy.tool_util.deps.containers import ContainerFinder from galaxy.tool_util.deps.views import DependencyResolversView from galaxy.tool_util.verify import test_data @@ -48,7 +49,6 @@ from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager from galaxy.tools import ToolBox from galaxy.tools.cache import ToolCache - from galaxy.tools.data import ToolDataTableManager from galaxy.tools.error_reports import ErrorReports from galaxy.visualization.genomes import Genomes @@ -67,10 +67,13 @@ class BasicSharedApp(Container): model: SharedModelMapping security: IdEncodingHelper auth_manager: AuthManager - toolbox: "ToolBox" security_agent: Any quota_agent: QuotaAgent + @property + def toolbox(self) -> "ToolBox": + raise NotImplementedError() + class MinimalToolApp(Protocol): is_webapp: bool @@ -150,7 +153,7 @@ class StructuredApp(MinimalManagerApp): webhooks_registry: WebhooksRegistry queue_worker: Any # 'galaxy.queue_worker.GalaxyQueueWorker' data_provider_registry: Any # 'galaxy.visualization.data_providers.registry.DataProviderRegistry' - tool_data_tables: "ToolDataTableManager" + tool_data_tables: ToolDataTableManager tool_cache: "ToolCache" tool_shed_repository_cache: Optional[ToolShedRepositoryCache] watchers: "ConfigWatchers" diff --git a/lib/galaxy/tool_shed/galaxy_install/client.py b/lib/galaxy/tool_shed/galaxy_install/client.py new file mode 100644 index 000000000000..92125ba6d78d --- /dev/null +++ b/lib/galaxy/tool_shed/galaxy_install/client.py @@ -0,0 +1,73 @@ +import threading +from typing import ( + Any, + Dict, + Generic, + List, + Optional, + TYPE_CHECKING, + TypeVar, + Union, +) + +from typing_extensions import Protocol + +from galaxy.model.base import ModelMapping +from galaxy.model.tool_shed_install import HasToolBox +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_util.data import ( + OutputDataset, + ToolDataTableManager, +) +from galaxy.tool_util.toolbox.base import AbstractToolBox + +if TYPE_CHECKING: + import galaxy.tool_shed.metadata.installed_repository_manger + + +class DataManagerInterface(Protocol): + GUID_TYPE: str = "data_manager" + DEFAULT_VERSION: str = "0.0.1" + + def process_result(self, out_data): + ... + + def write_bundle(self, out: Dict[str, OutputDataset]) -> Dict[str, OutputDataset]: + ... + + +class DataManagersInterface(Protocol): + @property + def _reload_count(self) -> int: + ... + + def load_manager_from_elem( + self, data_manager_elem, tool_path=None, add_manager=True + ) -> Optional[DataManagerInterface]: + ... + + def get_manager(self, data_manager_id: str) -> Optional[DataManagerInterface]: + ... + + def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: + ... + + +ToolBoxType = TypeVar("ToolBoxType", bound="AbstractToolBox") + + +class InstallationTarget(HasToolBox, Generic[ToolBoxType]): + data_managers: DataManagersInterface + install_model: ModelMapping + model: ModelMapping + security: IdEncodingHelper + config: Any + installed_repository_manager: "galaxy.tool_shed.metadata.installed_repository_manger.InstalledRepositoryManager" + watchers: Any # TODO: interface... + _toolbox_lock: threading.RLock + tool_shed_repository_cache: Optional[ToolShedRepositoryCache] + tool_data_tables: ToolDataTableManager + + def wait_for_toolbox_reload(self, old_toolbox: ToolBoxType) -> None: + ... diff --git a/lib/galaxy/tool_shed/galaxy_install/install_manager.py b/lib/galaxy/tool_shed/galaxy_install/install_manager.py index eb81181d5055..d97a586c08f0 100644 --- a/lib/galaxy/tool_shed/galaxy_install/install_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/install_manager.py @@ -16,7 +16,7 @@ util, ) from galaxy.model.base import transaction -from galaxy.structured_app import StructuredApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import ( InstalledRepositoryMetadataManager, ) @@ -37,6 +37,7 @@ common_util, encoding_util, ) +from galaxy.util.tool_shed.tool_shed_registry import Registry from tool_shed_client.schema import ( ExtraRepoInfo, RepositoryMetadataInstallInfoDict, @@ -45,11 +46,43 @@ log = logging.getLogger(__name__) +def get_install_info_from_tool_shed( + tool_shed_url: str, tool_shed_registry: Registry, name: str, owner: str, changeset_revision: str +) -> Tuple[RepositoryMetadataInstallInfoDict, ExtraRepoInfo]: + params = dict(name=name, owner=owner, changeset_revision=changeset_revision) + pathspec = ["api", "repositories", "get_repository_revision_install_info"] + try: + raw_text = util.url_get( + tool_shed_url, + auth=tool_shed_registry.url_auth(tool_shed_url), + pathspec=pathspec, + params=params, + ) + except Exception: + message = "Error attempting to retrieve installation information from tool shed " + message += f"{tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" + log.exception(message) + raise exceptions.InternalServerError(message) + if raw_text: + # If successful, the response from get_repository_revision_install_info will be 3 + # dictionaries, a dictionary defining the Repository, a dictionary defining the + # Repository revision (RepositoryMetadata), and a dictionary including the additional + # information required to install the repository. + items = json.loads(util.unicodify(raw_text)) + repository_revision_dict: RepositoryMetadataInstallInfoDict = items[1] + repo_info_dict: ExtraRepoInfo = items[2] + else: + message = f"Unable to retrieve installation information from tool shed {tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" + log.warning(message) + raise exceptions.InternalServerError(message) + return repository_revision_dict, repo_info_dict + + class InstallRepositoryManager: - app: StructuredApp + app: InstallationTarget tpm: tool_panel_manager.ToolPanelManager - def __init__(self, app: StructuredApp, tpm: Optional[tool_panel_manager.ToolPanelManager] = None): + def __init__(self, app: InstallationTarget, tpm: Optional[tool_panel_manager.ToolPanelManager] = None): self.app = app self.install_model = self.app.install_model self._view = views.DependencyResolversView(app) @@ -79,32 +112,9 @@ def _get_repository_components_for_installation( def __get_install_info_from_tool_shed( self, tool_shed_url: str, name: str, owner: str, changeset_revision: str ) -> Tuple[RepositoryMetadataInstallInfoDict, List[ExtraRepoInfo]]: - params = dict(name=name, owner=owner, changeset_revision=changeset_revision) - pathspec = ["api", "repositories", "get_repository_revision_install_info"] - try: - raw_text = util.url_get( - tool_shed_url, - auth=self.app.tool_shed_registry.url_auth(tool_shed_url), - pathspec=pathspec, - params=params, - ) - except Exception: - message = "Error attempting to retrieve installation information from tool shed " - message += f"{tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" - log.exception(message) - raise exceptions.InternalServerError(message) - if raw_text: - # If successful, the response from get_repository_revision_install_info will be 3 - # dictionaries, a dictionary defining the Repository, a dictionary defining the - # Repository revision (RepositoryMetadata), and a dictionary including the additional - # information required to install the repository. - items = json.loads(util.unicodify(raw_text)) - repository_revision_dict: RepositoryMetadataInstallInfoDict = items[1] - repo_info_dict: ExtraRepoInfo = items[2] - else: - message = f"Unable to retrieve installation information from tool shed {tool_shed_url} for revision {changeset_revision} of repository {name} owned by {owner}" - log.warning(message) - raise exceptions.InternalServerError(message) + repository_revision_dict, repo_info_dict = get_install_info_from_tool_shed( + tool_shed_url, self.app.tool_shed_registry, name, owner, changeset_revision + ) # Make sure the tool shed returned everything we need for installing the repository. if not repository_revision_dict or not repo_info_dict: invalid_parameter_message = "No information is available for the requested repository revision.\n" diff --git a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py index 4474b8841e8a..76bf5b0b24c1 100644 --- a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py @@ -20,7 +20,7 @@ ToolDependency, ToolShedRepository, ) -from galaxy.structured_app import MinimalManagerApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import ( InstalledRepositoryMetadataManager, ) @@ -44,14 +44,14 @@ class InstalledRepositoryManager: - app: MinimalManagerApp + app: InstallationTarget _tool_paths: List[str] installed_repository_dicts: List[Dict[str, Any]] repository_dependencies_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] installed_repository_dependencies_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] installed_dependent_repositories_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] - def __init__(self, app: MinimalManagerApp): + def __init__(self, app: InstallationTarget): """ Among other things, keep in in-memory sets of tuples defining installed repositories and tool dependencies along with the relationships between each of them. This will allow for quick discovery of those repositories or components that diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index 5c1d62886670..f8fa39bf80bb 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -1,14 +1,19 @@ import logging import os -from typing import Optional +from typing import ( + Any, + Dict, + Optional, +) from sqlalchemy import false from galaxy import util from galaxy.model.base import transaction -from galaxy.structured_app import MinimalManagerApp +from galaxy.model.tool_shed_install import ToolShedRepository +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager -from galaxy.tool_shed.metadata.metadata_generator import MetadataGenerator +from galaxy.tool_shed.metadata.metadata_generator import GalaxyMetadataGenerator from galaxy.tool_shed.util.repository_util import ( get_installed_tool_shed_repository, get_repository_owner, @@ -24,21 +29,23 @@ log = logging.getLogger(__name__) -class InstalledRepositoryMetadataManager(MetadataGenerator): +class InstalledRepositoryMetadataManager(GalaxyMetadataGenerator): + app: InstallationTarget + def __init__( self, - app: MinimalManagerApp, + app: InstallationTarget, tpm: Optional[tool_panel_manager.ToolPanelManager] = None, - repository=None, - changeset_revision=None, - repository_clone_url=None, - shed_config_dict=None, - relative_install_dir=None, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False, - metadata_dict=None, + repository: Optional[ToolShedRepository] = None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir: Optional[str] = None, + repository_files_dir: Optional[str] = None, + resetting_all_metadata_on_repository: bool = False, + updating_installed_repository: bool = False, + persist: bool = False, + metadata_dict: Optional[Dict[str, Any]] = None, ): super().__init__( app, @@ -120,6 +127,7 @@ def get_repository_tools_tups(self): def reset_all_metadata_on_installed_repository(self): """Reset all metadata on a single tool shed repository installed into a Galaxy instance.""" if self.relative_install_dir: + assert self.repository original_metadata_dict = self.repository.metadata_ self.generate_metadata_for_changeset_revision() if self.metadata_dict != original_metadata_dict: @@ -135,7 +143,9 @@ def reset_all_metadata_on_installed_repository(self): else: log.debug(f"Metadata did not need to be reset on repository {self.repository.name}.") else: - log.debug(f"Error locating installation directory for repository {self.repository.name}.") + log.debug( + f"Error locating installation directory for repository {self.repository and self.repository.name}." + ) def reset_metadata_on_selected_repositories(self, user, **kwd): """ @@ -187,9 +197,11 @@ def set_repository(self, repository): super().set_repository(repository) self.repository_clone_url = common_util.generate_clone_url_for_installed_repository(self.app, repository) - def tool_shed_from_repository_clone_url(self): + def tool_shed_from_repository_clone_url(self) -> str: """Given a repository clone URL, return the tool shed that contains the repository.""" - cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url(self.repository_clone_url) + repository_clone_url = self.repository_clone_url + assert repository_clone_url + cleaned_repository_clone_url = common_util.remove_protocol_and_user_from_clone_url(repository_clone_url) return ( common_util.remove_protocol_and_user_from_clone_url(cleaned_repository_clone_url) .split("/repos/")[0] @@ -201,6 +213,7 @@ def update_in_shed_tool_config(self): A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list of config_elems instead of using the in-memory list. """ + assert self.repository shed_conf_dict = self.shed_config_dict or self.repository.get_shed_config_dict(self.app) shed_tool_conf = shed_conf_dict["config_filename"] tool_path = shed_conf_dict["tool_path"] diff --git a/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py b/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py index cc9a99b8ac6c..79f9c88dae46 100644 --- a/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py @@ -16,6 +16,7 @@ ) from galaxy.model.base import transaction +from galaxy.tool_shed.galaxy_install import installed_repository_manager from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager from galaxy.tool_shed.util import repository_util from galaxy.tool_shed.util.container_util import get_components_from_key @@ -268,7 +269,8 @@ def create_repository_dependency_objects( log.info( f"Reactivating deactivated tool_shed_repository '{str(repository_db_record.name)}'." ) - self.app.installed_repository_manager.activate_repository(repository_db_record) + irm = installed_repository_manager.InstalledRepositoryManager(self.app) + irm.activate_repository(repository_db_record) # No additional updates to the database record are necessary. can_update_db_record = False elif repository_db_record.status not in [ diff --git a/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py b/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py index 163822c290a7..d3cce73744b5 100644 --- a/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py @@ -7,9 +7,12 @@ Dict, List, Optional, - TYPE_CHECKING, ) +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + InstallationTarget, +) from galaxy.util import ( Element, etree, @@ -21,9 +24,6 @@ from galaxy.util.tool_shed.xml_util import parse_xml from . import tool_panel_manager -if TYPE_CHECKING: - from galaxy.tools.data_manager.manager import DataManager - log = logging.getLogger(__name__) SHED_DATA_MANAGER_CONF_XML = """ @@ -33,9 +33,10 @@ class DataManagerHandler: + app: InstallationTarget root: Optional[Element] = None - def __init__(self, app): + def __init__(self, app: InstallationTarget): self.app = app @property @@ -73,8 +74,8 @@ def install_data_managers( relative_install_dir: StrPath, repository, repository_tools_tups, - ) -> List["DataManager"]: - rval: List[DataManager] = [] + ) -> List["DataManagerInterface"]: + rval: List["DataManagerInterface"] = [] if "data_manager" in metadata_dict: tpm = tool_panel_manager.ToolPanelManager(self.app) repository_tools_by_guid = {} diff --git a/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py b/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py index 344f50fd6c6c..c68ba7fe4be3 100644 --- a/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py @@ -8,7 +8,7 @@ from galaxy.exceptions import RequestParameterInvalidException from galaxy.model.base import transaction -from galaxy.structured_app import MinimalManagerApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.util.basic_util import strip_path from galaxy.tool_shed.util.repository_util import get_repository_owner from galaxy.tool_shed.util.shed_util_common import get_tool_panel_config_tool_path_install_dir @@ -25,9 +25,9 @@ class ToolPanelManager: - app: MinimalManagerApp + app: InstallationTarget - def __init__(self, app: MinimalManagerApp): + def __init__(self, app: InstallationTarget): self.app = app def add_to_shed_tool_config(self, shed_tool_conf_dict: Dict[str, Any], elem_list: list) -> None: @@ -135,7 +135,7 @@ def add_to_tool_panel( self.app.toolbox.update_shed_config(shed_tool_conf_dict) self.add_to_shed_tool_config(shed_tool_conf_dict, elem_list) - def config_elems_to_xml_file(self, config_elems, config_filename, tool_path, tool_cache_data_dir=None): + def config_elems_to_xml_file(self, config_elems, config_filename, tool_path, tool_cache_data_dir=None) -> None: """ Persist the current in-memory list of config_elems to a file named by the value of config_filename. diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index a8025d8fd786..d797d1eba98c 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -1,10 +1,25 @@ import logging import os import tempfile +from typing import ( + Any, + cast, + Dict, + List, + Optional, + Tuple, + TYPE_CHECKING, + Union, +) -from sqlalchemy import and_ +from typing_extensions import Protocol from galaxy import util +from galaxy.model.tool_shed_install import ToolShedRepository +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + InstallationTarget, +) from galaxy.tool_shed.repository_type import ( REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, TOOL_DEPENDENCY_DEFINITION_FILENAME, @@ -24,123 +39,80 @@ from galaxy.tool_shed.util.repository_util import get_repository_for_dependency_relationship from galaxy.tool_util.loader_directory import looks_like_a_tool from galaxy.tool_util.parser.interface import TestCollectionDef -from galaxy.tools.data_manager.manager import DataManager from galaxy.tools.repositories import ValidationContext from galaxy.util.tool_shed.common_util import ( generate_clone_url_for_installed_repository, - generate_clone_url_for_repository_in_tool_shed, remove_protocol_and_user_from_clone_url, remove_protocol_from_tool_shed_url, ) from galaxy.util.tool_shed.xml_util import parse_xml -from galaxy.web import url_for + +if TYPE_CHECKING: + from galaxy.structured_app import BasicSharedApp + log = logging.getLogger(__name__) +InvalidFileT = Tuple[str, str] +HandleResultT = Tuple[List, bool, str] -class MetadataGenerator: - def __init__( - self, - app, - repository=None, - changeset_revision=None, - repository_clone_url=None, - shed_config_dict=None, - relative_install_dir=None, - repository_files_dir=None, - resetting_all_metadata_on_repository=False, - updating_installed_repository=False, - persist=False, - metadata_dict=None, - user=None, - ): - self.app = app - self.user = user - self.repository = repository - if self.app.name == "galaxy": - if changeset_revision is None and self.repository is not None: - self.changeset_revision = self.repository.changeset_revision - else: - self.changeset_revision = changeset_revision +NOT_TOOL_CONFIGS = [ + suc.DATATYPES_CONFIG_FILENAME, + REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, + TOOL_DEPENDENCY_DEFINITION_FILENAME, + suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, +] - if repository_clone_url is None and self.repository is not None: - self.repository_clone_url = generate_clone_url_for_installed_repository(self.app, self.repository) - else: - self.repository_clone_url = repository_clone_url - if shed_config_dict is None: - if self.repository is not None: - self.shed_config_dict = self.repository.get_shed_config_dict(self.app) - else: - self.shed_config_dict = {} - else: - self.shed_config_dict = shed_config_dict - if relative_install_dir is None and self.repository is not None: - tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) - if repository_files_dir is None and self.repository is not None: - repository_files_dir = self.repository.repo_files_directory(self.app) - if metadata_dict is None: - # Shed related tool panel configs are only relevant to Galaxy. - self.metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename", None)} - else: - self.metadata_dict = metadata_dict - else: - # We're in the Tool Shed. - if changeset_revision is None and self.repository is not None: - self.changeset_revision = self.repository.tip() - else: - self.changeset_revision = changeset_revision - if repository_clone_url is None and self.repository is not None: - self.repository_clone_url = generate_clone_url_for_repository_in_tool_shed(self.user, self.repository) - else: - self.repository_clone_url = repository_clone_url - if shed_config_dict is None: - self.shed_config_dict = {} - else: - self.shed_config_dict = shed_config_dict - if relative_install_dir is None and self.repository is not None: - relative_install_dir = self.repository.repo_path(self.app) - if repository_files_dir is None and self.repository is not None: - repository_files_dir = self.repository.repo_path(self.app) - if metadata_dict is None: - self.metadata_dict = {} - else: - self.metadata_dict = metadata_dict - self.relative_install_dir = relative_install_dir - self.repository_files_dir = repository_files_dir - self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository - self.updating_installed_repository = updating_installed_repository - self.persist = persist - self.invalid_file_tups = [] - self.sa_session = app.model.session - self.NOT_TOOL_CONFIGS = [ - suc.DATATYPES_CONFIG_FILENAME, - REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, - TOOL_DEPENDENCY_DEFINITION_FILENAME, - suc.REPOSITORY_DATA_MANAGER_CONFIG_FILENAME, - ] + +class RepositoryProtocol(Protocol): + name: str + id: str + + def repo_path(self, app) -> Optional[str]: + ... + + +class BaseMetadataGenerator: + app: Union["BasicSharedApp", InstallationTarget] + repository: Optional[RepositoryProtocol] + invalid_file_tups: List[InvalidFileT] + changeset_revision: Optional[str] + repository_clone_url: Optional[str] + shed_config_dict: Dict[str, Any] + metadata_dict: Dict[str, Any] + relative_install_dir: Optional[str] + repository_files_dir: Optional[str] + persist: bool + + def initial_metadata_dict(self) -> Dict[str, Any]: + raise NotImplementedError() + + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + raise NotImplementedError() def _generate_data_manager_metadata( - self, repo_dir, data_manager_config_filename, metadata_dict, shed_config_dict=None - ): + self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None + ) -> Dict[str, Any]: """ Update the received metadata_dict with information from the parsed data_manager_config_filename. """ if data_manager_config_filename is None: return metadata_dict + assert self.repository repo_path = self.repository.repo_path(self.app) - try: + if hasattr(self.repository, "repo_files_directory"): # Galaxy Side. - repo_files_directory = self.repository.repo_files_directory(self.app) + repo_files_directory = self.repository.repo_files_directory(self.app) # type: ignore[attr-defined] repo_dir = repo_files_directory - except AttributeError: + else: # Tool Shed side. repo_files_directory = repo_path relative_data_manager_dir = util.relpath(os.path.split(data_manager_config_filename)[0], repo_dir) rel_data_manager_config_filename = os.path.join( relative_data_manager_dir, os.path.split(data_manager_config_filename)[1] ) - data_managers = {} - invalid_data_managers = [] + data_managers: Dict[str, dict] = {} + invalid_data_managers: List[dict] = [] data_manager_metadata = { "config_filename": rel_data_manager_config_filename, "data_managers": data_managers, @@ -177,8 +149,8 @@ def _generate_data_manager_metadata( continue # FIXME: default behavior is to fall back to tool.name. data_manager_name = data_manager_elem.get("name", data_manager_id) - version = data_manager_elem.get("version", DataManager.DEFAULT_VERSION) - guid = self.generate_guid_for_object(DataManager.GUID_TYPE, data_manager_id, version) + version = data_manager_elem.get("version", DataManagerInterface.DEFAULT_VERSION) + guid = self._generate_guid_for_object(DataManagerInterface.GUID_TYPE, data_manager_id, version) data_tables = [] if tool_file is None: log.error(f'Data Manager entry is missing tool_file attribute in "{data_manager_config_filename}".') @@ -243,7 +215,8 @@ def generate_environment_dependency_metadata(self, elem, valid_tool_dependencies valid_tool_dependencies_dict["set_environment"] = [requirements_dict] return valid_tool_dependencies_dict - def generate_guid_for_object(self, guid_type, obj_id, version): + def _generate_guid_for_object(self, guid_type, obj_id, version) -> str: + assert self.repository_clone_url tmp_url = remove_protocol_and_user_from_clone_url(self.repository_clone_url) return f"{tmp_url}/{guid_type}/{obj_id}/{version}" @@ -263,18 +236,16 @@ def generate_metadata_for_changeset_revision(self): """ if self.shed_config_dict is None: self.shed_config_dict = {} + assert self.repository if self.updating_installed_repository: # Keep the original tool shed repository metadata if setting metadata on a repository # installed into a local Galaxy instance for which we have pulled updates. - original_repository_metadata = self.repository.metadata_ + gx_repository = cast(ToolShedRepository, self.repository) # definitely in Galaxy version + original_repository_metadata = gx_repository.metadata_ else: original_repository_metadata = None readme_file_names = _get_readme_file_names(str(self.repository.name)) - if self.app.name == "galaxy": - # Shed related tool panel configs are only relevant to Galaxy. - metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} - else: - metadata_dict = {} + metadata_dict = self.initial_metadata_dict() readme_files = [] invalid_tool_configs = [] if self.resetting_all_metadata_on_repository: @@ -294,9 +265,11 @@ def generate_metadata_for_changeset_revision(self): work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-gmfcr") # All other files are on disk in the repository's repo_path, which is the value of # self.relative_install_dir. + assert self.relative_install_dir files_dir = self.relative_install_dir if self.shed_config_dict.get("tool_path"): files_dir = os.path.join(self.shed_config_dict["tool_path"], files_dir) + assert files_dir # Create ValidationContext to load and validate tools, data tables and datatypes with ValidationContext.from_app(app=self.app, work_dir=work_dir) as validation_context: tv = ToolValidator(validation_context) @@ -351,7 +324,7 @@ def generate_metadata_for_changeset_revision(self): ) readme_files.append(relative_path_to_readme) # See if we have a tool config. - elif looks_like_a_tool(os.path.join(root, name), invalid_names=self.NOT_TOOL_CONFIGS): + elif looks_like_a_tool(os.path.join(root, name), invalid_names=NOT_TOOL_CONFIGS): full_path = str(os.path.abspath(os.path.join(root, name))) # why the str, seems very odd element_tree, error_message = parse_xml(full_path) if element_tree is None: @@ -416,7 +389,7 @@ def generate_package_dependency_metadata(self, elem, valid_tool_dependencies_dic """ # TODO: make this function a class. repository_dependency_is_valid = True - repository_dependency_tup = [] + repository_dependency_tup: list = [] requirements_dict = {} error_message = "" package_name = elem.get("name", None) @@ -540,7 +513,7 @@ def generate_repository_dependency_metadata(self, repository_dependencies_config prior_installation_required, only_if_compiling_contained_td, ) = repository_dependency_tup - repository_dependency_tup = ( + invalid_repository_dependency_tup = ( toolshed, name, owner, @@ -549,7 +522,7 @@ def generate_repository_dependency_metadata(self, repository_dependencies_config only_if_compiling_contained_td, err_msg, ) - invalid_repository_dependency_tups.append(repository_dependency_tup) + invalid_repository_dependency_tups.append(invalid_repository_dependency_tup) error_message += err_msg if invalid_repository_dependency_tups: invalid_repository_dependencies_dict["repository_dependencies"] = invalid_repository_dependency_tups @@ -667,11 +640,10 @@ def generate_tool_dependency_metadata( root = tree.getroot() class RecurserValueStore: - pass + valid_tool_dependencies_dict = {} + invalid_tool_dependencies_dict = {} rvs = RecurserValueStore() - rvs.valid_tool_dependencies_dict = {} - rvs.invalid_tool_dependencies_dict = {} valid_repository_dependency_tups = [] invalid_repository_dependency_tups = [] description = root.get("description") @@ -734,7 +706,8 @@ def _check_elem_for_dep(elems): if original_valid_tool_dependencies_dict: # We're generating metadata on an update pulled to a tool shed repository installed # into a Galaxy instance, so handle changes to tool dependencies appropriately. - irm = self.app.installed_repository_manager + installation_target = cast(InstallationTarget, self.app) + irm = installation_target.installed_repository_manager ( updated_tool_dependency_names, deleted_tool_dependency_names, @@ -745,14 +718,14 @@ def _check_elem_for_dep(elems): if rvs.invalid_tool_dependencies_dict: metadata_dict["invalid_tool_dependencies"] = rvs.invalid_tool_dependencies_dict if valid_repository_dependency_tups: - metadata_dict = self.update_repository_dependencies_metadata( + metadata_dict = self._update_repository_dependencies_metadata( metadata=metadata_dict, repository_dependency_tups=valid_repository_dependency_tups, is_valid=True, description=description, ) if invalid_repository_dependency_tups: - metadata_dict = self.update_repository_dependencies_metadata( + metadata_dict = self._update_repository_dependencies_metadata( metadata=metadata_dict, repository_dependency_tups=invalid_repository_dependency_tups, is_valid=False, @@ -820,180 +793,6 @@ def get_sample_files_from_disk(self, repository_files_dir, tool_path=None, relat sample_file_metadata_paths.append(relative_path_to_sample_file) return sample_file_metadata_paths, sample_file_copy_paths - def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False): - """ - Process the received repository_elem which is a tag either from a - repository_dependencies.xml file or a tool_dependencies.xml file. If the former, - we're generating repository dependencies metadata for a repository in the Tool Shed. - If the latter, we're generating package dependency metadata within Galaxy or the - Tool Shed. - """ - is_valid = True - error_message = "" - toolshed = repository_elem.get("toolshed", None) - name = repository_elem.get("name", None) - owner = repository_elem.get("owner", None) - changeset_revision = repository_elem.get("changeset_revision", None) - prior_installation_required = str(repository_elem.get("prior_installation_required", False)) - repository_dependency_tup = [ - toolshed, - name, - owner, - changeset_revision, - prior_installation_required, - str(only_if_compiling_contained_td), - ] - if self.app.name == "galaxy": - if self.updating_installed_repository: - pass - else: - # We're installing a repository into Galaxy, so make sure its contained repository - # dependency definition is valid. - if toolshed is None or name is None or owner is None or changeset_revision is None: - # Several packages exist in the Tool Shed that contain invalid repository - # definitions, but will still install. We will report these errors to the - # installing user. Previously, we would: - # Raise an exception here instead of returning an error_message to keep the - # installation from proceeding. Reaching here implies a bug in the Tool Shed - # framework. - error_message = "Installation encountered an invalid repository dependency definition:\n" - error_message += util.xml_to_string(repository_elem, pretty=True) - log.error(error_message) - return repository_dependency_tup, False, error_message - if not toolshed: - # Default to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") - repository_dependency_tup[0] = toolshed - toolshed = remove_protocol_from_tool_shed_url(toolshed) - if self.app.name == "galaxy": - # We're in Galaxy. We reach here when we're generating the metadata for a tool - # dependencies package defined for a repository or when we're generating metadata - # for an installed repository. See if we can locate the installed repository via - # the changeset_revision defined in the repository_elem (it may be outdated). If - # we're successful in locating an installed repository with the attributes defined - # in the repository_elem, we know it is valid. - repository = get_repository_for_dependency_relationship(self.app, toolshed, name, owner, changeset_revision) - if repository: - return repository_dependency_tup, is_valid, error_message - else: - # Send a request to the tool shed to retrieve appropriate additional changeset - # revisions with which the repository - # may have been installed. - text = get_updated_changeset_revisions_from_tool_shed( - self.app, toolshed, name, owner, changeset_revision - ) - if text: - updated_changeset_revisions = util.listify(text) - for updated_changeset_revision in updated_changeset_revisions: - repository = get_repository_for_dependency_relationship( - self.app, toolshed, name, owner, updated_changeset_revision - ) - if repository: - return repository_dependency_tup, is_valid, error_message - if self.updating_installed_repository: - # The repository dependency was included in an update to the installed - # repository, so it will not yet be installed. Return the tuple for later - # installation. - return repository_dependency_tup, is_valid, error_message - if self.updating_installed_repository: - # The repository dependency was included in an update to the installed repository, - # so it will not yet be installed. Return the tuple for later installation. - return repository_dependency_tup, is_valid, error_message - # Don't generate an error message for missing repository dependencies that are required - # only if compiling the dependent repository's tool dependency. - if not only_if_compiling_contained_td: - # We'll currently default to setting the repository dependency definition as invalid - # if an installed repository cannot be found. This may not be ideal because the tool - # shed may have simply been inaccessible when metadata was being generated for the - # installed tool shed repository. - error_message = ( - f"Ignoring invalid repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision}." - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - else: - # We're in the tool shed. - if suc.tool_shed_is_this_tool_shed(toolshed): - try: - user = ( - self.sa_session.query(self.app.model.User) - .filter(self.app.model.User.table.c.username == owner) - .one() - ) - except Exception: - error_message = ( - f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision} because the owner is invalid." - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - try: - repository = ( - self.sa_session.query(self.app.model.Repository) - .filter( - and_( - self.app.model.Repository.table.c.name == name, - self.app.model.Repository.table.c.user_id == user.id, - ) - ) - .one() - ) - except Exception: - error_message = ( - f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision} because the name is invalid. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - repo = repository.hg_repo - - # The received changeset_revision may be None since defining it in the dependency definition is optional. - # If this is the case, the default will be to set its value to the repository dependency tip revision. - # This probably occurs only when handling circular dependency definitions. - tip_ctx = repo[repo.changelog.tip()] - # Make sure the repo.changlog includes at least 1 revision. - if changeset_revision is None and tip_ctx.rev() >= 0: - changeset_revision = str(tip_ctx) - repository_dependency_tup = [ - toolshed, - name, - owner, - changeset_revision, - prior_installation_required, - str(only_if_compiling_contained_td), - ] - return repository_dependency_tup, is_valid, error_message - else: - # Find the specified changeset revision in the repository's changelog to see if it's valid. - found = False - for changeset in repo.changelog: - changeset_hash = str(repo[changeset]) - if changeset_hash == changeset_revision: - found = True - break - if not found: - error_message = ( - f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " - f"changeset revision {changeset_revision} because the changeset revision is invalid. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - else: - # Repository dependencies are currently supported within a single tool shed. - error_message = ( - "Repository dependencies are currently supported only within the same tool shed. Ignoring " - f"repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, changeset revision {changeset_revision}. " - ) - log.debug(error_message) - is_valid = False - return repository_dependency_tup, is_valid, error_message - return repository_dependency_tup, is_valid, error_message - def _set_add_to_tool_panel_attribute_for_tool(self, tool): """ Determine if a tool should be loaded into the Galaxy tool panel. Examples of valid tools that @@ -1005,33 +804,14 @@ def _set_add_to_tool_panel_attribute_for_tool(self, tool): return False return True - def set_changeset_revision(self, changeset_revision): + def set_changeset_revision(self, changeset_revision: Optional[str]): self.changeset_revision = changeset_revision - def set_relative_install_dir(self, relative_install_dir): + def set_relative_install_dir(self, relative_install_dir: Optional[str]): self.relative_install_dir = relative_install_dir - def set_repository(self, repository, relative_install_dir=None, changeset_revision=None): - self.repository = repository - # Shed related tool panel configs are only relevant to Galaxy. - if self.app.name == "galaxy": - if relative_install_dir is None and self.repository is not None: - tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) - if changeset_revision is None and self.repository is not None: - self.set_changeset_revision(self.repository.changeset_revision) - else: - self.set_changeset_revision(changeset_revision) - self.shed_config_dict = repository.get_shed_config_dict(self.app) - self.metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename", None)} - else: - if relative_install_dir is None and self.repository is not None: - relative_install_dir = repository.repo_path(self.app) - if changeset_revision is None and self.repository is not None: - self.set_changeset_revision(self.repository.tip()) - else: - self.set_changeset_revision(changeset_revision) - self.shed_config_dict = {} - self.metadata_dict = {} + def _reset_attributes_after_repository_update(self, relative_install_dir: Optional[str]): + self.metadata_dict = self.initial_metadata_dict() self.set_relative_install_dir(relative_install_dir) self.set_repository_files_dir() self.resetting_all_metadata_on_repository = False @@ -1039,13 +819,16 @@ def set_repository(self, repository, relative_install_dir=None, changeset_revisi self.persist = False self.invalid_file_tups = [] - def set_repository_clone_url(self, repository_clone_url): - self.repository_clone_url = repository_clone_url - - def set_repository_files_dir(self, repository_files_dir=None): + def set_repository_files_dir(self, repository_files_dir: Optional[str] = None): self.repository_files_dir = repository_files_dir - def update_repository_dependencies_metadata(self, metadata, repository_dependency_tups, is_valid, description): + def _update_repository_dependencies_metadata( + self, + metadata: Dict[str, Any], + repository_dependency_tups: List[tuple], + is_valid: bool, + description: Optional[str], + ) -> Dict[str, Any]: if is_valid: repository_dependencies_dict = metadata.get("repository_dependencies", None) else: @@ -1069,7 +852,172 @@ def update_repository_dependencies_metadata(self, metadata, repository_dependenc return metadata -def _get_readme_file_names(repository_name): +class GalaxyMetadataGenerator(BaseMetadataGenerator): + """A MetadataGenerator building on Galaxy's app and repository constructs.""" + + app: InstallationTarget + repository: Optional[ToolShedRepository] + + def __init__( + self, + app: InstallationTarget, + repository=None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir=None, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False, + metadata_dict=None, + user=None, + ): + self.app = app + self.user = user + self.repository = repository + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.changeset_revision + else: + self.changeset_revision = changeset_revision + + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = generate_clone_url_for_installed_repository(self.app, self.repository) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + if self.repository is not None: + self.shed_config_dict = self.repository.get_shed_config_dict(self.app) + else: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_files_directory(self.app) + if metadata_dict is None: + # Shed related tool panel configs are only relevant to Galaxy. + self.metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename", None)} + else: + self.metadata_dict = metadata_dict + self.relative_install_dir = relative_install_dir + self.repository_files_dir = repository_files_dir + self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository + self.updating_installed_repository = updating_installed_repository + self.persist = persist + self.invalid_file_tups = [] + + def initial_metadata_dict(self) -> Dict[str, Any]: + # Shed related tool panel configs are only relevant to Galaxy. + metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} + return metadata_dict + + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): + self.repository = repository + if relative_install_dir is None and self.repository is not None: + tool_path, relative_install_dir = self.repository.get_tool_relative_path(self.app) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision(self.repository.changeset_revision) + else: + self.set_changeset_revision(changeset_revision) + self.shed_config_dict = repository.get_shed_config_dict(self.app) + self._reset_attributes_after_repository_update(relative_install_dir) + + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + """ + Process the received repository_elem which is a tag either from a + repository_dependencies.xml file or a tool_dependencies.xml file. If the former, + we're generating repository dependencies metadata for a repository in the Tool Shed. + If the latter, we're generating package dependency metadata within Galaxy or the + Tool Shed. + """ + is_valid = True + error_message = "" + toolshed = repository_elem.get("toolshed", None) + name = repository_elem.get("name", None) + owner = repository_elem.get("owner", None) + changeset_revision = repository_elem.get("changeset_revision", None) + prior_installation_required = str(repository_elem.get("prior_installation_required", False)) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + if self.updating_installed_repository: + pass + else: + # We're installing a repository into Galaxy, so make sure its contained repository + # dependency definition is valid. + if toolshed is None or name is None or owner is None or changeset_revision is None: + # Several packages exist in the Tool Shed that contain invalid repository + # definitions, but will still install. We will report these errors to the + # installing user. Previously, we would: + # Raise an exception here instead of returning an error_message to keep the + # installation from proceeding. Reaching here implies a bug in the Tool Shed + # framework. + error_message = "Installation encountered an invalid repository dependency definition:\n" + error_message += util.xml_to_string(repository_elem, pretty=True) + log.error(error_message) + return repository_dependency_tup, False, error_message + + # Must be present in Galaxy side code I think. + assert toolshed + + toolshed = remove_protocol_from_tool_shed_url(toolshed) + + # We're in Galaxy. We reach here when we're generating the metadata for a tool + # dependencies package defined for a repository or when we're generating metadata + # for an installed repository. See if we can locate the installed repository via + # the changeset_revision defined in the repository_elem (it may be outdated). If + # we're successful in locating an installed repository with the attributes defined + # in the repository_elem, we know it is valid. + repository = get_repository_for_dependency_relationship(self.app, toolshed, name, owner, changeset_revision) + if repository: + return repository_dependency_tup, is_valid, error_message + else: + # Send a request to the tool shed to retrieve appropriate additional changeset + # revisions with which the repository + # may have been installed. + text = get_updated_changeset_revisions_from_tool_shed(self.app, toolshed, name, owner, changeset_revision) + if text: + updated_changeset_revisions = util.listify(text) + for updated_changeset_revision in updated_changeset_revisions: + repository = get_repository_for_dependency_relationship( + self.app, toolshed, name, owner, updated_changeset_revision + ) + if repository: + return repository_dependency_tup, is_valid, error_message + if self.updating_installed_repository: + # The repository dependency was included in an update to the installed + # repository, so it will not yet be installed. Return the tuple for later + # installation. + return repository_dependency_tup, is_valid, error_message + if self.updating_installed_repository: + # The repository dependency was included in an update to the installed repository, + # so it will not yet be installed. Return the tuple for later installation. + return repository_dependency_tup, is_valid, error_message + # Don't generate an error message for missing repository dependencies that are required + # only if compiling the dependent repository's tool dependency. + if not only_if_compiling_contained_td: + # We'll currently default to setting the repository dependency definition as invalid + # if an installed repository cannot be found. This may not be ideal because the tool + # shed may have simply been inaccessible when metadata was being generated for the + # installed tool shed repository. + error_message = f"Ignoring invalid repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " + error_message += f"changeset revision {changeset_revision}." + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + return repository_dependency_tup, is_valid, error_message + + +def _get_readme_file_names(repository_name: str) -> List[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" readme_files = ["readme", "read_me", "install"] valid_filenames = [f"{f}.txt" for f in readme_files] diff --git a/lib/galaxy/tool_shed/tools/data_table_manager.py b/lib/galaxy/tool_shed/tools/data_table_manager.py index c47b034113c1..f2d02a4d96a6 100644 --- a/lib/galaxy/tool_shed/tools/data_table_manager.py +++ b/lib/galaxy/tool_shed/tools/data_table_manager.py @@ -1,18 +1,30 @@ import logging import os import shutil -from typing import List +from typing import ( + List, + TYPE_CHECKING, + Union, +) -from galaxy.structured_app import StructuredApp +from galaxy.tool_shed.galaxy_install.client import InstallationTarget from galaxy.tool_shed.util import hg_util from galaxy.util import etree from galaxy.util.tool_shed import xml_util +if TYPE_CHECKING: + from galaxy.structured_app import BasicSharedApp + log = logging.getLogger(__name__) +RequiredAppT = Union["BasicSharedApp", InstallationTarget] + + class ShedToolDataTableManager: - def __init__(self, app: StructuredApp): + app: RequiredAppT + + def __init__(self, app: RequiredAppT): self.app = app def generate_repository_info_elem( diff --git a/lib/galaxy/tool_shed/tools/tool_validator.py b/lib/galaxy/tool_shed/tools/tool_validator.py index 9b94813584a2..1d2780c81d3d 100644 --- a/lib/galaxy/tool_shed/tools/tool_validator.py +++ b/lib/galaxy/tool_shed/tools/tool_validator.py @@ -1,6 +1,9 @@ import logging -from galaxy.tool_shed.tools.data_table_manager import ShedToolDataTableManager +from galaxy.tool_shed.tools.data_table_manager import ( + RequiredAppT, + ShedToolDataTableManager, +) from galaxy.tool_shed.util import ( basic_util, hg_util, @@ -17,7 +20,7 @@ class ToolValidator: - def __init__(self, app): + def __init__(self, app: RequiredAppT): self.app = app self.stdtm = ShedToolDataTableManager(self.app) diff --git a/lib/galaxy/tool_shed/unittest_utils/__init__.py b/lib/galaxy/tool_shed/unittest_utils/__init__.py new file mode 100644 index 000000000000..2785fbca8857 --- /dev/null +++ b/lib/galaxy/tool_shed/unittest_utils/__init__.py @@ -0,0 +1,237 @@ +import threading +from pathlib import Path +from typing import ( + Any, + Dict, + List, + NamedTuple, + Optional, + Union, +) + +from galaxy.model.migrations import ( + DatabaseStateVerifier, + TSI, +) +from galaxy.model.orm.engine_factory import build_engine +from galaxy.model.tool_shed_install import mapping as install_mapping +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.tool_shed.cache import ToolShedRepositoryCache +from galaxy.tool_shed.galaxy_install.client import ( + DataManagerInterface, + DataManagersInterface, + InstallationTarget, +) +from galaxy.tool_shed.util.repository_util import get_installed_repository +from galaxy.tool_util.data import ( + OutputDataset, + ToolDataTableManager, +) +from galaxy.tool_util.loader_directory import looks_like_a_tool +from galaxy.tool_util.toolbox.base import ( + AbstractToolBox, + NullToolTagManager, +) +from galaxy.tool_util.toolbox.watcher import ( + get_tool_conf_watcher, + get_tool_watcher, +) +from galaxy.util.tool_shed.tool_shed_registry import Registry + + +class ToolShedTarget(NamedTuple): + url: str + name: str + + @property + def as_str(self) -> str: + return f""" + + + +""" + + +EMPTY_TOOL_DATA_TABLE_CONFIG = """ + + +""" + + +class Config: + tool_data_path: str + install_database_connection: str + install_database_engine_options: Dict[str, Any] = {} + update_integrated_tool_panel: bool = True + integrated_tool_panel_config: str + shed_tool_config_file: str + shed_tool_data_path: str + migrated_tools_config: Optional[str] = None + shed_tools_dir: str + edam_panel_views: list = [] + tool_configs: list = [] + shed_tool_data_table_config: str + shed_data_manager_config_file: str + + def get(self, key, default): + return getattr(self, key, default) + + +class TestTool: + _macro_paths: List[str] = [] + params_with_missing_data_table_entry: list = [] + params_with_missing_index_file: list = [] + + def __init__(self, config_file, tool_shed_repository, guid): + self.config_file = config_file + self.tool_shed_repository = tool_shed_repository + self.guid = guid + self.id = guid + self.version = "1.0.0" + self.hidden = False + self._lineage = None + self.name = "test_tool" + + @property + def lineage(self): + return self._lineage + + +class TestToolBox(AbstractToolBox): + def create_tool(self, config_file, tool_cache_data_dir=None, **kwds): + tool = TestTool(config_file, kwds["tool_shed_repository"], kwds["guid"]) + tool._lineage = self._lineage_map.register(tool) # cleanup? + return tool + + def _get_tool_shed_repository(self, tool_shed, name, owner, installed_changeset_revision): + return get_installed_repository( + self.app, + tool_shed=tool_shed, + name=name, + owner=owner, + installed_changeset_revision=installed_changeset_revision, + from_cache=True, + ) + + def _looks_like_a_tool(self, path): + return looks_like_a_tool(path, enable_beta_formats=False) + + def tool_tag_manager(self): + return NullToolTagManager() + + +class Watchers: + def __init__(self, app): + self.app = app + self.tool_config_watcher = get_tool_conf_watcher( + reload_callback=self.app.reload_toolbox, + tool_cache=None, + ) + self.tool_watcher = get_tool_watcher(self, app.config) + + +class DummyDataManager(DataManagerInterface): + GUID_TYPE: str = "data_manager" + DEFAULT_VERSION: str = "0.0.1" + + def process_result(self, out_data): + return None + + def write_bundle(self, out) -> Dict[str, OutputDataset]: + return {} + + +class StandaloneDataManagers(DataManagersInterface): + __reload_count = 0 + + def load_manager_from_elem( + self, data_manager_elem, tool_path=None, add_manager=True + ) -> Optional[DataManagerInterface]: + return DummyDataManager() + + def get_manager(self, data_manager_id: str) -> Optional[DataManagerInterface]: + return None + + def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: + return None + + @property + def _reload_count(self) -> int: + self.__reload_count += 1 + return self.__reload_count + + +class StandaloneInstallationTarget(InstallationTarget): + name: str = "galaxy" + tool_shed_registry: Registry + security: IdEncodingHelper + _toolbox: TestToolBox + _toolbox_lock: threading.RLock = threading.RLock() + tool_shed_repository_cache: Optional[ToolShedRepositoryCache] = None + data_managers = StandaloneDataManagers() + + def __init__( + self, + target_directory: Path, + tool_shed_target: Optional[ToolShedTarget] = None, + ): + tool_root_dir = target_directory / "tools" + config: Config = Config() + install_db_path = str(target_directory / "install.sqlite") + config.tool_data_path = str(target_directory / "tool_data") + config.shed_tool_data_path = config.tool_data_path + config.install_database_connection = f"sqlite:///{install_db_path}?isolation_level=IMMEDIATE" + config.integrated_tool_panel_config = str(target_directory / "integrated.xml") + config.shed_tool_data_table_config = str(target_directory / "shed_tool_data_table_conf.xml") + shed_conf = target_directory / "shed_conf.xml" + shed_data_manager_config_file = target_directory / "shed_data_manager_conf.xml" + config.shed_data_manager_config_file = str(shed_data_manager_config_file) + config.shed_tool_config_file = str(shed_conf) + shed_conf.write_text(f'\n') + (target_directory / "shed_tool_data_table_conf.xml").write_text(EMPTY_TOOL_DATA_TABLE_CONFIG) + self.config = config + install_engine = build_engine(config.install_database_connection, config.install_database_engine_options) + self.security = IdEncodingHelper(id_secret="notasecretfortests") + DatabaseStateVerifier( + install_engine, + TSI, + None, + None, + True, + False, + ).run() + self.install_model = install_mapping.configure_model_mapping(install_engine) + registry_config: Optional[Path] = None + if tool_shed_target: + registry_config = target_directory / "tool_sheds_conf.xml" + with registry_config.open("w") as f: + f.write(tool_shed_target.as_str) + + self.tool_shed_registry = Registry(registry_config) + self.tool_root_dir = tool_root_dir + self.tool_root_dir.mkdir() + config.shed_tools_dir = str(tool_root_dir) + self.watchers = Watchers(self) + self.reload_toolbox() + self.tool_data_tables = ToolDataTableManager( + tool_data_path=self.config.tool_data_path, + config_filename=self.config.shed_tool_data_table_config, + other_config_dict=self.config, + ) + dependency_dir = target_directory / "_dependencies" + dependency_dir.mkdir() + + @property + def tool_dependency_dir(self) -> Optional[str]: + return None + + def reload_toolbox(self): + self._toolbox = TestToolBox( + config_filenames=[self.config.shed_tool_config_file], + tool_root_dir=self.tool_root_dir, + app=self, + ) + + @property + def toolbox(self) -> TestToolBox: + return self._toolbox diff --git a/lib/galaxy/tool_shed/util/dependency_display.py b/lib/galaxy/tool_shed/util/dependency_display.py index 99910d03add4..f4c2ef30a60a 100644 --- a/lib/galaxy/tool_shed/util/dependency_display.py +++ b/lib/galaxy/tool_shed/util/dependency_display.py @@ -2,6 +2,7 @@ import os from galaxy import util +from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager from galaxy.tool_shed.util import utility_container_manager from galaxy.util import UNKNOWN from galaxy.util.tool_shed.common_util import parse_repository_dependency_tuple @@ -198,11 +199,12 @@ def populate_containers_dict_from_repository_metadata(self, repository): """ metadata = repository.metadata_ if metadata: + irm = InstalledRepositoryManager(self.app) # Handle repository dependencies. ( installed_repository_dependencies, missing_repository_dependencies, - ) = self.app.installed_repository_manager.get_installed_and_missing_repository_dependencies(repository) + ) = irm.get_installed_and_missing_repository_dependencies(repository) # Handle the current repository's tool dependencies. repository_tool_dependencies = metadata.get("tool_dependencies", None) # Make sure to display missing tool dependencies as well. @@ -291,3 +293,20 @@ def build_repository_containers( except Exception as e: log.debug(f"Exception in build_repository_containers: {str(e)}") return containers_dict + + +def build_manage_repository_dict(app, status, repository): + dd = DependencyDisplayer(app) + containers_dict = dd.populate_containers_dict_from_repository_metadata( + repository=repository, + ) + management_dict = { + "status": status, + } + missing_repo_dependencies = containers_dict.get("missing_repository_dependencies", None) + if missing_repo_dependencies: + management_dict["missing_repository_dependencies"] = missing_repo_dependencies.to_dict() + repository_dependencies = containers_dict.get("repository_dependencies", None) + if repository_dependencies: + management_dict["repository_dependencies"] = repository_dependencies.to_dict() + return management_dict diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index 674048937359..11e9a675039d 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -21,10 +21,7 @@ ) from sqlalchemy.orm import joinedload -from galaxy import ( - util, - web, -) +from galaxy import util from galaxy.model.base import transaction from galaxy.model.scoped_session import install_model_scoped_session from galaxy.model.tool_shed_install import ToolShedRepository @@ -229,6 +226,8 @@ def generate_tool_shed_repository_install_dir(repository_clone_url, changeset_re # Now tmp_url is something like: bx.psu.edu:9009/repos/some_username/column items = tmp_url.split("/repos/") tool_shed_url = items[0] + if len(items) == 1: + raise Exception(f"Processing an invalid tool shed clone URL {repository_clone_url} - tmp_url {tmp_url}") repo_path = items[1] tool_shed_url = common_util.remove_port_from_tool_shed_url(tool_shed_url) return "/".join((tool_shed_url, "repos", repo_path, changeset_revision)) @@ -724,7 +723,6 @@ def repository_was_previously_installed(app, tool_shed_url, repository_name, rep # Get all previous changeset revisions from the tool shed for the repository back to, but excluding, # the previous valid changeset revision to see if it was previously installed using one of them. params = dict( - galaxy_url=web.url_for("/", qualified=True), name=repository_name, owner=repository_owner, changeset_revision=changeset_revision, diff --git a/lib/galaxy/tool_shed/util/shed_util_common.py b/lib/galaxy/tool_shed/util/shed_util_common.py index 9e35745495de..76c757082b87 100644 --- a/lib/galaxy/tool_shed/util/shed_util_common.py +++ b/lib/galaxy/tool_shed/util/shed_util_common.py @@ -5,7 +5,6 @@ from galaxy.model.base import transaction from galaxy.tool_shed.util import repository_util from galaxy.util.tool_shed import common_util -from galaxy.web import url_for log = logging.getLogger(__name__) @@ -174,13 +173,6 @@ def set_image_paths(app, text, encoded_repository_id=None, tool_shed_repository= return text -def tool_shed_is_this_tool_shed(toolshed_base_url): - """Determine if a tool shed is the current tool shed.""" - cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url(toolshed_base_url) - cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url(str(url_for("/", qualified=True))) - return cleaned_toolshed_base_url == cleaned_tool_shed - - __all__ = ( "can_eliminate_repository_dependency", "clean_dependency_relationships", @@ -192,5 +184,4 @@ def tool_shed_is_this_tool_shed(toolshed_base_url): "get_user", "have_shed_tool_conf_for_install", "set_image_paths", - "tool_shed_is_this_tool_shed", ) diff --git a/lib/galaxy/tool_util/toolbox/base.py b/lib/galaxy/tool_util/toolbox/base.py index 39fc18c68f01..24274e56cd8c 100644 --- a/lib/galaxy/tool_util/toolbox/base.py +++ b/lib/galaxy/tool_util/toolbox/base.py @@ -120,6 +120,14 @@ def handle_tags(self, tool_id, tool_definition_source): """Parse out tags and persist them.""" +class NullToolTagManager(AbstractToolTagManager): + def reset_tags(self) -> None: + return None + + def handle_tags(self, tool_id, tool_definition_source) -> None: + return None + + class AbstractToolBox(Dictifiable, ManagesIntegratedToolPanelMixin): """ Abstract container for managing a ToolPanel - containing tools and diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index fc9b86b07d24..3d03e0348d51 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -3097,7 +3097,7 @@ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, fina super().exec_after_process(app, inp_data, out_data, param_dict, job=job, **kwds) # process results of tool data_manager_id = job.data_manager_association.data_manager_id - data_manager = self.app.data_managers.get_manager(data_manager_id, None) + data_manager = self.app.data_managers.get_manager(data_manager_id) assert ( data_manager is not None ), f"Invalid data manager ({data_manager_id}) requested. It may have been removed before the job completed." @@ -3108,10 +3108,10 @@ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, fina pass elif data_manager_mode == "bundle": for bundle_path, dataset in data_manager.write_bundle(out_data).items(): - dataset = cast(model.HistoryDatasetAssociation, dataset) - dataset.dataset.object_store.update_from_file( - dataset.dataset, - extra_dir=dataset.dataset.extra_files_path_name, + hda = cast(model.HistoryDatasetAssociation, dataset) + hda.dataset.object_store.update_from_file( + hda.dataset, + extra_dir=hda.dataset.extra_files_path_name, file_name=bundle_path, alt_name=os.path.basename(bundle_path), create=True, diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py index aaa7e4d23ee3..52f79ca54250 100644 --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -3,13 +3,16 @@ import os from typing import ( Dict, + List, Optional, + Union, ) from typing_extensions import Protocol from galaxy import util from galaxy.structured_app import StructuredApp +from galaxy.tool_shed.galaxy_install.client import DataManagersInterface from galaxy.tool_util.data import ( BundleProcessingOptions, OutputDataset, @@ -23,16 +26,17 @@ log = logging.getLogger(__name__) -class DataManagers: +class DataManagers(DataManagersInterface): data_managers: Dict[str, "DataManager"] managed_data_tables: Dict[str, "DataManager"] + __reload_count: int - def __init__(self, app: StructuredApp, xml_filename=None): + def __init__(self, app: StructuredApp, xml_filename=None, reload_count: Optional[int] = None): self.app = app self.data_managers = {} self.managed_data_tables = {} self.tool_path = None - self._reload_count = 0 + self.__reload_count = reload_count or 0 self.filename = xml_filename or self.app.config.data_manager_config_file for filename in util.listify(self.filename): if not filename: @@ -45,6 +49,10 @@ def __init__(self, app: StructuredApp, xml_filename=None): if exc.errno != errno.ENOENT or self.app.config.is_set("shed_data_manager_config_file"): raise + @property + def _reload_count(self) -> int: + return self.__reload_count + def load_from_xml(self, xml_filename, store_tool_path=True) -> None: try: tree = util.parse_xml(xml_filename) @@ -102,7 +110,7 @@ def add_manager(self, data_manager): def get_manager(self, *args, **kwds): return self.data_managers.get(*args, **kwds) - def remove_manager(self, manager_ids): + def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: if not isinstance(manager_ids, list): manager_ids = [manager_ids] for manager_id in manager_ids: @@ -236,7 +244,7 @@ def process_result(self, out_data: Dict[str, OutputDataset]) -> None: def write_bundle( self, out_data: Dict[str, OutputDataset], - ): + ) -> Dict[str, OutputDataset]: tool_data_tables = self.data_managers.app.tool_data_tables return tool_data_tables.write_bundle( out_data, diff --git a/lib/galaxy/tools/repositories.py b/lib/galaxy/tools/repositories.py index c5e17273d0a0..50e848dda7ae 100644 --- a/lib/galaxy/tools/repositories.py +++ b/lib/galaxy/tools/repositories.py @@ -3,22 +3,34 @@ import shutil import tempfile from contextlib import contextmanager +from typing import Optional from galaxy.managers.dbkeys import GenomeBuilds from galaxy.tools.data import ToolDataTableManager from galaxy.util.bunch import Bunch +class ValidationContextConfig: + tool_data_path: Optional[str] + shed_tool_data_path: Optional[str] + tool_data_table_config: str + shed_tool_data_table_config: str + interactivetools_enable: bool + len_file_path: str + builds_file_path: Optional[str] + + class ValidationContext: """Minimal App object for tool validation.""" is_webapp = True + config: ValidationContextConfig def __init__( self, - app_name, - security, + app_name: str, model, + security, tool_data_path, shed_tool_data_path, tool_data_tables=None, @@ -27,9 +39,9 @@ def __init__( biotools_metadata_source=None, ): self.name = app_name - self.security = security self.model = model - self.config = Bunch() + self.security = security + self.config = ValidationContextConfig() self.config.tool_data_path = tool_data_path self.config.shed_tool_data_path = shed_tool_data_path self.temporary_path = tempfile.mkdtemp(prefix="tool_validation_") @@ -67,11 +79,11 @@ def from_app(app, work_dir=None): with ValidationContext( app_name=app.name, security=app.security, - model=app.model, + model=getattr(app, "model", None), tool_data_path=work_dir, shed_tool_data_path=work_dir, tool_data_tables=tool_data_tables, - registry=app.datatypes_registry, + registry=getattr(app, "datatypes_registry", None), hgweb_config_manager=getattr(app, "hgweb_config_manager", None), biotools_metadata_source=getattr(app, "biotools_metadata_source", None), ) as app: diff --git a/lib/galaxy/util/tool_shed/common_util.py b/lib/galaxy/util/tool_shed/common_util.py index 6636897ceea8..b65eab240f8f 100644 --- a/lib/galaxy/util/tool_shed/common_util.py +++ b/lib/galaxy/util/tool_shed/common_util.py @@ -51,17 +51,6 @@ def generate_clone_url_for_installed_repository(app: HasToolShedRegistry, reposi return util.build_url(tool_shed_url, pathspec=["repos", str(repository.owner), str(repository.name)]) -def generate_clone_url_for_repository_in_tool_shed(user, repository) -> str: - """Generate the URL for cloning a repository that is in the tool shed.""" - base_url = url_for("/", qualified=True).rstrip("/") - if user: - protocol, base = base_url.split("://") - username = f"{user.username}@" - return f"{protocol}://{username}{base}/repos/{repository.user.username}/{repository.name}" - else: - return f"{base_url}/repos/{repository.user.username}/{repository.name}" - - def generate_clone_url_from_repo_info_tup(app: HasToolShedRegistry, repo_info_tup) -> str: """Generate the URL for cloning a repository given a tuple of toolshed, name, owner, changeset_revision.""" # Example tuple: ['http://localhost:9009', 'blast_datatypes', 'test', '461a4216e8ab', False] @@ -286,7 +275,6 @@ def remove_protocol_from_tool_shed_url(tool_shed_url: str) -> str: "accumulate_tool_dependencies", "check_tool_tag_set", "generate_clone_url_for_installed_repository", - "generate_clone_url_for_repository_in_tool_shed", "generate_clone_url_from_repo_info_tup", "get_repository_dependencies", "get_protocol_from_tool_shed_url", diff --git a/lib/galaxy/util/tool_shed/tool_shed_registry.py b/lib/galaxy/util/tool_shed/tool_shed_registry.py index b50393d005d9..c1a60aa56dc2 100644 --- a/lib/galaxy/util/tool_shed/tool_shed_registry.py +++ b/lib/galaxy/util/tool_shed/tool_shed_registry.py @@ -5,6 +5,8 @@ Optional, ) +from typing_extensions import Literal + from galaxy.util import parse_xml_string from galaxy.util.path import StrPath from galaxy.util.tool_shed import common_util @@ -20,6 +22,8 @@ """ +API_VERSION = Literal["v1", "v2"] + class AUTH_TUPLE(NamedTuple): username: str @@ -28,11 +32,13 @@ class AUTH_TUPLE(NamedTuple): class Registry: tool_sheds: Dict[str, str] + tool_shed_api_versions: Dict[str, API_VERSION] tool_sheds_auth: Dict[str, Optional[AUTH_TUPLE]] def __init__(self, config: Optional[StrPath] = None): self.tool_sheds = {} self.tool_sheds_auth = {} + self.tool_shed_api_versions = {} if config: # Parse tool_sheds_conf.xml tree, error_message = parse_xml(config) @@ -48,10 +54,17 @@ def __init__(self, config: Optional[StrPath] = None): try: name = elem.get("name", None) url = elem.get("url", None) + version_raw = elem.get("version", "1") + version: API_VERSION + if version_raw == "1": + version = "v1" + else: + version = "v2" username = elem.get("user", None) password = elem.get("pass", None) if name and url: self.tool_sheds[name] = url + self.tool_shed_api_versions[name] = version self.tool_sheds_auth[name] = None log.debug(f"Loaded reference to tool shed: {name}") if name and url and username and password: @@ -75,6 +88,13 @@ def url_auth(self, url: str) -> Optional[AUTH_TUPLE]: log.debug(f"Invalid url '{str(url)}' received by tool shed registry's url_auth method.") return None + def is_legacy(self, url: str) -> bool: + shed_name = self._shed_name_for_url(url) + if shed_name is None: + return True + else: + return self.tool_shed_api_versions[shed_name] == "v1" + def _shed_name_for_url(self, url: str) -> Optional[str]: url_sans_protocol = common_util.remove_protocol_from_tool_shed_url(url) for shed_name, shed_url in self.tool_sheds.items(): diff --git a/lib/galaxy/webapps/base/api.py b/lib/galaxy/webapps/base/api.py index 2835a2340faf..73056bc88a09 100644 --- a/lib/galaxy/webapps/base/api.py +++ b/lib/galaxy/webapps/base/api.py @@ -165,7 +165,14 @@ def get_error_response_for_request(request: Request, exc: MessageException) -> J if "ga4gh" in path: # When serving GA4GH APIs use limited exceptions to conform their expected # error schema. Tailored to DRS currently. - content = {"status_code": status_code, "msg": error_dict["err_msg"]} + message = error_dict["err_msg"] + if "drs" in path: + content = {"status_code": status_code, "msg": message} + elif "trs" in path: + content = {"code": status_code, "message": message} + else: + # unknown schema - just yield the most useful error message + content = error_dict else: content = error_dict diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py index 1c9774efa025..8fc17986afec 100644 --- a/lib/galaxy/webapps/base/webapp.py +++ b/lib/galaxy/webapps/base/webapp.py @@ -736,21 +736,9 @@ def __create_new_session(self, prev_galaxy_session=None, user_for_new_session=No Caller is responsible for flushing the returned session. """ - session_key = self.security.get_new_guid() - galaxy_session = self.app.model.GalaxySession( - session_key=session_key, - is_valid=True, - remote_host=self.request.remote_host, - remote_addr=self.request.remote_addr, - referer=self.request.headers.get("Referer", None), + return create_new_session( + self, prev_galaxy_session=prev_galaxy_session, user_for_new_session=user_for_new_session ) - if prev_galaxy_session: - # Invalidated an existing session for some reason, keep track - galaxy_session.prev_session_id = prev_galaxy_session.id - if user_for_new_session: - # The new session should be associated with the user - galaxy_session.user = user_for_new_session - return galaxy_session @property def cookie_path(self): @@ -1110,6 +1098,31 @@ def qualified_url_for_path(self, path): return url_for(path, qualified=True) +def create_new_session(trans, prev_galaxy_session=None, user_for_new_session=None): + """ + Create a new GalaxySession for this request, possibly with a connection + to a previous session (in `prev_galaxy_session`) and an existing user + (in `user_for_new_session`). + + Caller is responsible for flushing the returned session. + """ + session_key = trans.security.get_new_guid() + galaxy_session = trans.app.model.GalaxySession( + session_key=session_key, + is_valid=True, + remote_host=trans.request.remote_host, + remote_addr=trans.request.remote_addr, + referer=trans.request.headers.get("Referer", None), + ) + if prev_galaxy_session: + # Invalidated an existing session for some reason, keep track + galaxy_session.prev_session_id = prev_galaxy_session.id + if user_for_new_session: + # The new session should be associated with the user + galaxy_session.user = user_for_new_session + return galaxy_session + + def default_url_path(path): return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) diff --git a/lib/galaxy/webapps/galaxy/api/__init__.py b/lib/galaxy/webapps/galaxy/api/__init__.py index 15fe9102b9a7..b6a44a1f43ac 100644 --- a/lib/galaxy/webapps/galaxy/api/__init__.py +++ b/lib/galaxy/webapps/galaxy/api/__init__.py @@ -47,6 +47,7 @@ NoMatchFound, ) from starlette.types import Scope +from typing_extensions import Literal try: from starlette_context import context as request_context @@ -109,7 +110,7 @@ def __init__(self, callable, dep_type): self.galaxy_type_depends = dep_type -def depends(dep_type: Type[T]) -> T: +def depends(dep_type: Type[T], get_app=get_app) -> T: def _do_resolve(request: Request): return get_app().resolve(dep_type) @@ -178,7 +179,10 @@ def __call__(self, name: str, **path_params): query_params = path_params.pop("query_params", None) try: if qualified: - url = str(self.request.url_for(name, **path_params)) + if name == "/": + url = str(self.request.base_url) + else: + url = str(self.request.url_for(name, **path_params)) else: url = self.request.app.url_path_for(name, **path_params) if query_params: @@ -197,6 +201,8 @@ class GalaxyASGIRequest(GalaxyAbstractRequest): Implements the GalaxyAbstractRequest interface to provide access to some properties of the request commonly used.""" + __request: Request + def __init__(self, request: Request): self.__request = request self.__environ: Optional[MutableMapping[str, Any]] = None @@ -229,6 +235,28 @@ def environ(self) -> MutableMapping[str, Any]: self.__environ = build_environ(self.__request.scope, None) # type: ignore[arg-type] return self.__environ + @property + def headers(self): + return self.__request.headers + + @property + def remote_host(self) -> str: + # was available in wsgi and is used create_new_session + return self.host + + @property + def remote_addr(self) -> Optional[str]: + # was available in wsgi and is used create_new_session + # not sure what to do here... + return None + + @property + def is_secure(self) -> bool: + return self.__request.url.scheme == "https" + + def get_cookie(self, name): + return self.__request.cookies.get(name) + class GalaxyASGIResponse(GalaxyAbstractResponse): """Wrapper around Starlette/FastAPI Response object. @@ -243,6 +271,31 @@ def __init__(self, response: Response): def headers(self): return self.__response.headers + def set_cookie( + self, + key: str, + value: str = "", + max_age: Optional[int] = None, + expires: Optional[int] = None, + path: str = "/", + domain: Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: Optional[Literal["lax", "strict", "none"]] = "lax", + ) -> None: + """Set a cookie.""" + self.__response.set_cookie( + key, + value, + max_age=max_age, + expires=expires, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + DependsOnUser = cast(Optional[User], Depends(get_user)) @@ -301,9 +354,11 @@ class RestVerb(str, Enum): options = "OPTIONS" -class Router(InferringRouter): +class FrameworkRouter(InferringRouter): """A FastAPI Inferring Router tailored to Galaxy.""" + admin_user_dependency: Any + def wrap_with_alias(self, verb: RestVerb, *args, alias: Optional[str] = None, **kwd): """ Wraps FastAPI methods with additional alias keyword and require_admin handling. @@ -382,9 +437,9 @@ def _handle_galaxy_kwd(self, kwd): require_admin = kwd.pop("require_admin", False) if require_admin: if "dependencies" in kwd: - kwd["dependencies"].append(AdminUserRequired) + kwd["dependencies"].append(self.admin_user_dependency) else: - kwd["dependencies"] = [AdminUserRequired] + kwd["dependencies"] = [self.admin_user_dependency] return kwd @@ -398,6 +453,10 @@ def cbv(self): return cbv(self) +class Router(FrameworkRouter): + admin_user_dependency = AdminUserRequired + + class APIContentTypeRoute(APIRoute): """ Determines endpoint to match using content-type. diff --git a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py index b3dbfff241c6..81b6642e4ee4 100644 --- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py @@ -153,17 +153,4 @@ def _manage_repository_json(self, trans, **kwd): with transaction(trans.install_model.context): trans.install_model.context.commit() message = "The repository information has been updated." - dd = dependency_display.DependencyDisplayer(trans.app) - containers_dict = dd.populate_containers_dict_from_repository_metadata( - repository=repository, - ) - management_dict = { - "status": status, - } - missing_repo_dependencies = containers_dict.get("missing_repository_dependencies", None) - if missing_repo_dependencies: - management_dict["missing_repository_dependencies"] = missing_repo_dependencies.to_dict() - repository_dependencies = containers_dict.get("repository_dependencies", None) - if repository_dependencies: - management_dict["repository_dependencies"] = repository_dependencies.to_dict() - return management_dict + return dependency_display.build_manage_repository_dict(trans.app, status, repository) diff --git a/lib/galaxy/webapps/galaxy/controllers/user.py b/lib/galaxy/webapps/galaxy/controllers/user.py index 7fc9b8c1e017..52587ee2d66e 100644 --- a/lib/galaxy/webapps/galaxy/controllers/user.py +++ b/lib/galaxy/webapps/galaxy/controllers/user.py @@ -189,7 +189,7 @@ def __validate_login(self, trans, payload=None, **kwd): message, status = self.resend_activation_email(trans, user.email, user.username) return self.message_exception(trans, message, sanitize=False) else: # activation is OFF - pw_expires = trans.app.config.password_expiration_period + pw_expires = getattr(trans.app.config, "password_expiration_period", None) if pw_expires and user.last_password_change < datetime.today() - pw_expires: # Password is expired, we don't log them in. return { diff --git a/lib/galaxy/work/context.py b/lib/galaxy/work/context.py index 310779e504e3..8a1206018c0a 100644 --- a/lib/galaxy/work/context.py +++ b/lib/galaxy/work/context.py @@ -4,6 +4,8 @@ Optional, ) +from typing_extensions import Literal + from galaxy.managers.context import ProvidesHistoryContext from galaxy.model import ( GalaxySession, @@ -85,6 +87,14 @@ def base(self) -> str: def host(self) -> str: """The host address.""" + @abc.abstractproperty + def is_secure(self) -> bool: + """Was this a secure (https) request.""" + + @abc.abstractmethod + def get_cookie(self, name): + """Return cookie.""" + class GalaxyAbstractResponse: """Abstract interface to provide access to some response utilities.""" @@ -102,6 +112,21 @@ def set_content_type(self, content_type: str): def get_content_type(self): return self.headers.get("content-type", None) + @abc.abstractmethod + def set_cookie( + self, + key: str, + value: str = "", + max_age: Optional[int] = None, + expires: Optional[int] = None, + path: str = "/", + domain: Optional[str] = None, + secure: bool = False, + httponly: bool = False, + samesite: Optional[Literal["lax", "strict", "none"]] = "lax", + ) -> None: + """Set a cookie.""" + class SessionRequestContext(WorkRequestContext): """Like WorkRequestContext, but provides access to request.""" diff --git a/lib/tool_shed/context.py b/lib/tool_shed/context.py index df4b07a5ccdc..107be8c8ed48 100644 --- a/lib/tool_shed/context.py +++ b/lib/tool_shed/context.py @@ -2,6 +2,7 @@ from typing import Optional from sqlalchemy.orm import scoped_session +from typing_extensions import Protocol from galaxy.security.idencoding import IdEncodingHelper from galaxy.work.context import ( @@ -16,7 +17,7 @@ from tool_shed.webapp.model.mapping import ToolShedModelMapping -class ProvidesAppContext: +class ProvidesAppContext(Protocol): """For transaction-like objects to provide the shed convenience layer for database and event handling. @@ -45,7 +46,7 @@ def model(self) -> ToolShedModelMapping: return self.app.model -class ProvidesUserContext(ProvidesAppContext): +class ProvidesUserContext(ProvidesAppContext, Protocol): """For transaction-like objects to provide Galaxy convenience layer for reasoning about users. @@ -72,11 +73,21 @@ def user_is_bootstrap_admin(self) -> bool: return not self.anonymous and user is not None and user.bootstrap_admin_user -class SessionRequestContext(ProvidesUserContext): +class ProvidesRepositoriesContext(ProvidesUserContext, Protocol): + @abc.abstractproperty + def repositories_hostname(self) -> str: + """Provide access to hostname used by target mercurial server.""" + + +class SessionRequestContext(ProvidesRepositoriesContext, Protocol): @abc.abstractmethod def get_galaxy_session(self) -> Optional[GalaxySession]: ... + @abc.abstractmethod + def set_galaxy_session(self, galaxy_session: GalaxySession): + ... + @abc.abstractproperty def request(self) -> GalaxyAbstractRequest: ... @@ -84,3 +95,84 @@ def request(self) -> GalaxyAbstractRequest: @abc.abstractproperty def response(self) -> GalaxyAbstractResponse: ... + + @abc.abstractmethod + def url_builder(self): + ... + + @abc.abstractproperty + def session_csrf_token(self) -> str: + ... + + +class SessionRequestContextImpl(SessionRequestContext): + _app: ToolShedApp + _user: Optional[User] + _galaxy_session: Optional[GalaxySession] + + def __init__( + self, + app: ToolShedApp, + request: GalaxyAbstractRequest, + response: GalaxyAbstractResponse, + user: Optional[User] = None, + galaxy_session: Optional[GalaxySession] = None, + url_builder=None, + ): + self._app = app + self._user = user + self._galaxy_session = galaxy_session + self._url_builder = url_builder + self.__request = request + self.__response = response + + @property + def app(self) -> ToolShedApp: + return self._app + + @property + def url_builder(self): + return self._url_builder + + @property + def user(self) -> Optional[User]: + return self._user + + def get_galaxy_session(self) -> Optional[GalaxySession]: + return self._galaxy_session + + def set_galaxy_session(self, galaxy_session: GalaxySession): + self._galaxy_session = galaxy_session + if galaxy_session.user: + self._user = galaxy_session.user + + @property + def repositories_hostname(self) -> str: + return str(self.request.base).rstrip("/") + + @property + def host(self): + return self.__request.host + + @property + def request(self) -> GalaxyAbstractRequest: + return self.__request + + @property + def response(self) -> GalaxyAbstractResponse: + return self.__response + + # Following three things added v2.0 frontend + @property + def session_csrf_token(self): + token = "" + if self._galaxy_session: + token = self.security.encode_id(self._galaxy_session.id, kind="csrf") + return token + + @property + def galaxy_session(self) -> Optional[GalaxySession]: + return self._galaxy_session + + def log_event(self, str): + pass diff --git a/lib/tool_shed/dependencies/attribute_handlers.py b/lib/tool_shed/dependencies/attribute_handlers.py index 57d312a832e8..ea611b132ab8 100644 --- a/lib/tool_shed/dependencies/attribute_handlers.py +++ b/lib/tool_shed/dependencies/attribute_handlers.py @@ -5,13 +5,13 @@ List, Optional, Tuple, + TYPE_CHECKING, ) from galaxy.util import ( asbool, etree, ) -from galaxy.web import url_for from tool_shed.dependencies.tool import tag_attribute_handler from tool_shed.repository_types.util import ( REPOSITORY_DEPENDENCY_DEFINITION_FILENAME, @@ -24,12 +24,21 @@ xml_util, ) +if TYPE_CHECKING: + from tool_shed.context import ProvidesRepositoriesContext + from tool_shed.structured_app import ToolShedApp + + log = logging.getLogger(__name__) class RepositoryDependencyAttributeHandler: - def __init__(self, app, unpopulate): - self.app = app + trans: "ProvidesRepositoriesContext" + app: "ToolShedApp" + + def __init__(self, trans: "ProvidesRepositoriesContext", unpopulate): + self.trans = trans + self.app = trans.app self.file_name = REPOSITORY_DEPENDENCY_DEFINITION_FILENAME self.unpopulate = unpopulate @@ -111,7 +120,7 @@ def handle_elem(self, elem): # From here on we're populating the toolshed and changeset_revision attributes if necessary. if not toolshed: # Default the setting to the current tool shed. - toolshed = str(url_for("/", qualified=True)).rstrip("/") + toolshed = str(self.trans.url_builder("/", qualified=True)).rstrip("/") elem.attrib["toolshed"] = toolshed altered = True if not changeset_revision: @@ -188,8 +197,12 @@ def handle_tag_attributes(self, config): class ToolDependencyAttributeHandler: - def __init__(self, app, unpopulate): - self.app = app + trans: "ProvidesRepositoriesContext" + app: "ToolShedApp" + + def __init__(self, trans: "ProvidesRepositoriesContext", unpopulate): + self.trans = trans + self.app = trans.app self.file_name = TOOL_DEPENDENCY_DEFINITION_FILENAME self.unpopulate = unpopulate @@ -198,7 +211,7 @@ def handle_tag_attributes(self, tool_dependencies_config): Populate or unpopulate the tooshed and changeset_revision attributes of each tag defined within a tool_dependencies.xml file. """ - rdah = RepositoryDependencyAttributeHandler(self.app, self.unpopulate) + rdah = RepositoryDependencyAttributeHandler(self.trans, self.unpopulate) tah = tag_attribute_handler.TagAttributeHandler(self.app, rdah, self.unpopulate) altered = False error_message = "" diff --git a/lib/tool_shed/dependencies/repository/relation_builder.py b/lib/tool_shed/dependencies/repository/relation_builder.py index 1f3c175e5d7d..33811377d031 100644 --- a/lib/tool_shed/dependencies/repository/relation_builder.py +++ b/lib/tool_shed/dependencies/repository/relation_builder.py @@ -16,7 +16,7 @@ class RelationBuilder: - def __init__(self, app, repository, repository_metadata, tool_shed_url): + def __init__(self, app, repository, repository_metadata, tool_shed_url, trans=None): self.all_repository_dependencies = {} self.app = app self.circular_repository_dependencies = [] @@ -25,6 +25,7 @@ def __init__(self, app, repository, repository_metadata, tool_shed_url): self.handled_key_rd_dicts = [] self.key_rd_dicts_to_be_processed = [] self.tool_shed_url = tool_shed_url + self.trans = trans def can_add_to_key_rd_dicts(self, key_rd_dict, key_rd_dicts): """Handle the case where an update to the changeset revision was done.""" @@ -212,7 +213,8 @@ def get_updated_changeset_revisions_for_repository_dependencies(self, key_rd_dic rd_prior_installation_required, rd_only_if_compiling_contained_td, ) = common_util.parse_repository_dependency_tuple(repository_dependency) - if suc.tool_shed_is_this_tool_shed(rd_toolshed): + tool_shed_is_this_tool_shed = suc.tool_shed_is_this_tool_shed(rd_toolshed, trans=self.trans) + if tool_shed_is_this_tool_shed: repository = tool_shed.util.repository_util.get_repository_by_name_and_owner( self.app, rd_name, rd_owner ) @@ -318,7 +320,7 @@ def handle_key_rd_dicts_for_repository(self, current_repository_key, repository_ prior_installation_required, only_if_compiling_contained_td, ) = common_util.parse_repository_dependency_tuple(repository_dependency) - if suc.tool_shed_is_this_tool_shed(toolshed): + if suc.tool_shed_is_this_tool_shed(toolshed, trans=self.trans): required_repository = tool_shed.util.repository_util.get_repository_by_name_and_owner(self.app, name, owner) self.repository = required_repository repository_id = self.app.security.encode_id(required_repository.id) diff --git a/lib/tool_shed/managers/categories.py b/lib/tool_shed/managers/categories.py new file mode 100644 index 000000000000..9176fef9e978 --- /dev/null +++ b/lib/tool_shed/managers/categories.py @@ -0,0 +1,82 @@ +from typing import ( + Any, + Callable, + Dict, + List, +) + +import tool_shed.util.shed_util_common as suc +from galaxy import ( + exceptions, + web, +) +from galaxy.model.base import transaction +from tool_shed.context import ProvidesUserContext +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.model import Category +from tool_shed_client.schema import ( + Category as CategoryResponse, + CreateCategoryRequest, +) + + +class CategoryManager: + def __init__(self, app: ToolShedApp): + self.app = app + + def create(self, trans: ProvidesUserContext, category_request: CreateCategoryRequest) -> Category: + name = category_request.name + description = category_request.description or name + if name: + if suc.get_category_by_name(self.app, name): + raise exceptions.Conflict("A category with that name already exists.") + else: + # Create the category + category = self.app.model.Category(name=name, description=description) + trans.sa_session.add(category) + with transaction(trans.sa_session): + trans.sa_session.commit() + return category + else: + raise exceptions.RequestParameterMissingException('Missing required parameter "name".') + + def index_db(self, trans: ProvidesUserContext, deleted: bool) -> List[Category]: + category_db_objects: List[Category] = [] + if deleted and not trans.user_is_admin: + raise exceptions.AdminRequiredException("Only administrators can query deleted categories.") + for category in ( + trans.sa_session.query(Category).filter(Category.table.c.deleted == deleted).order_by(Category.table.c.name) + ): + category_db_objects.append(category) + return category_db_objects + + def index(self, trans: ProvidesUserContext, deleted: bool) -> List[Dict[str, Any]]: + category_dicts: List[Dict[str, Any]] = [] + for category in self.index_db(trans, deleted): + category_dict = self.to_dict(category) + category_dicts.append(category_dict) + return category_dicts + + def to_dict(self, category: Category) -> Dict[str, Any]: + category_dict = category.to_dict(view="collection", value_mapper=get_value_mapper(self.app)) + category_dict["repositories"] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get( + category.name, 0 + ) + category_dict["url"] = web.url_for( + controller="categories", action="show", id=self.app.security.encode_id(category.id) + ) + return category_dict + + def to_model(self, category: Category) -> CategoryResponse: + as_dict = self.to_dict(category) + return CategoryResponse( + id=as_dict["id"], + name=as_dict["name"], + description=as_dict["description"], + repositories=as_dict["repositories"], + ) + + +def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: + value_mapper = {"id": app.security.encode_id} + return value_mapper diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 4b26be1ad274..805f716164c5 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -1,78 +1,618 @@ """ Manager and Serializer for TS repositories. """ +import json import logging +from collections import namedtuple +from time import strftime +from typing import ( + Any, + Callable, + cast, + Dict, + List, + Optional, + Union, +) -from sqlalchemy.orm.exc import ( - MultipleResultsFound, - NoResultFound, +from pydantic import BaseModel +from sqlalchemy import ( + and_, + false, ) +from galaxy import web from galaxy.exceptions import ( - InconsistentDatabase, + ConfigDoesNotAllowException, + InsufficientPermissionsException, InternalServerError, + MalformedContents, + ObjectNotFound, RequestParameterInvalidException, ) +from galaxy.tool_shed.util import dependency_display +from galaxy.util import listify +from tool_shed.context import ( + ProvidesRepositoriesContext, + ProvidesUserContext, +) +from tool_shed.metadata import repository_metadata_manager +from tool_shed.repository_types import util as rt_util +from tool_shed.structured_app import ToolShedApp +from tool_shed.util import ( + encoding_util, + hg_util, +) +from tool_shed.util.metadata_util import ( + get_all_dependencies, + get_current_repository_metadata_for_changeset_revision, + get_metadata_revisions, + get_next_downloadable_changeset_revision, + get_repository_metadata_by_changeset_revision, +) +from tool_shed.util.readme_util import build_readme_files_dict +from tool_shed.util.repository_content_util import upload_tar +from tool_shed.util.repository_util import ( + create_repository as low_level_create_repository, + get_repo_info_dict, + get_repositories_by_category, + get_repository_by_name_and_owner, + get_repository_in_tool_shed, + validate_repository_name, +) +from tool_shed.util.shed_util_common import ( + count_repositories_in_category, + get_category, +) +from tool_shed.util.tool_util import generate_message_for_invalid_tools +from tool_shed.webapp.model import ( + Repository, + RepositoryMetadata, +) +from tool_shed.webapp.search.repo_search import RepoSearch +from tool_shed_client.schema import ( + CreateRepositoryRequest, + DetailedRepository, + ExtraRepoInfo, + LegacyInstallInfoTuple, + Repository as SchemaRepository, + RepositoryMetadataInstallInfoDict, + ResetMetadataOnRepositoryResponse, +) +from .categories import get_value_mapper as category_value_mapper log = logging.getLogger(__name__) -# ============================================================================= -class RepoManager: +def search(trans: ProvidesUserContext, q: str, page: int = 1, page_size: int = 10): """ - Interface/service object for interacting with TS repositories. + Perform the search over TS repositories. + Note that search works over the Whoosh index which you have + to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. + Also TS config option toolshed_search_on has to be True and + whoosh_index_dir has to be specified. """ + app = trans.app + conf = app.config + if not conf.toolshed_search_on: + raise ConfigDoesNotAllowException("Searching the TS through the API is turned off for this instance.") + if not conf.whoosh_index_dir: + raise ConfigDoesNotAllowException( + "There is no directory for the search index specified. Please contact the administrator." + ) - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + search_term = q.strip() + if len(search_term) < 1: + raise RequestParameterInvalidException("The search term has to be at least one character long.") - def get(self, trans, decoded_repo_id): - """ - Get the repo from the DB. + repo_search = RepoSearch() - :param decoded_repo_id: decoded repo id - :type decoded_repo_id: int + Boosts = namedtuple( + "Boosts", + [ + "repo_name_boost", + "repo_description_boost", + "repo_long_description_boost", + "repo_homepage_url_boost", + "repo_remote_repository_url_boost", + "categories_boost", + "repo_owner_username_boost", + ], + ) + boosts = Boosts( + float(conf.get("repo_name_boost", 0.9)), + float(conf.get("repo_description_boost", 0.6)), + float(conf.get("repo_long_description_boost", 0.5)), + float(conf.get("repo_homepage_url_boost", 0.3)), + float(conf.get("repo_remote_repository_url_boost", 0.2)), + float(conf.get("categories_boost", 0.5)), + float(conf.get("repo_owner_username_boost", 0.3)), + ) - :returns: the requested repo - :rtype: tool_shed.webapp.model.Repository - """ - try: - repo = ( - trans.sa_session.query(trans.app.model.Repository) - .filter(trans.app.model.Repository.table.c.id == decoded_repo_id) - .one() + results = repo_search.search(trans, search_term, page, page_size, boosts) + results["hostname"] = web.url_for("/", qualified=True) + return results + + +class UpdatesRequest(BaseModel): + name: Optional[str] = None + owner: Optional[str] = None + changeset_revision: str + hexlify: bool = True + + +def check_updates(app: ToolShedApp, request: UpdatesRequest) -> Union[str, Dict[str, Any]]: + name = request.name + owner = request.owner + changeset_revision = request.changeset_revision + hexlify_this = request.hexlify + repository = get_repository_by_name_and_owner( + app, name, owner, eagerload_columns=[Repository.downloadable_revisions] + ) + if repository and repository.downloadable_revisions: + repository_metadata = get_repository_metadata_by_changeset_revision( + app, app.security.encode_id(repository.id), changeset_revision + ) + tool_shed_status_dict = {} + # Handle repository deprecation. + tool_shed_status_dict["repository_deprecated"] = str(repository.deprecated) + tip_revision = repository.downloadable_revisions[0] + # Handle latest installable revision. + if changeset_revision == tip_revision: + tool_shed_status_dict["latest_installable_revision"] = "True" + else: + next_installable_revision = get_next_downloadable_changeset_revision(app, repository, changeset_revision) + if repository_metadata is None: + if next_installable_revision and next_installable_revision != changeset_revision: + tool_shed_status_dict["latest_installable_revision"] = "True" + else: + tool_shed_status_dict["latest_installable_revision"] = "False" + else: + if next_installable_revision and next_installable_revision != changeset_revision: + tool_shed_status_dict["latest_installable_revision"] = "False" + else: + tool_shed_status_dict["latest_installable_revision"] = "True" + # Handle revision updates. + if changeset_revision == tip_revision: + tool_shed_status_dict["revision_update"] = "False" + else: + if repository_metadata is None: + tool_shed_status_dict["revision_update"] = "True" + else: + tool_shed_status_dict["revision_update"] = "False" + # Handle revision upgrades. + metadata_revisions = [revision[1] for revision in get_metadata_revisions(app, repository)] + num_metadata_revisions = len(metadata_revisions) + for index, metadata_revision in enumerate(metadata_revisions): + if index == num_metadata_revisions: + tool_shed_status_dict["revision_upgrade"] = "False" + break + if metadata_revision == changeset_revision: + if num_metadata_revisions - index > 1: + tool_shed_status_dict["revision_upgrade"] = "True" + else: + tool_shed_status_dict["revision_upgrade"] = "False" + break + return ( + encoding_util.tool_shed_encode(tool_shed_status_dict) if hexlify_this else json.dumps(tool_shed_status_dict) + ) + return encoding_util.tool_shed_encode({}) if hexlify_this else json.dumps({}) + + +def guid_to_repository(app: ToolShedApp, tool_id: str) -> "Repository": + # tool_id = remove_protocol_and_user_from_clone_url(tool_id) + shed, _, owner, name, rest = tool_id.split("/", 5) + clause_list = [ + and_( + app.model.Repository.table.c.deprecated == false(), + app.model.Repository.table.c.deleted == false(), + app.model.Repository.table.c.name == name, + app.model.User.table.c.username == owner, + app.model.Repository.table.c.user_id == app.model.User.table.c.id, + ) + ] + repository = app.model.context.query(app.model.Repository).filter(*clause_list).first() + return repository + + +def index_tool_ids(app: ToolShedApp, tool_ids: List[str]) -> Dict[str, Any]: + repository_found = [] + all_metadata = dict() + for tool_id in tool_ids: + repository = guid_to_repository(app, tool_id) + owner = repository.user.username + name = repository.name + clause_list = [ + and_( + app.model.Repository.table.c.deprecated == false(), + app.model.Repository.table.c.deleted == false(), + app.model.Repository.table.c.name == name, + app.model.User.table.c.username == owner, + app.model.Repository.table.c.user_id == app.model.User.table.c.id, + ) + ] + repository = app.model.context.current.sa_session.query(app.model.Repository).filter(*clause_list).first() + if not repository: + log.warning(f"Repository {owner}/{name} does not exist, skipping") + continue + for changeset, changehash in repository.installable_revisions(app): + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) + tools: Optional[List[Dict[str, Any]]] = metadata.metadata.get("tools") + if not tools: + log.warning(f"Repository {owner}/{name}/{changehash} does not contain valid tools, skipping") + continue + for tool_metadata in tools: + if tool_metadata["guid"] in tool_ids: + repository_found.append("%d:%s" % (int(changeset), changehash)) + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) + if metadata is None: + continue + metadata_dict = metadata.to_dict( + value_mapper={"id": app.security.encode_id, "repository_id": app.security.encode_id} + ) + metadata_dict["repository"] = repository.to_dict(value_mapper={"id": app.security.encode_id}) + if metadata.has_repository_dependencies: + metadata_dict["repository_dependencies"] = get_all_dependencies( + app, metadata, processed_dependency_links=[] + ) + else: + metadata_dict["repository_dependencies"] = [] + if metadata.includes_tool_dependencies: + metadata_dict["tool_dependencies"] = repository.get_tool_dependencies(app, changehash) + else: + metadata_dict["tool_dependencies"] = {} + if metadata.includes_tools: + metadata_dict["tools"] = metadata.metadata["tools"] + all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict + if repository_found: + all_metadata["current_changeset"] = repository_found[0] + # all_metadata[ 'found_changesets' ] = repository_found + return all_metadata + else: + return {} + + +def index_repositories(app: ToolShedApp, name: Optional[str], owner: Optional[str], deleted: bool): + clause_list = [ + and_( + app.model.Repository.table.c.deprecated == false(), + app.model.Repository.table.c.deleted == deleted, + ) + ] + if owner is not None: + clause_list.append( + and_( + app.model.User.table.c.username == owner, + app.model.Repository.table.c.user_id == app.model.User.table.c.id, + ) + ) + if name is not None: + clause_list.append(app.model.Repository.table.c.name == name) + repositories = [] + for repository in ( + app.model.context.query(app.model.Repository).filter(*clause_list).order_by(app.model.Repository.table.c.name) + ): + repositories.append(repository) + return repositories + + +def can_manage_repo(trans: ProvidesUserContext, repository: Repository) -> bool: + security_agent = trans.app.security_agent + return trans.user_is_admin or security_agent.user_can_administer_repository(trans.user, repository) + + +def can_update_repo(trans: ProvidesUserContext, repository: Repository) -> bool: + app = trans.app + security_agent = app.security_agent + return can_manage_repo(trans, repository) or security_agent.can_push(app, trans.user, repository) + + +def get_repository_metadata_for_management( + trans: ProvidesUserContext, encoded_repository_id: str, changeset_revision: str +) -> RepositoryMetadata: + repository = get_repository_in_tool_shed(trans.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("Cannot manage target repository") + revisions = [r for r in repository.metadata_revisions if r.changeset_revision == changeset_revision] + if len(revisions) != 1: + raise ObjectNotFound() + repository_metadata = revisions[0] + return repository_metadata + + +def get_install_info(trans: ProvidesRepositoriesContext, name, owner, changeset_revision) -> LegacyInstallInfoTuple: + app = trans.app + value_mapper = get_value_mapper(app) + # Example URL: + # http:///api/repositories/get_repository_revision_install_info?name=&owner=&changeset_revision= + if name and owner and changeset_revision: + # Get the repository information. + repository = get_repository_by_name_and_owner( + app, name, owner, eagerload_columns=[Repository.downloadable_revisions] + ) + if repository is None: + log.debug(f"Cannot locate repository {name} owned by {owner}") + return {}, {}, {} + encoded_repository_id = app.security.encode_id(repository.id) + repository_dict: dict = repository.to_dict(view="element", value_mapper=value_mapper) + repository_dict["url"] = web.url_for(controller="repositories", action="show", id=encoded_repository_id) + # Get the repository_metadata information. + repository_metadata = get_repository_metadata_by_changeset_revision( + app, encoded_repository_id, changeset_revision + ) + if repository_metadata is None: + # The changeset_revision column in the repository_metadata table has been updated with a new + # value value, so find the changeset_revision to which we need to update. + new_changeset_revision = get_next_downloadable_changeset_revision(app, repository, changeset_revision) + repository_metadata = get_repository_metadata_by_changeset_revision( + app, encoded_repository_id, new_changeset_revision + ) + changeset_revision = new_changeset_revision + if repository_metadata is not None: + encoded_repository_metadata_id = app.security.encode_id(repository_metadata.id) + repository_metadata_dict: RepositoryMetadataInstallInfoDict = cast( + RepositoryMetadataInstallInfoDict, + repository_metadata.to_dict(view="collection", value_mapper=value_mapper), + ) + repository_metadata_dict["url"] = web.url_for( + controller="repository_revisions", action="show", id=encoded_repository_metadata_id ) - except MultipleResultsFound: - raise InconsistentDatabase("Multiple repositories found with the same id.") - except NoResultFound: - raise RequestParameterInvalidException("No repository found with the id provided.") - except Exception: - raise InternalServerError("Error loading from the database.") - return repo - - def list_by_owner(self, trans, user_id): - """ - Return a list of of repositories owned by a given TS user from the DB. - - :returns: query that will emit repositories owned by given user - :rtype: sqlalchemy query - """ - query = trans.sa_session.query(trans.app.model.Repository).filter( - trans.app.model.Repository.table.c.user_id == user_id + if "tools" in repository_metadata.metadata: + repository_metadata_dict["valid_tools"] = repository_metadata.metadata["tools"] + # Get the repo_info_dict for installing the repository. + repo_info_dict: ExtraRepoInfo + ( + repo_info_dict, + includes_tools, + includes_tool_dependencies, + includes_tools_for_display_in_tool_panel, + has_repository_dependencies, + has_repository_dependencies_only_if_compiling_contained_td, + ) = get_repo_info_dict(trans, encoded_repository_id, changeset_revision) + return repository_dict, repository_metadata_dict, repo_info_dict + else: + log.debug( + "Unable to locate repository_metadata record for repository id %s and changeset_revision %s", + repository.id, + changeset_revision, + ) + return repository_dict, {}, {} + else: + debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " + debug_msg += f"Invalid name {name} or owner {owner} or changeset_revision {changeset_revision} received." + log.debug(debug_msg) + return {}, {}, {} + + +def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: + value_mapper = { + "id": app.security.encode_id, + "repository_id": app.security.encode_id, + "user_id": app.security.encode_id, + } + return value_mapper + + +def get_ordered_installable_revisions( + app: ToolShedApp, name: Optional[str], owner: Optional[str], tsr_id: Optional[str] +) -> List[str]: + eagerload_columns = [Repository.downloadable_revisions] + if None not in [name, owner]: + # Get the repository information. + repository = get_repository_by_name_and_owner(app, name, owner, eagerload_columns=eagerload_columns) + if repository is None: + raise ObjectNotFound(f"No repository named {name} found with owner {owner}") + elif tsr_id is not None: + repository = get_repository_in_tool_shed(app, tsr_id, eagerload_columns=eagerload_columns) + else: + error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " + error_message += "invalid parameters received." + log.debug(error_message) + return [] + return [revision[1] for revision in repository.installable_revisions(app, sort_revisions=True)] + + +def get_repository_metadata_dict(app: ToolShedApp, id: str, recursive: bool, downloadable_only: bool) -> Dict[str, Any]: + all_metadata = {} + repository = get_repository_in_tool_shed(app, id, eagerload_columns=[Repository.downloadable_revisions]) + for changeset, changehash in get_metadata_revisions( + app, repository, sort_revisions=True, downloadable=downloadable_only + ): + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) + if metadata is None: + continue + metadata_dict = metadata.to_dict( + value_mapper={"id": app.security.encode_id, "repository_id": app.security.encode_id} ) - return query - - def create(self, trans, name, description=""): - """ - Create a new group. - """ - - def update(self, trans, group, name=None, description=None): - """ - Update the given group - """ - - def delete(self, trans, group, undelete=False): - """ - Mark given group deleted/undeleted based on the flag. - """ + metadata_dict["repository"] = repository.to_dict(value_mapper={"id": app.security.encode_id}) + if metadata.has_repository_dependencies and recursive: + metadata_dict["repository_dependencies"] = get_all_dependencies( + app, metadata, processed_dependency_links=[] + ) + else: + metadata_dict["repository_dependencies"] = [] + if metadata.includes_tools: + metadata_dict["tools"] = metadata.metadata["tools"] + metadata_dict["invalid_tools"] = metadata.metadata.get("invalid_tools", []) + all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict + return all_metadata + + +def readmes(app: ToolShedApp, repository: Repository, changeset_revision: str) -> dict: + encoded_repository_id = app.security.encode_id(repository.id) + repository_metadata = get_repository_metadata_by_changeset_revision(app, encoded_repository_id, changeset_revision) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + return build_readme_files_dict(app, repository, changeset_revision, repository_metadata.metadata) + return {} + + +def reset_metadata_on_repository(trans: ProvidesUserContext, repository_id) -> ResetMetadataOnRepositoryResponse: + app: ToolShedApp = trans.app + + def handle_repository(trans, start_time, repository): + results = dict(start_time=start_time, repository_status=[]) + try: + rmm = repository_metadata_manager.RepositoryMetadataManager( + trans, + repository=repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + rmm.reset_all_metadata_on_repository_in_tool_shed() + rmm_invalid_file_tups = rmm.get_invalid_file_tups() + if rmm_invalid_file_tups: + message = generate_message_for_invalid_tools( + app, rmm_invalid_file_tups, repository, None, as_html=False + ) + results["status"] = "warning" + else: + message = ( + f"Successfully reset metadata on repository {repository.name} owned by {repository.user.username}" + ) + results["status"] = "ok" + except Exception as e: + message = ( + f"Error resetting metadata on repository {repository.name} owned by {repository.user.username}: {e}" + ) + results["status"] = "error" + status = f"{repository.name} : {message}" + results["repository_status"].append(status) + return results + + if repository_id is not None: + repository = get_repository_in_tool_shed(app, repository_id) + start_time = strftime("%Y-%m-%d %H:%M:%S") + log.debug(f"{start_time}...resetting metadata on repository {repository.name}") + results = handle_repository(trans, start_time, repository) + stop_time = strftime("%Y-%m-%d %H:%M:%S") + results["stop_time"] = stop_time + return ResetMetadataOnRepositoryResponse(**results) + + +def create_repository(trans: ProvidesUserContext, request: CreateRepositoryRequest) -> Repository: + app: ToolShedApp = trans.app + user = trans.user + assert user + category_ids = listify(request.category_ids) + name = request.name + invalid_message = validate_repository_name(app, name, user) + if invalid_message: + raise RequestParameterInvalidException(invalid_message) + + repo, _ = low_level_create_repository( + app=app, + name=name, + type=request.type_, + description=request.synopsis, + long_description=request.description, + user_id=user.id, + category_ids=category_ids, + remote_repository_url=request.remote_repository_url, + homepage_url=request.homepage_url, + ) + return repo + + +def to_element_dict(app, repository: Repository, include_categories: bool = False) -> Dict[str, Any]: + value_mapper = get_value_mapper(app) + repository_dict = repository.to_dict(view="element", value_mapper=value_mapper) + if include_categories: + repository_dict["category_ids"] = [app.security.encode_id(x.category.id) for x in repository.categories] + return repository_dict + + +def repositories_by_category( + app: ToolShedApp, + category_id: str, + page: Optional[int] = None, + sort_key: str = "name", + sort_order: str = "asc", + installable: bool = True, +): + category = get_category(app, category_id) + category_dict: Dict[str, Any] + if category is None: + category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") + return category_dict + category_dict = category.to_dict(view="element", value_mapper=category_value_mapper(app)) + category_dict["repository_count"] = count_repositories_in_category(app, category_id) + repositories = get_repositories_by_category( + app, category.id, installable=installable, sort_order=sort_order, sort_key=sort_key, page=page + ) + category_dict["repositories"] = repositories + return category_dict + + +def to_model(app, repository: Repository) -> SchemaRepository: + return SchemaRepository(**to_element_dict(app, repository)) + + +def to_detailed_model(app, repository: Repository) -> DetailedRepository: + return DetailedRepository(**to_element_dict(app, repository)) + + +def upload_tar_and_set_metadata( + trans: ProvidesRepositoriesContext, + host: str, + repository: Repository, + uploaded_file, + commit_message: str, + dry_run: bool = False, +): + app = trans.app + user = trans.user + assert user + repo_dir = repository.repo_path(app) + tip = repository.tip() + tar_response = upload_tar( + trans, + user.username, + repository, + uploaded_file, + commit_message, + ) + ( + ok, + message, + _, + content_alert_str, + _, + _, + ) = tar_response + if ok: + # Update the repository files for browsing. + hg_util.update_repository(repo_dir) + # Get the new repository tip. + if tip == repository.tip(): + raise MalformedContents("No changes to repository.") + else: + rmm = repository_metadata_manager.RepositoryMetadataManager(trans, repository=repository) + _, error_message = rmm.set_repository_metadata_due_to_new_tip(host, content_alert_str=content_alert_str) + if error_message: + raise InternalServerError(error_message) + dd = dependency_display.DependencyDisplayer(app) + if str(repository.type) not in [ + rt_util.REPOSITORY_SUITE_DEFINITION, + rt_util.TOOL_DEPENDENCY_DEFINITION, + ]: + # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies + # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency + # definitions can define orphan tool dependencies (no relationship to any tools contained in the + # repository), so warning messages are important because orphans are always valid. The repository + # owner must be warned in case they did not intend to define an orphan dependency, but simply + # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. + if repository.metadata_revisions: + # A repository's metadata revisions are order descending by update_time, so the zeroth revision + # will be the tip just after an upload. + metadata_dict = repository.metadata_revisions[0].metadata + else: + metadata_dict = {} + orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict) + if orphan_message: + message += orphan_message + else: + raise InternalServerError(message) + return message diff --git a/lib/tool_shed/managers/tools.py b/lib/tool_shed/managers/tools.py new file mode 100644 index 000000000000..bd648d4903a9 --- /dev/null +++ b/lib/tool_shed/managers/tools.py @@ -0,0 +1,44 @@ +from collections import namedtuple + +from galaxy import exceptions +from tool_shed.context import SessionRequestContext +from tool_shed.webapp.search.tool_search import ToolSearch + + +def search(trans: SessionRequestContext, q: str, page: int = 1, page_size: int = 10) -> dict: + """ + Perform the search over TS tools index. + Note that search works over the Whoosh index which you have + to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. + Also TS config option toolshed_search_on has to be True and + whoosh_index_dir has to be specified. + """ + app = trans.app + conf = app.config + if not conf.toolshed_search_on: + raise exceptions.ConfigDoesNotAllowException( + "Searching the TS through the API is turned off for this instance." + ) + if not conf.whoosh_index_dir: + raise exceptions.ConfigDoesNotAllowException( + "There is no directory for the search index specified. Please contact the administrator." + ) + search_term = q.strip() + if len(search_term) < 1: + raise exceptions.RequestParameterInvalidException("The search term has to be at least one character long.") + + tool_search = ToolSearch() + + Boosts = namedtuple( + "Boosts", ["tool_name_boost", "tool_description_boost", "tool_help_boost", "tool_repo_owner_username_boost"] + ) + boosts = Boosts( + float(conf.get("tool_name_boost", 1.2)), + float(conf.get("tool_description_boost", 0.6)), + float(conf.get("tool_help_boost", 0.4)), + float(conf.get("tool_repo_owner_username_boost", 0.3)), + ) + + results = tool_search.search(trans.app, search_term, page, page_size, boosts) + results["hostname"] = trans.repositories_hostname + return results diff --git a/lib/tool_shed/managers/trs.py b/lib/tool_shed/managers/trs.py new file mode 100644 index 000000000000..c36488bdb361 --- /dev/null +++ b/lib/tool_shed/managers/trs.py @@ -0,0 +1,153 @@ +from typing import ( + Any, + cast, + Dict, + List, + Optional, + Tuple, +) + +from starlette.datastructures import URL + +from galaxy.exceptions import ObjectNotFound +from galaxy.util.tool_shed.common_util import remove_protocol_and_user_from_clone_url +from galaxy.version import VERSION +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.structured_app import ToolShedApp +from tool_shed.util.metadata_util import get_current_repository_metadata_for_changeset_revision +from tool_shed.webapp.model import ( + Repository, + RepositoryMetadata, +) +from tool_shed_client.schema.trs import ( + DescriptorType, + Tool, + ToolClass, + ToolVersion, +) +from tool_shed_client.schema.trs_service_info import ( + Organization, + Service, + ServiceType, +) +from tool_shed_client.trs_util import decode_identifier +from .repositories import guid_to_repository + +TRS_SERVICE_NAME = "Tool Shed TRS API" +TRS_SERVICE_DESCRIPTION = "Serves tool shed repository tools according to the GA4GH TRS specification" + + +def service_info(app: ToolShedApp, request_url: URL): + components = request_url.components + hostname = components.hostname + assert hostname + default_organization_id = ".".join(reversed(hostname.split("."))) + config = app.config + organization_id = cast(str, config.ga4gh_service_id or default_organization_id) + organization_name = cast(str, config.ga4gh_service_organization_name or organization_id) + organization_url = cast(str, config.ga4gh_service_organization_url or f"{components.scheme}://{components.netloc}") + + organization = Organization( + url=organization_url, + name=organization_name, + ) + service_type = ServiceType( + group="org.ga4gh", + artifact="trs", + version="2.1.0", + ) + environment = config.ga4gh_service_environment + extra_kwds = {} + if environment: + extra_kwds["environment"] = environment + return Service( + id=organization_id + ".trs", + name=TRS_SERVICE_NAME, + description=TRS_SERVICE_DESCRIPTION, + organization=organization, + type=service_type, + version=VERSION, + **extra_kwds, + ) + + +def tool_classes() -> List[ToolClass]: + return [ToolClass(id="galaxy_tool", name="Galaxy Tool", description="Galaxy XML Tools")] + + +def trs_tool_id_to_repository(trans: ProvidesRepositoriesContext, trs_tool_id: str) -> Repository: + guid = decode_identifier(trans.repositories_hostname, trs_tool_id) + guid = remove_protocol_and_user_from_clone_url(guid) + return guid_to_repository(trans.app, guid) + + +def get_repository_metadata_by_tool_version( + app: ToolShedApp, repository: Repository, tool_id: str +) -> Dict[str, RepositoryMetadata]: + versions = {} + for _, changeset in repository.installable_revisions(app): + metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changeset) + tools: Optional[List[Dict[str, Any]]] = metadata.metadata.get("tools") + if not tools: + continue + for tool_metadata in tools: + if tool_metadata["id"] != tool_id: + continue + versions[tool_metadata["version"]] = metadata + return versions + + +def get_tools_for(repository_metadata: RepositoryMetadata) -> List[Dict[str, Any]]: + tools: Optional[List[Dict[str, Any]]] = repository_metadata.metadata.get("tools") + assert tools + return tools + + +def trs_tool_id_to_repository_metadata( + trans: ProvidesRepositoriesContext, trs_tool_id: str +) -> Optional[Tuple[Repository, Dict[str, RepositoryMetadata]]]: + tool_guid = decode_identifier(trans.repositories_hostname, trs_tool_id) + tool_guid = remove_protocol_and_user_from_clone_url(tool_guid) + _, tool_id = tool_guid.rsplit("/", 1) + repository = guid_to_repository(trans.app, tool_guid) + app = trans.app + versions: Dict[str, RepositoryMetadata] = get_repository_metadata_by_tool_version(app, repository, tool_id) + if not versions: + return None + + return repository, versions + + +def get_tool(trans: ProvidesRepositoriesContext, trs_tool_id: str) -> Tool: + guid = decode_identifier(trans.repositories_hostname, trs_tool_id) + guid = remove_protocol_and_user_from_clone_url(guid) + repo_metadata = trs_tool_id_to_repository_metadata(trans, trs_tool_id) + if not repo_metadata: + raise ObjectNotFound() + repository, metadata_by_version = repo_metadata + + repo_owner = repository.user.username + aliases: List[str] = [guid] + hostname = remove_protocol_and_user_from_clone_url(trans.repositories_hostname) + url = f"https://{hostname}/repos/{repo_owner}/{repository.name}" + + versions: List[ToolVersion] = [] + for tool_version_str, _ in metadata_by_version.items(): + version_url = url # TODO: + tool_version = ToolVersion( + author=[repo_owner], + containerfile=False, + descriptor_type=[DescriptorType.GALAXY], + id=tool_version_str, + url=version_url, + verified=False, + ) + versions.append(tool_version) + return Tool( + aliases=aliases, + id=trs_tool_id, + url=url, + toolclass=tool_classes()[0], + organization=repo_owner, + versions=versions, + ) diff --git a/lib/tool_shed/managers/users.py b/lib/tool_shed/managers/users.py new file mode 100644 index 000000000000..055aaf1945d1 --- /dev/null +++ b/lib/tool_shed/managers/users.py @@ -0,0 +1,77 @@ +from typing import List + +from galaxy.exceptions import RequestParameterInvalidException +from galaxy.model.base import transaction +from galaxy.security.validate_user_input import ( + validate_email, + validate_password, + validate_publicname, +) +from tool_shed.context import ProvidesUserContext +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.model import User +from tool_shed_client.schema import ( + CreateUserRequest, + User as ApiUser, +) + + +def index(app: ToolShedApp, deleted: bool) -> List[ApiUser]: + users: List[ApiUser] = [] + for user in ( + app.model.context.query(app.model.User) + .filter(app.model.User.table.c.deleted == deleted) + .order_by(app.model.User.table.c.username) + ): + users.append(get_api_user(app, user)) + return users + + +def create_user(app: ToolShedApp, email: str, username: str, password: str) -> User: + if username == "repos": + raise RequestParameterInvalidException("Cannot create a tool shed user with the username repos") + sa_session = app.model.context + user = User(email=email) + user.set_password_cleartext(password) + user.username = username + # API was doing this but mypy doesn't think user has an active boolean attribute. + # if app.config.user_activation_on: + # user.active = False + # else: + # user.active = True # Activation is off, every new user is active by default. + sa_session.add(user) + with transaction(sa_session): + sa_session.commit() + app.security_agent.create_private_user_role(user) + return user + + +def api_create_user(trans: ProvidesUserContext, request: CreateUserRequest) -> ApiUser: + app = trans.app + message = _validate( + trans, email=request.email, password=request.password, confirm=request.password, username=request.username + ) + if message: + raise RequestParameterInvalidException(message) + user = create_user(app, request.email, request.username, request.password) + return get_api_user(app, user) + + +def get_api_user(app: ToolShedApp, user: User) -> ApiUser: + return ApiUser( + id=app.security.encode_id(user.id), + username=user.username, + ) + + +def _validate(trans: ProvidesUserContext, email: str, password: str, confirm: str, username: str) -> str: + if username in ["repos"]: + return f"The term '{username}' is a reserved word in the Tool Shed, so it cannot be used as a public user name." + message = "\n".join( + ( + validate_email(trans, email), + validate_password(trans, password, confirm), + validate_publicname(trans, username), + ) + ).rstrip() + return message diff --git a/lib/tool_shed/metadata/metadata_generator.py b/lib/tool_shed/metadata/metadata_generator.py deleted file mode 100644 index 06b7720d4a43..000000000000 --- a/lib/tool_shed/metadata/metadata_generator.py +++ /dev/null @@ -1,3 +0,0 @@ -from galaxy.tool_shed.metadata.metadata_generator import MetadataGenerator - -__all__ = ("MetadataGenerator",) diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index c6e850acd175..9ec1b29271b2 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -1,16 +1,28 @@ import logging import tempfile +from typing import ( + Any, + Dict, + List, + Optional, +) from sqlalchemy import ( + and_, false, or_, ) from galaxy import util from galaxy.model.base import transaction +from galaxy.tool_shed.metadata.metadata_generator import ( + BaseMetadataGenerator, + HandleResultT, + InvalidFileT, +) from galaxy.util import inflector from galaxy.web.form_builder import SelectField -from tool_shed.metadata import metadata_generator +from tool_shed.context import ProvidesRepositoriesContext from tool_shed.repository_types import util as rt_util from tool_shed.repository_types.metadata import TipOnly from tool_shed.structured_app import ToolShedApp @@ -23,15 +35,200 @@ shed_util_common as suc, tool_util, ) +from tool_shed.util.metadata_util import repository_metadata_by_changeset_revision +from tool_shed.webapp.model import Repository log = logging.getLogger(__name__) -class RepositoryMetadataManager(metadata_generator.MetadataGenerator): +class ToolShedMetadataGenerator(BaseMetadataGenerator): + """A MetadataGenerator building on ToolShed's app and repository constructs.""" + + app: ToolShedApp + repository: Optional[Repository] + + # why is mypy making me re-annotate these things from the base class, it didn't + # when they were in the same file + invalid_file_tups: List[InvalidFileT] + repository_clone_url: Optional[str] + + def __init__( + self, + trans: ProvidesRepositoriesContext, + repository: Optional[Repository] = None, + changeset_revision: Optional[str] = None, + repository_clone_url: Optional[str] = None, + shed_config_dict: Optional[Dict[str, Any]] = None, + relative_install_dir=None, + repository_files_dir=None, + resetting_all_metadata_on_repository=False, + updating_installed_repository=False, + persist=False, + metadata_dict=None, + user=None, + ): + self.trans = trans + self.app = trans.app + self.user = user + self.repository = repository + if changeset_revision is None and self.repository is not None: + self.changeset_revision = self.repository.tip() + else: + self.changeset_revision = changeset_revision + if repository_clone_url is None and self.repository is not None: + self.repository_clone_url = common_util.generate_clone_url_for(self.trans, self.repository) + else: + self.repository_clone_url = repository_clone_url + if shed_config_dict is None: + self.shed_config_dict = {} + else: + self.shed_config_dict = shed_config_dict + if relative_install_dir is None and self.repository is not None: + relative_install_dir = self.repository.repo_path(self.app) + if repository_files_dir is None and self.repository is not None: + repository_files_dir = self.repository.repo_path(self.app) + if metadata_dict is None: + self.metadata_dict = {} + else: + self.metadata_dict = metadata_dict + self.relative_install_dir = relative_install_dir + self.repository_files_dir = repository_files_dir + self.resetting_all_metadata_on_repository = resetting_all_metadata_on_repository + self.updating_installed_repository = updating_installed_repository + self.persist = persist + self.invalid_file_tups = [] + self.sa_session = trans.app.model.session + + def initial_metadata_dict(self) -> Dict[str, Any]: + return {} + + def set_repository( + self, repository, relative_install_dir: Optional[str] = None, changeset_revision: Optional[str] = None + ): + self.repository = repository + if relative_install_dir is None and self.repository is not None: + relative_install_dir = repository.repo_path(self.app) + if changeset_revision is None and self.repository is not None: + self.set_changeset_revision(self.repository.tip()) + else: + self.set_changeset_revision(changeset_revision) + self.shed_config_dict = {} + self._reset_attributes_after_repository_update(relative_install_dir) + + def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: + """ + Process the received repository_elem which is a tag either from a + repository_dependencies.xml file or a tool_dependencies.xml file. If the former, + we're generating repository dependencies metadata for a repository in the Tool Shed. + If the latter, we're generating package dependency metadata within Galaxy or the + Tool Shed. + """ + is_valid = True + error_message = "" + toolshed = repository_elem.get("toolshed", None) + name = repository_elem.get("name", None) + owner = repository_elem.get("owner", None) + changeset_revision = repository_elem.get("changeset_revision", None) + prior_installation_required = str(repository_elem.get("prior_installation_required", False)) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + if not toolshed: + # Default to the current tool shed. + toolshed = self.trans.repositories_hostname + log.warning(f"\n\n\n\n\n\nin not toolshed with {toolshed}\n\n\n\n") + # toolshed = str(url_for("/", qualified=True)).rstrip("/") + repository_dependency_tup[0] = toolshed + else: + log.warning(f"moooocww.....{toolshed}\n\n\n\n\n") + toolshed = common_util.remove_protocol_from_tool_shed_url(toolshed) + + if suc.tool_shed_is_this_tool_shed(toolshed, trans=self.trans): + try: + user = ( + self.sa_session.query(self.app.model.User) + .filter(self.app.model.User.table.c.username == owner) + .one() + ) + except Exception: + error_message = ( + f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " + ) + error_message += f"changeset revision {changeset_revision} because the owner is invalid." + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + try: + repository = ( + self.sa_session.query(self.app.model.Repository) + .filter( + and_( + self.app.model.Repository.table.c.name == name, + self.app.model.Repository.table.c.user_id == user.id, + ) + ) + .one() + ) + except Exception: + error_message = f"Ignoring repository dependency definition for tool shed {toolshed}," + error_message += f"name {name}, owner {owner}, " + error_message += f"changeset revision {changeset_revision} because the name is invalid. " + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + repo = repository.hg_repo + + # The received changeset_revision may be None since defining it in the dependency definition is optional. + # If this is the case, the default will be to set its value to the repository dependency tip revision. + # This probably occurs only when handling circular dependency definitions. + tip_ctx = repo[repo.changelog.tip()] + # Make sure the repo.changlog includes at least 1 revision. + if changeset_revision is None and tip_ctx.rev() >= 0: + changeset_revision = str(tip_ctx) + repository_dependency_tup = [ + toolshed, + name, + owner, + changeset_revision, + prior_installation_required, + str(only_if_compiling_contained_td), + ] + return repository_dependency_tup, is_valid, error_message + else: + # Find the specified changeset revision in the repository's changelog to see if it's valid. + found = False + for changeset in repo.changelog: + changeset_hash = str(repo[changeset]) + if changeset_hash == changeset_revision: + found = True + break + if not found: + error_message = f"Ignoring repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, " + error_message += ( + f"changeset revision {changeset_revision} because the changeset revision is invalid. " + ) + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + else: + # Repository dependencies are currently supported within a single tool shed. + error_message = "Repository dependencies are currently supported only within the same tool shed. Ignoring " + error_message += f"repository dependency definition for tool shed {toolshed}, name {name}, owner {owner}, changeset revision {changeset_revision}. " + log.debug(error_message) + is_valid = False + return repository_dependency_tup, is_valid, error_message + return repository_dependency_tup, is_valid, error_message + + +class RepositoryMetadataManager(ToolShedMetadataGenerator): def __init__( self, - app: ToolShedApp, - user, + trans: ProvidesRepositoriesContext, repository=None, changeset_revision=None, repository_clone_url=None, @@ -44,7 +241,7 @@ def __init__( metadata_dict=None, ): super().__init__( - app, + trans, repository, changeset_revision, repository_clone_url, @@ -55,8 +252,11 @@ def __init__( updating_installed_repository, persist, metadata_dict=metadata_dict, - user=user, + user=trans.user, ) + app = trans.app + user = trans.user + self.sa_session = app.model.context self.app = app self.user = user # Repository metadata comparisons for changeset revisions. @@ -66,13 +266,13 @@ def __init__( self.SUBSET = "subset" self.SUBSET_VALUES = [self.EQUAL, self.SUBSET] - def add_tool_versions(self, id, repository_metadata, changeset_revisions): + def _add_tool_versions(self, id: int, repository_metadata, changeset_revisions): # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata. metadata = repository_metadata.metadata tool_versions_dict = {} for tool_dict in metadata.get("tools", []): # We have at least 2 changeset revisions to compare tool guids and tool ids. - parent_id = self.get_parent_id( + parent_id = self._get_parent_id( id, tool_dict["id"], tool_dict["version"], tool_dict["guid"], changeset_revisions ) tool_versions_dict[tool_dict["guid"]] = parent_id @@ -96,13 +296,13 @@ def build_repository_ids_select_field( repositories_select_field.add_option(option_label, option_value) return repositories_select_field - def clean_repository_metadata(self, changeset_revisions): + def _clean_repository_metadata(self, changeset_revisions): + assert self.repository # Delete all repository_metadata records associated with the repository that have # a changeset_revision that is not in changeset_revisions. We sometimes see multiple # records with the same changeset revision value - no idea how this happens. We'll # assume we can delete the older records, so we'll order by update_time descending and # delete records that have the same changeset_revision we come across later. - changeset_revisions_checked = [] for repository_metadata in ( self.sa_session.query(self.app.model.RepositoryMetadata) .filter(self.app.model.RepositoryMetadata.table.c.repository_id == self.repository.id) @@ -112,7 +312,7 @@ def clean_repository_metadata(self, changeset_revisions): ) ): changeset_revision = repository_metadata.changeset_revision - if changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions: + if changeset_revision not in changeset_revisions: self.sa_session.delete(repository_metadata) session = self.sa_session() with transaction(session): @@ -127,63 +327,39 @@ def compare_changeset_revisions(self, ancestor_changeset_revision, ancestor_meta # This changeset_revision is an ancestor of self.changeset_revision which is associated # with self.metadata_dict. A new repository_metadata record will be created only # when this method returns the constant value self.NOT_EQUAL_AND_NOT_SUBSET. - ancestor_datatypes = ancestor_metadata_dict.get("datatypes", []) ancestor_tools = ancestor_metadata_dict.get("tools", []) ancestor_guids = [tool_dict["guid"] for tool_dict in ancestor_tools] ancestor_guids.sort() - ancestor_readme_files = ancestor_metadata_dict.get("readme_files", []) ancestor_repository_dependencies_dict = ancestor_metadata_dict.get("repository_dependencies", {}) ancestor_repository_dependencies = ancestor_repository_dependencies_dict.get("repository_dependencies", []) ancestor_tool_dependencies = ancestor_metadata_dict.get("tool_dependencies", {}) - ancestor_workflows = ancestor_metadata_dict.get("workflows", []) ancestor_data_manager = ancestor_metadata_dict.get("data_manager", {}) - current_datatypes = self.metadata_dict.get("datatypes", []) current_tools = self.metadata_dict.get("tools", []) current_guids = [tool_dict["guid"] for tool_dict in current_tools] current_guids.sort() - current_readme_files = self.metadata_dict.get("readme_files", []) current_repository_dependencies_dict = self.metadata_dict.get("repository_dependencies", {}) current_repository_dependencies = current_repository_dependencies_dict.get("repository_dependencies", []) current_tool_dependencies = self.metadata_dict.get("tool_dependencies", {}) - current_workflows = self.metadata_dict.get("workflows", []) current_data_manager = self.metadata_dict.get("data_manager", {}) # Handle case where no metadata exists for either changeset. - no_datatypes = not ancestor_datatypes and not current_datatypes - no_readme_files = not ancestor_readme_files and not current_readme_files no_repository_dependencies = not ancestor_repository_dependencies and not current_repository_dependencies no_tool_dependencies = not ancestor_tool_dependencies and not current_tool_dependencies no_tools = not ancestor_guids and not current_guids - no_workflows = not ancestor_workflows and not current_workflows no_data_manager = not ancestor_data_manager and not current_data_manager - if ( - no_datatypes - and no_readme_files - and no_repository_dependencies - and no_tool_dependencies - and no_tools - and no_workflows - and no_data_manager - ): + if no_repository_dependencies and no_tool_dependencies and no_tools and no_data_manager: return self.NO_METADATA - # Uncomment the following if we decide that README files should affect how installable - # repository revisions are defined. See the NOTE in self.compare_readme_files(). - # readme_file_comparision = self.compare_readme_files( ancestor_readme_files, current_readme_files ) repository_dependency_comparison = self.compare_repository_dependencies( ancestor_repository_dependencies, current_repository_dependencies ) tool_dependency_comparison = self.compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies ) - workflow_comparison = self.compare_workflows(ancestor_workflows, current_workflows) - datatype_comparison = self.compare_datatypes(ancestor_datatypes, current_datatypes) data_manager_comparison = self.compare_data_manager(ancestor_data_manager, current_data_manager) # Handle case where all metadata is the same. if ( ancestor_guids == current_guids and repository_dependency_comparison == self.EQUAL and tool_dependency_comparison == self.EQUAL - and workflow_comparison == self.EQUAL - and datatype_comparison == self.EQUAL and data_manager_comparison == self.EQUAL ): return self.EQUAL @@ -191,16 +367,8 @@ def compare_changeset_revisions(self, ancestor_changeset_revision, ancestor_meta # readme_file_is_subset = readme_file_comparision in [ self.EQUAL, self.SUBSET ] repository_dependency_is_subset = repository_dependency_comparison in self.SUBSET_VALUES tool_dependency_is_subset = tool_dependency_comparison in self.SUBSET_VALUES - workflow_dependency_is_subset = workflow_comparison in self.SUBSET_VALUES - datatype_is_subset = datatype_comparison in self.SUBSET_VALUES datamanager_is_subset = data_manager_comparison in self.SUBSET_VALUES - if ( - repository_dependency_is_subset - and tool_dependency_is_subset - and workflow_dependency_is_subset - and datatype_is_subset - and datamanager_is_subset - ): + if repository_dependency_is_subset and tool_dependency_is_subset and datamanager_is_subset: is_subset = True for guid in ancestor_guids: if guid not in current_guids: @@ -239,56 +407,6 @@ def __data_manager_dict_to_tuple_list(metadata_dict): return self.SUBSET return self.NOT_EQUAL_AND_NOT_SUBSET - def compare_datatypes(self, ancestor_datatypes, current_datatypes): - """Determine if ancestor_datatypes is the same as or a subset of current_datatypes.""" - # Each datatype dict looks something like: - # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"} - if len(ancestor_datatypes) <= len(current_datatypes): - for ancestor_datatype in ancestor_datatypes: - # Currently the only way to differentiate datatypes is by name. - ancestor_datatype_dtype = ancestor_datatype["dtype"] - ancestor_datatype_extension = ancestor_datatype["extension"] - ancestor_datatype_mimetype = ancestor_datatype.get("mimetype", None) - found_in_current = False - for current_datatype in current_datatypes: - if ( - current_datatype["dtype"] == ancestor_datatype_dtype - and current_datatype["extension"] == ancestor_datatype_extension - and current_datatype.get("mimetype", None) == ancestor_datatype_mimetype - ): - found_in_current = True - break - if not found_in_current: - return self.NOT_EQUAL_AND_NOT_SUBSET - if len(ancestor_datatypes) == len(current_datatypes): - return self.EQUAL - else: - return self.SUBSET - return self.NOT_EQUAL_AND_NOT_SUBSET - - def compare_readme_files(self, ancestor_readme_files, current_readme_files): - """Determine if ancestor_readme_files is equal to or a subset of current_readme_files.""" - # NOTE: Although repository README files are considered a Galaxy utility similar to tools, - # repository dependency definition files, etc., we don't define installable repository revisions - # based on changes to README files. To understand why, consider the following scenario: - # 1. Upload the filtering tool to a new repository - this will result in installable revision 0. - # 2. Upload a README file to the repository - this will move the installable revision from revision - # 0 to revision 1. - # 3. Delete the README file from the repository - this will move the installable revision from - # revision 1 to revision 2. - # The above scenario is the current behavior, and that is why this method is not currently called. - # This method exists only in case we decide to change this current behavior. - # The lists of readme files looks something like: ["database/community_files/000/repo_2/readme.txt"] - if len(ancestor_readme_files) <= len(current_readme_files): - for ancestor_readme_file in ancestor_readme_files: - if ancestor_readme_file not in current_readme_files: - return self.NOT_EQUAL_AND_NOT_SUBSET - if len(ancestor_readme_files) == len(current_readme_files): - return self.EQUAL - else: - return self.SUBSET - return self.NOT_EQUAL_AND_NOT_SUBSET - def compare_repository_dependencies(self, ancestor_repository_dependencies, current_repository_dependencies): """ Determine if ancestor_repository_dependencies is the same as or a subset of @@ -369,38 +487,6 @@ def compare_tool_dependencies(self, ancestor_tool_dependencies, current_tool_dep return self.SUBSET return self.NOT_EQUAL_AND_NOT_SUBSET - def compare_workflows(self, ancestor_workflows, current_workflows): - """ - Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows - is a subset of current_workflows. - """ - if len(ancestor_workflows) <= len(current_workflows): - for ancestor_workflow_tup in ancestor_workflows: - # ancestor_workflows is a list of tuples where each contained tuple is - # [ , ] - ancestor_workflow_dict = ancestor_workflow_tup[1] - # Currently the only way to differentiate workflows is by name. - ancestor_workflow_name = ancestor_workflow_dict["name"] - num_ancestor_workflow_steps = len(ancestor_workflow_dict["steps"]) - found_in_current = False - for current_workflow_tup in current_workflows: - current_workflow_dict = current_workflow_tup[1] - # Assume that if the name and number of steps are euqal, then the workflows - # are the same. Of course, this may not be true... - if ( - current_workflow_dict["name"] == ancestor_workflow_name - and len(current_workflow_dict["steps"]) == num_ancestor_workflow_steps - ): - found_in_current = True - break - if not found_in_current: - return self.NOT_EQUAL_AND_NOT_SUBSET - if len(ancestor_workflows) == len(current_workflows): - return self.EQUAL - else: - return self.SUBSET - return self.NOT_EQUAL_AND_NOT_SUBSET - def create_or_update_repository_metadata(self, changeset_revision, metadata_dict): """Create or update a repository_metadata record in the tool shed.""" has_repository_dependencies = False @@ -427,8 +513,9 @@ def create_or_update_repository_metadata(self, changeset_revision, metadata_dict downloadable = True else: downloadable = False - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, self.app.security.encode_id(self.repository.id), changeset_revision + assert self.repository + repository_metadata = repository_metadata_by_changeset_revision( + self.app.model, self.repository.id, changeset_revision ) if repository_metadata: repository_metadata.metadata = metadata_dict @@ -453,6 +540,7 @@ def create_or_update_repository_metadata(self, changeset_revision, metadata_dict assert repository_metadata # Always set the default values for the following columns. When resetting all metadata # on a repository this will reset the values. + assert repository_metadata repository_metadata.missing_test_components = False self.sa_session.add(repository_metadata) session = self.sa_session() @@ -492,14 +580,12 @@ def different_revision_defines_tip_only_repository_dependency(self, rd_tup, repo return isinstance(repository_type_class, TipOnly) return False - def get_parent_id(self, id, old_id, version, guid, changeset_revisions): + def _get_parent_id(self, id: int, old_id, version, guid, changeset_revisions): parent_id = None # Compare from most recent to oldest. changeset_revisions.reverse() for changeset_revision in changeset_revisions: - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, id, changeset_revision - ) + repository_metadata = repository_metadata_by_changeset_revision(self.app.model, id, changeset_revision) assert repository_metadata metadata = repository_metadata.metadata tools_dicts = metadata.get("tools", []) @@ -566,43 +652,6 @@ def get_query_for_setting_metadata_on_repositories(self, my_writable=False, orde self.app.model.Repository.table.c.deleted == false() ) - def new_datatypes_metadata_required(self, repository_metadata): - """ - Compare the last saved metadata for each datatype in the repository with the new metadata - in self.metadata_dict to determine if a new repository_metadata table record is required - or if the last saved metadata record can be updated for datatypes instead. - """ - # Datatypes are stored in metadata as a list of dictionaries that looks like: - # [{'dtype': 'galaxy.datatypes.data:Text', 'subclass': 'True', 'extension': 'acedb'}] - if "datatypes" in self.metadata_dict: - current_datatypes = self.metadata_dict["datatypes"] - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if "datatypes" in metadata: - ancestor_datatypes = metadata["datatypes"] - # The saved metadata must be a subset of the new metadata. - datatype_comparison = self.compare_datatypes(ancestor_datatypes, current_datatypes) - if datatype_comparison == self.NOT_EQUAL_AND_NOT_SUBSET: - return True - else: - return False - else: - # The new metadata includes datatypes, but the stored metadata does not, - # so we can update the stored metadata. - return False - else: - # There is no stored metadata, so we can update the metadata column in the - # repository_metadata table. - return False - else: - # There is no stored repository metadata, so we need to create a new repository_metadata - # table record. - return True - # self.metadata_dict includes no metadata for datatypes, so a new repository_metadata - # table record is not needed. - return False - def new_metadata_required_for_utilities(self): """ This method compares the last stored repository_metadata record associated with self.repository @@ -612,65 +661,18 @@ def new_metadata_required_for_utilities(self): self.repository because one or more Galaxy utilities may have been deleted from self.repository in the new tip. """ + assert self.repository repository_metadata = metadata_util.get_latest_repository_metadata( self.app, self.repository.id, downloadable=False ) - datatypes_required = self.new_datatypes_metadata_required(repository_metadata) - # Uncomment the following if we decide that README files should affect how installable - # repository revisions are defined. See the NOTE in the compare_readme_files() method. - # readme_files_required = sewlf.new_readme_files_metadata_required( repository_metadata ) repository_dependencies_required = self.new_repository_dependency_metadata_required(repository_metadata) tools_required = self.new_tool_metadata_required(repository_metadata) tool_dependencies_required = self.new_tool_dependency_metadata_required(repository_metadata) - workflows_required = self.new_workflow_metadata_required(repository_metadata) - if ( - datatypes_required - or repository_dependencies_required - or tools_required - or tool_dependencies_required - or workflows_required - ): + data_managers_required = self.new_data_manager_required(repository_metadata) + if repository_dependencies_required or tools_required or tool_dependencies_required or data_managers_required: return True return False - def new_readme_files_metadata_required(self, repository_metadata): - """ - Compare the last saved metadata for each readme file in the repository with the new metadata - in self.metadata_dict to determine if a new repository_metadata table record is required or - if the last saved metadata record can be updated for readme files instead. - """ - # Repository README files are kind of a special case because they have no effect on reproducibility. - # We'll simply inspect the file names to determine if any that exist in the saved metadata are - # eliminated from the new metadata in self.metadata_dict. - if "readme_files" in self.metadata_dict: - current_readme_files = self.metadata_dict["readme_files"] - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if "readme_files" in metadata: - ancestor_readme_files = metadata["readme_files"] - # The saved metadata must be a subset of the new metadata. - readme_file_comparison = self.compare_readme_files(ancestor_readme_files, current_readme_files) - if readme_file_comparison == self.NOT_EQUAL_AND_NOT_SUBSET: - return True - else: - return False - else: - # The new metadata includes readme_files, but the stored metadata does not, so - # we can update the stored metadata. - return False - else: - # There is no stored metadata, so we can update the metadata column in the repository_metadata - # table. - return False - else: - # There is no stored repository metadata, so we need to create a new repository_metadata - # table record. - return True - # self.metadata_dict includes no metadata for readme_files, so a new repository_metadata - # table record is not needed. - return False - def new_repository_dependency_metadata_required(self, repository_metadata): """ Compare the last saved metadata for each repository dependency in the repository @@ -722,6 +724,16 @@ def new_repository_dependency_metadata_required(self, repository_metadata): # record is not needed. return False + def new_data_manager_required(self, repository_metadata): + if self.metadata_dict and repository_metadata and repository_metadata.metadata: + return self.compare_data_manager(self.metadata_dict, repository_metadata.metadata) != self.EQUAL + else: + return bool( + repository_metadata + and repository_metadata.metadata + and repository_metadata.metadata.get("data_managers") + ) + def new_tool_metadata_required(self, repository_metadata): """ Compare the last saved metadata for each tool in the repository with the new metadata in @@ -818,33 +830,15 @@ def new_tool_dependency_metadata_required(self, repository_metadata): # record is not needed. return False - def new_workflow_metadata_required(self, repository_metadata): - """ - Currently everything about an exported workflow except the name is hard-coded, so - there's no real way to differentiate versions of exported workflows. If this changes - at some future time, this method should be enhanced accordingly. - """ - if "workflows" in self.metadata_dict: - if repository_metadata: - # The repository has metadata, so update the workflows value - - # no new record is needed. - return False - else: - # There is no saved repository metadata, so we need to create a - # new repository_metadata table record. - return True - # self.metadata_dict includes no metadata for workflows, so a new - # repository_metadata table record is not needed. - return False - - def reset_all_metadata_on_repository_in_tool_shed(self): + def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=None): """Reset all metadata on a single repository in a tool shed.""" + assert self.repository log.debug(f"Resetting all metadata on repository: {self.repository.name}") repo = self.repository.hg_repo # The list of changeset_revisions refers to repository_metadata records that have been created # or updated. When the following loop completes, we'll delete all repository_metadata records # for this repository that do not have a changeset_revision value in this list. - changeset_revisions = [] + changeset_revisions: List[Optional[str]] = [] # When a new repository_metadata record is created, it always uses the values of # metadata_changeset_revision and metadata_dict. metadata_changeset_revision = None @@ -855,7 +849,9 @@ def reset_all_metadata_on_repository_in_tool_shed(self): work_dir = tempfile.mkdtemp(prefix="tmp-toolshed-ramorits") ctx = repo[changeset] log.debug("Cloning repository changeset revision: %s", str(ctx.rev())) - cloned_ok, error_message = hg_util.clone_repository(self.repository_clone_url, work_dir, str(ctx.rev())) + assert self.repository_clone_url + repository_clone_url = repository_clone_url or self.repository_clone_url + cloned_ok, error_message = hg_util.clone_repository(repository_clone_url, work_dir, str(ctx.rev())) if cloned_ok: log.debug("Generating metadata for changeset revision: %s", str(ctx.rev())) self.set_changeset_revision(str(ctx)) @@ -873,9 +869,11 @@ def reset_all_metadata_on_repository_in_tool_shed(self): # self.SUBSET - ancestor metadata is a subset of current metadata, so continue from current # self.NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current # metadata, so persist ancestor metadata. + log.info(f"amd {ancestor_metadata_dict}") comparison = self.compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict ) + log.info(f"comparison {comparison}") if comparison in [self.NO_METADATA, self.EQUAL, self.SUBSET]: ancestor_changeset_revision = self.changeset_revision ancestor_metadata_dict = self.metadata_dict @@ -909,32 +907,24 @@ def reset_all_metadata_on_repository_in_tool_shed(self): basic_util.remove_dir(work_dir) # Delete all repository_metadata records for this repository that do not have a changeset_revision # value in changeset_revisions. - self.clean_repository_metadata(changeset_revisions) + self._clean_repository_metadata(changeset_revisions) # Set tool version information for all downloadable changeset revisions. Get the list of changeset # revisions from the changelog. - self.reset_all_tool_versions(repo) + self._reset_all_tool_versions(repo) - def reset_all_tool_versions(self, repo): + def _reset_all_tool_versions(self, repo): """Reset tool version lineage for those changeset revisions that include valid tools.""" - encoded_repository_id = self.app.security.encode_id(self.repository.id) - changeset_revisions_that_contain_tools = [] - for changeset in repo.changelog: - changeset_revision = str(repo[changeset]) - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, changeset_revision - ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - if metadata.get("tools", None): - changeset_revisions_that_contain_tools.append(changeset_revision) + assert self.repository + changeset_revisions_that_contain_tools = _get_changeset_revisions_that_contain_tools( + self.app, repo, self.repository + ) # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that # are downloadable and contain tools. If a repository includes tools, build a dictionary of # { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision. for index, changeset_revision in enumerate(changeset_revisions_that_contain_tools): tool_versions_dict = {} - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, changeset_revision + repository_metadata = repository_metadata_by_changeset_revision( + self.app.model, self.repository.id, changeset_revision ) assert repository_metadata metadata = repository_metadata.metadata @@ -947,8 +937,8 @@ def reset_all_tool_versions(self, repo): tool_versions_dict[tool_dict["guid"]] = tool_dict["id"] else: for tool_dict in tool_dicts: - parent_id = self.get_parent_id( - encoded_repository_id, + parent_id = self._get_parent_id( + self.repository.id, tool_dict["id"], tool_dict["version"], tool_dict["guid"], @@ -1009,18 +999,19 @@ def reset_metadata_on_selected_repositories(self, **kwd): status = "error" return message, status - def set_repository(self, repository): + def set_repository(self, repository, repository_clone_url=None): super().set_repository(repository) - self.repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed(self.user, repository) + self.repository_clone_url = repository_clone_url or common_util.generate_clone_url_for(self.trans, repository) def set_repository_metadata(self, host, content_alert_str="", **kwd): """ Set metadata using the self.repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset has problems. """ + assert self.repository message = "" status = "done" - encoded_id = self.app.security.encode_id(self.repository.id) + repository_id = self.repository.id repo = self.repository.hg_repo self.generate_metadata_for_changeset_revision() if self.metadata_dict: @@ -1040,7 +1031,7 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): else: # Update the latest stored repository metadata with the contents and attributes of self.metadata_dict. repository_metadata = metadata_util.get_latest_repository_metadata( - self.app, self.repository.id, downloadable=False + self.app, repository_id, downloadable=False ) if repository_metadata: downloadable = metadata_util.is_downloadable(self.metadata_dict) @@ -1083,11 +1074,9 @@ def set_repository_metadata(self, host, content_alert_str="", **kwd): changeset_revisions = [] for changeset in repo.changelog: changeset_revision = str(repo[changeset]) - if metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_id, changeset_revision - ): + if repository_metadata_by_changeset_revision(self.app.model, repository_id, changeset_revision): changeset_revisions.append(changeset_revision) - self.add_tool_versions(encoded_id, repository_metadata, changeset_revisions) + self._add_tool_versions(repository_id, repository_metadata, changeset_revisions) elif len(repo) == 1 and not self.invalid_file_tups: message = "Revision %s includes no Galaxy utilities for which metadata can " % str( self.repository.tip() @@ -1105,3 +1094,16 @@ def set_repository_metadata_due_to_new_tip(self, host, content_alert_str=None, * """Set metadata on the tip of self.repository in the tool shed.""" error_message, status = self.set_repository_metadata(host, content_alert_str=content_alert_str, **kwd) return status, error_message + + +def _get_changeset_revisions_that_contain_tools(app: "ToolShedApp", repo, repository) -> List[str]: + changeset_revisions_that_contain_tools = [] + for changeset in repo.changelog: + changeset_revision = str(repo[changeset]) + repository_metadata = repository_metadata_by_changeset_revision(app.model, repository.id, changeset_revision) + if repository_metadata: + metadata = repository_metadata.metadata + if metadata: + if metadata.get("tools", None): + changeset_revisions_that_contain_tools.append(changeset_revision) + return changeset_revisions_that_contain_tools diff --git a/lib/tool_shed/py.typed b/lib/tool_shed/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/lib/tool_shed/test/base/api.py b/lib/tool_shed/test/base/api.py index 3cd998769013..0983dd88d3e6 100644 --- a/lib/tool_shed/test/base/api.py +++ b/lib/tool_shed/test/base/api.py @@ -1,19 +1,15 @@ import os -import re +from functools import wraps from typing import ( Any, Dict, Optional, ) -from urllib.parse import urljoin import pytest -import requests from galaxy.tool_util.verify.interactor import GalaxyInteractorApi -from galaxy_test.base import api_asserts from galaxy_test.base.api_util import ( - baseauth_headers, get_admin_api_key as get_galaxy_admin_api_key, get_user_api_key as get_galaxy_user_key, TEST_USER, @@ -22,6 +18,7 @@ from galaxy_test.driver.testcase import DrivenFunctionalTestCase from . import driver from .api_util import ( + ensure_user_with_email, get_admin_api_key, get_user_api_key, ShedApiInteractor, @@ -35,13 +32,19 @@ class ShedBaseTestCase(DrivenFunctionalTestCase): @property def populator(self) -> ToolShedPopulator: if self._populator is None: - self._populator = ToolShedPopulator(self.admin_api_interactor, self.api_interactor) + self._populator = self._get_populator(self.api_interactor) return self._populator @property def admin_api_interactor(self) -> ShedApiInteractor: return ShedApiInteractor(self.url, get_admin_api_key()) + def _api_interactor_for_key(self, key: str) -> ShedApiInteractor: + return self._api_interactor(key) + + def populator_for_key(self, key: str) -> ToolShedPopulator: + return self._get_populator(self._api_interactor_for_key(key)) + @property def api_interactor(self) -> ShedApiInteractor: user_api_key = get_user_api_key() @@ -49,17 +52,19 @@ def api_interactor(self) -> ShedApiInteractor: email = TEST_USER password = "testpassword" ensure_user_with_email(self.admin_api_interactor, email, password) - user_api_key = self._api_key(email, password) - return ShedApiInteractor(self.url, user_api_key) - - def _api_key(self, email: str, password: str) -> str: - headers = baseauth_headers(email, password) - url = urljoin(self.url, "api/authenticate/baseauth") - auth_response = requests.get(url, headers=headers) - api_asserts.assert_status_code_is(auth_response, 200) - auth_dict = auth_response.json() - api_asserts.assert_has_keys(auth_dict, "api_key") - return auth_dict["api_key"] + user_api_key = self.admin_api_interactor.create_api_key(email, password) + return self._api_interactor_for_key(user_api_key) + + def _api_interactor_by_credentials(self, email: str, password: str) -> ShedApiInteractor: + ensure_user_with_email(self.admin_api_interactor, email, password) + user_api_key = self.admin_api_interactor.create_api_key(email, password) + return self._api_interactor(user_api_key) + + def _api_interactor(self, api_key: str) -> ShedApiInteractor: + return ShedApiInteractor(self.url, api_key) + + def _get_populator(self, user_api_interactor) -> ToolShedPopulator: + return ToolShedPopulator(self.admin_api_interactor, user_api_interactor) def setUp(self): host = os.environ.get("TOOL_SHED_TEST_HOST") @@ -89,35 +94,6 @@ def _get_driver(self, tool_shed_test_driver): self._test_driver = tool_shed_test_driver -def ensure_user_with_email(admin_api_interactor: ShedApiInteractor, email: str, password: Optional[str]): - all_users_response = admin_api_interactor.get("users") - try: - all_users_response.raise_for_status() - except requests.exceptions.HTTPError as e: - raise Exception( - f"Failed to verify user with email [{email}] exists - perhaps you're targetting the wrong Galaxy server or using an incorrect admin API key. HTTP error: {e}" - ) - username = email_to_username(email) - all_users = all_users_response.json() - try: - test_user = [user for user in all_users if user["username"] == username][0] - except IndexError: - password = password or "testpass" - data = dict( - remote_user_email=email, - email=email, - password=password, - username=username, - ) - test_user = admin_api_interactor.post("users", json=data).json() - return test_user - - -def email_to_username(email: str) -> str: - """Pattern used for test user generation - does not use the API.""" - return re.sub(r"[^a-z-\d]", "--", email.lower()) - - class ShedGalaxyInteractorApi(GalaxyInteractorApi): def __init__(self, galaxy_url: str): interactor_kwds: Dict[str, Any] = {} @@ -127,6 +103,25 @@ def __init__(self, galaxy_url: str): super().__init__(**interactor_kwds) +def make_skip_if_api_version_wrapper(version): + def wrapper(method): + @wraps(method) + def wrapped_method(api_test_case, *args, **kwd): + interactor: ShedApiInteractor = api_test_case.api_interactor + api_version = interactor.api_version + if api_version == version: + raise pytest.skip(f"{version} tool shed API found, skipping test") + return method(api_test_case, *args, **kwd) + + return wrapped_method + + return wrapper + + +skip_if_api_v1 = make_skip_if_api_version_wrapper("v1") +skip_if_api_v2 = make_skip_if_api_version_wrapper("v2") + + class ShedApiTestCase(ShedBaseTestCase, UsesShedApi): _galaxy_interactor: Optional[GalaxyInteractorApi] = None diff --git a/lib/tool_shed/test/base/api_util.py b/lib/tool_shed/test/base/api_util.py index 412b92af4dad..e90a495aa69b 100644 --- a/lib/tool_shed/test/base/api_util.py +++ b/lib/tool_shed/test/base/api_util.py @@ -1,12 +1,23 @@ import os +import re from functools import wraps from typing import ( + Any, Callable, + Dict, Optional, ) from urllib.parse import urljoin import requests +from typing_extensions import Literal + +from galaxy_test.base.api_asserts import ( + assert_has_keys, + assert_status_code_is, + assert_status_code_is_ok, +) +from galaxy_test.base.api_util import baseauth_headers DEFAULT_TOOL_SHED_BOOTSTRAP_ADMIN_API_KEY = "TEST1234" DEFAULT_TOOL_SHED_USER_API_KEY = None @@ -52,6 +63,15 @@ def __init__(self, url: str, api_key: str): self.url = url self.api_key = api_key + def create_api_key(self, email: str, password: str) -> str: + headers = baseauth_headers(email, password) + url = urljoin(self.url, "api/authenticate/baseauth") + auth_response = requests.get(url, headers=headers) + assert_status_code_is(auth_response, 200) + auth_dict = auth_response.json() + assert_has_keys(auth_dict, "api_key") + return auth_dict["api_key"] + def _append_headers(self, kwd): if "admin" in kwd: key = get_admin_api_key() @@ -66,3 +86,56 @@ def _append_headers(self, kwd): get = decorate_method(requests.get) post = decorate_method(requests.post) put = decorate_method(requests.put) + delete = decorate_method(requests.delete) + + @property + def api_version(self) -> Literal["v1", "v2"]: + config = self.version() + api_version = config.get("api_version", "v1") + return api_version + + def version(self) -> Dict[str, Any]: + response = self.get("version") + response.raise_for_status() + return response.json() + + @property + def hg_url_base(self): + return self.url + + +def create_user(admin_interactor: ShedApiInteractor, user_dict: Dict[str, Any], assert_ok=True) -> Dict[str, Any]: + email = user_dict["email"] + if "password" not in user_dict: + user_dict["password"] = "testpass" + if "remote_user_email" not in user_dict: + user_dict["remote_user_email"] = email + response = admin_interactor.post("users", json=user_dict) + if assert_ok: + assert_status_code_is_ok(response) + return response.json() + + +def ensure_user_with_email( + admin_api_interactor: ShedApiInteractor, email: str, password: Optional[str] +) -> Dict[str, Any]: + all_users_response = admin_api_interactor.get("users") + try: + all_users_response.raise_for_status() + except requests.exceptions.HTTPError as e: + raise Exception( + f"Failed to verify user with email [{email}] exists - perhaps you're targeting the wrong ToolShed server or using an incorrect admin API key. HTTP error: {e}" + ) + username = email_to_username(email) + all_users = all_users_response.json() + try: + test_user = [user for user in all_users if user["username"] == username][0] + except IndexError: + request = {"email": email, "username": username, "password": password} + test_user = create_user(admin_api_interactor, request, assert_ok=False) + return test_user + + +def email_to_username(email: str) -> str: + """Pattern used for test user generation - does not use the API.""" + return re.sub(r"[^a-z-\d]", "--", email.lower()) diff --git a/lib/tool_shed/test/base/browser.py b/lib/tool_shed/test/base/browser.py new file mode 100644 index 000000000000..559ce7468cf3 --- /dev/null +++ b/lib/tool_shed/test/base/browser.py @@ -0,0 +1,49 @@ +import abc +from typing import ( + List, + Union, +) + +FormValueType = Union[str, bool] + + +class ShedBrowser(metaclass=abc.ABCMeta): + @abc.abstractmethod + def visit_url(self, url: str, allowed_codes: List[int]) -> str: + """Navigate to the supplied URL.""" + + @abc.abstractmethod + def page_content(self) -> str: + """Return the page content for""" + + @abc.abstractmethod + def check_page_for_string(self, patt: str) -> None: + """Looks for 'patt' in the current browser page""" + + @abc.abstractmethod + def check_string_not_in_page(self, patt: str) -> None: + """Looks for 'patt' not being in the current browser page""" + + @abc.abstractmethod + def fill_form_value(self, form_name: str, control_name: str, value: FormValueType): + """Fill in a form value.""" + + @abc.abstractmethod + def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): + """Submit the target button.""" + + @abc.abstractmethod + def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): + """Submit the target button.""" + + @abc.abstractproperty + def is_twill(self) -> bool: + """Return whether this is a twill browser.""" + + @abc.abstractmethod + def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + """Select some new categories and then restore the component.""" + + @abc.abstractmethod + def grant_users_access(self, usernames: List[str]) -> None: + """Select users to grant access to.""" diff --git a/lib/tool_shed/test/base/driver.py b/lib/tool_shed/test/base/driver.py index f8885c50a8e5..7dc778643e81 100644 --- a/lib/tool_shed/test/base/driver.py +++ b/lib/tool_shed/test/base/driver.py @@ -43,6 +43,14 @@ class ToolShedTestDriver(driver_util.TestDriver): def setup(self): """Entry point for test driver script.""" + self.external_shed = bool(os.environ.get("TOOL_SHED_TEST_EXTERNAL", None)) + if not self.external_shed: + self._setup_local() + else: + # Going to also need to set TOOL_SHED_TEST_HOST. + assert os.environ["TOOL_SHED_TEST_HOST"] + + def _setup_local(self): # ---- Configuration ------------------------------------------------------ tool_shed_test_tmp_dir = driver_util.setup_tool_shed_tmp_dir() if not os.path.isdir(tool_shed_test_tmp_dir): diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py new file mode 100644 index 000000000000..d29529cece24 --- /dev/null +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -0,0 +1,174 @@ +import time +from typing import List + +from playwright.sync_api import ( + expect, + Locator, + Page, +) + +from .browser import ( + FormValueType, + ShedBrowser, +) + + +class Locators: + toolbar_login = ".toolbar-login" + toolbar_logout = ".toolbar-logout" + login_submit_button = '[name="login_button"]' + register_link = ".register-link" + + +class PlaywrightShedBrowser(ShedBrowser): + _page: Page + + def __init__(self, page: Page): + self._page = page + + def visit_url(self, url: str, allowed_codes: List[int]) -> str: + response = self._page.goto(url) + assert response is not None + return_code = response.status + assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( + return_code, + ", ".join(str(code) for code in allowed_codes), + ) + return response.url + + def page_content(self) -> str: + self._page.wait_for_load_state("networkidle") + return self._page.content() + + def check_page_for_string(self, patt: str) -> None: + """Looks for 'patt' in the current browser page""" + patt = patt.replace("", "").replace("", "") + expect(self._page.locator("body")).to_contain_text(patt) + + def check_string_not_in_page(self, patt: str) -> None: + patt = patt.replace("", "").replace("", "") + expect(self._page.locator("body")).not_to_contain_text(patt) + + def xcheck_page_for_string(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) == -1: + fname = self.write_temp_file(page) + errmsg = f"no match to '{patt}'\npage content written to '{fname}'\npage: [[{page}]]" + raise AssertionError(errmsg) + + def xcheck_string_not_in_page(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) != -1: + fname = self.write_temp_file(page) + errmsg = f"string ({patt}) incorrectly displayed in page.\npage content written to '{fname}'" + raise AssertionError(errmsg) + + def write_temp_file(self, content, suffix=".html"): + import tempfile + + from galaxy.util import smart_str + + with tempfile.NamedTemporaryFile(suffix=suffix, prefix="twilltestcase-", delete=False) as fh: + fh.write(smart_str(content)) + return fh.name + + def show_forms(self) -> Locator: + """Shows form, helpful for debugging new tests""" + return self._page.locator("form") + + def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): + form = self._form_with_name(form_name) + self._submit_form(form, button, **kwd) + + def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): + """Populates and submits a form from the keyword arguments.""" + # An HTMLForm contains a sequence of Controls. Supported control classes are: + # TextControl, FileControl, ListControl, RadioControl, CheckboxControl, SelectControl, + # SubmitControl, ImageControl + if form is None: + try: + form = self.show_forms().nth(form_no) + except IndexError: + raise ValueError("No form to submit found") + self._submit_form(form, button, **kwd) + + def _submit_form(self, form: Locator, button="runtool_btn", **kwd): + for control_name, control_value in kwd.items(): + self._fill_form_value(form, control_name, control_value) + input = self._page.locator(f"[name='{button}']") + if input.count(): + input.click() + else: + submit_input = form.locator("input[type=submit]") + submit_input.click() + time.sleep(0.25) + # tc.submit(button) + + def _form_with_name(self, name: str) -> Locator: + forms = self.show_forms() + count = forms.count() + for i in range(count): + nth_form = self.show_forms().nth(i) + if nth_form.get_attribute("name") == name: + return nth_form + raise KeyError(f"No form with name [{name}]") + + def fill_form_value(self, form_name: str, control_name: str, value: FormValueType): + form: Locator = self._form_with_name(form_name) + self._fill_form_value(form, control_name, value) + + def _fill_form_value(self, form: Locator, control_name: str, value: FormValueType): + input_i = form.locator(f"input[name='{control_name}']") + input_t = form.locator(f"textarea[name='{control_name}']") + input_s = form.locator(f"select[name='{control_name}']") + if input_i.count(): + if control_name in ["redirect"]: + input_i.input_value = value + else: + if isinstance(value, bool): + if value and not input_i.is_checked(): + input_i.check() + elif not value and input_i.is_checked(): + input_i.uncheck() + else: + input_i.fill(value) + if input_t.count(): + input_t.fill(value) + if input_s.count(): + input_s.select_option(value) + + def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + multi_select = "form[name='categories'] select[name='category_id']" + select_locator = self._page.locator(multi_select) + select_locator.evaluate("node => node.selectedOptions = []") + select_locator.select_option(label=categories_to_add) + self.submit_form_with_name("categories", "manage_categories_button") + + select_locator.evaluate("node => node.selectedOptions = []") + select_locator.select_option(label=categories_to_remove) + self.submit_form_with_name("categories", "manage_categories_button") + + def grant_users_access(self, usernames: List[str]): + multi_select = "form[name='user_access'] select[name='allow_push']" + select_locator = self._page.locator(multi_select) + select_locator.evaluate("node => node.selectedOptions = []") + select_locator.select_option(label=usernames) + self.submit_form_with_name("user_access", "user_access_button") + + @property + def is_twill(self) -> bool: + return False + + def logout_if_logged_in(self, assert_logged_out=True): + self._page.wait_for_selector(f"{Locators.toolbar_login}, {Locators.toolbar_logout}") + logout_locator = self._page.locator(Locators.toolbar_logout) + if logout_locator.is_visible(): + logout_locator.click() + if assert_logged_out: + self.expect_not_logged_in() + + def expect_not_logged_in(self): + expect(self._page.locator(Locators.toolbar_logout)).not_to_be_visible() + + def expect_logged_in(self): + expect(self._page.locator(Locators.toolbar_logout)).to_be_visible() diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 01f3f6c0e870..9b63e1f7a559 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -8,6 +8,7 @@ ) import requests +from typing_extensions import Protocol from galaxy.util.resources import ( files, @@ -17,6 +18,7 @@ from galaxy_test.base import api_asserts from galaxy_test.base.api_util import random_name from tool_shed_client.schema import ( + BuildSearchIndexResponse, Category, CreateCategoryRequest, CreateRepositoryRequest, @@ -38,8 +40,12 @@ ResetMetadataOnRepositoryResponse, ToolSearchRequest, ToolSearchResults, + Version, +) +from .api_util import ( + ensure_user_with_email, + ShedApiInteractor, ) -from .api_util import ShedApiInteractor HasRepositoryId = Union[str, Repository] @@ -71,6 +77,11 @@ def repo_tars(test_data_path: str) -> List[Path]: return tar_paths +class HostsTestToolShed(Protocol): + host: str + port: int + + class ToolShedPopulator: """Utilities for easy fixture creation of tool shed related things.""" @@ -81,25 +92,72 @@ def __init__(self, admin_api_interactor: ShedApiInteractor, api_interactor: Shed self._admin_api_interactor = admin_api_interactor self._api_interactor = api_interactor - def setup_test_data_repo(self, test_data_path: str) -> Repository: - prefix = test_data_path.replace("_", "") - category_id = self.new_category(prefix=prefix).id - repository = self.new_repository(category_id, prefix=prefix) - repository_id = repository.id + def setup_bismark_repo( + self, + repository_id: Optional[HasRepositoryId] = None, + end: Optional[int] = None, + category_id: Optional[str] = None, + ) -> HasRepositoryId: + if repository_id is None: + category_id = category_id or self.new_category(prefix="testbismark").id + repository_id = self.new_repository(category_id, prefix="testbismark") + return self.setup_test_data_repo_by_id("bismark", repository_id, assert_ok=False, end=end) + + def setup_test_data_repo_by_id( + self, + test_data_path: str, + repository_id: Optional[HasRepositoryId] = None, + assert_ok=True, + start: int = 0, + end: Optional[int] = None, + ) -> HasRepositoryId: + if repository_id is None: + prefix = test_data_path.replace("_", "") + category_id = self.new_category(prefix=prefix).id + repository = self.new_repository(category_id, prefix=prefix) + repository_id = repository.id + assert repository_id for index, repo_tar in enumerate(repo_tars(test_data_path)): + if index < start: + continue + + if end and index >= end: + break + commit_message = f"Updating {test_data_path} with index {index} with tar {repo_tar}" - response = self.upload_revision( - repository_id, - repo_tar, - commit_message=commit_message, - ) - assert response.is_ok + response = self.upload_revision_raw(repository_id, repo_tar, commit_message) + if assert_ok: + api_asserts.assert_status_code_is_ok(response) + assert RepositoryUpdate(__root__=response.json()).is_ok + return repository_id + + def setup_test_data_repo( + self, + test_data_path: str, + repository: Optional[Repository] = None, + assert_ok=True, + start: int = 0, + end: Optional[int] = None, + category_id: Optional[str] = None, + ) -> Repository: + if repository is None: + prefix = test_data_path.replace("_", "") + if category_id is None: + category_id = self.new_category(prefix=prefix).id + repository = self.new_repository(category_id, prefix=prefix) + self.setup_test_data_repo_by_id(test_data_path, repository, assert_ok=assert_ok, start=start, end=end) return repository - def setup_column_maker_repo(self, prefix=DEFAULT_PREFIX) -> Repository: - category_id = self.new_category(prefix=prefix).id + def setup_column_maker_repo( + self, + prefix=DEFAULT_PREFIX, + category_id: Optional[str] = None, + ) -> Repository: + if category_id is None: + category_id = self.new_category(prefix=prefix).id + assert category_id repository = self.new_repository(category_id, prefix=prefix) repository_id = repository.id assert repository_id @@ -115,6 +173,11 @@ def setup_column_maker_and_get_metadata(self, prefix=DEFAULT_PREFIX) -> Reposito repository = self.setup_column_maker_repo(prefix=prefix) return self.get_metadata(repository) + def get_install_info_for_repository(self, has_repository_id: HasRepositoryId) -> InstallInfo: + repository_id = self._repository_id(has_repository_id) + metadata = self.get_metadata(repository_id, True) + return self.get_install_info(metadata) + def get_install_info(self, repository_metadata: RepositoryMetadata) -> InstallInfo: revision_metadata = repository_metadata.latest_revision repo = revision_metadata.repository @@ -152,7 +215,7 @@ def upload_revision_raw( def upload_revision( self, repository: HasRepositoryId, path: Traversable, commit_message: str = DEFAULT_COMMIT_MESSAGE ): - response = self.upload_revision_raw(repository, path, commit_message) + response = self.upload_revision_raw(repository, path, commit_message=commit_message) if response.status_code != 200: response_json = None err_msg = None @@ -183,9 +246,10 @@ def create_repository(self, request: CreateRepositoryRequest) -> Repository: api_asserts.assert_status_code_is_ok(response) return Repository(**response.json()) - def reindex(self): + def reindex(self) -> BuildSearchIndexResponse: index_response = self._admin_api_interactor.put("tools/build_search_index") index_response.raise_for_status() + return BuildSearchIndexResponse(**index_response.json()) def new_category( self, name: Optional[str] = None, description: Optional[str] = None, prefix=DEFAULT_PREFIX @@ -243,6 +307,59 @@ def repository_index(self, request: Optional[RepositoryIndexRequest]) -> Reposit api_asserts.assert_status_code_is_ok(repository_response) return RepositoryIndexResponse(__root__=repository_response.json()) + def get_usernames_allowed_to_push(self, repository: HasRepositoryId) -> List[str]: + repository_id = self._repository_id(repository) + show_response = self._api_interactor.get(f"repositories/{repository_id}/allow_push") + show_response.raise_for_status() + as_list = show_response.json() + assert isinstance(as_list, list) + return as_list + + def allow_user_to_push(self, repository: HasRepositoryId, username: str) -> None: + repository_id = self._repository_id(repository) + post_response = self._api_interactor.post(f"repositories/{repository_id}/allow_push/{username}") + post_response.raise_for_status() + + def disallow_user_to_push(self, repository: HasRepositoryId, username: str) -> None: + repository_id = self._repository_id(repository) + delete_response = self._api_interactor.delete(f"repositories/{repository_id}/allow_push/{username}") + delete_response.raise_for_status() + + def set_malicious(self, repository: HasRepositoryId, changeset_revision: str): + repository_id = self._repository_id(repository) + put_response = self._api_interactor.put( + f"repositories/{repository_id}/revisions/{changeset_revision}/malicious" + ) + put_response.raise_for_status() + + def unset_malicious(self, repository: HasRepositoryId, changeset_revision: str): + repository_id = self._repository_id(repository) + delete_response = self._api_interactor.delete( + f"repositories/{repository_id}/revisions/{changeset_revision}/malicious" + ) + delete_response.raise_for_status() + + def tip_is_malicious(self, repository: HasRepositoryId) -> bool: + repository_metadata = self.get_metadata(repository) + revision = repository_metadata.latest_revision + return revision.malicious + + def set_deprecated(self, repository: HasRepositoryId): + repository_id = self._repository_id(repository) + put_response = self._api_interactor.put(f"repositories/{repository_id}/deprecated") + put_response.raise_for_status() + + def unset_deprecated(self, repository: HasRepositoryId): + repository_id = self._repository_id(repository) + delete_response = self._api_interactor.delete(f"repositories/{repository_id}/deprecated") + delete_response.raise_for_status() + + def is_deprecated(self, repository: HasRepositoryId) -> bool: + repository_id = self._repository_id(repository) + repository_response = self._api_interactor.get(f"repositories/{repository_id}") + repository_response.raise_for_status() + return Repository(**repository_response.json()).deprecated + def get_metadata(self, repository: HasRepositoryId, downloadable_only=True) -> RepositoryMetadata: repository_id = self._repository_id(repository) metadata_response = self._api_interactor.get( @@ -258,6 +375,11 @@ def reset_metadata(self, repository: HasRepositoryId) -> ResetMetadataOnReposito api_asserts.assert_status_code_is_ok(reset_response) return ResetMetadataOnRepositoryResponse(**reset_response.json()) + def version(self) -> Version: + version_response = self._admin_api_interactor.get("version") + api_asserts.assert_status_code_is_ok(version_response) + return Version(**version_response.json()) + def tool_search_query(self, query: str) -> ToolSearchResults: return self.tool_search(ToolSearchRequest(q=query)) @@ -266,6 +388,22 @@ def tool_search(self, search_request: ToolSearchRequest) -> ToolSearchResults: api_asserts.assert_status_code_is_ok(search_response) return ToolSearchResults(**search_response.json()) + def tool_guid( + self, shed_host: HostsTestToolShed, repository: Repository, tool_id: str, tool_version: Optional[str] = None + ) -> str: + owner = repository.owner + name = repository.name + port = shed_host.port + if port in [None, 80, 443]: + host_and_port = shed_host.host + else: + host_and_port = f"{shed_host.host}:{shed_host.port}" + tool_id_base = f"{host_and_port}/repos/{owner}/{name}/{tool_id}" + if tool_version is None: + return tool_id_base + else: + return f"{tool_id_base}/{tool_version}" + def repo_search_query(self, query: str) -> RepositorySearchResults: return self.repo_search(RepositorySearchRequest(q=query)) @@ -274,11 +412,23 @@ def repo_search(self, repo_search_request: RepositorySearchRequest) -> Repositor api_asserts.assert_status_code_is_ok(search_response) return RepositorySearchResults(**search_response.json()) + def delete_api_key(self) -> None: + response = self._api_interactor.delete("users/current/api_key") + response.raise_for_status() + + def create_new_api_key(self) -> str: + response = self._api_interactor.post("users/current/api_key") + response.raise_for_status() + return response.json() + def guid(self, repository: Repository, tool_id: str, tool_version: str) -> str: url = self._api_interactor.url base = url.split("://")[1].split("/")[0] return f"{base}/repos/{repository.owner}/{repository.name}/{tool_id}/{tool_version}" + def new_user(self, username: str, password: str): + return ensure_user_with_email(self._admin_api_interactor, username, password) + def _repository_id(self, has_id: HasRepositoryId) -> str: if isinstance(has_id, Repository): return has_id.id diff --git a/lib/tool_shed/test/base/test_db_util.py b/lib/tool_shed/test/base/test_db_util.py index 0ae2c8e76828..9081a007a097 100644 --- a/lib/tool_shed/test/base/test_db_util.py +++ b/lib/tool_shed/test/base/test_db_util.py @@ -1,5 +1,8 @@ import logging -from typing import Optional +from typing import ( + List, + Optional, +) from sqlalchemy import ( and_, @@ -47,23 +50,21 @@ def get_all_repositories(): return sa_session().query(model.Repository).all() -def get_all_installed_repositories(actually_installed=False): - if actually_installed: - return ( - install_session() - .query(galaxy.model.tool_shed_install.ToolShedRepository) - .filter( - and_( - galaxy.model.tool_shed_install.ToolShedRepository.table.c.deleted == false(), - galaxy.model.tool_shed_install.ToolShedRepository.table.c.uninstalled == false(), - galaxy.model.tool_shed_install.ToolShedRepository.table.c.status - == galaxy.model.tool_shed_install.ToolShedRepository.installation_status.INSTALLED, - ) +def get_all_installed_repositories(session=None) -> List[galaxy.model.tool_shed_install.ToolShedRepository]: + if session is None: + session = install_session() + return list( + session.query(galaxy.model.tool_shed_install.ToolShedRepository) + .filter( + and_( + galaxy.model.tool_shed_install.ToolShedRepository.table.c.deleted == false(), + galaxy.model.tool_shed_install.ToolShedRepository.table.c.uninstalled == false(), + galaxy.model.tool_shed_install.ToolShedRepository.table.c.status + == galaxy.model.tool_shed_install.ToolShedRepository.installation_status.INSTALLED, ) - .all() ) - else: - return install_session().query(galaxy.model.tool_shed_install.ToolShedRepository).all() + .all() + ) def get_galaxy_repository_by_name_owner_changeset_revision(repository_name, owner, changeset_revision): @@ -90,15 +91,13 @@ def get_installed_repository_by_id(repository_id): ) -def get_installed_repository_by_name_owner(repository_name, owner, return_multiple=False): - query = ( - install_session() - .query(galaxy.model.tool_shed_install.ToolShedRepository) - .filter( - and_( - galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name, - galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner, - ) +def get_installed_repository_by_name_owner(repository_name, owner, return_multiple=False, session=None): + if session is None: + session = install_session() + query = session.query(galaxy.model.tool_shed_install.ToolShedRepository).filter( + and_( + galaxy.model.tool_shed_install.ToolShedRepository.table.c.name == repository_name, + galaxy.model.tool_shed_install.ToolShedRepository.table.c.owner == owner, ) ) if return_multiple: diff --git a/lib/tool_shed/test/base/twillbrowser.py b/lib/tool_shed/test/base/twillbrowser.py new file mode 100644 index 000000000000..65cf8c48eda3 --- /dev/null +++ b/lib/tool_shed/test/base/twillbrowser.py @@ -0,0 +1,150 @@ +import tempfile +from typing import ( + Dict, + List, +) + +import twill.commands as tc +from twill.browser import FormElement + +from galaxy.util import smart_str +from .browser import ( + FormValueType, + ShedBrowser, +) + +tc.options["equiv_refresh_interval"] = 0 + + +def visit_url(url: str, allowed_codes: List[int]) -> str: + new_url = tc.go(url) + return_code = tc.browser.code + assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( + return_code, + ", ".join(str(code) for code in allowed_codes), + ) + return new_url + + +def page_content() -> str: + return tc.browser.html + + +class TwillShedBrowser(ShedBrowser): + def visit_url(self, url: str, allowed_codes: List[int]) -> str: + return visit_url(url, allowed_codes=allowed_codes) + + def page_content(self) -> str: + """ + Return the last visited page (usually HTML, but can binary data as + well). + """ + return page_content() + + def check_page_for_string(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) == -1: + fname = self.write_temp_file(page) + errmsg = f"no match to '{patt}'\npage content written to '{fname}'\npage: [[{page}]]" + raise AssertionError(errmsg) + + def check_string_not_in_page(self, patt: str) -> None: + page = self.page_content() + if page.find(patt) != -1: + fname = self.write_temp_file(page) + errmsg = f"string ({patt}) incorrectly displayed in page.\npage content written to '{fname}'" + raise AssertionError(errmsg) + + def write_temp_file(self, content, suffix=".html"): + with tempfile.NamedTemporaryFile(suffix=suffix, prefix="twilltestcase-", delete=False) as fh: + fh.write(smart_str(content)) + return fh.name + + def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): + forms_by_name: Dict[str, FormElement] = {f.get("name"): f for f in self._show_forms()} + form = forms_by_name[form_name] + self._submit_form(form, button, **kwd) + + def _show_forms(self) -> List[FormElement]: + """Shows form, helpful for debugging new tests""" + return tc.browser.forms + + def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): + if form is None: + try: + form = self._show_forms()[form_no] + except IndexError: + raise ValueError("No form to submit found") + self._submit_form(form, button, **kwd) + + def _submit_form(self, form, button, **kwd): + controls = {c.name: c for c in form.inputs} + form_name = form.get("name") + for control_name, control_value in kwd.items(): + if control_name not in controls: + continue # these cannot be handled safely - cause the test to barf out + if not isinstance(control_value, list): + control_value = [str(control_value)] + control = controls[control_name] + control_type = getattr(control, "type", None) + if control_type in ( + "text", + "textfield", + "submit", + "password", + "TextareaElement", + "checkbox", + "radio", + None, + ): + for cv in control_value: + tc.fv(form_name, control.name, cv) + else: + # Add conditions for other control types here when necessary. + pass + tc.submit(button) + + def fill_form_value(self, form_name: str, control_name: str, value: FormValueType): + value = str(value) + tc.fv(form_name, control_name, value) + + def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + """Select some new categories and then restore the component.""" + strings_displayed = [] + strings_not_displayed = [] + for category in categories_to_add: + self.fill_form_value("categories", "category_id", f"+{category}") + strings_displayed.append(f"selected>{category}") + for category in categories_to_remove: + self.fill_form_value("categories", "category_id", f"-{category}") + strings_not_displayed.append(f"selected>{category}") + self.submit_form_with_name("categories", "manage_categories_button") + self._check_for_strings(strings_displayed, strings_not_displayed) + + strings_displayed = [] + strings_not_displayed = [] + for category in categories_to_remove: + self.fill_form_value("categories", "category_id", f"+{category}") + strings_displayed.append(f"selected>{category}") + for category in categories_to_add: + self.fill_form_value("categories", "category_id", f"-{category}") + strings_not_displayed.append(f"selected>{category}") + self.submit_form_with_name("categories", "manage_categories_button") + self._check_for_strings(strings_displayed, strings_not_displayed) + + def grant_users_access(self, usernames: List[str]): + for username in usernames: + self.fill_form_value("user_access", "allow_push", f"+{username}") + self.submit_form_with_name("user_access", "user_access_button") + + @property + def is_twill(self) -> bool: + return True + + def _check_for_strings(self, strings_displayed: List[str], strings_not_displayed: List[str]): + if strings_displayed: + for check_str in strings_displayed: + self.check_page_for_string(check_str) + if strings_not_displayed: + for check_str in strings_not_displayed: + self.check_string_not_in_page(check_str) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index b390561922e8..8c15af3dc210 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -1,3 +1,5 @@ +import abc +import contextlib import logging import os import shutil @@ -6,7 +8,11 @@ import tempfile import time from json import loads +from pathlib import Path from typing import ( + Any, + Dict, + Iterator, List, Optional, ) @@ -16,29 +22,47 @@ urlparse, ) +import pytest import requests -import twill.commands as tc from mercurial import ( commands, hg, ui, ) +from playwright.sync_api import Page +from sqlalchemy import ( + and_, + false, +) import galaxy.model.tool_shed_install as galaxy_model +from galaxy.schema.schema import CheckForUpdatesResponse from galaxy.security import idencoding +from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager +from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager +from galaxy.tool_shed.galaxy_install.metadata.installed_repository_metadata_manager import ( + InstalledRepositoryMetadataManager, +) +from galaxy.tool_shed.unittest_utils import ( + StandaloneInstallationTarget, + ToolShedTarget, +) +from galaxy.tool_shed.util.dependency_display import build_manage_repository_dict +from galaxy.tool_shed.util.repository_util import check_for_updates from galaxy.util import ( DEFAULT_SOCKET_TIMEOUT, smart_str, - unicodify, ) from galaxy_test.base.api_asserts import assert_status_code_is_ok from galaxy_test.base.api_util import get_admin_api_key from galaxy_test.base.populators import wait_on_assertion +from tool_shed.test.base.populators import TEST_DATA_REPO_FILES from tool_shed.util import ( hg_util, hgweb_config, xml_util, ) +from tool_shed.util.repository_content_util import tar_open from tool_shed.webapp.model import Repository as DbRepository from tool_shed_client.schema import ( Category, @@ -50,17 +74,557 @@ test_db_util, ) from .api import ShedApiTestCase +from .browser import ShedBrowser +from .playwrightbrowser import PlaywrightShedBrowser +from .twillbrowser import ( + page_content, + visit_url, +) # Set a 10 minute timeout for repository installation. repository_installation_timeout = 600 log = logging.getLogger(__name__) -tc.options["equiv_refresh_interval"] = 0 +class ToolShedInstallationClient(metaclass=abc.ABCMeta): + @abc.abstractmethod + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + """ """ + + @abc.abstractmethod + def setup(self) -> None: + """Setup client interaction.""" + + @abc.abstractmethod + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + """Deactivate the supplied repository.""" + + @abc.abstractmethod + def display_installed_jobs_list_page( + self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None + ) -> None: + """If available, check data manager jobs for supplied strings.""" + + @abc.abstractmethod + def installed_repository_extended_info( + self, installed_repository: galaxy_model.ToolShedRepository + ) -> Dict[str, Any]: + """""" + + @abc.abstractmethod + def install_repository( + self, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool, + install_repository_dependencies: bool, + new_tool_panel_section_label: Optional[str], + ) -> None: + """""" + + @abc.abstractmethod + def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + """""" + + @abc.abstractmethod + def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + """""" + + @abc.abstractmethod + def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: + """""" + + @abc.abstractmethod + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + """""" + + @abc.abstractmethod + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + """""" + + @abc.abstractproperty + def tool_data_path(self) -> str: + """""" + + @abc.abstractproperty + def shed_tool_data_table_conf(self) -> str: + """""" + + @abc.abstractmethod + def get_tool_names(self) -> List[str]: + """""" + + @abc.abstractmethod + def get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + """""" + + @abc.abstractmethod + def get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + """""" + + @abc.abstractmethod + def get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + """""" + + @abc.abstractmethod + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + """""" + + @abc.abstractmethod + def refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + """""" + + +class GalaxyInteractorToolShedInstallationClient(ToolShedInstallationClient): + """A Galaxy API + Database as a installation target for the tool shed.""" + + def __init__(self, testcase): + self.testcase = testcase + + def setup(self): + self._galaxy_login() + + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + metadata = repository.metadata_ + assert "tools" in metadata, f"Tools not found in repository metadata: {metadata}" + # If integrated_tool_panel.xml is to be tested, this test method will need to be enhanced to handle tools + # from the same repository in different tool panel sections. Getting the first tool guid is ok, because + # currently all tools contained in a single repository will be loaded into the same tool panel section. + if repository.status in [ + galaxy_model.ToolShedRepository.installation_status.UNINSTALLED, + galaxy_model.ToolShedRepository.installation_status.DEACTIVATED, + ]: + tool_panel_section = _get_tool_panel_section_from_repository_metadata(metadata) + else: + tool_panel_section = self._get_tool_panel_section_from_api(metadata) + assert ( + tool_panel_section == expected_tool_panel_section + ), f"Expected to find tool panel section *{expected_tool_panel_section}*, but instead found *{tool_panel_section}*\nMetadata: {metadata}\n" + + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + encoded_id = self.testcase.security.encode_id(installed_repository.id) + api_key = get_admin_api_key() + response = requests.delete( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/{encoded_id}", + data={"remove_from_disk": False, "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def display_installed_jobs_list_page( + self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None + ) -> None: + data_managers = installed_repository.metadata_.get("data_manager", {}).get("data_managers", {}) + if data_manager_names: + if not isinstance(data_manager_names, list): + data_manager_names = [data_manager_names] + for data_manager_name in data_manager_names: + assert ( + data_manager_name in data_managers + ), f"The requested Data Manager '{data_manager_name}' was not found in repository metadata." + else: + data_manager_name = list(data_managers.keys()) + for data_manager_name in data_manager_names: + params = {"id": data_managers[data_manager_name]["guid"]} + self._visit_galaxy_url("/data_manager/jobs_list", params=params) + content = page_content() + for expected in strings_displayed: + if content.find(expected) == -1: + raise AssertionError(f"Failed to find pattern {expected} in {content}") + + def installed_repository_extended_info( + self, installed_repository: galaxy_model.ToolShedRepository + ) -> Dict[str, Any]: + params = {"id": self.testcase.security.encode_id(installed_repository.id)} + self._visit_galaxy_url("/admin_toolshed/manage_repository_json", params=params) + json = page_content() + return loads(json) + + def install_repository( + self, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool, + install_repository_dependencies: bool, + new_tool_panel_section_label: Optional[str], + ): + payload = { + "tool_shed_url": self.testcase.url, + "name": name, + "owner": owner, + "changeset_revision": changeset_revision, + "install_tool_dependencies": install_tool_dependencies, + "install_repository_dependencies": install_repository_dependencies, + "install_resolver_dependencies": False, + } + if new_tool_panel_section_label: + payload["new_tool_panel_section_label"] = new_tool_panel_section_label + create_response = self.testcase.galaxy_interactor._post( + "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True + ) + assert_status_code_is_ok(create_response) + create_response_object = create_response.json() + if isinstance(create_response_object, dict): + assert "status" in create_response_object + assert "ok" == create_response_object["status"] # repo already installed... + return + assert isinstance(create_response_object, list) + repository_ids = [repo["id"] for repo in create_response.json()] + log.debug(f"Waiting for the installation of repository IDs: {repository_ids}") + self._wait_for_repository_installation(repository_ids) + + def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + params = dict(id=self.testcase.security.encode_id(installed_repository.id)) + url = "/admin_toolshed/restore_repository" + self._visit_galaxy_url(url, params=params) + + def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + repository_ids = [] + for repository in repositories: + repository_ids.append(self.testcase.security.encode_id(repository.id)) + api_key = get_admin_api_key() + response = requests.post( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", + data={"repository_ids": repository_ids, "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + encoded_id = self.testcase.security.encode_id(installed_repository.id) + api_key = get_admin_api_key() + response = requests.delete( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/{encoded_id}", + data={"remove_from_disk": True, "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + repository_id = self.testcase.security.encode_id(installed_repository.id) + params = { + "id": repository_id, + } + api_key = get_admin_api_key() + response = requests.get( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/check_for_updates?key={api_key}", + params=params, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + response.raise_for_status() + response_dict = response.json() + if verify_no_updates: + assert "message" in response_dict + message = response_dict["message"] + assert "The status has not changed in the tool shed for repository" in message, str(response_dict) + return response_dict + + def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: + encoded_id = self.testcase.security.encode_id(repository.id) + api_key = get_admin_api_key() + response = requests.post( + f"{self.testcase.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", + data={"repository_ids": [encoded_id], "key": api_key}, + timeout=DEFAULT_SOCKET_TIMEOUT, + ) + assert response.status_code != 403, response.content + + @property + def tool_data_path(self): + return os.environ.get("GALAXY_TEST_TOOL_DATA_PATH") + + @property + def shed_tool_data_table_conf(self): + return os.environ.get("TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF") + + def get_tool_names(self) -> List[str]: + response = self.testcase.galaxy_interactor._get("tools?in_panel=false") + response.raise_for_status() + tool_list = response.json() + return [t["name"] for t in tool_list] + + def get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + return test_db_util.get_installed_repository_by_name_owner(repository_name, repository_owner) + + def get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + return test_db_util.get_installed_repository_by_name_owner( + repository_name, repository_owner, return_multiple=True + ) + + def get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + return self.testcase.get_installed_repository_for(owner=owner, name=name, changeset=changeset) + + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + repositories = test_db_util.get_all_installed_repositories() + for repository in repositories: + test_db_util.ga_refresh(repository) + return repositories + + def refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + test_db_util.ga_refresh(repo) + + def _galaxy_login(self, email="test@bx.psu.edu", password="testuser", username="admin-user"): + self._galaxy_logout() + self._create_user_in_galaxy(email=email, password=password, username=username) + params = {"login": email, "password": password, "session_csrf_token": self._galaxy_token()} + self._visit_galaxy_url("/user/login", params=params) + + def _galaxy_logout(self): + self._visit_galaxy_url("/user/logout", params=dict(session_csrf_token=self._galaxy_token())) + + def _create_user_in_galaxy(self, email="test@bx.psu.edu", password="testuser", username="admin-user"): + params = { + "username": username, + "email": email, + "password": password, + "confirm": password, + "session_csrf_token": self._galaxy_token(), + } + self._visit_galaxy_url("/user/create", params=params, allowed_codes=[200, 400]) + + def _galaxy_token(self): + self._visit_galaxy_url("/") + html = page_content() + token_def_index = html.find("session_csrf_token") + token_sep_index = html.find(":", token_def_index) + token_quote_start_index = html.find('"', token_sep_index) + token_quote_end_index = html.find('"', token_quote_start_index + 1) + token = html[(token_quote_start_index + 1) : token_quote_end_index] + return token + + def _get_tool_panel_section_from_api(self, metadata): + tool_metadata = metadata["tools"] + tool_guid = quote_plus(tool_metadata[0]["guid"], safe="") + api_url = f"/api/tools/{tool_guid}" + self._visit_galaxy_url(api_url) + tool_dict = loads(page_content()) + tool_panel_section = tool_dict["panel_section_name"] + return tool_panel_section + + def _wait_for_repository_installation(self, repository_ids): + # Wait until all repositories are in a final state before returning. This ensures that subsequent tests + # are running against an installed repository, and not one that is still in the process of installing. + if repository_ids: + for repository_id in repository_ids: + galaxy_repository = test_db_util.get_installed_repository_by_id( + self.testcase.security.decode_id(repository_id) + ) + _wait_for_installation(galaxy_repository, test_db_util.ga_refresh) + + def _visit_galaxy_url(self, url, params=None, allowed_codes=None): + if allowed_codes is None: + allowed_codes = [200] + url = f"{self.testcase.galaxy_url}{url}" + url = self.testcase.join_url_and_params(url, params) + return visit_url(url, allowed_codes) + + +class StandaloneToolShedInstallationClient(ToolShedInstallationClient): + def __init__(self, testcase): + self.testcase = testcase + self.temp_directory = Path(tempfile.mkdtemp(prefix="toolshedtestinstalltarget")) + tool_shed_target = ToolShedTarget( + self.testcase.url, + "Tool Shed for Testing", + ) + self._installation_target = StandaloneInstallationTarget(self.temp_directory, tool_shed_target=tool_shed_target) + + def setup(self) -> None: + pass + + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + metadata = repository.metadata_ + assert "tools" in metadata, f"Tools not found in repository metadata: {metadata}" + # TODO: check actual toolbox if tool is already installed... + tool_panel_section = _get_tool_panel_section_from_repository_metadata(metadata) + assert ( + tool_panel_section == expected_tool_panel_section + ), f"Expected to find tool panel section *{expected_tool_panel_section}*, but instead found *{tool_panel_section}*\nMetadata: {metadata}\n" + + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + irm = InstalledRepositoryManager(app=self._installation_target) + errors = irm.uninstall_repository(repository=installed_repository, remove_from_disk=False) + if errors: + raise Exception( + f"Attempting to uninstall tool dependencies for repository named {installed_repository.name} resulted in errors: {errors}" + ) + + def display_installed_jobs_list_page( + self, installed_repository: galaxy_model.ToolShedRepository, data_manager_names=None, strings_displayed=None + ) -> None: + raise NotImplementedError() + + def installed_repository_extended_info( + self, installed_repository: galaxy_model.ToolShedRepository + ) -> Dict[str, Any]: + self._installation_target.install_model.context.refresh(installed_repository) + return build_manage_repository_dict(self._installation_target, "ok", installed_repository) + + def install_repository( + self, + name: str, + owner: str, + changeset_revision: str, + install_tool_dependencies: bool, + install_repository_dependencies: bool, + new_tool_panel_section_label: Optional[str], + ): + tool_shed_url = self.testcase.url + payload = { + "tool_shed_url": tool_shed_url, + "name": name, + "owner": owner, + "changeset_revision": changeset_revision, + "install_tool_dependencies": install_tool_dependencies, + "install_repository_dependencies": install_repository_dependencies, + "install_resolver_dependencies": False, + } + if new_tool_panel_section_label: + payload["new_tool_panel_section_label"] = new_tool_panel_section_label + irm = InstallRepositoryManager(app=self._installation_target) + installed_tool_shed_repositories = irm.install(str(tool_shed_url), name, owner, changeset_revision, payload) + for installed_tool_shed_repository in installed_tool_shed_repositories or []: + _wait_for_installation( + installed_tool_shed_repository, self._installation_target.install_model.context.refresh + ) + + def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + irm = InstalledRepositoryManager(app=self._installation_target) + irm.activate_repository(installed_repository) + + def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + for repository in repositories: + irmm = InstalledRepositoryMetadataManager(self._installation_target) + irmm.set_repository(repository) + irmm.reset_all_metadata_on_installed_repository() + + def reset_installed_repository_metadata(self, repository: galaxy_model.ToolShedRepository) -> None: + irmm = InstalledRepositoryMetadataManager(self._installation_target) + irmm.set_repository(repository) + irmm.reset_all_metadata_on_installed_repository() + + def uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + irm = InstalledRepositoryManager(app=self._installation_target) + errors = irm.uninstall_repository(repository=installed_repository, remove_from_disk=True) + if errors: + raise Exception( + f"Attempting to uninstall tool dependencies for repository named {installed_repository.name} resulted in errors: {errors}" + ) + + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + message, status = check_for_updates( + self._installation_target.tool_shed_registry, + self._installation_target.install_model.context, + installed_repository.id, + ) + response = CheckForUpdatesResponse(message=message, status=status) + response_dict = response.dict() + if verify_no_updates: + assert "message" in response_dict + message = response_dict["message"] + assert "The status has not changed in the tool shed for repository" in message, str(response_dict) + return response_dict + + def get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + return test_db_util.get_installed_repository_by_name_owner( + repository_name, repository_owner, session=self._installation_target.install_model.context + ) + + def get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + return test_db_util.get_installed_repository_by_name_owner( + repository_name, + repository_owner, + return_multiple=True, + session=self._installation_target.install_model.context, + ) + + def get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ) -> Optional[Dict[str, Any]]: + clause_list = [] + if name is not None: + clause_list.append(galaxy_model.ToolShedRepository.table.c.name == name) + if owner is not None: + clause_list.append(galaxy_model.ToolShedRepository.table.c.owner == owner) + if changeset is not None: + clause_list.append(galaxy_model.ToolShedRepository.table.c.changeset_revision == changeset) + clause_list.append(galaxy_model.ToolShedRepository.table.c.deleted == false()) + clause_list.append(galaxy_model.ToolShedRepository.table.c.uninstalled == false()) + + query = self._installation_target.install_model.context.query(galaxy_model.ToolShedRepository) + if len(clause_list) > 0: + query = query.filter(and_(*clause_list)) + repository = query.one_or_none() + if repository: + return repository.to_dict() + else: + return None + + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + repositories = test_db_util.get_all_installed_repositories( + session=self._installation_target.install_model.context + ) + for repository in repositories: + self._installation_target.install_model.context.refresh(repository) + return repositories + + def refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + self._installation_target.install_model.context.refresh(repo) + + @property + def shed_tool_data_table_conf(self): + return self._installation_target.config.shed_tool_data_table_config + + @property + def tool_data_path(self): + return self._installation_target.config.tool_data_path + + def get_tool_names(self) -> List[str]: + tool_names = [] + for _, tool in self._installation_target.toolbox.tools(): + tool_names.append(tool.name) + return tool_names + + +@pytest.mark.usefixtures("shed_browser") class ShedTwillTestCase(ShedApiTestCase): """Class of FunctionalTestCase geared toward HTML interactions using the Twill library.""" + requires_galaxy: bool = False + _installation_client = None + __browser: Optional[ShedBrowser] = None + def setUp(self): super().setUp() # Security helper @@ -70,11 +634,37 @@ def setUp(self): self.hgweb_config_manager = hgweb_config.hgweb_config_manager self.hgweb_config_manager.hgweb_config_dir = self.hgweb_config_dir self.tool_shed_test_tmp_dir = os.environ.get("TOOL_SHED_TEST_TMP_DIR", None) - self.shed_tool_data_table_conf = os.environ.get("TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF") self.file_dir = os.environ.get("TOOL_SHED_TEST_FILE_DIR", None) - self.tool_data_path = os.environ.get("GALAXY_TEST_TOOL_DATA_PATH") self.shed_tool_conf = os.environ.get("GALAXY_TEST_SHED_TOOL_CONF") self.test_db_util = test_db_util + if os.environ.get("TOOL_SHED_TEST_INSTALL_CLIENT") == "standalone": + # TODO: once nose is out of the way - try to get away without + # instantiating the unused Galaxy server here. + installation_client_class = StandaloneToolShedInstallationClient + full_stack_galaxy = False + else: + installation_client_class = GalaxyInteractorToolShedInstallationClient + full_stack_galaxy = True + self.full_stack_galaxy = full_stack_galaxy + if self.requires_galaxy and (self.__class__._installation_client is None): + self.__class__._installation_client = installation_client_class(self) + self.__class__._installation_client.setup() + self._installation_client = self.__class__._installation_client + + @pytest.fixture(autouse=True) + def inject_shed_browser(self, shed_browser: ShedBrowser): + self.__browser = shed_browser + + @property + def _browser(self) -> ShedBrowser: + assert self.__browser + return self.__browser + + def _escape_page_content_if_needed(self, content: str) -> str: + # if twill browser is being used - replace spaces with " " + if self._browser.is_twill: + content = content.replace(" ", " ") + return content def check_for_strings(self, strings_displayed=None, strings_not_displayed=None): strings_displayed = strings_displayed or [] @@ -97,54 +687,59 @@ def check_page(self, strings_displayed, strings_displayed_count, strings_not_dis def check_page_for_string(self, patt): """Looks for 'patt' in the current browser page""" - page = unicodify(self.last_page()) - if page.find(patt) == -1: - fname = self.write_temp_file(page) - errmsg = f"no match to '{patt}'\npage content written to '{fname}'\npage: [[{page}]]" - raise AssertionError(errmsg) + self._browser.check_page_for_string(patt) def check_string_not_in_page(self, patt): """Checks to make sure 'patt' is NOT in the page.""" - page = self.last_page() - if page.find(patt) != -1: - fname = self.write_temp_file(page) - errmsg = f"string ({patt}) incorrectly displayed in page.\npage content written to '{fname}'" - raise AssertionError(errmsg) + self._browser.check_string_not_in_page(patt) # Functions associated with user accounts + def _submit_register_form(self, email: str, password: str, username: str, redirect: Optional[str] = None): + self._browser.fill_form_value("registration", "email", email) + if redirect is not None: + self._browser.fill_form_value("registration", "redirect", redirect) + self._browser.fill_form_value("registration", "password", password) + self._browser.fill_form_value("registration", "confirm", password) + self._browser.fill_form_value("registration", "username", username) + self._browser.submit_form_with_name("registration", "create_user_button") + + @property + def invalid_tools_labels(self) -> str: + return "Invalid Tools" if self.is_v2 else "Invalid tools" def create(self, cntrller="user", email="test@bx.psu.edu", password="testuser", username="admin-user", redirect=""): # HACK: don't use panels because late_javascripts() messes up the twill browser and it # can't find form fields (and hence user can't be logged in). params = dict(cntrller=cntrller, use_panels=False) self.visit_url("/user/create", params) - tc.fv("registration", "email", email) - tc.fv("registration", "redirect", redirect) - tc.fv("registration", "password", password) - tc.fv("registration", "confirm", password) - tc.fv("registration", "username", username) - tc.submit("create_user_button") + self._submit_register_form( + email, + password, + username, + redirect, + ) previously_created = False username_taken = False invalid_username = False - try: - self.check_page_for_string("Created new user account") - except AssertionError: + if not self.is_v2: try: - # May have created the account in a previous test run... - self.check_page_for_string(f"User with email '{email}' already exists.") - previously_created = True + self.check_page_for_string("Created new user account") except AssertionError: try: - self.check_page_for_string("Public name is taken; please choose another") - username_taken = True + # May have created the account in a previous test run... + self.check_page_for_string(f"User with email '{email}' already exists.") + previously_created = True except AssertionError: - # Note that we're only checking if the usr name is >< 4 chars here... try: - self.check_page_for_string("Public name must be at least 4 characters in length") - invalid_username = True + self.check_page_for_string("Public name is taken; please choose another") + username_taken = True except AssertionError: - pass + # Note that we're only checking if the usr name is >< 4 chars here... + try: + self.check_page_for_string("Public name must be at least 4 characters in length") + invalid_username = True + except AssertionError: + pass return previously_created, username_taken, invalid_username def last_page(self): @@ -152,14 +747,27 @@ def last_page(self): Return the last visited page (usually HTML, but can binary data as well). """ - return tc.browser.html + return self._browser.page_content() - def last_url(self): - return tc.browser.url + def user_api_interactor(self, email="test@bx.psu.edu", password="testuser"): + return self._api_interactor_by_credentials(email, password) + + def user_populator(self, email="test@bx.psu.edu", password="testuser"): + return self._get_populator(self.user_api_interactor(email=email, password=password)) def login( - self, email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="", logout_first=True + self, + email: str = "test@bx.psu.edu", + password: str = "testuser", + username: str = "admin-user", + redirect: str = "", + logout_first: bool = True, ): + if self.is_v2: + # old version had a logout URL, this one needs to check + # page if logged in + self.visit_url("/") + # Clear cookies. if logout_first: self.logout() @@ -167,7 +775,8 @@ def login( previously_created, username_taken, invalid_username = self.create( email=email, password=password, username=username, redirect=redirect ) - if previously_created: + # v2 doesn't log you in on account creation... so force a login here + if previously_created or self.is_v2: # The acount has previously been created, so just login. # HACK: don't use panels because late_javascripts() messes up the twill browser and it # can't find form fields (and hence user can't be logged in). @@ -175,73 +784,59 @@ def login( self.visit_url("/user/login", params=params) self.submit_form(button="login_button", login=email, redirect=redirect, password=password) - def logout(self): - self.visit_url("/user/logout") - self.check_page_for_string("You have been logged out") + @property + def is_v2(self) -> bool: + return self.api_interactor.api_version == "v2" - def showforms(self): - """Shows form, helpful for debugging new tests""" - return tc.browser.forms + @property + def _playwright_browser(self) -> PlaywrightShedBrowser: + # make sure self.is_v2 + browser = self._browser + assert isinstance(browser, PlaywrightShedBrowser) + return browser + + @property + def _page(self) -> Page: + return self._playwright_browser._page + + def logout(self): + if self.is_v2: + self._playwright_browser.logout_if_logged_in() + else: + self.visit_url("/user/logout") + self.check_page_for_string("You have been logged out") def submit_form(self, form_no=-1, button="runtool_btn", form=None, **kwd): """Populates and submits a form from the keyword arguments.""" # An HTMLForm contains a sequence of Controls. Supported control classes are: # TextControl, FileControl, ListControl, RadioControl, CheckboxControl, SelectControl, # SubmitControl, ImageControl - if form is None: - try: - form = self.showforms()[form_no] - except IndexError: - raise ValueError("No form to submit found") - controls = {c.name: c for c in form.inputs} - form_name = form.get("name") - for control_name, control_value in kwd.items(): - if control_name not in controls: - continue # these cannot be handled safely - cause the test to barf out - if not isinstance(control_value, list): - control_value = [str(control_value)] - control = controls[control_name] - control_type = getattr(control, "type", None) - if control_type in ( - "text", - "textfield", - "submit", - "password", - "TextareaElement", - "checkbox", - "radio", - None, - ): - for cv in control_value: - tc.fv(form_name, control.name, cv) - else: - # Add conditions for other control types here when necessary. - pass - tc.submit(button) + self._browser.submit_form(form_no, button, form, **kwd) - def visit_url(self, url, params=None, doseq=False, allowed_codes=None): - if allowed_codes is None: - allowed_codes = [200] + def join_url_and_params(self, url: str, params, query=None) -> str: if params is None: params = dict() + if query is None: + query = urlparse(url).query + if query: + for query_parameter in query.split("&"): + key, value = query_parameter.split("=") + params[key] = value + if params: + url += f"?{urlencode(params)}" + return url + + def visit_url(self, url: str, params=None, allowed_codes: Optional[List[int]] = None) -> str: parsed_url = urlparse(url) if len(parsed_url.netloc) == 0: url = f"http://{self.host}:{self.port}{parsed_url.path}" else: url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}" - if parsed_url.query: - for query_parameter in parsed_url.query.split("&"): - key, value = query_parameter.split("=") - params[key] = value - if params: - url += f"?{urlencode(params, doseq=doseq)}" - new_url = tc.go(url) - return_code = tc.browser.code - assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( - return_code, - ", ".join(str(code) for code in allowed_codes), - ) - return new_url + url = self.join_url_and_params(url, params, query=parsed_url.query) + if allowed_codes is None: + allowed_codes = [200] + + return self._browser.visit_url(url, allowed_codes=allowed_codes) def write_temp_file(self, content, suffix=".html"): with tempfile.NamedTemporaryFile(suffix=suffix, prefix="twilltestcase-", delete=False) as fh: @@ -260,12 +855,15 @@ def assign_admin_role(self, repository: Repository, user): self.check_for_strings(strings_displayed=["Role", "has been associated"]) def browse_category(self, category: Category, strings_displayed=None, strings_not_displayed=None): - params = { - "sort": "name", - "operation": "valid_repositories_by_category", - "id": category.id, - } - self.visit_url("/repository/browse_valid_categories", params=params) + if self.is_v2: + self.visit_url(f"/repositories_by_category/{category.id}") + else: + params = { + "sort": "name", + "operation": "valid_repositories_by_category", + "id": category.id, + } + self.visit_url("/repository/browse_valid_categories", params=params) self.check_for_strings(strings_displayed, strings_not_displayed) def browse_repository(self, repository: Repository, strings_displayed=None, strings_not_displayed=None): @@ -279,7 +877,10 @@ def browse_repository_dependencies(self, strings_displayed=None, strings_not_dis self.check_for_strings(strings_displayed, strings_not_displayed) def browse_tool_shed(self, url, strings_displayed=None, strings_not_displayed=None): - url = "/repository/browse_valid_categories" + if self.is_v2: + url = "/repositories_by_category" + else: + url = "/repository/browse_valid_categories" self.visit_url(url) self.check_for_strings(strings_displayed, strings_not_displayed) @@ -305,28 +906,12 @@ def check_for_valid_tools(self, repository, strings_displayed=None, strings_not_ self.display_manage_repository_page(repository, strings_displayed, strings_not_displayed) def check_galaxy_repository_db_status(self, repository_name, owner, expected_status): - installed_repository = test_db_util.get_installed_repository_by_name_owner(repository_name, owner) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, owner) + self._refresh_tool_shed_repository(installed_repository) assert ( installed_repository.status == expected_status ), f"Status in database is {installed_repository.status}, expected {expected_status}" - def check_galaxy_repository_tool_panel_section(self, repository, expected_tool_panel_section): - metadata = repository.metadata_ - assert "tools" in metadata, f"Tools not found in repository metadata: {metadata}" - # If integrated_tool_panel.xml is to be tested, this test method will need to be enhanced to handle tools - # from the same repository in different tool panel sections. Getting the first tool guid is ok, because - # currently all tools contained in a single repository will be loaded into the same tool panel section. - if repository.status in [ - galaxy_model.ToolShedRepository.installation_status.UNINSTALLED, - galaxy_model.ToolShedRepository.installation_status.DEACTIVATED, - ]: - tool_panel_section = self.get_tool_panel_section_from_repository_metadata(metadata) - else: - tool_panel_section = self.get_tool_panel_section_from_api(metadata) - assert ( - tool_panel_section == expected_tool_panel_section - ), f"Expected to find tool panel section *{expected_tool_panel_section}*, but instead found *{tool_panel_section}*\nMetadata: {metadata}\n" - def check_repository_changelog(self, repository: Repository, strings_displayed=None, strings_not_displayed=None): params = {"id": repository.id} self.visit_url("/repository/view_changelog", params=params) @@ -335,12 +920,14 @@ def check_repository_changelog(self, repository: Repository, strings_displayed=N def check_repository_dependency( self, repository: Repository, depends_on_repository, depends_on_changeset_revision=None, changeset_revision=None ): - strings_displayed = [depends_on_repository.name, depends_on_repository.owner] - if depends_on_changeset_revision: - strings_displayed.append(depends_on_changeset_revision) - self.display_manage_repository_page( - repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed - ) + if not self.is_v2: + # v2 doesn't display repository repository dependencies, they are deprecated + strings_displayed = [depends_on_repository.name, depends_on_repository.owner] + if depends_on_changeset_revision: + strings_displayed.append(depends_on_changeset_revision) + self.display_manage_repository_page( + repository, changeset_revision=changeset_revision, strings_displayed=strings_displayed + ) def check_repository_metadata(self, repository: Repository, tip_only=True): if tip_only: @@ -429,6 +1016,12 @@ def check_string_count_in_page(self, pattern, min_count, max_count=None): ) raise AssertionError(errmsg) + def check_galaxy_repository_tool_panel_section( + self, repository: galaxy_model.ToolShedRepository, expected_tool_panel_section: str + ) -> None: + assert self._installation_client + self._installation_client.check_galaxy_repository_tool_panel_section(repository, expected_tool_panel_section) + def clone_repository(self, repository: Repository, destination_path: str) -> None: url = f"{self.url}/repos/{repository.owner}/{repository.name}" success, message = hg_util.clone_repository(url, destination_path, self.get_repository_tip(repository)) @@ -450,13 +1043,12 @@ def commit_and_push(self, repository, hgrepo, options, username, password): raise def create_category(self, **kwd) -> Category: - category = self.populator.get_category_with_name(kwd["name"]) + category_name = kwd["name"] + category = self.populator.get_category_with_name(category_name) if category is None: - params = {"operation": "create"} - self.visit_url("/admin/manage_categories", params=params) - self.submit_form(button="create_category_button", **kwd) - category = self.populator.get_category_with_name(kwd["name"]) - assert category + # not recreating this functionality in the UI I don't think? + category = self.populator.new_category(category_name) + return category return category def create_repository_dependency( @@ -476,7 +1068,7 @@ def create_repository_dependency( repository_names = [] if complex: filename = "tool_dependencies.xml" - self.generate_complex_dependency_xml( + target = self.generate_complex_dependency_xml( filename=filename, filepath=filepath, repository_tuples=repository_tuples, @@ -488,72 +1080,123 @@ def create_repository_dependency( repository_names.append(name) dependency_description = f"{repository.name} depends on {', '.join(repository_names)}." filename = "repository_dependencies.xml" - self.generate_simple_dependency_xml( + target = self.generate_simple_dependency_xml( repository_tuples=repository_tuples, filename=filename, filepath=filepath, dependency_description=dependency_description, prior_installation_required=prior_installation_required, ) - self.upload_file( - repository, - filename=filename, - filepath=filepath, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message=f"Uploaded dependency on {', '.join(repository_names)}.", - strings_displayed=None, - strings_not_displayed=None, + self.add_file_to_repository(repository, target, filename, strings_displayed=strings_displayed) + + def deactivate_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + assert self._installation_client + self._installation_client.deactivate_repository(installed_repository) + + @contextlib.contextmanager + def cloned_repo(self, repository: Repository) -> Iterator[str]: + temp_directory = tempfile.mkdtemp(prefix="toolshedrepowithoutfiles") + try: + self.clone_repository(repository, temp_directory) + shutil.rmtree(os.path.join(temp_directory, ".hg")) + contents = os.listdir(temp_directory) + if len(contents) == 1 and contents[0] == "repo": + yield os.path.join(temp_directory, "repo") + else: + yield temp_directory + finally: + shutil.rmtree(temp_directory) + + def setup_freebayes_0010_repo(self, repository: Repository): + strings_displayed = [ + "Metadata may have been defined", + "This file requires an entry", + "tool_data_table_conf", + ] + self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) + strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] + self.add_file_to_repository( + repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed + ) + self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") + target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") + self.add_file_to_repository( + repository, target, strings_displayed=["Exception attempting to parse", "invalid element name"] ) + target = os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml") + strings_displayed = [ + "The settings for name, version and type from a contained tool configuration" + ] + # , strings_displayed=strings_displayed + self.add_file_to_repository(repository, target) + target = os.path.join("freebayes", "tool_dependencies.xml") + self.add_file_to_repository(repository, target) - def create_user_in_galaxy( - self, cntrller="user", email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="" + def add_file_to_repository( + self, + repository: Repository, + source: str, + target: Optional[str] = None, + strings_displayed=None, + commit_message: Optional[str] = None, ): - params = { - "username": username, - "email": email, - "password": password, - "confirm": password, - "session_csrf_token": self.galaxy_token(), - } - self.visit_galaxy_url("/user/create", params=params, allowed_codes=[200, 400]) + with self.cloned_repo(repository) as temp_directory: + if target is None: + target = os.path.basename(source) + full_target = os.path.join(temp_directory, target) + full_source = TEST_DATA_REPO_FILES.joinpath(source) + shutil.copyfile(str(full_source), full_target) + commit_message = commit_message or "Uploaded revision with added file." + self._upload_dir_to_repository( + repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed + ) - def deactivate_repository(self, installed_repository, strings_displayed=None, strings_not_displayed=None): - encoded_id = self.security.encode_id(installed_repository.id) - api_key = get_admin_api_key() - response = requests.delete( - f"{self.galaxy_url}/api/tool_shed_repositories/{encoded_id}", - data={"remove_from_disk": False, "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content + def add_tar_to_repository(self, repository: Repository, source: str, strings_displayed=None): + with self.cloned_repo(repository) as temp_directory: + full_source = TEST_DATA_REPO_FILES.joinpath(source) + tar = tar_open(full_source) + tar.extractall(path=temp_directory) + tar.close() + commit_message = "Uploaded revision with added files from tar." + self._upload_dir_to_repository( + repository, temp_directory, commit_message=commit_message, strings_displayed=strings_displayed + ) + + def commit_tar_to_repository( + self, repository: Repository, source: str, commit_message=None, strings_displayed=None + ): + full_source = TEST_DATA_REPO_FILES.joinpath(source) + assert full_source.is_file(), f"Attempting to upload {full_source} as a tar which is not a file" + populator = self.user_populator() + if strings_displayed is None: + # Just assume this is a valid upload... + populator.upload_revision(repository, full_source, commit_message=commit_message) + else: + response = populator.upload_revision_raw(repository, full_source, commit_message=commit_message) + try: + text = response.json()["message"] + except Exception: + text = response.text + for string_displayed in strings_displayed: + if string_displayed not in text: + raise AssertionError(f"Failed to find {string_displayed} in JSON response {text}") def delete_files_from_repository(self, repository: Repository, filenames: List[str]): - temp_directory = tempfile.mkdtemp(prefix="toolshedrepowithoutfiles") - try: - self.clone_repository(repository, temp_directory) + with self.cloned_repo(repository) as temp_directory: for filename in filenames: to_delete = os.path.join(temp_directory, filename) os.remove(to_delete) - shutil.rmtree(os.path.join(temp_directory, ".hg")) - tf = tempfile.NamedTemporaryFile() - with tarfile.open(tf.name, "w:gz") as tar: - tar.add(temp_directory, arcname="repo") - target = os.path.abspath(tf.name) - self.upload_file( - repository, - filename=os.path.basename(target), - filepath=os.path.dirname(target), - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded revision with deleted files.", - strings_displayed=[], - strings_not_displayed=[], - ) - finally: - shutil.rmtree(temp_directory) + commit_message = "Uploaded revision with deleted files." + self._upload_dir_to_repository(repository, temp_directory, commit_message=commit_message) + + def _upload_dir_to_repository(self, repository: Repository, target, commit_message, strings_displayed=None): + tf = tempfile.NamedTemporaryFile() + with tarfile.open(tf.name, "w:gz") as tar: + tar.add(target, arcname=".") + target = os.path.abspath(tf.name) + self.commit_tar_to_repository( + repository, target, commit_message=commit_message, strings_displayed=strings_displayed + ) def delete_repository(self, repository: Repository) -> None: repository_id = repository.id @@ -565,27 +1208,14 @@ def delete_repository(self, repository: Repository) -> None: self.check_for_strings(strings_displayed, strings_not_displayed) def display_installed_jobs_list_page(self, installed_repository, data_manager_names=None, strings_displayed=None): - data_managers = installed_repository.metadata_.get("data_manager", {}).get("data_managers", {}) - if data_manager_names: - if not isinstance(data_manager_names, list): - data_manager_names = [data_manager_names] - for data_manager_name in data_manager_names: - assert ( - data_manager_name in data_managers - ), f"The requested Data Manager '{data_manager_name}' was not found in repository metadata." - else: - data_manager_name = list(data_managers.keys()) - for data_manager_name in data_manager_names: - params = {"id": data_managers[data_manager_name]["guid"]} - self.visit_galaxy_url("/data_manager/jobs_list", params=params) - self.check_for_strings(strings_displayed) + assert self._installation_client + self._installation_client.display_installed_jobs_list_page( + installed_repository, data_manager_names, strings_displayed + ) def display_installed_repository_manage_json(self, installed_repository): - params = {"id": self.security.encode_id(installed_repository.id)} - self.visit_galaxy_url("/admin_toolshed/manage_repository_json", params=params) - import json - - return json.loads(self.last_page()) + assert self._installation_client + return self._installation_client.installed_repository_extended_info(installed_repository) def display_manage_repository_page( self, repository: Repository, changeset_revision=None, strings_displayed=None, strings_not_displayed=None @@ -593,7 +1223,10 @@ def display_manage_repository_page( params = {"id": repository.id} if changeset_revision: params["changeset_revision"] = changeset_revision - self.visit_url("/repository/manage_repository", params=params) + url = "/repository/manage_repository" + if self.is_v2: + url = f"/repositories/{repository.id}" + self.visit_url(url, params=params) self.check_for_strings(strings_displayed, strings_not_displayed) def display_repository_clone_page( @@ -623,33 +1256,15 @@ def display_repository_file_contents( self.check_for_strings(strings_displayed, strings_not_displayed) def edit_repository_categories( - self, repository: Repository, categories_to_add=None, categories_to_remove=None, restore_original=True + self, + repository: Repository, + categories_to_add: List[str], + categories_to_remove: List[str], + restore_original=True, ) -> None: - categories_to_add = categories_to_add or [] - categories_to_remove = categories_to_remove or [] params = {"id": repository.id} self.visit_url("/repository/manage_repository", params=params) - strings_displayed = [] - strings_not_displayed = [] - for category in categories_to_add: - tc.fv("2", "category_id", f"+{category}") - strings_displayed.append(f"selected>{category}") - for category in categories_to_remove: - tc.fv("2", "category_id", f"-{category}") - strings_not_displayed.append(f"selected>{category}") - tc.submit("manage_categories_button") - self.check_for_strings(strings_displayed, strings_not_displayed) - if restore_original: - strings_displayed = [] - strings_not_displayed = [] - for category in categories_to_remove: - tc.fv("2", "category_id", f"+{category}") - strings_displayed.append(f"selected>{category}") - for category in categories_to_add: - tc.fv("2", "category_id", f"-{category}") - strings_not_displayed.append(f"selected>{category}") - tc.submit("manage_categories_button") - self.check_for_strings(strings_displayed, strings_not_displayed) + self._browser.edit_repository_categories(categories_to_add, categories_to_remove) def edit_repository_information(self, repository: Repository, revert=True, **kwd): params = {"id": repository.id} @@ -663,17 +1278,21 @@ def edit_repository_information(self, repository: Repository, revert=True, **kwd strings_displayed = [] for input_elem_name in ["repo_name", "description", "long_description", "repository_type"]: if input_elem_name in kwd: - tc.fv("edit_repository", input_elem_name, kwd[input_elem_name]) + self._browser.fill_form_value("edit_repository", input_elem_name, kwd[input_elem_name]) strings_displayed.append(self.escape_html(kwd[input_elem_name])) - tc.submit("edit_repository_button") - self.check_for_strings(strings_displayed) + self._browser.submit_form_with_name("edit_repository", "edit_repository_button") + # TODO: come back to this (and similar conditional below), the problem is check + # for strings isn't working with with textboxes I think? + if self._browser.is_twill: + self.check_for_strings(strings_displayed) if revert: strings_displayed = [] for input_elem_name in ["repo_name", "description", "long_description"]: - tc.fv("edit_repository", input_elem_name, original_information[input_elem_name]) + self._browser.fill_form_value("edit_repository", input_elem_name, original_information[input_elem_name]) strings_displayed.append(self.escape_html(original_information[input_elem_name])) - tc.submit("edit_repository_button") - self.check_for_strings(strings_displayed) + self._browser.submit_form_with_name("edit_repository", "edit_repository_button") + if self._browser.is_twill: + self.check_for_strings(strings_displayed) def enable_email_alerts(self, repository: Repository, strings_displayed=None, strings_not_displayed=None) -> None: repository_id = repository.id @@ -701,28 +1320,6 @@ def fetch_repository_metadata(self, repository: Repository, strings_displayed=No self.visit_url(url) self.check_for_strings(strings_displayed, strings_not_displayed) - def galaxy_token(self): - self.visit_galaxy_url("/") - html = self.last_page() - token_def_index = html.find("session_csrf_token") - token_sep_index = html.find(":", token_def_index) - token_quote_start_index = html.find('"', token_sep_index) - token_quote_end_index = html.find('"', token_quote_start_index + 1) - token = html[(token_quote_start_index + 1) : token_quote_end_index] - return token - - def galaxy_login( - self, email="test@bx.psu.edu", password="testuser", username="admin-user", redirect="", logout_first=True - ): - if logout_first: - self.galaxy_logout() - self.create_user_in_galaxy(email=email, password=password, username=username, redirect=redirect) - params = {"login": email, "password": password, "session_csrf_token": self.galaxy_token()} - self.visit_galaxy_url("/user/login", params=params) - - def galaxy_logout(self): - self.visit_galaxy_url("/user/logout", params=dict(session_csrf_token=self.galaxy_token())) - def generate_complex_dependency_xml(self, filename, filepath, repository_tuples, package, version): file_path = os.path.join(filepath, filename) dependency_entries = [] @@ -745,6 +1342,7 @@ def generate_complex_dependency_xml(self, filename, filepath, repository_tuples, ) # Save the generated xml to the specified location. open(file_path, "w").write(repository_dependency_xml) + return file_path def generate_simple_dependency_xml( self, @@ -786,6 +1384,7 @@ def generate_simple_dependency_xml( # Save the generated xml to the specified location. full_path = os.path.join(filepath, filename) open(full_path, "w").write(repository_dependency_xml) + return full_path def generate_temp_path(self, test_script_path, additional_paths=None): additional_paths = additional_paths or [] @@ -794,6 +1393,10 @@ def generate_temp_path(self, test_script_path, additional_paths=None): os.makedirs(temp_path) return temp_path + def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + assert self._installation_client + return self._installation_client.get_all_installed_repositories() + def get_filename(self, filename, filepath=None): if filepath is not None: return os.path.abspath(os.path.join(filepath, filename)) @@ -917,6 +1520,10 @@ def get_repository_tip(self, repository: Repository) -> str: repo = self.get_hg_repo(self.get_repo_path(repository)) return str(repo[repo.changelog.tip()]) + def get_repository_first_revision(self, repository: Repository) -> str: + repo = self.get_hg_repo(self.get_repo_path(repository)) + return str(repo[0]) + def _get_metadata_revision_count(self, repository: Repository) -> int: repostiory_metadata: RepositoryMetadata = self.populator.get_metadata(repository, downloadable_only=False) return len(repostiory_metadata.__root__) @@ -942,28 +1549,6 @@ def get_tools_from_repository_metadata(self, repository, include_invalid=False): ) return valid_tools, invalid_tools - def get_tool_panel_section_from_api(self, metadata): - tool_metadata = metadata["tools"] - tool_guid = quote_plus(tool_metadata[0]["guid"], safe="") - api_url = f"/api/tools/{tool_guid}" - self.visit_galaxy_url(api_url) - tool_dict = loads(self.last_page()) - tool_panel_section = tool_dict["panel_section_name"] - return tool_panel_section - - def get_tool_panel_section_from_repository_metadata(self, metadata): - tool_metadata = metadata["tools"] - tool_guid = tool_metadata[0]["guid"] - assert "tool_panel_section" in metadata, f"Tool panel section not found in metadata: {metadata}" - tool_panel_section_metadata = metadata["tool_panel_section"] - # tool_section_dict = dict( tool_config=guids_and_configs[ guid ], - # id=section_id, - # name=section_name, - # version=section_version ) - # This dict is appended to tool_panel_section_metadata[ tool_guid ] - tool_panel_section = tool_panel_section_metadata[tool_guid][0]["name"] - return tool_panel_section - def grant_role_to_user(self, user, role): strings_displayed = [self.security.encode_id(role.id), role.name] strings_not_displayed = [] @@ -999,9 +1584,7 @@ def grant_write_access( usernames = usernames or [] self.display_manage_repository_page(repository) self.check_for_strings(strings_displayed, strings_not_displayed) - for username in usernames: - tc.fv("user_access", "allow_push", f"+{username}") - tc.submit("user_access_button") + self._browser.grant_users_access(usernames) self.check_for_strings(post_submit_strings_displayed, post_submit_strings_not_displayed) def _install_repository( @@ -1025,30 +1608,15 @@ def _install_repository( # repository_id = repository.id if changeset_revision is None: changeset_revision = self.get_repository_tip(repository) - payload = { - "tool_shed_url": self.url, - "name": name, - "owner": owner, - "changeset_revision": changeset_revision, - "install_tool_dependencies": install_tool_dependencies, - "install_repository_dependencies": install_repository_dependencies, - "install_resolver_dependencies": False, - } - if new_tool_panel_section_label: - payload["new_tool_panel_section_label"] = new_tool_panel_section_label - create_response = self.galaxy_interactor._post( - "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True + assert self._installation_client + self._installation_client.install_repository( + name, + owner, + changeset_revision, + install_tool_dependencies, + install_repository_dependencies, + new_tool_panel_section_label, ) - assert_status_code_is_ok(create_response) - create_response_object = create_response.json() - if isinstance(create_response_object, dict): - assert "status" in create_response_object - assert "ok" == create_response_object["status"] # repo already installed... - return - assert isinstance(create_response_object, list) - repository_ids = [repo["id"] for repo in create_response.json()] - log.debug(f"Waiting for the installation of repository IDs: {repository_ids}") - self.wait_for_repository_installation(repository_ids) def load_citable_url( self, @@ -1074,17 +1642,20 @@ def load_citable_url( url += f"/{changeset_revision}" self.visit_url(url) self.check_for_strings(strings_displayed, strings_not_displayed) - # Now load the page that should be displayed inside the iframe and check for strings. - if encoded_repository_id: - params = {"id": encoded_repository_id, "operation": "view_or_manage_repository"} - if changeset_revision: - params["changeset_revision"] = changeset_revision - self.visit_url("/repository/view_repository", params=params) - self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) - elif encoded_user_id: - params = {"user_id": encoded_user_id, "operation": "repositories_by_user"} - self.visit_url("/repository/browse_repositories", params=params) + if self.is_v2: self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) + else: + # Now load the page that should be displayed inside the iframe and check for strings. + if encoded_repository_id: + params = {"id": encoded_repository_id, "operation": "view_or_manage_repository"} + if changeset_revision: + params["changeset_revision"] = changeset_revision + self.visit_url("/repository/view_repository", params=params) + self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) + elif encoded_user_id: + params = {"user_id": encoded_user_id, "operation": "repositories_by_user"} + self.visit_url("/repository/browse_repositories", params=params) + self.check_for_strings(strings_displayed_in_iframe, strings_not_displayed_in_iframe) def load_changeset_in_tool_shed( self, repository_id, changeset_revision, strings_displayed=None, strings_not_displayed=None @@ -1150,9 +1721,7 @@ def preview_repository_in_tool_shed( self.check_for_strings(strings_displayed, strings_not_displayed) def reactivate_repository(self, installed_repository): - params = dict(id=self.security.encode_id(installed_repository.id)) - url = "/admin_toolshed/restore_repository" - self.visit_galaxy_url(url, params=params) + self._installation_client.reactivate_repository(installed_repository) def reinstall_repository_api( self, @@ -1163,59 +1732,32 @@ def reinstall_repository_api( ): name = installed_repository.name owner = installed_repository.owner - payload = { - "tool_shed_url": self.url, # wish this used tool_shed. - "name": name, - "owner": owner, - "changeset_revision": installed_repository.installed_changeset_revision, - "install_tool_dependencies": install_tool_dependencies, - "install_repository_dependencies": install_repository_dependencies, - "install_resolver_dependencies": False, - } - if new_tool_panel_section_label: - payload["new_tool_panel_section_label"] = new_tool_panel_section_label - create_response = self.galaxy_interactor._post( - "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True + self._installation_client.install_repository( + name, + owner, + installed_repository.installed_changeset_revision, + install_tool_dependencies, + install_repository_dependencies, + new_tool_panel_section_label, ) - assert_status_code_is_ok(create_response) - create_response_object = create_response.json() - if isinstance(create_response_object, dict): - assert "status" in create_response_object - assert "ok" == create_response_object["status"] # repo already installed... - return - assert isinstance(create_response_object, list) - repository_ids = [repo["id"] for repo in create_response.json()] - log.debug(f"Waiting for the installation of repository IDs: {repository_ids}") - self.wait_for_repository_installation(repository_ids) def repository_is_new(self, repository: Repository) -> bool: repo = self.get_hg_repo(self.get_repo_path(repository)) tip_ctx = repo[repo.changelog.tip()] return tip_ctx.rev() < 0 - def reset_installed_repository_metadata(self, repository): - encoded_id = self.security.encode_id(repository.id) - api_key = get_admin_api_key() - response = requests.post( - f"{self.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", - data={"repository_ids": [encoded_id], "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content - def reset_metadata_on_selected_repositories(self, repository_ids): - self.visit_url("/admin/reset_metadata_on_selected_repositories_in_tool_shed") - kwd = dict(repository_ids=repository_ids) - self.submit_form(button="reset_metadata_on_selected_repositories_button", **kwd) + if self.is_v2: + for repository_id in repository_ids: + self.populator.reset_metadata(repository_id) + else: + self.visit_url("/admin/reset_metadata_on_selected_repositories_in_tool_shed") + kwd = dict(repository_ids=repository_ids) + self.submit_form(button="reset_metadata_on_selected_repositories_button", **kwd) - def reset_metadata_on_selected_installed_repositories(self, repository_ids): - api_key = get_admin_api_key() - response = requests.post( - f"{self.galaxy_url}/api/tool_shed_repositories/reset_metadata_on_selected_installed_repositories", - data={"repository_ids": repository_ids, "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content + def reset_metadata_on_installed_repositories(self, repositories): + assert self._installation_client + self._installation_client.reset_metadata_on_installed_repositories(repositories) def reset_repository_metadata(self, repository): params = {"id": repository.id} @@ -1240,44 +1782,11 @@ def search_for_valid_tools( params["galaxy_url"] = self.galaxy_url for field_name, search_string in search_fields.items(): self.visit_url("/repository/find_tools", params=params) - tc.fv("1", "exact_matches", exact_matches) - tc.fv("1", field_name, search_string) - tc.submit() + self._browser.fill_form_value("find_tools", "exact_matches", exact_matches) + self._browser.fill_form_value("find_tools", field_name, search_string) + self._browser.submit_form_with_name("find_tools", "find_tools_submit") self.check_for_strings(strings_displayed, strings_not_displayed) - def send_message_to_repository_owner( - self, - repository: Repository, - message: str, - strings_displayed=None, - strings_not_displayed=None, - post_submit_strings_displayed=None, - post_submit_strings_not_displayed=None, - ) -> None: - params = {"id": repository.id} - self.visit_url("/repository/contact_owner", params=params) - self.check_for_strings(strings_displayed, strings_not_displayed) - tc.fv(1, "message", message) - tc.submit() - self.check_for_strings(post_submit_strings_displayed, post_submit_strings_not_displayed) - - def set_form_value(self, form, kwd, field_name, field_value): - """ - Set the form field field_name to field_value if it exists, and return the provided dict containing that value. If - the field does not exist in the provided form, return a dict without that index. - """ - form_id = form.attrib.get("id") - controls = [control for control in form.inputs if str(control.name) == field_name] - if len(controls) > 0: - log.debug(f"Setting field {field_name} of form {form_id} to {field_value}.") - tc.formvalue(form_id, field_name, str(field_value)) - kwd[field_name] = str(field_value) - else: - if field_name in kwd: - log.debug("No field %s in form %s, discarding from return value.", field_name, form_id) - del kwd[field_name] - return kwd - def set_repository_deprecated( self, repository: Repository, set_deprecated=True, strings_displayed=None, strings_not_displayed=None ): @@ -1289,8 +1798,8 @@ def set_repository_malicious( self, repository: Repository, set_malicious=True, strings_displayed=None, strings_not_displayed=None ) -> None: self.display_manage_repository_page(repository) - tc.fv("malicious", "malicious", set_malicious) - tc.submit("malicious_button") + self._browser.fill_form_value("malicious", "malicious", set_malicious) + self._browser.submit_form_with_name("malicious", "malicious_button") self.check_for_strings(strings_displayed, strings_not_displayed) def tip_has_metadata(self, repository: Repository) -> bool: @@ -1305,140 +1814,32 @@ def undelete_repository(self, repository: Repository) -> None: strings_not_displayed: List[str] = [] self.check_for_strings(strings_displayed, strings_not_displayed) - def uninstall_repository(self, installed_repository, strings_displayed=None, strings_not_displayed=None): - encoded_id = self.security.encode_id(installed_repository.id) - api_key = get_admin_api_key() - response = requests.delete( - f"{self.galaxy_url}/api/tool_shed_repositories/{encoded_id}", - data={"remove_from_disk": True, "key": api_key}, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - assert response.status_code != 403, response.content - - def update_installed_repository_api(self, installed_repository, verify_no_updates=False): - repository_id = self.security.encode_id(installed_repository.id) - params = { - "id": repository_id, - } - api_key = get_admin_api_key() - response = requests.get( - f"{self.galaxy_url}/api/tool_shed_repositories/check_for_updates?key={api_key}", - params=params, - timeout=DEFAULT_SOCKET_TIMEOUT, - ) - response.raise_for_status() - response_dict = response.json() - if verify_no_updates: - assert "message" in response_dict - message = response_dict["message"] - assert "The status has not changed in the tool shed for repository" in message, str(response_dict) - return response_dict + def _uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: + assert self._installation_client + self._installation_client.uninstall_repository(installed_repository) - def upload_file( - self, - repository: Repository, - filename, - filepath, - valid_tools_only, - uncompress_file, - remove_repo_files_not_in_tar, - commit_message, - strings_displayed=None, - strings_not_displayed=None, - ): - if strings_displayed is None: - strings_displayed = [] - if strings_not_displayed is None: - strings_not_displayed = [] - removed_message = "files were removed from the repository" - if remove_repo_files_not_in_tar: - if not self.repository_is_new(repository): - if removed_message not in strings_displayed: - strings_displayed.append(removed_message) - else: - if removed_message not in strings_not_displayed: - strings_not_displayed.append(removed_message) - params = {"repository_id": repository.id} - self.visit_url("/upload/upload", params=params) - if valid_tools_only: - strings_displayed.extend(["has been successfully", "uploaded to the repository."]) - tc.formfile("1", "file_data", self.get_filename(filename, filepath)) - if uncompress_file: - tc.fv(1, "uncompress_file", "Yes") - else: - tc.fv(1, "uncompress_file", "No") - if not self.repository_is_new(repository): - if remove_repo_files_not_in_tar: - tc.fv(1, "remove_repo_files_not_in_tar", "Yes") - else: - tc.fv(1, "remove_repo_files_not_in_tar", "No") - tc.fv(1, "commit_message", commit_message) - tc.submit("upload_button") - self.check_for_strings(strings_displayed, strings_not_displayed) - # Uncomment this if it becomes necessary to wait for an asynchronous process to complete after submitting an upload. - # for i in range( 5 ): - # try: - # self.check_for_strings( strings_displayed, strings_not_displayed ) - # break - # except Exception as e: - # if i == 4: - # raise e - # else: - # time.sleep( 1 ) - # continue - - def upload_url( - self, - repository, - url, - filepath, - valid_tools_only, - uncompress_file, - remove_repo_files_not_in_tar, - commit_message, - strings_displayed=None, - strings_not_displayed=None, - ): - removed_message = "files were removed from the repository" - if remove_repo_files_not_in_tar: - if not self.repository_is_new(repository): - if removed_message not in strings_displayed: - strings_displayed.append(removed_message) - else: - if removed_message not in strings_not_displayed: - strings_not_displayed.append(removed_message) - params = {"repository_id": repository.id} - self.visit_url("/upload/upload", params=params) - if valid_tools_only: - strings_displayed.extend(["has been successfully", "uploaded to the repository."]) - tc.fv("1", "url", url) - if uncompress_file: - tc.fv(1, "uncompress_file", "Yes") - else: - tc.fv(1, "uncompress_file", "No") - if not self.repository_is_new(repository): - if remove_repo_files_not_in_tar: - tc.fv(1, "remove_repo_files_not_in_tar", "Yes") - else: - tc.fv(1, "remove_repo_files_not_in_tar", "No") - tc.fv(1, "commit_message", commit_message) - tc.submit("upload_button") - self.check_for_strings(strings_displayed, strings_not_displayed) + def update_installed_repository( + self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False + ) -> Dict[str, Any]: + assert self._installation_client + return self._installation_client.update_installed_repository(installed_repository, verify_no_updates=False) def verify_installed_repositories(self, installed_repositories=None, uninstalled_repositories=None): installed_repositories = installed_repositories or [] uninstalled_repositories = uninstalled_repositories or [] for repository_name, repository_owner in installed_repositories: - galaxy_repository = test_db_util.get_installed_repository_by_name_owner(repository_name, repository_owner) + galaxy_repository = self._get_installed_repository_by_name_owner(repository_name, repository_owner) if galaxy_repository: assert ( galaxy_repository.status == "Installed" ), f"Repository {repository_name} should be installed, but is {galaxy_repository.status}" def verify_installed_repository_metadata_unchanged(self, name, owner): - installed_repository = test_db_util.get_installed_repository_by_name_owner(name, owner) + installed_repository = self._get_installed_repository_by_name_owner(name, owner) + assert installed_repository metadata = installed_repository.metadata_ - self.reset_installed_repository_metadata(installed_repository) + assert self._installation_client + self._installation_client.reset_installed_repository_metadata(installed_repository) new_metadata = installed_repository.metadata_ assert metadata == new_metadata, f"Metadata for installed repository {name} differs after metadata reset." @@ -1447,9 +1848,27 @@ def verify_installed_repository_no_tool_panel_section(self, repository): metadata = repository.metadata_ assert "tool_panel_section" not in metadata, f"Tool panel section incorrectly found in metadata: {metadata}" + @property + def shed_tool_data_table_conf(self): + return self._installation_client.shed_tool_data_table_conf + + @property + def tool_data_path(self): + return self._installation_client.tool_data_path + + def _refresh_tool_shed_repository(self, repo: galaxy_model.ToolShedRepository) -> None: + assert self._installation_client + self._installation_client.refresh_tool_shed_repository(repo) + def verify_installed_repository_data_table_entries(self, required_data_table_entries): # The value of the received required_data_table_entries will be something like: [ 'sam_fa_indexes' ] - data_tables, error_message = xml_util.parse_xml(self.shed_tool_data_table_conf) + shed_tool_data_table_conf = self.shed_tool_data_table_conf + data_tables, error_message = xml_util.parse_xml(shed_tool_data_table_conf) + with open(shed_tool_data_table_conf) as f: + shed_tool_data_table_conf_contents = f.read() + assert ( + not error_message + ), f"Failed to parse {shed_tool_data_table_conf} properly. File contents [{shed_tool_data_table_conf_contents}]" found = False # With the tool shed, the "path" attribute that is hard-coded into the tool_data_tble_conf.xml # file is ignored. This is because the tool shed requires the directory location to which this @@ -1505,15 +1924,41 @@ def verify_installed_repository_data_table_entries(self, required_data_table_ent break # We better have an entry like: in our parsed data_tables # or we know that the repository was not correctly installed! - assert found, f"No entry for {required_data_table_entry} in {self.shed_tool_data_table_conf}." + if not found: + if required_data_table_entry is None: + raise AssertionError( + f"No tables found in {shed_tool_data_table_conf}. File contents {shed_tool_data_table_conf_contents}" + ) + else: + raise AssertionError( + f"No entry for {required_data_table_entry} in {shed_tool_data_table_conf}. File contents {shed_tool_data_table_conf_contents}" + ) + + def _get_installed_repository_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> galaxy_model.ToolShedRepository: + assert self._installation_client + return self._installation_client.get_installed_repository_by_name_owner(repository_name, repository_owner) + + def _get_installed_repositories_by_name_owner( + self, repository_name: str, repository_owner: str + ) -> List[galaxy_model.ToolShedRepository]: + assert self._installation_client + return self._installation_client.get_installed_repositories_by_name_owner(repository_name, repository_owner) + + def _get_installed_repository_for( + self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None + ): + assert self._installation_client + return self._installation_client.get_installed_repository_for(owner=owner, name=name, changeset=changeset) def _assert_has_installed_repos_with_names(self, *names): for name in names: - assert self.get_installed_repository_for(name=name) + assert self._get_installed_repository_for(name=name) def _assert_has_no_installed_repos_with_names(self, *names): for name in names: - assert not self.get_installed_repository_for(name=name) + assert not self._get_installed_repository_for(name=name) def _assert_has_missing_dependency( self, installed_repository: galaxy_model.ToolShedRepository, repository_name: str @@ -1543,9 +1988,9 @@ def _assert_has_installed_repository_dependency( changeset: Optional[str] = None, ) -> None: json = self.display_installed_repository_manage_json(installed_repository) - assert "repository_dependencies" in json, ( - "No repository dependencies were defined in %s." % installed_repository.name - ) + if "repository_dependencies" not in json: + name = installed_repository.name + raise AssertionError(f"No repository dependencies were defined in {name}. manage json is {json}") repository_dependencies = json["repository_dependencies"] found = False for folder in repository_dependencies.get("folders"): @@ -1577,11 +2022,9 @@ def _assert_is_not_missing_dependency( def _assert_has_valid_tool_with_name(self, tool_name: str) -> None: def assert_has(): - response = self.galaxy_interactor._get("tools?in_panel=false") - response.raise_for_status() - tool_list = response.json() - tool_list = [t for t in tool_list if t["name"] == tool_name] - assert tool_list + assert self._installation_client + tool_names = self._installation_client.get_tool_names() + assert tool_name in tool_names # May need to wait on toolbox reload. wait_on_assertion(assert_has, f"toolbox to contain {tool_name}", 10) @@ -1624,31 +2067,32 @@ def verify_unchanged_repository_metadata(self, repository: Repository): # or if the number of keys differs. assert old_metadata == new_metadata, f"Metadata changed after reset on repository {repository.name}." - def visit_galaxy_url(self, url, params=None, doseq=False, allowed_codes=None): - if allowed_codes is None: - allowed_codes = [200] - url = f"{self.galaxy_url}{url}" - self.visit_url(url, params=params, doseq=doseq, allowed_codes=allowed_codes) - def wait_for_repository_installation(self, repository_ids): - final_states = [ - galaxy_model.ToolShedRepository.installation_status.ERROR, - galaxy_model.ToolShedRepository.installation_status.INSTALLED, - ] - # Wait until all repositories are in a final state before returning. This ensures that subsequent tests - # are running against an installed repository, and not one that is still in the process of installing. - if repository_ids: - for repository_id in repository_ids: - galaxy_repository = test_db_util.get_installed_repository_by_id(self.security.decode_id(repository_id)) - timeout_counter = 0 - while galaxy_repository.status not in final_states: - test_db_util.ga_refresh(galaxy_repository) - timeout_counter = timeout_counter + 1 - # This timeout currently defaults to 10 minutes. - if timeout_counter > repository_installation_timeout: - raise AssertionError( - "Repository installation timed out, %d seconds elapsed, repository state is %s." - % (timeout_counter, galaxy_repository.status) - ) - break - time.sleep(1) +def _wait_for_installation(repository: galaxy_model.ToolShedRepository, refresh): + final_states = [ + galaxy_model.ToolShedRepository.installation_status.ERROR, + galaxy_model.ToolShedRepository.installation_status.INSTALLED, + ] + # Wait until all repositories are in a final state before returning. This ensures that subsequent tests + # are running against an installed repository, and not one that is still in the process of installing. + timeout_counter = 0 + while repository.status not in final_states: + refresh(repository) + timeout_counter = timeout_counter + 1 + # This timeout currently defaults to 10 minutes. + if timeout_counter > repository_installation_timeout: + raise AssertionError( + "Repository installation timed out, %d seconds elapsed, repository state is %s." + % (timeout_counter, repository.status) + ) + break + time.sleep(1) + + +def _get_tool_panel_section_from_repository_metadata(metadata): + tool_metadata = metadata["tools"] + tool_guid = tool_metadata[0]["guid"] + assert "tool_panel_section" in metadata, f"Tool panel section not found in metadata: {metadata}" + tool_panel_section_metadata = metadata["tool_panel_section"] + tool_panel_section = tool_panel_section_metadata[tool_guid][0]["name"] + return tool_panel_section diff --git a/lib/tool_shed/test/functional/api_notes.md b/lib/tool_shed/test/functional/api_notes.md index 0e225c08478c..527a71f4a2e9 100644 --- a/lib/tool_shed/test/functional/api_notes.md +++ b/lib/tool_shed/test/functional/api_notes.md @@ -23,7 +23,6 @@ for deletion instead of writing new tests and modernizing the API. - reset_metadata_on_repositories - remove_repository_registry_entry -- get_repository_revision_install_info - get_installable_revisions - The whole Groups API. - The whole Repository Revisions API. @@ -41,4 +40,10 @@ for deletion instead of writing new tests and modernizing the API. | repositories/{repository_id}/changeset_revision | NO | YES | NO | YES | NO | | | POST repositories | NO | YES | NO | YES | NO | | | GET repositories (without search query) | ? | ? |? | True | True | | +| GET /repositories/updates/ | YES | NO | NO | NO | YES | | +Research if searching by tool_ids is used with the repository index API. + +Added in: +- https://github.com/galaxyproject/galaxy/pull/3626/files +- Likely no longer used? diff --git a/lib/tool_shed/test/functional/conftest.py b/lib/tool_shed/test/functional/conftest.py new file mode 100644 index 000000000000..9798b868a212 --- /dev/null +++ b/lib/tool_shed/test/functional/conftest.py @@ -0,0 +1,46 @@ +import os +from typing import ( + Any, + Dict, + Generator, +) + +import pytest +from playwright.sync_api import ( + Browser, + BrowserContext, +) +from typing_extensions import Literal + +from ..base.browser import ShedBrowser +from ..base.playwrightbrowser import PlaywrightShedBrowser +from ..base.twillbrowser import TwillShedBrowser + +DEFAULT_BROWSER: Literal["twill", "playwright"] = "playwright" + + +def twill_browser() -> Generator[ShedBrowser, None, None]: + yield TwillShedBrowser() + + +def playwright_browser(class_context: BrowserContext) -> Generator[ShedBrowser, None, None]: + page = class_context.new_page() + yield PlaywrightShedBrowser(page) + + +if os.environ.get("TOOL_SHED_TEST_BROWSER", DEFAULT_BROWSER) == "twill": + shed_browser = pytest.fixture(scope="class")(twill_browser) +else: + shed_browser = pytest.fixture(scope="class")(playwright_browser) + + +@pytest.fixture(scope="class") +def class_context( + browser: Browser, + browser_context_args: Dict, + pytestconfig: Any, + request: pytest.FixtureRequest, +) -> Generator[BrowserContext, None, None]: + from pytest_playwright.pytest_playwright import context + + yield from context.__pytest_wrapped__.obj(browser, browser_context_args, pytestconfig, request) diff --git a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py index 1bd1e51948fd..c34727257c17 100644 --- a/lib/tool_shed/test/functional/test_0000_basic_repository_features.py +++ b/lib/tool_shed/test/functional/test_0000_basic_repository_features.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -21,6 +22,9 @@ def test_0000_initiate_users(self): self.login(email=common.test_user_2_email, username=common.test_user_2_name) self.login(email=common.admin_email, username=common.admin_username) + @skip_if_api_v2 + # no replicating the functionality in tool shed 2.0, use Planemo + # to create repositories. def test_0005_create_repository_without_categories(self): """Verify that a repository cannot be created unless at least one category has been defined.""" strings_displayed = ["No categories have been configured in this instance of the Galaxy Tool Shed"] @@ -69,6 +73,7 @@ def test_0025_change_repository_category(self): categories_to_remove=["Test 0000 Basic Repository Features 1"], ) + @skip_if_api_v2 def test_0030_grant_write_access(self): """Grant write access to another user""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -78,16 +83,8 @@ def test_0030_grant_write_access(self): def test_0035_upload_filtering_1_1_0(self): """Upload filtering_1.1.0.tar to the repository""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded filtering 1.1.0", - strings_displayed=[], - strings_not_displayed=[], + self.commit_tar_to_repository( + repository, "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0" ) def test_0040_verify_repository(self): @@ -116,12 +113,19 @@ def test_0040_verify_repository(self): self.browse_repository( repository, strings_displayed=[f"Repository '{repository.name}' revision", "(repository tip)"] ) + strings = ["Uploaded filtering 1.1.0"] + if self._browser.is_twill: + # this appears in a link - it isn't how one would check this + # in playwright. But also we're testing the mercurial page + # here so this is probably a questionable check overall. + strings += [latest_changeset_revision] self.display_repository_clone_page( common.test_user_1_name, repository_name, - strings_displayed=["Uploaded filtering 1.1.0", latest_changeset_revision], + strings_displayed=strings, ) + @skip_if_api_v2 def test_0045_alter_repository_states(self): """Test toggling the malicious and deprecated repository flags.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -140,89 +144,68 @@ def test_0045_alter_repository_states(self): self.display_manage_repository_page( repository, strings_displayed=strings_displayed, - strings_not_displayed=["Upload files", "Reset all repository metadata"], + strings_not_displayed=["Reset all repository metadata"], ) - self.browse_repository(repository, strings_not_displayed=["Upload files"]) + self.browse_repository(repository) self.set_repository_deprecated( repository, strings_displayed=["has been marked as not deprecated"], set_deprecated=False ) - strings_displayed = ["Mark repository as deprecated", "Upload files", "Reset all repository metadata"] + strings_displayed = ["Mark repository as deprecated", "Reset all repository metadata"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + @skip_if_api_v2 + # probably not porting this functionality - just test + # with Twill for older UI and drop when that is all dropped def test_0050_display_repository_tip_file(self): """Display the contents of filtering.xml in the repository tip revision""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) assert repository - self.display_repository_file_contents( - repository=repository, - filename="filtering.xml", - filepath=None, - strings_displayed=["1.1.0"], - strings_not_displayed=[], - ) + if self._browser.is_twill: + self.display_repository_file_contents( + repository=repository, + filename="filtering.xml", + filepath=None, + strings_displayed=["1.1.0"], + strings_not_displayed=[], + ) def test_0055_upload_filtering_txt_file(self): """Upload filtering.txt file associated with tool version 1.1.0.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_0000.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering.txt", - strings_displayed=[], - strings_not_displayed=[], - ) - self.display_manage_repository_page( - repository, strings_displayed=["Readme file for filtering 1.1.0"] - ) + self.add_file_to_repository(repository, "filtering/filtering_0000.txt") + expected = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") + self.display_manage_repository_page(repository, strings_displayed=[expected]) def test_0060_upload_filtering_test_data(self): """Upload filtering test data.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_test_data.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering test data", - strings_displayed=[], - strings_not_displayed=[], - ) - self.display_repository_file_contents( - repository=repository, - filename="1.bed", - filepath="test-data", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_tar_to_repository(repository, "filtering/filtering_test_data.tar") + if self._browser.is_twill: + # probably not porting this functionality - just test + # with Twill for older UI and drop when that is all dropped + self.display_repository_file_contents( + repository=repository, + filename="1.bed", + filepath="test-data", + strings_displayed=[], + strings_not_displayed=[], + ) self.check_repository_metadata(repository, tip_only=True) def test_0065_upload_filtering_2_2_0(self): """Upload filtering version 2.2.0""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") def test_0070_verify_filtering_repository(self): """Verify the new tool versions and repository metadata.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) tip = self.get_repository_tip(repository) self.check_for_valid_tools(repository) - strings_displayed = ["Select a revision"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["Select a revision"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) self.check_count_of_metadata_revisions_associated_with_repository(repository, metadata_count=2) tool_guid = f"{self.url.replace('http://', '').rstrip('/')}/repos/user1/filtering_0000/Filter1/2.2.0" @@ -245,26 +228,16 @@ def test_0070_verify_filtering_repository(self): def test_0075_upload_readme_txt_file(self): """Upload readme.txt file associated with tool version 2.2.0.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.txt", - strings_displayed=[], - strings_not_displayed=[], - ) - self.display_manage_repository_page( - repository, strings_displayed=["This is a readme file."] - ) + self.add_file_to_repository(repository, "readme.txt") + content = self._escape_page_content_if_needed("This is a readme file.") + self.display_manage_repository_page(repository, strings_displayed=[content]) # Verify that there is a different readme file for each metadata revision. + readme_content = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") self.display_manage_repository_page( repository, strings_displayed=[ - "Readme file for filtering 1.1.0", - "This is a readme file.", + readme_content, + content, ], ) @@ -273,10 +246,10 @@ def test_0080_delete_readme_txt_file(self): repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.delete_files_from_repository(repository, filenames=["readme.txt"]) self.check_count_of_metadata_revisions_associated_with_repository(repository, metadata_count=2) - self.display_manage_repository_page( - repository, strings_displayed=["Readme file for filtering 1.1.0"] - ) + readme_content = self._escape_page_content_if_needed("Readme file for filtering 1.1.0") + self.display_manage_repository_page(repository, strings_displayed=[readme_content]) + @skip_if_api_v2 # not re-implemented in the UI, there are API tests though def test_0085_search_for_valid_filter_tool(self): """Search for the filtering tool by tool ID, name, and version.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -311,46 +284,35 @@ def test_0100_verify_reserved_username_handling(self): self.login(email="baduser@bx.psu.edu", username="repos") test_user_1 = self.test_db_util.get_user("baduser@bx.psu.edu") assert test_user_1 is None, 'Creating user with public name "repos" succeeded.' - error_message = ( - "The term 'repos' is a reserved word in the Tool Shed, so it cannot be used as a public user name." - ) - self.check_for_strings(strings_displayed=[error_message]) + if not self.is_v2: + # no longer use this terminology but the above test case ensures + # the important thing and caught a bug in v2 + error_message = ( + "The term 'repos' is a reserved word in the Tool Shed, so it cannot be used as a public user name." + ) + self.check_for_strings(strings_displayed=[error_message]) def test_0105_contact_repository_owner(self): - """Fill out and submit the form to contact the owner of a repository. - - This test should not actually send the email, since functional tests are designed to function without - any external network connection. The embedded tool shed server these tests are running against has been configured - with an SMTP server address that will not and should not resolve correctly. However, since the successful sending of - the email is the last step in the process, this will verify functional correctness of all preceding steps. - """ - self.login(email=common.test_user_2_email, username=common.test_user_2_name) - repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - message = "This is a test message." - strings_displayed = [ - "Contact the owner of the repository named", - repository.name, - "streamline appropriate communication", - ] - post_submit_strings_displayed = ["An error occurred sending your message by email"] - self.send_message_to_repository_owner( - repository=repository, - message=message, - strings_displayed=strings_displayed, - post_submit_strings_displayed=post_submit_strings_displayed, - ) + """""" + # We no longer implement this. + pass + @skip_if_api_v2 # v2 doesn't implement repository deleting repositories def test_0110_delete_filtering_repository(self): """Delete the filtering_0000 repository and verify that it no longer has any downloadable revisions.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) self.delete_repository(repository) + metadata = self._populator.get_metadata(repository, downloadable_only=False) + for _, value in metadata.__root__.items(): + assert not value.downloadable # Explicitly reload all metadata revisions from the database, to ensure that we have the current status of the downloadable flag. # for metadata_revision in repository.metadata_revisions: # self.test_db_util.refresh(metadata_revision) # Marking a repository as deleted should result in no metadata revisions being downloadable. - assert True not in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] + # assert True not in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] + @skip_if_api_v2 # v2 doesn't implement repository deleting repositories def test_0115_undelete_filtering_repository(self): """Undelete the filtering_0000 repository and verify that it now has two downloadable revisions.""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) @@ -364,6 +326,7 @@ def test_0115_undelete_filtering_repository(self): assert True in [metadata.downloadable for metadata in self._db_repository(repository).metadata_revisions] assert len(self._db_repository(repository).downloadable_revisions) == 2 + @skip_if_api_v2 # not re-implementing in tool shed 2.0 def test_0120_enable_email_notifications(self): """Enable email notifications for test user 2 on filtering_0000.""" # Log in as test_user_2 @@ -378,20 +341,9 @@ def test_0125_upload_new_readme_file(self): self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) # Upload readme.txt to the filtering_0000 repository and verify that it is now displayed. - self.upload_file( - repository, - filename="filtering/readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded new readme.txt with invalid ascii characters.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.display_manage_repository_page( - repository, strings_displayed=["These characters should not"] - ) + self.add_file_to_repository(repository, "filtering/readme.txt") + content = self._escape_page_content_if_needed("These characters should not") + self.display_manage_repository_page(repository, strings_displayed=[content]) def test_0130_verify_handling_of_invalid_characters(self): """Load the above changeset in the change log and confirm that there is no server error displayed.""" @@ -408,13 +360,14 @@ def test_0130_verify_handling_of_invalid_characters(self): break # Check for the changeset revision, repository name, owner username, 'repos' in the clone url, and the captured # unicode decoding error message. + content = self._escape_page_content_if_needed("These characters should not") strings_displayed = [ "%d:%s" % (revision_number, revision_hash), "filtering_0000", "user1", "repos", "added:", - "+These characters should not", + f"+{content}", ] self.load_changeset_in_tool_shed(repository_id, changeset_revision, strings_displayed=strings_displayed) @@ -429,8 +382,11 @@ def test_0140_view_invalid_changeset(self): """View repository using an invalid changeset""" repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) encoded_repository_id = repository.id + assert encoded_repository_id strings_displayed = ["Invalid+changeset+revision"] - self.visit_url( + view_repo_url = ( f"/repository/view_repository?id={encoded_repository_id}&changeset_revision=nonsensical_changeset" ) - self.check_for_strings(strings_displayed=strings_displayed, strings_not_displayed=[]) + self.visit_url(view_repo_url) + if self._browser.is_twill: + self.check_for_strings(strings_displayed=strings_displayed, strings_not_displayed=[]) diff --git a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py index f4fb76e6acc7..4b9d27ae19d7 100644 --- a/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_0010_repository_with_tool_dependencies.py @@ -1,5 +1,6 @@ import os +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -53,19 +54,13 @@ def test_0010_create_freebayes_repository_and_upload_tool_xml(self): strings_displayed=[], ) assert repository - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool xml.", - strings_displayed=["Metadata may have been defined", "This file requires an entry", "tool_data_table_conf"], - strings_not_displayed=[], - ) + strings_displayed = ["Metadata may have been defined", "This file requires an entry", "tool_data_table_conf"] + self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) + if self.is_v2: + # opps... not good right? + self.populator.reset_metadata(repository) self.display_manage_repository_page( - repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] + repository, strings_displayed=[self.invalid_tools_labels], strings_not_displayed=["Valid tools"] ) tip = self.get_repository_tip(repository) strings_displayed = ["requires an entry", "tool_data_table_conf.xml"] @@ -78,19 +73,12 @@ def test_0015_upload_missing_tool_data_table_conf_file(self): Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool data table sample file.", - strings_displayed=[], - strings_not_displayed=[], + strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] + self.add_file_to_repository( + repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed ) self.display_manage_repository_page( - repository, strings_displayed=["Invalid tools"], strings_not_displayed=["Valid tools"] + repository, strings_displayed=[self.invalid_tools_labels], strings_not_displayed=["Valid tools"] ) tip = self.get_repository_tip(repository) strings_displayed = ["refers to a file", "sam_fa_indices.loc"] @@ -103,17 +91,7 @@ def test_0020_upload_missing_sample_loc_file(self): Uploading the tool_data_table_conf.xml.sample alone should not make the tool valid, but the error message should change. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool data table .loc file.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") def test_0025_upload_malformed_tool_dependency_xml(self): """Upload tool_dependencies.xml with bad characters in the readme tag. @@ -122,16 +100,9 @@ def test_0025_upload_malformed_tool_dependency_xml(self): Upload a tool_dependencies.xml file that contains <> in the text of the readme tag. This should show an error message about malformed xml. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename=os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded malformed tool dependency XML.", - strings_displayed=["Exception attempting to parse", "invalid element name"], - strings_not_displayed=[], + target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") + self.add_file_to_repository( + repository, target, strings_displayed=["Exception attempting to parse", "invalid element name"] ) def test_0030_upload_invalid_tool_dependency_xml(self): @@ -141,19 +112,11 @@ def test_0030_upload_invalid_tool_dependency_xml(self): This should result in a message about the tool dependency configuration not matching the tool's requirements. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename=os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded invalid tool dependency XML.", - strings_displayed=[ - "The settings for name, version and type from a contained tool configuration" - ], - strings_not_displayed=[], - ) + target = os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml") + strings_displayed = [ + "The settings for name, version and type from a contained tool configuration" + ] + self.add_file_to_repository(repository, target, strings_displayed=strings_displayed) def test_0035_upload_valid_tool_dependency_xml(self): """Upload tool_dependencies.xml defining version 0.9.4_9696d0ce8a962f7bb61c4791be5ce44312b81cf8 of the freebayes package. @@ -162,18 +125,10 @@ def test_0035_upload_valid_tool_dependency_xml(self): At this stage, there should be no errors on the upload page, as every missing or invalid file has been corrected. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename=os.path.join("freebayes", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded valid tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], - ) + target = os.path.join("freebayes", "tool_dependencies.xml") + self.add_file_to_repository(repository, target) + @skip_if_api_v2 def test_0040_verify_tool_dependencies(self): """Verify that the uploaded tool_dependencies.xml specifies the correct package versions. @@ -182,7 +137,7 @@ def test_0040_verify_tool_dependencies(self): """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) strings_displayed = ["freebayes", "0.9.4_9696d0ce8a9", "samtools", "0.1.18", "Valid tools", "package"] - strings_not_displayed = ["Invalid tools"] + strings_not_displayed = [self.invalid_tools_labels] self.display_manage_repository_page( repository, strings_displayed=strings_displayed, strings_not_displayed=strings_not_displayed ) diff --git a/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py b/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py index bd4fcd822d3a..f4fce544de59 100644 --- a/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0020_basic_repository_dependencies.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -38,17 +39,7 @@ def test_0010_create_column_maker_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.commit_tar_to_repository(column_maker_repository, "column_maker/column_maker.tar") def test_0020_create_emboss_5_repository_and_upload_files(self): """Create and populate the emboss_5_0020 repository.""" @@ -61,17 +52,7 @@ def test_0020_create_emboss_5_repository_and_upload_files(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], - ) + self.commit_tar_to_repository(repository, "emboss/emboss.tar") def test_0025_generate_and_upload_repository_dependencies_xml(self): """Generate and upload the repository_dependencies.xml file""" @@ -90,6 +71,7 @@ def test_0025_generate_and_upload_repository_dependencies_xml(self): repository=repository, repository_tuples=[repository_tuple], filepath=repository_dependencies_path ) + @skip_if_api_v2 def test_0030_verify_emboss_5_dependencies(self): """Verify that the emboss_5 repository now depends on the emboss_datatypes repository with correct name, owner, and changeset revision.""" repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py index 17a403bea00f..8436d84d8794 100644 --- a/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_0030_repository_dependency_revisions.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -39,17 +40,7 @@ def test_0010_create_emboss_5_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( - repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.commit_tar_to_repository(repository, "emboss/emboss.tar", commit_message="Uploaded tool tarball.") def test_0015_create_emboss_6_repository(self): """Create and populate the emboss_6_0030 repository.""" @@ -62,16 +53,10 @@ def test_0015_create_emboss_6_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_dependent_repository(self): @@ -86,16 +71,10 @@ def test_0020_create_dependent_repository(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_emboss_repository(self): @@ -109,16 +88,10 @@ def test_0025_create_emboss_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_generate_repository_dependencies_for_emboss_5(self): @@ -189,6 +162,7 @@ def test_0045_generate_repository_dependency_on_emboss_6(self): repository=emboss_repository, repository_tuples=[emboss_tuple], filepath=repository_dependencies_path ) + @skip_if_api_v2 def test_0050_verify_repository_dependency_revisions(self): """Verify that different metadata revisions of the emboss repository have different repository dependencies.""" repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py b/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py index c469597b738b..c1a5f0de3315 100644 --- a/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_0040_repository_circular_dependencies.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -42,16 +43,10 @@ def test_0010_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -66,16 +61,10 @@ def test_0015_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded the tool tarball for filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_dependency_on_freebayes(self): @@ -141,6 +130,7 @@ def test_0035_verify_repository_metadata(self): for repository in [freebayes_repository, filtering_repository]: self.verify_unchanged_repository_metadata(repository) + @skip_if_api_v2 def test_0040_verify_tool_dependencies(self): """Verify that freebayes displays tool dependencies.""" repository = self._get_repository_by_name_and_owner(freebayes_repository_name, common.test_user_1_name) diff --git a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py index 1178402c6b2c..c774cf278cf3 100644 --- a/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_0050_circular_dependencies_4_levels.py @@ -1,3 +1,4 @@ +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -51,16 +52,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -74,16 +69,12 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + assert repository + assert repository.id + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_emboss_datatypes_repository(self): @@ -101,16 +92,10 @@ def test_0020_create_emboss_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_filtering_repository(self): @@ -124,16 +109,10 @@ def test_0025_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_freebayes_repository(self): @@ -147,16 +126,10 @@ def test_0030_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_bismark_repository(self): @@ -170,17 +143,7 @@ def test_0035_create_bismark_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository, end=1) def test_0040_create_and_upload_dependency_definitions(self): column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name) @@ -283,9 +246,11 @@ def test_0045_verify_repository_dependencies(self): self.check_repository_dependency(filtering_repository, emboss_repository) for repository in [bismark_repository, emboss_repository, column_repository]: self.check_repository_dependency(freebayes_repository, repository) - strings_displayed = ["freebayes_0050 depends on freebayes_0050, emboss_0050, column_maker_0050."] - self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) + if not self.is_v2: + strings_displayed = ["freebayes_0050 depends on freebayes_0050, emboss_0050, column_maker_0050."] + self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) + @skip_if_api_v2 def test_0050_verify_tool_dependencies(self): """Check that freebayes and emboss display tool dependencies.""" freebayes_repository = self._get_repository_by_name_and_owner( diff --git a/lib/tool_shed/test/functional/test_0070_invalid_tool.py b/lib/tool_shed/test/functional/test_0070_invalid_tool.py index b3fcd3842d5f..df577f7ea4d1 100644 --- a/lib/tool_shed/test/functional/test_0070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_0070_invalid_tool.py @@ -30,30 +30,9 @@ def test_0005_create_category_and_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.display_manage_repository_page(repository, strings_displayed=["Invalid tools"]) - invalid_revision = self.get_repository_tip(repository) - self.upload_file( - repository, - filename="bismark/bismark_methylation_extractor.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded an updated tool xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository) + invalid_revision = self.get_repository_first_revision(repository) + self.display_manage_repository_page(repository, strings_displayed=[self.invalid_tools_labels]) valid_revision = self.get_repository_tip(repository) tool_guid = f"{self.url.replace('http://', '').rstrip('/')}/repos/user1/bismark_0070/bismark_methylation_extractor/0.7.7.3" tool_metadata_strings_displayed = [ diff --git a/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py b/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py index c00941209860..1d063d2e0f9d 100644 --- a/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_0080_advanced_circular_dependencies.py @@ -35,16 +35,10 @@ def test_0005_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0005_create_convert_repository(self): @@ -60,16 +54,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependencies(self): diff --git a/lib/tool_shed/test/functional/test_0090_tool_search.py b/lib/tool_shed/test/functional/test_0090_tool_search.py index 566629d42fb5..2ceb5c982509 100644 --- a/lib/tool_shed/test/functional/test_0090_tool_search.py +++ b/lib/tool_shed/test/functional/test_0090_tool_search.py @@ -47,16 +47,10 @@ def test_0005_create_bwa_base_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/bwa_base.tar", commit_message="Uploaded BWA tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_bwa_color_repository(self): @@ -71,16 +65,10 @@ def test_0010_create_bwa_color_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/bwa_color.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/bwa_color.tar", commit_message="Uploaded BWA color tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_repository(self): @@ -94,16 +82,10 @@ def test_0020_create_emboss_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_filtering_repository(self): @@ -117,16 +99,10 @@ def test_0025_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_freebayes_repository(self): @@ -140,16 +116,10 @@ def test_0030_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_and_upload_dependency_definitions(self): diff --git a/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py b/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py index f7841a849445..fb7625860558 100644 --- a/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0100_complex_repository_dependencies.py @@ -1,6 +1,7 @@ import logging import os +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -43,21 +44,12 @@ def test_0005_create_bwa_package_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml.", - strings_displayed=["This repository currently contains a single file named tool_dependencies.xml"], - strings_not_displayed=[], - ) - # Visit the manage repository page for package_bwa_0_5_9_0100. - self.display_manage_repository_page( - repository, strings_displayed=["Tool dependencies", "will not be", "to this repository"] - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") + if not self.is_v2: + # Visit the manage repository page for package_bwa_0_5_9_0100. + self.display_manage_repository_page( + repository, strings_displayed=["Tool dependencies", "will not be", "to this repository"] + ) def test_0010_create_bwa_base_repository(self): """Create and populate bwa_base_0100.""" @@ -73,16 +65,10 @@ def test_0010_create_bwa_base_repository(self): strings_displayed=[], ) # Populate the repository named bwa_base_repository_0100 with a bwa_base tool archive. - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/complex/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/complex/bwa_base.tar", commit_message="Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_generate_complex_repository_dependency_invalid_shed_url(self): @@ -199,10 +185,12 @@ def test_0035_generate_complex_repository_dependency(self): version="0.5.9", ) self.check_repository_dependency(base_repository, depends_on_repository=tool_repository) - self.display_manage_repository_page( - base_repository, strings_displayed=["bwa", "0.5.9", "package", changeset_revision] - ) + if not self.is_v2: + self.display_manage_repository_page( + base_repository, strings_displayed=["bwa", "0.5.9", "package", changeset_revision] + ) + @skip_if_api_v2 def test_0040_generate_tool_dependency(self): """Generate and upload a new tool_dependencies.xml file that specifies an arbitrary file on the filesystem, and verify that bwa_base depends on the new changeset revision.""" # The base_repository named bwa_base_repository_0100 is the dependent repository. @@ -217,17 +205,7 @@ def test_0040_generate_tool_dependency(self): open(xml_filename, "w").write( open(old_tool_dependency).read().replace("__PATH__", self.get_filename("bwa/complex")) ) - self.upload_file( - tool_repository, - filename=xml_filename, - filepath=new_tool_dependency_path, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded new tool_dependencies.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(tool_repository, xml_filename, "tool_dependencies.xml") # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file. repository_tip = self.get_repository_tip(tool_repository) strings_displayed = ["bwa", "0.5.9", "package"] diff --git a/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py b/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py index a2762fe0c007..60b4ad2a0912 100644 --- a/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_0110_invalid_simple_repository_dependencies.py @@ -39,16 +39,10 @@ def test_0010_create_emboss_datatypes_repository_and_upload_tarball(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_5_repository_and_upload_files(self): @@ -62,16 +56,10 @@ def test_0020_create_emboss_5_repository_and_upload_files(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_generate_repository_dependency_with_invalid_url(self): diff --git a/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py index 670f61f1a25a..71cd3322379f 100644 --- a/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_0120_simple_repository_dependency_multiple_owners.py @@ -51,16 +51,10 @@ def test_0005_create_datatypes_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blast_datatypes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "blast/blast_datatypes.tar", commit_message="Uploaded blast_datatypes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_verify_datatypes_repository(self): @@ -71,16 +65,8 @@ def test_0010_verify_datatypes_repository(self): the datatypes that are defined in datatypes_conf.xml. """ repository = self._get_repository_by_name_and_owner(datatypes_repository_name, common.test_user_2_name) - strings_displayed = [ - "BlastXml", - "BlastNucDb", - "BlastProtDb", - "application/xml", - "text/html", - "blastxml", - "blastdbn", - "blastdbp", - ] + # v2 rightfully doesn't display anything about datatypes... + strings_displayed = ["Galaxy datatypes for the BLAST top hit"] self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0015_create_tool_repository(self): @@ -100,16 +86,10 @@ def test_0015_create_tool_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blastxml_to_top_descr.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "blast/blastxml_to_top_descr.tar", commit_message="Uploaded blastxml_to_top_descr tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_verify_tool_repository(self): @@ -120,7 +100,9 @@ def test_0020_verify_tool_repository(self): """ repository = self._get_repository_by_name_and_owner(tool_repository_name, common.test_user_1_name) strings_displayed = ["blastxml_to_top_descr_0120", "BLAST top hit descriptions", "Make a table from BLAST XML"] - strings_displayed.extend(["0.0.1", "Valid tools"]) + strings_displayed.append("0.0.1") + if not self.is_v2: + strings_displayed.append("Valid tools") self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0025_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_0140_tool_help_images.py b/lib/tool_shed/test/functional/test_0140_tool_help_images.py index 1bc8e1050b8c..1a8247747700 100644 --- a/lib/tool_shed/test/functional/test_0140_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_0140_tool_help_images.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -50,18 +51,13 @@ def test_0005_create_htseq_count_repository(self): strings_displayed=[], ) # Upload htseq_count.tar to the repository. - self.upload_file( + self.commit_tar_to_repository( repository, - filename="htseq_count/htseq_count.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "htseq_count/htseq_count.tar", commit_message="Uploaded htseq_count.tar.", - strings_displayed=[], - strings_not_displayed=[], ) + @skip_if_api_v2 def test_0010_load_tool_page(self): """Load the tool page and check for the image. diff --git a/lib/tool_shed/test/functional/test_0150_prior_installation_required.py b/lib/tool_shed/test/functional/test_0150_prior_installation_required.py index e8d832eede17..22800d42c359 100644 --- a/lib/tool_shed/test/functional/test_0150_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0150_prior_installation_required.py @@ -49,16 +49,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -72,16 +66,10 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py index 822758a6a712..5ce2a7d0f7d6 100644 --- a/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0160_circular_prior_installation_required.py @@ -48,16 +48,10 @@ def test_0005_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -71,16 +65,11 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -94,16 +83,10 @@ def test_0015_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependency(self): diff --git a/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py index e37888b4e02f..d0d1b3b1bad4 100644 --- a/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_0170_complex_prior_installation_required.py @@ -52,16 +52,11 @@ def test_0005_create_matplotlib_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_matplotlib/package_matplotlib_1_2.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_matplotlib/package_matplotlib_1_2.tar", commit_message="Uploaded matplotlib tool dependency tarball.", - strings_displayed=["This repository currently contains a single file named tool_dependencies.xml"], - strings_not_displayed=[], + strings_displayed=["tool_dependencies.xml"], ) def test_0010_create_numpy_repository(self): @@ -79,16 +74,10 @@ def test_0010_create_numpy_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_numpy/package_numpy_1_7.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_numpy/package_numpy_1_7.tar", commit_message="Uploaded numpy tool dependency tarball.", - strings_displayed=["This repository currently contains a single file named tool_dependencies.xml"], - strings_not_displayed=[], ) def test_0015_create_complex_repository_dependency(self): @@ -120,17 +109,7 @@ def test_0015_create_complex_repository_dependency(self): new_xml_file = os.path.join(dependency_xml_path, "tool_dependencies.xml") open(new_xml_file, "w").write(original_xml.replace("", processed_xml)) # Upload the generated complex repository dependency XML to the matplotlib repository. - self.upload_file( - matplotlib_repository, - filename="tool_dependencies.xml", - filepath=dependency_xml_path, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency on numpy 1.7.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(matplotlib_repository, new_xml_file, "tool_dependencies.xml") def test_0020_verify_generated_dependency(self): """Verify that matplotlib now has a package tool dependency and a complex repository dependency. @@ -146,6 +125,7 @@ def test_0020_verify_generated_dependency(self): ) changeset_revision = self.get_repository_tip(numpy_repository) self.check_repository_dependency(matplotlib_repository, depends_on_repository=numpy_repository) - self.display_manage_repository_page( - matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] - ) + if not self.is_v2: + self.display_manage_repository_page( + matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] + ) diff --git a/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py index c920dd5c58ff..f20c29b08cbd 100644 --- a/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py @@ -65,28 +65,12 @@ def test_0005_create_filtering_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") def test_0010_create_freebayes_repository(self): """Create and populate the freebayes_0010 repository.""" @@ -106,50 +90,7 @@ def test_0010_create_freebayes_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded freebayes.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_data_table_conf.xml.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded sam_fa_indices.loc.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_dependencies.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml", - strings_displayed=[], - strings_not_displayed=[], - ) + self.setup_freebayes_0010_repo(repository) def test_0015_create_datatypes_0020_repository(self): """Create and populate the column_maker_0020 repository.""" @@ -169,16 +110,10 @@ def test_0015_create_datatypes_0020_repository(self): category=category_0020, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_0020_repository(self): @@ -199,16 +134,10 @@ def test_0020_create_emboss_0020_repository(self): category=category_0020, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_emboss_datatypes_0030_repository(self): @@ -229,16 +158,10 @@ def test_0025_create_emboss_datatypes_0030_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_emboss_5_repository(self): @@ -259,16 +182,10 @@ def test_0030_create_emboss_5_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_5_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_emboss_6_repository(self): @@ -289,16 +206,10 @@ def test_0035_create_emboss_6_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0040_create_emboss_0030_repository(self): @@ -319,16 +230,10 @@ def test_0040_create_emboss_0030_repository(self): category=category_0030, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0045_create_repository_dependencies_for_0030(self): @@ -395,16 +300,10 @@ def test_0050_create_freebayes_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0055_create_filtering_repository(self): @@ -425,16 +324,10 @@ def test_0055_create_filtering_repository(self): category=category_0040, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0060_create_dependency_structure(self): @@ -482,16 +375,10 @@ def test_0065_create_convert_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0070_create_column_repository(self): @@ -509,16 +396,10 @@ def test_0070_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0075_create_emboss_datatypes_repository(self): @@ -539,16 +420,10 @@ def test_0080_create_emboss_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0085_create_filtering_repository(self): @@ -566,16 +441,10 @@ def test_0085_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0090_create_freebayes_repository(self): @@ -593,16 +462,10 @@ def test_0090_create_freebayes_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0095_create_bismark_repository(self): @@ -620,17 +483,7 @@ def test_0095_create_bismark_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository, end=1) def test_0100_create_and_upload_dependency_definitions(self): """Create the dependency structure for test 0050.""" diff --git a/lib/tool_shed/test/functional/test_0310_hg_api_features.py b/lib/tool_shed/test/functional/test_0310_hg_api_features.py index c108a2532db5..589d077ef2b5 100644 --- a/lib/tool_shed/test/functional/test_0310_hg_api_features.py +++ b/lib/tool_shed/test/functional/test_0310_hg_api_features.py @@ -46,27 +46,10 @@ def test_0005_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_test_data.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering test data.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_clone(self): diff --git a/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py b/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py index 323ef273d692..0a46eec3fe7b 100644 --- a/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py +++ b/lib/tool_shed/test/functional/test_0420_citable_urls_for_repositories.py @@ -9,7 +9,7 @@ repository_name = "filtering_0420" repository_description = "Galaxy filtering tool for test 0420" -repository_long_description = "Long description of Galaxy filtering tool for test 0410" +repository_long_description = "Long description of Galaxy filtering tool for test 0420" first_changeset_hash = "" @@ -59,16 +59,10 @@ def test_0005_create_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_2.2.0.tar", commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) # We'll be checking for this hash later, after uploading another file to the repository, making get_repository_tip() not usable. first_changeset_hash = self.get_repository_tip(repository) @@ -81,17 +75,7 @@ def test_0010_upload_new_file_to_repository(self): The repository should now contain two changeset revisions, 0: and 1:. """ repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.txt.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "readme.txt") def test_0015_load_user_view_page(self): """Load the /view/ page amd check for strings. @@ -104,9 +88,12 @@ def test_0015_load_user_view_page(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/browse_repositories?user_id=&operation=repositories_by_user - strings_displayed = ["/repository/browse_repositories", encoded_user_id, "operation=repositories_by_user"] - strings_displayed.append(encoded_user_id) - strings_displayed_in_iframe = ["user1", "filtering_0420", "Galaxy filtering tool for test 0420"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["/repository/browse_repositories", encoded_user_id, "operation=repositories_by_user"] + strings_displayed.append(encoded_user_id) + strings_displayed_in_iframe = ["user1", "filtering_0420", repository_description] self.load_citable_url( username="user1", repository_name=None, @@ -131,11 +118,19 @@ def test_0020_load_repository_view_page(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/bview_repository?id= - strings_displayed = ["/repository", "view_repository", "id=", encoded_repository_id] - strings_displayed_in_iframe = ["user1", "filtering_0420", "Galaxy filtering tool for test 0420"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["/repository", "view_repository", "id=", encoded_repository_id] + strings_displayed_in_iframe = [ + "user1", + "filtering_0420", + self._escape_page_content_if_needed(repository_long_description), + ] strings_displayed_in_iframe.append(self.get_repository_tip(repository)) - strings_displayed_in_iframe.append("Link to this repository:") - strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420") + if not self.is_v2: + strings_displayed_in_iframe.append("Link to this repository:") + strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420") self.load_citable_url( username="user1", repository_name="filtering_0420", @@ -161,15 +156,19 @@ def test_0025_load_view_page_for_previous_revision(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/view_repository?id= - strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] + if self.is_v2: + strings_displayed = [] + else: + strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] strings_displayed_in_iframe = [ "user1", "filtering_0420", - "Galaxy filtering tool for test 0420", + self._escape_page_content_if_needed(repository_long_description), first_changeset_hash, ] - strings_displayed_in_iframe.append("Link to this repository revision:") - strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420/{first_changeset_hash}") + if not self.is_v2: + strings_displayed_in_iframe.append("Link to this repository revision:") + strings_displayed_in_iframe.append(f"{self.url}/view/user1/filtering_0420/{first_changeset_hash}") strings_not_displayed_in_iframe = [] self.load_citable_url( username="user1", @@ -189,13 +188,16 @@ def test_0030_load_sharable_url_with_invalid_changeset_revision(self): encoded_user_id = self.security.encode_id(test_user_1.id) encoded_repository_id = repository.id invalid_changeset_hash = "invalid" - # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, - # then directly load the url that the iframe should be loading and check for the expected strings. - # The iframe should point to /repository/view_repository?id=&status=error - strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] - strings_displayed.extend( - ["The+change+log", "does+not+include+revision", invalid_changeset_hash, "status=error"] - ) + if not self.is_v2: + # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, + # then directly load the url that the iframe should be loading and check for the expected strings. + # The iframe should point to /repository/view_repository?id=&status=error + strings_displayed = ["/repository", "view_repository", f"id={encoded_repository_id}"] + strings_displayed.extend( + ["The+change+log", "does+not+include+revision", invalid_changeset_hash, "status=error"] + ) + else: + strings_displayed = ["The change log does not include revision " + invalid_changeset_hash] self.load_citable_url( username="user1", repository_name="filtering_0420", @@ -216,12 +218,16 @@ def test_0035_load_sharable_url_with_invalid_repository_name(self): # Since twill does not load the contents of an iframe, we need to check that the iframe has been generated correctly, # then directly load the url that the iframe should be loading and check for the expected strings. # The iframe should point to /repository/browse_repositories?user_id=&operation=repositories_by_user - strings_displayed = ["/repository", "browse_repositories", "user1"] - strings_displayed.extend( - ["list+of+repositories+owned", "does+not+include+one+named", "%21%21invalid%21%21", "status=error"] - ) - strings_displayed_in_iframe = ["user1", "filtering_0420"] - strings_displayed_in_iframe.append("Repositories Owned by user1") + if not self.is_v2: + strings_displayed = ["/repository", "browse_repositories", "user1"] + strings_displayed.extend( + ["list+of+repositories+owned", "does+not+include+one+named", "%21%21invalid%21%21", "status=error"] + ) + strings_displayed_in_iframe = ["user1", "filtering_0420"] + strings_displayed_in_iframe.append("Repositories Owned by user1") + else: + strings_displayed = ["Repository user1/!!invalid!! is not found"] + strings_displayed_in_iframe = [] self.load_citable_url( username="user1", repository_name="!!invalid!!", @@ -238,7 +244,10 @@ def test_0040_load_sharable_url_with_invalid_owner(self): We are at step 8. Visit the following url and check for appropriate strings: /view/!!invalid!! """ - strings_displayed = ["The tool shed", self.url, "contains no repositories owned by", "!!invalid!!"] + if not self.is_v2: + strings_displayed = ["The tool shed", self.url, "contains no repositories owned by", "!!invalid!!"] + else: + strings_displayed = ["No repositories found"] self.load_citable_url( username="!!invalid!!", repository_name=None, diff --git a/lib/tool_shed/test/functional/test_0430_browse_utilities.py b/lib/tool_shed/test/functional/test_0430_browse_utilities.py index 4a817c031199..0202c5baf2ca 100644 --- a/lib/tool_shed/test/functional/test_0430_browse_utilities.py +++ b/lib/tool_shed/test/functional/test_0430_browse_utilities.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -54,16 +55,10 @@ def test_0010_create_emboss_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_tool_dependency_repository(self): @@ -85,18 +80,13 @@ def test_0020_create_tool_dependency_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes.tar.", - strings_displayed=[], - strings_not_displayed=[], ) + @skip_if_api_v2 def test_0030_browse_tools(self): """Load the page to browse tools. @@ -108,6 +98,7 @@ def test_0030_browse_tools(self): strings_displayed = ["EMBOSS", "antigenic1", "5.0.0", changeset_revision, "user1", "emboss_0430"] self.browse_tools(strings_displayed=strings_displayed) + @skip_if_api_v2 def test_0040_browse_tool_dependencies(self): """Browse tool dependencies and look for the right versions of freebayes and samtools. diff --git a/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py b/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py index 21313d07287f..1da5abd03751 100644 --- a/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py +++ b/lib/tool_shed/test/functional/test_0440_deleting_dependency_definitions.py @@ -83,16 +83,10 @@ def test_0005_create_column_maker_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column maker tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_convert_chars_repository(self): @@ -111,16 +105,10 @@ def test_0010_create_convert_chars_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert chars tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_dependency_on_convert_chars(self): @@ -213,17 +201,7 @@ def test_0030_create_bwa_package_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded package tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") def test_0035_create_bwa_base_repository(self): """Create and populate the bwa_base_0440 repository. @@ -242,16 +220,10 @@ def test_0035_create_bwa_base_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/complex/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/complex/bwa_base.tar", commit_message="Uploaded BWA nucleotide space mapping tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0040_create_dependency_on_bwa_package_repository(self): @@ -342,17 +314,7 @@ def test_0055_create_bwa_tool_dependency_repository(self): category=category, strings_displayed=strings_displayed, ) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded package tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") def test_0060_delete_bwa_tool_dependency_definition(self): """Delete the tool_dependencies.xml file from bwa_tool_dependency_0440. @@ -400,17 +362,7 @@ def test_0065_reupload_bwa_tool_dependency_definition(self): # Record the current tip, so we can verify that it's still not a downloadable revision after tool_dependencies.xml # is re-uploaded and a new downloadable revision is created. old_changeset_revision = self.get_repository_tip(repository) - self.upload_file( - repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded package tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "bwa/complex/tool_dependencies.xml") new_changeset_revision = self.get_repository_tip(repository) # Check that the old changeset revision is still downloadable. metadata_record = self._get_repository_metadata_by_changeset_revision(repository, old_changeset_revision) diff --git a/lib/tool_shed/test/functional/test_0460_upload_to_repository.py b/lib/tool_shed/test/functional/test_0460_upload_to_repository.py index 07b43d6f9d28..d1c04840ef8e 100644 --- a/lib/tool_shed/test/functional/test_0460_upload_to_repository.py +++ b/lib/tool_shed/test/functional/test_0460_upload_to_repository.py @@ -87,16 +87,10 @@ def test_0010_create_bwa_package_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.add_file_to_repository( repository, - filename="bwa/complex/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "bwa/complex/tool_dependencies.xml", commit_message="Populate package_bwa_0_5_9_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_tool_dependency_repositories(self): @@ -134,23 +128,14 @@ def test_0020_populate_complex_dependency_test_1_0460(self): """ repository = self._get_repository_by_name_and_owner("complex_dependency_test_1_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_file( - repository, - filename="0460_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] - ) + self.add_file_to_repository(repository, "0460_files/tool_dependencies.xml") + if not self.is_v2: + changeset_revision = self.get_repository_tip(package_repository) + strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0025_populate_complex_dependency_test_2_0460(self): """Populate complex_dependency_test_2_0460. @@ -160,23 +145,18 @@ def test_0025_populate_complex_dependency_test_2_0460(self): """ repository = self._get_repository_by_name_and_owner("complex_dependency_test_2_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/tool_dependencies_in_root.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/tool_dependencies_in_root.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] ) + if not self.is_v2: + changeset_revision = self.get_repository_tip(package_repository) + strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0030_populate_complex_dependency_test_3_0460(self): """Populate complex_dependency_test_3_0460. @@ -186,23 +166,21 @@ def test_0030_populate_complex_dependency_test_3_0460(self): """ repository = self._get_repository_by_name_and_owner("complex_dependency_test_3_0460", common.test_user_1_name) package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/tool_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/tool_dependencies_in_subfolder.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", filepath="subfolder", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, + filename="tool_dependencies.xml", + filepath="subfolder", + strings_displayed=[changeset_revision], + ) def test_0035_create_repositories_for_url_upload(self): """Create and populate hg_tool_dependency_0460 and hg_subfolder_tool_dependency_0460. @@ -218,17 +196,7 @@ def test_0035_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="0460_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate hg_tool_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/tool_dependencies.xml") repository = self.get_or_create_repository( name="hg_subfolder_tool_dependency_0460", description=bwa_repository_description, @@ -237,68 +205,10 @@ def test_0035_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/tool_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "0460_files/tool_dependencies_in_subfolder.tar", commit_message="Populate hg_subfolder_tool_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - - def test_0040_url_upload_to_complex_test(self): - """Populate complex_dependency_test_4_0460. - - This is step 8 - Upload to complex_dependency_test_4_0460 using the url hg:///repos/user1/hg_tool_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_tool_dependency_0460" - repository = self._get_repository_by_name_and_owner("complex_dependency_test_4_0460", common.test_user_1_name) - package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", strings_displayed=[changeset_revision] - ) - - def test_0045_url_upload_to_complex_test(self): - """Populate complex_dependency_test_4_0460. - - This is step 9 - Upload to complex_dependency_test_5_0460 using the url hg:///repos/user1/hg_subfolder_tool_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_subfolder_tool_dependency_0460" - repository = self._get_repository_by_name_and_owner("complex_dependency_test_5_0460", common.test_user_1_name) - package_repository = self._get_repository_by_name_and_owner("package_bwa_0_5_9_0460", common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = ["package_bwa_0_5_9_0460", "bwa", "0.5.9", "package", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="tool_dependencies.xml", filepath="subfolder", strings_displayed=[changeset_revision] ) def test_0050_create_repositories_for_simple_dependencies(self): @@ -338,23 +248,14 @@ def test_0055_populate_repository_dependency_test_1_0460(self): "repository_dependency_test_1_0460", common.test_user_1_name ) package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="0460_files/repository_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/repository_dependencies.xml") changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + strings_displayed = [bwa_repository_name, "user1", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0060_populate_repository_dependency_test_2_0460(self): """Populate repository_dependency_test_2_0460. @@ -365,23 +266,18 @@ def test_0060_populate_repository_dependency_test_2_0460(self): "repository_dependency_test_2_0460", common.test_user_1_name ) package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/in_root/repository_dependencies_in_root.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/in_root/repository_dependencies_in_root.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] - ) + if not self.is_v2: + strings_displayed = [bwa_repository_name, "user1", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] + ) def test_0065_populate_repository_dependency_test_3_0460(self): """Populate repository_dependency_test_3_0460. @@ -393,26 +289,21 @@ def test_0065_populate_repository_dependency_test_3_0460(self): "repository_dependency_test_3_0460", common.test_user_1_name ) package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, + "0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", commit_message="Uploaded complex repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], ) changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, - filename="repository_dependencies.xml", - filepath="subfolder", - strings_displayed=[changeset_revision], - ) + if not self.is_v2: + strings_displayed = [bwa_repository_name, "user1", changeset_revision] + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + self.display_repository_file_contents( + repository, + filename="repository_dependencies.xml", + filepath="subfolder", + strings_displayed=[changeset_revision], + ) def test_0070_create_repositories_for_url_upload(self): """Create and populate hg_repository_dependency_0460 and hg_subfolder_repository_dependency_0460. @@ -429,17 +320,7 @@ def test_0070_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="0460_files/repository_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate hg_repository_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "0460_files/repository_dependencies.xml") repository = self.get_or_create_repository( name="hg_subfolder_repository_dependency_0460", description=bwa_repository_description, @@ -448,75 +329,8 @@ def test_0070_create_repositories_for_url_upload(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "0460_files/in_subfolder/repository_dependencies_in_subfolder.tar", commit_message="Populate hg_subfolder_repository_dependency_0460 with a tool dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - - def test_0075_url_upload_to_complex_test(self): - """Populate repository_dependency_test_4_0460. - - This is step 15 - Upload to repository_dependency_test_4_0460 using the url - hg:///repos/user1/hg_repository_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_repository_dependency_0460" - repository = self._get_repository_by_name_and_owner( - "repository_dependency_test_4_0460", common.test_user_1_name - ) - package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, filename="repository_dependencies.xml", strings_displayed=[changeset_revision] - ) - - def test_0080_url_upload_to_complex_test(self): - """Populate repository_dependency_test_4_0460. - - This is step 16 - Upload to repository_dependency_test_5_0460 using the url - hg:///repos/user1/hg_subfolder_repository_dependency_0460. - """ - url = f"hg://{self.host}:{self.port}/repos/user1/hg_subfolder_repository_dependency_0460" - repository = self._get_repository_by_name_and_owner( - "repository_dependency_test_5_0460", common.test_user_1_name - ) - package_repository = self._get_repository_by_name_and_owner(bwa_repository_name, common.test_user_1_name) - self.upload_url( - repository, - url=url, - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=True, - commit_message="Uploaded repository dependency definition.", - strings_displayed=[], - strings_not_displayed=[], - ) - changeset_revision = self.get_repository_tip(package_repository) - strings_displayed = [bwa_repository_name, "user1", changeset_revision] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) - self.display_repository_file_contents( - repository, - filename="repository_dependencies.xml", - filepath="subfolder", - strings_displayed=[changeset_revision], ) diff --git a/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py b/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py index 8086ac37b12b..79df2baff835 100644 --- a/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py +++ b/lib/tool_shed/test/functional/test_0470_tool_dependency_repository_type.py @@ -90,17 +90,7 @@ def test_0005_create_libx11_repository(self): strings_displayed=[], ) # Upload the tool dependency definition to the package_x11_client_1_5_proto_7_0_0470 repository. - self.upload_file( - repository, - filename="emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_x11_client_1_5_proto_7_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("libx11_proto", repository, end=1) def test_0010_create_emboss_5_0_0_repository(self): """Create and populate package_emboss_5_0_0_0470. @@ -121,17 +111,7 @@ def test_0010_create_emboss_5_0_0_repository(self): strings_displayed=[], ) # Upload the edited tool dependency definition to the package_emboss_5_0_0 repository. - self.upload_file( - repository, - filename="emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_emboss_5_0_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("package_emboss_5_0_0_0470", repository, end=1) def test_0015_create_emboss_5_repository(self): """Create and populate emboss_5_0470. @@ -152,17 +132,7 @@ def test_0015_create_emboss_5_repository(self): strings_displayed=[], ) # Populate emboss_5 with tool and dependency definitions. - self.upload_file( - repository, - filename="emboss/0470_files/emboss_complex_dependency.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Populate emboss_5 with tool and dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("emboss_5_0470", repository, end=1) def test_0020_upload_updated_tool_dependency_to_package_x11(self): """Upload a new tool_dependencies.xml to package_x11_client_1_5_proto_7_0_0470. @@ -174,17 +144,7 @@ def test_0020_upload_updated_tool_dependency_to_package_x11(self): package_libx11_repository_name, common.test_user_1_name ) # Upload the tool dependency definition to the package_x11_client_1_5_proto_7_0_0470 repository. - self.upload_file( - package_x11_repository, - filename="emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_x11_client_1_5_proto_7_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("libx11_proto", package_x11_repository, start=1, end=2) count = self._get_metadata_revision_count(package_x11_repository) assert count == 1, ( "package_x11_client_1_5_proto_7_0_0470 has incorrect number of metadata revisions, expected 1 but found %d" @@ -202,16 +162,8 @@ def test_0025_upload_updated_tool_dependency_to_package_emboss(self): package_emboss_repository_name, common.test_user_1_name ) # Populate package_emboss_5_0_0_0470 with updated tool dependency definition. - self.upload_file( - package_emboss_repository, - filename="emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_emboss_5_0_0_0470 with tool dependency definitions.", - strings_displayed=[], - strings_not_displayed=[], + self.user_populator().setup_test_data_repo( + "package_emboss_5_0_0_0470", package_emboss_repository, start=1, end=2 ) count = self._get_metadata_revision_count(package_emboss_repository) assert count == 2, ( @@ -227,17 +179,7 @@ def test_0030_upload_updated_tool_dependency_to_emboss_5_repository(self): """ emboss_repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) # Populate package_emboss_5_0_0_0470 with updated tool dependency definition. - self.upload_file( - emboss_repository, - filename="emboss/0470_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Upload updated complex repository dependency definition to emboss_5_0470.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("emboss_5_0470", emboss_repository, start=1, end=2) count = self._get_metadata_revision_count(emboss_repository) assert count == 2, "package_emboss_5_0_0_0470 has incorrect number of metadata revisions" diff --git a/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py b/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py index 45510dd92d5d..48f121ead36b 100644 --- a/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py +++ b/lib/tool_shed/test/functional/test_0480_tool_dependency_xml_verification.py @@ -48,17 +48,7 @@ def test_0005_create_tool_dependency_repository(self): category=category, strings_displayed=[], ) - self.upload_file( - repository, - filename="0480_files/tool_dependencies.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Populate package_invalid_tool_dependency_xml_1_0_0 with an improperly defined tool dependency.", - strings_displayed=["package cannot be installed because", "missing either an <actions> tag set"], - strings_not_displayed=[], - ) + self.user_populator().setup_test_data_repo("0480", repository, assert_ok=False) def test_0010_populate_tool_dependency_repository(self): """Verify package_invalid_tool_dependency_xml_1_0_0. diff --git a/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py b/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py index 3465af3c582c..348f70281e88 100644 --- a/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py +++ b/lib/tool_shed/test/functional/test_0530_repository_admin_feature.py @@ -1,5 +1,6 @@ import logging +from ..base.api import skip_if_api_v2 from ..base.twilltestcase import ( common, ShedTwillTestCase, @@ -57,16 +58,10 @@ def test_0005_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_verify_repository_admin_role_exists(self): @@ -104,6 +99,7 @@ def test_0020_rename_repository(self): repository = self._get_repository_by_name_and_owner("renamed_filtering_0530", common.test_user_1_name) assert repository.name == "renamed_filtering_0530", "Repository was not renamed to renamed_filtering_0530." + @skip_if_api_v2 def test_0030_verify_access_denied(self): """Make sure a non-admin user can't modify the repository. diff --git a/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py b/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py index 2787292505f1..8c5b08cec3f9 100644 --- a/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py +++ b/lib/tool_shed/test/functional/test_0550_metadata_updated_dependencies.py @@ -67,21 +67,15 @@ def test_0005_freebayes_repository(self): strings_displayed=[], ) assert freebayes is not None, f"Error creating freebayes {repositories['freebayes']['name']}" - self.upload_file( + self.commit_tar_to_repository( freebayes, - filename="0550_files/package_freebayes_1_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded.", - strings_displayed=["has been successfully uploaded to the repository", "contains a single file"], - strings_not_displayed=None, - ) - # Visit the manage repository page for package_freebayes_0_5_9_0100. - self.display_manage_repository_page( - freebayes, strings_displayed=["Tool dependencies", "will not be", "to this repository"] + "0550_files/package_freebayes_1_0550.tgz", ) + if not self.is_v2: + # Visit the manage repository page for package_freebayes_0_5_9_0100. + self.display_manage_repository_page( + freebayes, strings_displayed=["Tool dependencies", "will not be", "to this repository"] + ) def test_0010_create_samtools_repository(self): """Create and populate the package_samtools_0550 repository.""" @@ -95,16 +89,10 @@ def test_0010_create_samtools_repository(self): strings_displayed=[], ) assert samtools is not None, f"Error creating samtools {repositories['samtools']['name']}" - self.upload_file( + self.commit_tar_to_repository( samtools, - filename="0550_files/package_samtools_1_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/package_samtools_1_0550.tgz", commit_message="Uploaded samtools 1.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -119,16 +107,10 @@ def test_0015_create_filtering_repository(self): strings_displayed=[], ) assert repository is not None, f"Error creating repository {repositories['filtering']['name']}" - self.upload_file( + self.commit_tar_to_repository( repository, - filename="0550_files/filtering_1.0.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/filtering_1.0.tgz", commit_message="Uploaded filtering 1.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) def test_0020_check_repository_dependency(self): @@ -137,7 +119,8 @@ def test_0020_check_repository_dependency(self): samtools = self._get_repository_by_name_and_owner(repositories["samtools"]["name"], common.test_user_1_name) filtering = self._get_repository_by_name_and_owner(repositories["filtering"]["name"], common.test_user_1_name) strings_displayed = [freebayes.id, samtools.id] - self.display_manage_repository_page(filtering, strings_displayed=strings_displayed) + if not self.is_v2: + self.display_manage_repository_page(filtering, strings_displayed=strings_displayed) def test_0025_update_dependent_repositories(self): """ @@ -146,27 +129,15 @@ def test_0025_update_dependent_repositories(self): freebayes = self._get_repository_by_name_and_owner(repositories["freebayes"]["name"], common.test_user_1_name) samtools = self._get_repository_by_name_and_owner(repositories["samtools"]["name"], common.test_user_1_name) filtering = self._get_repository_by_name_and_owner(repositories["filtering"]["name"], common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( freebayes, - filename="0550_files/package_freebayes_2_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/package_freebayes_2_0550.tgz", commit_message="Uploaded freebayes 2.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( samtools, - filename="0550_files/package_samtools_2_0550.tgz", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "0550_files/package_samtools_2_0550.tgz", commit_message="Uploaded samtools 2.0.", - strings_displayed=["has been successfully uncompressed and uploaded to the repository"], - strings_not_displayed=[], ) strings_displayed = [ repositories["freebayes"]["name"], diff --git a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py index 605290d8e95d..81760f008c34 100644 --- a/lib/tool_shed/test/functional/test_1000_install_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1000_install_basic_repository.py @@ -10,11 +10,12 @@ class TestBasicToolShedFeatures(ShedTwillTestCase): """Test installing a basic repository.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0000 category and upload the filtering repository to it, if necessary.""" @@ -36,54 +37,17 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_0000.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 1.1.0", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 2.2.0", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_file_to_repository(repository, "filtering/filtering_0000.txt") + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") + self.add_file_to_repository(repository, "readme.txt") def test_0010_browse_tool_sheds(self): """Browse the available tool sheds in this Galaxy instance.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed( url=self.url, strings_displayed=["Test 0000 Basic Repository Features 1", "Test 0000 Basic Repository Features 2"], @@ -107,19 +71,15 @@ def test_0025_install_filtering_repository(self): "Test 0000 Basic Repository Features 1", new_tool_panel_section_label="test_1000", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repo_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repo_name, common.test_user_1_name) changeset = str(installed_repository.installed_changeset_revision) - assert self.get_installed_repository_for(common.test_user_1, repo_name, changeset) + assert self._get_installed_repository_for(common.test_user_1, repo_name, changeset) self._assert_has_valid_tool_with_name("Filter1") self._assert_repo_has_tool_with_id(installed_repository, "Filter1") def test_0030_install_filtering_repository_again(self): """Attempt to install the already installed filtering repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repo_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repo_name, common.test_user_1_name) # Just make sure the repo is still installed, used to monitoring tests but we've # removed that page. self._install_repository( @@ -128,7 +88,7 @@ def test_0030_install_filtering_repository_again(self): "Test 0000 Basic Repository Features 1", ) changeset = str(installed_repository.installed_changeset_revision) - assert self.get_installed_repository_for(common.test_user_1, repo_name, changeset) + assert self._get_installed_repository_for(common.test_user_1, repo_name, changeset) def test_0035_verify_installed_repository_metadata(self): """Verify that resetting the metadata on an installed repository does not change the metadata.""" diff --git a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py index 370d150b7048..976d2c354ecc 100644 --- a/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1010_install_repository_with_tool_dependencies.py @@ -1,5 +1,4 @@ import logging -import os from ..base.twilltestcase import ( common, @@ -16,9 +15,10 @@ class TestToolWithToolDependencies(ShedTwillTestCase): """Test installing a repository with tool dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -36,89 +36,18 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool xml.", - strings_displayed=[ - "Metadata may have been defined", - "This file requires an entry", - "tool_data_table_conf", - ], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool data table sample file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool data table .loc file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded malformed tool dependency XML.", - strings_displayed=["Exception attempting to parse", "invalid element name"], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded invalid tool dependency XML.", - strings_displayed=[ - "The settings for name, version and type from a contained tool configuration" - ], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded valid tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.setup_freebayes_0010_repo(repository) def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the freebayes tool.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=[category_name]) category = self.populator.get_category_with_name(category_name) self.browse_category(category, strings_displayed=[repository_name]) - strings_displayed = [repository_name, "Valid tools", "Tool dependencies"] - self.preview_repository_in_tool_shed( - repository_name, common.test_user_1_name, strings_displayed=strings_displayed - ) + if not self.is_v2: + strings_displayed = [repository_name, "Valid tools", "Tool dependencies"] + self.preview_repository_in_tool_shed( + repository_name, common.test_user_1_name, strings_displayed=strings_displayed + ) def test_0015_install_freebayes_repository(self): """Install the freebayes repository without installing tool dependencies.""" @@ -129,10 +58,8 @@ def test_0015_install_freebayes_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1010", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - assert self.get_installed_repository_for( + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + assert self._get_installed_repository_for( common.test_user_1, repository_name, installed_repository.installed_changeset_revision ) self._assert_has_valid_tool_with_name("FreeBayes") diff --git a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py index a109ec0b0133..b965011e7ca0 100644 --- a/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1020_install_repository_with_repository_dependencies.py @@ -15,11 +15,12 @@ class TestToolWithRepositoryDependencies(ShedTwillTestCase): """Test installing a repository with repository dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0020 category and any missing repositories.""" @@ -36,16 +37,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): strings_displayed=[], ) if self.repository_is_new(column_maker_repository): - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) emboss_repository = self.get_or_create_repository( name=emboss_repository_name, @@ -55,16 +50,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1020", additional_paths=["emboss", "5"]) repository_tuple = ( @@ -81,13 +70,15 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the emboss tool.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=["Test 0020 Basic Repository Dependencies"]) category = self.populator.get_category_with_name("Test 0020 Basic Repository Dependencies") self.browse_category(category, strings_displayed=[emboss_repository_name]) - self.preview_repository_in_tool_shed( - emboss_repository_name, common.test_user_1_name, strings_displayed=[emboss_repository_name, "Valid tools"] - ) + if not self.is_v2: + self.preview_repository_in_tool_shed( + emboss_repository_name, + common.test_user_1_name, + strings_displayed=[emboss_repository_name, "Valid tools"], + ) def test_0015_install_emboss_repository(self): """Install the emboss repository without installing tool dependencies.""" @@ -98,10 +89,10 @@ def test_0015_install_emboss_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1020", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, emboss_repository_name, installed_repository.installed_changeset_revision ) self._assert_has_valid_tool_with_name("antigenic") @@ -113,14 +104,14 @@ def test_0020_verify_installed_repository_metadata(self): def test_0025_deactivate_datatypes_repository(self): """Deactivate the emboss_datatypes repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( column_maker_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_repository) def test_0030_reactivate_datatypes_repository(self): """Reactivate the datatypes repository and verify that the datatypes are again present.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( column_maker_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_repository) diff --git a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py index 8c5464484b7a..3cc894c038cb 100644 --- a/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1030_install_repository_with_dependency_revisions.py @@ -19,11 +19,12 @@ class TestRepositoryWithDependencyRevisions(ShedTwillTestCase): """Test installing a repository with dependency revisions.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0030 category and add repositories to it, if necessary.""" @@ -42,16 +43,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): ) if self.repository_is_new(column_maker_repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) emboss_5_repository = self.get_or_create_repository( name=emboss_5_repository_name, @@ -61,16 +56,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_5_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) column_maker_tuple = ( @@ -92,16 +81,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "6"]) column_maker_tuple = ( @@ -123,16 +106,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) dependency_tuple = ( @@ -160,13 +137,15 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the emboss tool.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=["Test 0030 Repository Dependency Revisions"]) category = self.populator.get_category_with_name("Test 0030 Repository Dependency Revisions") self.browse_category(category, strings_displayed=[emboss_repository_name]) - self.preview_repository_in_tool_shed( - emboss_repository_name, common.test_user_1_name, strings_displayed=[emboss_repository_name, "Valid tools"] - ) + if not self.is_v2: + self.preview_repository_in_tool_shed( + emboss_repository_name, + common.test_user_1_name, + strings_displayed=[emboss_repository_name, "Valid tools"], + ) def test_0015_install_emboss_repository(self): """Install the emboss repository without installing tool dependencies.""" @@ -178,15 +157,15 @@ def test_0015_install_emboss_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1030", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, emboss_repository_name, installed_repository.installed_changeset_revision ) self._assert_repo_has_tool_with_id(installed_repository, "EMBOSS: antigenic1") self._assert_has_valid_tool_with_name("antigenic") - self.update_installed_repository_api(installed_repository, verify_no_updates=True) + self.update_installed_repository(installed_repository, verify_no_updates=True) def test_0025_verify_installed_repository_metadata(self): """Verify that resetting the metadata on an installed repository does not change the metadata.""" diff --git a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py index ff1b9ea7b392..60748c5b2763 100644 --- a/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py +++ b/lib/tool_shed/test/functional/test_1040_install_repository_basic_circular_dependencies.py @@ -19,6 +19,8 @@ class TestInstallingCircularDependencies(ShedTwillTestCase): """Verify that the code correctly handles installing repositories with circular dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -41,16 +43,10 @@ def test_0005_create_freebayes_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -70,16 +66,10 @@ def test_0015_create_filtering_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded the tool tarball for filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependencies(self): @@ -122,7 +112,6 @@ def test_0020_create_repository_dependencies(self): def test_0025_install_freebayes_repository(self): """Install freebayes with blank tool panel section, without tool dependencies but with repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( freebayes_repository_name, common.test_user_1_name, @@ -133,26 +122,26 @@ def test_0025_install_freebayes_repository(self): def test_0030_uninstall_filtering_repository(self): """Deactivate filtering, verify tool panel section and missing repository dependency.""" - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_filtering_repository) - self.test_db_util.ga_refresh(installed_filtering_repository) + self._refresh_tool_shed_repository(installed_filtering_repository) self._assert_has_missing_dependency(installed_freebayes_repository, filtering_repository_name) self.check_galaxy_repository_db_status(filtering_repository_name, common.test_user_1_name, "Deactivated") def test_0035_reactivate_filtering_repository(self): """Reinstall filtering into 'filtering' tool panel section.""" - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) self.reinstall_repository_api( @@ -161,51 +150,51 @@ def test_0035_reactivate_filtering_repository(self): install_repository_dependencies=True, new_tool_panel_section_label="filtering", ) - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) self._assert_is_not_missing_dependency(installed_freebayes_repository, filtering_repository_name) def test_0040_uninstall_freebayes_repository(self): """Deactivate freebayes, verify tool panel section and missing repository dependency.""" - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_freebayes_repository) - assert not self.get_installed_repository_for( + assert not self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - self.test_db_util.ga_refresh(installed_freebayes_repository) + self._refresh_tool_shed_repository(installed_filtering_repository) self._assert_has_missing_dependency(installed_filtering_repository, freebayes_repository_name) self.check_galaxy_repository_db_status("freebayes_0040", "user1", "Deactivated") def test_0045_deactivate_filtering_repository(self): """Deactivate filtering, verify tool panel section.""" - installed_filtering_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_filtering_repository = self._get_installed_repository_by_name_owner( filtering_repository_name, common.test_user_1_name ) - installed_freebayes_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_freebayes_repository = self._get_installed_repository_by_name_owner( freebayes_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) self.deactivate_repository(installed_filtering_repository) - assert not self.get_installed_repository_for( + assert not self._get_installed_repository_for( common.test_user_1, freebayes_repository_name, installed_freebayes_repository.installed_changeset_revision ) - assert not self.get_installed_repository_for( + assert not self._get_installed_repository_for( common.test_user_1, filtering_repository_name, installed_filtering_repository.installed_changeset_revision ) - self.test_db_util.ga_refresh(installed_filtering_repository) + self._refresh_tool_shed_repository(installed_freebayes_repository) self._assert_has_missing_dependency(installed_freebayes_repository, filtering_repository_name) self.check_galaxy_repository_db_status(filtering_repository_name, common.test_user_1_name, "Deactivated") diff --git a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py index 6cc8abc38a20..e57633296faa 100644 --- a/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py +++ b/lib/tool_shed/test/functional/test_1050_circular_dependencies_4_levels.py @@ -36,6 +36,8 @@ class TestInstallRepositoryCircularDependencies(ShedTwillTestCase): """Verify that the code correctly handles circular dependencies down to n levels.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -56,16 +58,10 @@ def test_0005_create_convert_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -80,16 +76,10 @@ def test_0010_create_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_emboss_datatypes_repository(self): @@ -108,16 +98,10 @@ def test_0020_create_emboss_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_filtering_repository(self): @@ -132,16 +116,10 @@ def test_0025_create_filtering_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0030_create_freebayes_repository(self): @@ -156,16 +134,10 @@ def test_0030_create_freebayes_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0035_create_bismark_repository(self): @@ -180,17 +152,7 @@ def test_0035_create_bismark_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository, end=1) def test_0040_create_and_upload_dependency_definitions(self): """Set up the dependency structure.""" @@ -307,7 +269,8 @@ def test_0045_verify_repository_dependencies(self): strings_displayed = [ f"{freebayes_repository.name} depends on {', '.join(repo.name for repo in freebayes_dependencies)}." ] - self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) + if not self.is_v2: + self.display_manage_repository_page(freebayes_repository, strings_displayed=strings_displayed) def test_0050_verify_tool_dependencies(self): """Check that freebayes and emboss display tool dependencies.""" @@ -315,17 +278,17 @@ def test_0050_verify_tool_dependencies(self): freebayes_repository_name, common.test_user_1_name ) emboss_repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) - self.display_manage_repository_page( - freebayes_repository, - strings_displayed=["freebayes", "0.9.4_9696d0ce8a9", "samtools", "0.1.18", "Tool dependencies"], - ) - self.display_manage_repository_page( - emboss_repository, strings_displayed=["Tool dependencies", "emboss", "5.0.0", "package"] - ) + if not self.is_v2: + self.display_manage_repository_page( + freebayes_repository, + strings_displayed=["freebayes", "0.9.4_9696d0ce8a9", "samtools", "0.1.18", "Tool dependencies"], + ) + self.display_manage_repository_page( + emboss_repository, strings_displayed=["Tool dependencies", "emboss", "5.0.0", "package"] + ) def test_0055_install_column_repository(self): """Install column_maker with repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( column_repository_name, common.test_user_1_name, @@ -381,16 +344,13 @@ def test_0060_install_emboss_repository(self): def test_0065_deactivate_bismark_repository(self): """Deactivate bismark and verify things are okay.""" - repository = self.test_db_util.get_installed_repository_by_name_owner( - bismark_repository_name, common.test_user_1_name - ) + repository = self._get_installed_repository_by_name_owner(bismark_repository_name, common.test_user_1_name) self.deactivate_repository(repository) # Now we have emboss, bismark, column_maker, and convert_chars installed, filtering and freebayes never installed. installed_repositories = [ (column_repository_name, common.test_user_1_name), (emboss_repository_name, common.test_user_1_name), (convert_repository_name, common.test_user_1_name), - (bismark_repository_name, common.test_user_1_name), ] strings_displayed = ["emboss_0050", "column_maker_0050", "convert_chars_0050"] strings_not_displayed = ["bismark", "filtering_0050", "freebayes_0050"] @@ -400,12 +360,10 @@ def test_0065_deactivate_bismark_repository(self): def test_0070_uninstall_emboss_repository(self): """Uninstall the emboss_5 repository.""" - repository = self.test_db_util.get_installed_repository_by_name_owner( - emboss_repository_name, common.test_user_1_name - ) - self.uninstall_repository(repository) + repository = self._get_installed_repository_by_name_owner(emboss_repository_name, common.test_user_1_name) + self._uninstall_repository(repository) self._assert_has_no_installed_repos_with_names(repository.name) - self.test_db_util.ga_refresh(repository) + self._refresh_tool_shed_repository(repository) self.check_galaxy_repository_tool_panel_section(repository, "emboss_5_0050") # Now we have bismark, column_maker, and convert_chars installed, filtering and freebayes never installed, # and emboss uninstalled. diff --git a/lib/tool_shed/test/functional/test_1070_invalid_tool.py b/lib/tool_shed/test/functional/test_1070_invalid_tool.py index d0ce2fa0a196..0f8e08e822e1 100644 --- a/lib/tool_shed/test/functional/test_1070_invalid_tool.py +++ b/lib/tool_shed/test/functional/test_1070_invalid_tool.py @@ -13,9 +13,10 @@ class TestFreebayesRepository(ShedTwillTestCase): """Test repository with multiple revisions with invalid tools.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -33,32 +34,10 @@ def test_0005_ensure_existence_of_repository_and_category(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="bismark/bismark.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="bismark/bismark_methylation_extractor.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded an updated tool xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.user_populator().setup_bismark_repo(repository) def test_0010_browse_tool_shed(self): """Browse the available tool sheds in this Galaxy instance and preview the bismark repository.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.browse_tool_shed(url=self.url, strings_displayed=[category_name]) category = self.populator.get_category_with_name(category_name) self.browse_category(category, strings_displayed=[repository_name]) @@ -75,11 +54,9 @@ def test_0015_install_freebayes_repository(self): install_tool_dependencies=False, new_tool_panel_section_label="test_1070", ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - assert self.get_installed_repository_for( + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + assert self._get_installed_repository_for( common.test_user_1, repository_name, installed_repository.installed_changeset_revision ) - self.update_installed_repository_api(installed_repository, verify_no_updates=True) + self.update_installed_repository(installed_repository, verify_no_updates=True) self._assert_repo_has_invalid_tool_in_file(installed_repository, "bismark_bowtie_wrapper.xml") diff --git a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py index 2db80b7569e0..b9f82f4c098c 100644 --- a/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py +++ b/lib/tool_shed/test/functional/test_1080_advanced_circular_dependency_installation.py @@ -24,9 +24,10 @@ class TestRepositoryDependencies(ShedTwillTestCase): """Testing uninstalling and reinstalling repository dependencies, and setting tool panel sections.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -44,16 +45,10 @@ def test_0005_create_and_populate_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) running_standalone = True @@ -72,16 +67,10 @@ def test_0010_create_and_populate_convert_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) running_standalone = True @@ -119,7 +108,6 @@ def test_0015_upload_dependency_xml_if_needed(self): def test_0020_install_convert_repository(self): """Install convert_chars without repository dependencies into convert_chars tool panel section.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( convert_repository_name, common.test_user_1_name, @@ -128,22 +116,25 @@ def test_0020_install_convert_repository(self): install_repository_dependencies=False, new_tool_panel_section_label="convert_chars", ) - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( - common.test_user_1, "convert_chars_0080", installed_convert_repository.installed_changeset_revision + assert self._get_installed_repository_for( + common.test_user_1, convert_repository_name, installed_convert_repository.installed_changeset_revision ) - self._assert_has_installed_repository_dependency( - installed_convert_repository, "column_maker_0080", installed_column_repository.installed_changeset_revision - ) - # installed_convert_repository has required_repositories and the following string - # is included when not installing via the API. This distrubs me but we've not installed - # not from the API for a long time so I'm just dropping the check. -John - # "Missing repository dependencies", + if self.full_stack_galaxy: + # This branch has been broken since we switched from mako to API for installing... + self._assert_has_installed_repository_dependency( + installed_convert_repository, + column_repository_name, + installed_column_repository.installed_changeset_revision, + ) + else: + # Previous mako had some string checks and such equivalent to this. + self._assert_has_missing_dependency(installed_convert_repository, column_repository_name) def test_0025_install_column_repository(self): """Install column maker with repository dependencies into column_maker tool panel section.""" @@ -154,16 +145,16 @@ def test_0025_install_column_repository(self): install_repository_dependencies=True, new_tool_panel_section_label="column_maker", ) - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "convert_chars_0080", installed_convert_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "column_maker_0080", installed_column_repository.installed_changeset_revision ) self._assert_has_installed_repository_dependency( @@ -172,10 +163,10 @@ def test_0025_install_column_repository(self): def test_0030_deactivate_convert_repository(self): """Deactivate convert_chars, verify that column_maker is installed and missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_convert_repository) @@ -183,10 +174,10 @@ def test_0030_deactivate_convert_repository(self): def test_0035_reactivate_convert_repository(self): """Reactivate convert_chars, both convert_chars and column_maker should now show as installed.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_convert_repository) @@ -196,10 +187,10 @@ def test_0035_reactivate_convert_repository(self): def test_0040_deactivate_column_repository(self): """Deactivate column_maker, verify that convert_chars is installed and missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_column_repository) @@ -207,7 +198,7 @@ def test_0040_deactivate_column_repository(self): def test_0045_deactivate_convert_repository(self): """Deactivate convert_chars, verify that both convert_chars and column_maker are deactivated.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_convert_repository) @@ -215,7 +206,7 @@ def test_0045_deactivate_convert_repository(self): def test_0050_reactivate_column_repository(self): """Reactivate column_maker. This should not automatically reactivate convert_chars, so column_maker should be displayed as installed but missing repository dependencies.""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_column_repository) @@ -223,10 +214,10 @@ def test_0050_reactivate_column_repository(self): def test_0055_reactivate_convert_repository(self): """Activate convert_chars. Both convert_chars and column_maker should now show as installed.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_convert_repository) @@ -236,21 +227,21 @@ def test_0055_reactivate_convert_repository(self): def test_0060_uninstall_column_repository(self): """Uninstall column_maker. Verify that convert_chars is installed and missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_column_repository) + self._uninstall_repository(installed_column_repository) self._assert_has_missing_dependency(installed_convert_repository, "column_maker_0080") def test_0065_reinstall_column_repository(self): """Reinstall column_maker without repository dependencies, verify both convert_chars and column_maker are installed.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reinstall_repository_api(installed_column_repository, install_repository_dependencies=False) @@ -260,10 +251,10 @@ def test_0065_reinstall_column_repository(self): def test_0070_uninstall_convert_repository(self): """Uninstall convert_chars, verify column_maker installed but missing repository dependencies.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_convert_repository) @@ -271,7 +262,7 @@ def test_0070_uninstall_convert_repository(self): def test_0075_uninstall_column_repository(self): """Uninstall column_maker, verify that both convert_chars and column_maker are uninstalled.""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_column_repository) @@ -279,10 +270,10 @@ def test_0075_uninstall_column_repository(self): def test_0080_reinstall_convert_repository(self): """Reinstall convert_chars with repository dependencies, verify that this installs both convert_chars and column_maker.""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reinstall_repository_api(installed_convert_repository, install_repository_dependencies=True) diff --git a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py index 7ecbb727afc6..37192cf55b45 100644 --- a/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py +++ b/lib/tool_shed/test/functional/test_1090_repository_dependency_handling.py @@ -22,9 +22,10 @@ class TestRepositoryDependencies(ShedTwillTestCase): """Testing the behavior of repository dependencies with tool panel sections.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -41,16 +42,10 @@ def test_0005_create_and_populate_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_and_populate_convert_repository(self): @@ -66,16 +61,10 @@ def test_0010_create_and_populate_convert_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_and_upload_dependency_files(self): @@ -103,7 +92,6 @@ def test_0015_create_and_upload_dependency_files(self): def test_0020_install_repositories(self): """Install column_maker into column_maker tool panel section and install repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( column_repository_name, common.test_user_1_name, @@ -112,32 +100,32 @@ def test_0020_install_repositories(self): install_repository_dependencies=True, new_tool_panel_section_label="column_maker", ) - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) + self._refresh_tool_shed_repository(installed_convert_repository) self._assert_has_installed_repos_with_names("convert_chars_1085", "column_maker_1085") self._assert_is_not_missing_dependency(installed_convert_repository, "column_maker_1085") def test_0025_uninstall_column_repository(self): """uninstall column_maker, verify same section""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_column_repository) - self.test_db_util.ga_refresh(installed_column_repository) + self._uninstall_repository(installed_column_repository) self.check_galaxy_repository_tool_panel_section(installed_column_repository, "column_maker") def test_0030_uninstall_convert_repository(self): - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_convert_repository) - self.test_db_util.ga_refresh(installed_convert_repository) + self._uninstall_repository(installed_convert_repository) + self._refresh_tool_shed_repository(installed_convert_repository) self.check_galaxy_repository_tool_panel_section(installed_convert_repository, "column_maker") def test_0035_reinstall_column_repository(self): """reinstall column_maker into new section 'new_column_maker' (no_changes = false), no dependencies""" - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) self.reinstall_repository_api( @@ -150,7 +138,7 @@ def test_0035_reinstall_column_repository(self): def test_0040_reinstall_convert_repository(self): """reinstall convert_chars into new section 'new_convert_chars' (no_changes = false), no dependencies""" - installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self.reinstall_repository_api( @@ -166,12 +154,12 @@ def test_0040_reinstall_convert_repository(self): # https://jenkins.galaxyproject.org/job/docker-toolshed/5198/ # def test_0045_uninstall_and_verify_tool_panel_sections( self ): # '''uninstall both and verify tool panel sections''' - # installed_convert_repository = self.test_db_util.get_installed_repository_by_name_owner( convert_repository_name, + # installed_convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, # common.test_user_1_name ) - # installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( column_repository_name, + # installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, # common.test_user_1_name ) - # self.uninstall_repository( installed_convert_repository ) - # self.uninstall_repository( installed_column_repository ) + # self._uninstall_repository( installed_convert_repository ) + # self._uninstall_repository( installed_column_repository ) # self.test_db_util.ga_refresh( installed_convert_repository ) # self.test_db_util.ga_refresh( installed_column_repository ) # self.check_galaxy_repository_tool_panel_section( installed_column_repository, 'new_column_maker' ) diff --git a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py index 3451407adf4a..b824593a34d4 100644 --- a/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py +++ b/lib/tool_shed/test/functional/test_1100_install_updated_repository_dependencies.py @@ -18,9 +18,10 @@ class TestRepositoryDependencies(ShedTwillTestCase): """Test installing a repository, then updating it to include repository dependencies.""" + requires_galaxy = True + def test_0000_create_or_login_admin_user(self): """Create necessary user accounts and login as an admin user.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -37,16 +38,10 @@ def test_0005_create_and_populate_column_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_and_populate_convert_repository(self): @@ -63,21 +58,14 @@ def test_0010_create_and_populate_convert_repository(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_install_and_uninstall_column_repository(self): """Install and uninstall the column_maker repository.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( column_repository_name, common.test_user_1_name, @@ -86,10 +74,10 @@ def test_0015_install_and_uninstall_column_repository(self): install_repository_dependencies=True, new_tool_panel_section_label="column_maker", ) - installed_column_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_column_repository) + self._uninstall_repository(installed_column_repository) def test_0020_upload_dependency_xml(self): """Upload a repository_dependencies.xml file to column_maker that specifies convert_chars.""" @@ -115,7 +103,6 @@ def test_0025_verify_repository_dependency(self): def test_0030_reinstall_column_repository(self): """Reinstall column_maker and verify it installs repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) strings_not_displayed = ["column_maker_1087"] self._assert_has_no_installed_repos_with_names(*strings_not_displayed) self._install_repository( diff --git a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py index 50bd7449cfa9..a4c0fcf55550 100644 --- a/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py +++ b/lib/tool_shed/test/functional/test_1120_install_repository_with_complex_dependencies.py @@ -25,6 +25,8 @@ class TestInstallingComplexRepositoryDependencies(ShedTwillTestCase): """Test features related to installing repositories with complex repository dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -51,19 +53,7 @@ def test_0005_create_bwa_package_repository(self): open(xml_filename, "w").write( open(old_tool_dependency).read().replace("__PATH__", self.get_filename("bwa/complex")) ) - self.upload_file( - repository, - filename=xml_filename, - filepath=new_tool_dependency_path, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml.", - strings_displayed=[ - "This repository currently contains a single file named tool_dependencies.xml" - ], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, xml_filename, "tool_dependencies.xml") self.display_manage_repository_page( repository, strings_displayed=["Tool dependencies", "consider setting its type"] ) @@ -83,16 +73,10 @@ def test_0010_create_bwa_base_repository(self): strings_displayed=[], ) self._get_repository_by_name_and_owner(bwa_package_repository_name, common.test_user_1_name) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="bwa/complex/bwa_base.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "bwa/complex/bwa_base.tar", commit_message="Uploaded bwa_base.tar with tool wrapper XML, but without tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_generate_complex_repository_dependency_invalid_shed_url(self): @@ -235,17 +219,7 @@ def test_0040_update_tool_repository(self): open(xml_filename, "w").write( open(old_tool_dependency).read().replace("__PATH__", self.get_filename("bwa/complex")) ) - self.upload_file( - tool_repository, - filename=xml_filename, - filepath=new_tool_dependency_path, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded new tool_dependencies.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(tool_repository, xml_filename, "tool_dependencies.xml") # Verify that the dependency display has been updated as a result of the new tool_dependencies.xml file. self.display_manage_repository_page( base_repository, @@ -255,29 +229,27 @@ def test_0040_update_tool_repository(self): def test_0045_install_base_repository(self): """Verify installation of the repository with complex repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) tool_repository = self._get_repository_by_name_and_owner(bwa_package_repository_name, common.test_user_1_name) preview_strings_displayed = [tool_repository.name, self.get_repository_tip(tool_repository)] self._install_repository( bwa_base_repository_name, common.test_user_1_name, category_name, - install_tool_dependencies=True, preview_strings_displayed=preview_strings_displayed, ) def test_0050_verify_installed_repositories(self): """Verify that the installed repositories are displayed properly.""" - base_repository = self.test_db_util.get_installed_repository_by_name_owner( + base_repository = self._get_installed_repository_by_name_owner( bwa_base_repository_name, common.test_user_1_name ) - tool_repository = self.test_db_util.get_installed_repository_by_name_owner( + tool_repository = self._get_installed_repository_by_name_owner( bwa_package_repository_name, common.test_user_1_name ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "bwa_base_repository_0100", base_repository.installed_changeset_revision ) - assert self.get_installed_repository_for( + assert self._get_installed_repository_for( common.test_user_1, "package_bwa_0_5_9_0100", tool_repository.installed_changeset_revision ) self._assert_has_installed_repository_dependency(base_repository, "package_bwa_0_5_9_0100") diff --git a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py index aa68f5c33f09..db5139d4247d 100644 --- a/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py +++ b/lib/tool_shed/test/functional/test_1130_install_repository_with_invalid_repository_dependency.py @@ -19,6 +19,8 @@ class TestBasicRepositoryDependencies(ShedTwillTestCase): """Testing emboss 5 with repository dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts and login as an admin user.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -43,16 +45,10 @@ def test_0010_create_emboss_dependendent_column_maker_repository_and_upload_tarb ) if self.repository_is_new(column_maker_repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_emboss_5_repository_and_upload_files(self): @@ -68,16 +64,10 @@ def test_0020_create_emboss_5_repository_and_upload_files(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_generate_repository_dependency_with_invalid_url(self): @@ -168,7 +158,6 @@ def test_0040_generate_repository_dependency_with_invalid_changeset_revision(sel def test_0045_install_repository_with_invalid_repository_dependency(self): """Install the repository and verify that galaxy detects invalid repository dependencies.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) repository = self._get_repository_by_name_and_owner(emboss_repository_name, common.test_user_1_name) preview_strings_displayed = [ "emboss_0110", @@ -183,7 +172,7 @@ def test_0045_install_repository_with_invalid_repository_dependency(self): install_repository_dependencies=True, preview_strings_displayed=preview_strings_displayed, ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) json = self.display_installed_repository_manage_json(installed_repository) diff --git a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py index 354528effcb5..12067ac1bce8 100644 --- a/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py +++ b/lib/tool_shed/test/functional/test_1140_simple_repository_dependency_multiple_owners.py @@ -30,6 +30,8 @@ class TestInstallRepositoryMultipleOwners(ShedTwillTestCase): + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts and login as an admin user. @@ -58,16 +60,10 @@ def test_0005_create_datatypes_repository(self): strings_displayed=strings_displayed, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blast_datatypes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "blast/blast_datatypes.tar", commit_message="Uploaded blast_datatypes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_verify_datatypes_repository(self): @@ -88,7 +84,8 @@ def test_0010_verify_datatypes_repository(self): "blastdbn", "blastdbp", ] - self.display_manage_repository_page(repository, strings_displayed=strings_displayed) + if not self.is_v2: + self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0015_create_tool_repository(self): """Create and populate the blastxml_to_top_descr_0120 repository @@ -110,13 +107,13 @@ def test_0015_create_tool_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="blast/blastxml_to_top_descr.tar", + "blast/blastxml_to_top_descr.tar", filepath=None, valid_tools_only=True, uncompress_file=True, - remove_repo_files_not_in_tar=False, + remove_repo_files_not_in_tar=True, commit_message="Uploaded blastxml_to_top_descr tarball.", strings_displayed=[], strings_not_displayed=[], @@ -130,7 +127,7 @@ def test_0020_verify_tool_repository(self): """ repository = self._get_repository_by_name_and_owner(tool_repository_name, common.test_user_1_name) strings_displayed = ["blastxml_to_top_descr_0120", "BLAST top hit descriptions", "Make a table from BLAST XML"] - strings_displayed.extend(["0.0.1", "Valid tools"]) + strings_displayed.extend(["0.0.1"]) self.display_manage_repository_page(repository, strings_displayed=strings_displayed) def test_0025_create_repository_dependency(self): @@ -174,7 +171,6 @@ def test_0045_install_blastxml_to_top_descr(self): We are at step 1, Galaxy side. Install blastxml_to_top_descr_0120 to Galaxy, with repository dependencies, so that the datatypes repository is also installed. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( name="blastxml_to_top_descr_0120", owner=common.test_user_1_name, @@ -191,8 +187,6 @@ def test_0050_verify_repository_installation(self): are now new datatypes in the registry matching the ones defined in blast_datatypes_0120. Also check that blast_datatypes_0120 is labeled as an installed repository dependency of blastxml_to_top_descr_0120. """ - tool_repository = self.test_db_util.get_installed_repository_by_name_owner( - tool_repository_name, common.test_user_1_name - ) + tool_repository = self._get_installed_repository_by_name_owner(tool_repository_name, common.test_user_1_name) self._assert_has_valid_tool_with_name("BLAST top hit") self._assert_repo_has_tool_with_id(tool_repository, "blastxml_to_top_descr") diff --git a/lib/tool_shed/test/functional/test_1160_tool_help_images.py b/lib/tool_shed/test/functional/test_1160_tool_help_images.py index 72717272d19b..add92d62dc55 100644 --- a/lib/tool_shed/test/functional/test_1160_tool_help_images.py +++ b/lib/tool_shed/test/functional/test_1160_tool_help_images.py @@ -23,6 +23,8 @@ class TestToolHelpImages(ShedTwillTestCase): """Test features related to tool help images.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -47,16 +49,10 @@ def test_0005_create_htseq_count_repository(self): ) if self.repository_is_new(repository): # Upload htseq_count.tar to the repository if it hasn't already been populated. - self.upload_file( + self.commit_tar_to_repository( repository, - filename="htseq_count/htseq_count.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "htseq_count/htseq_count.tar", commit_message="Uploaded htseq_count.tar.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_load_tool_page(self): @@ -73,6 +69,8 @@ def test_0010_load_tool_page(self): # should be the tool that contains a link to the image. repository_metadata = self._db_repository(repository).metadata_revisions[0].metadata tool_path = repository_metadata["tools"][0]["tool_config"] - self.load_display_tool_page( - repository, tool_path, changeset_revision, strings_displayed=[image_path], strings_not_displayed=[] - ) + # V2 is not going to have this page right? So... do we need this test at all or that route? Likely not? + if self._browser.is_twill and not self.is_v2: + self.load_display_tool_page( + repository, tool_path, changeset_revision, strings_displayed=[image_path], strings_not_displayed=[] + ) diff --git a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py index 486db3241632..c126a29ca4b8 100644 --- a/lib/tool_shed/test/functional/test_1170_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1170_prior_installation_required.py @@ -38,9 +38,10 @@ class TestSimplePriorInstallation(ShedTwillTestCase): """Test features related to datatype converters.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -59,16 +60,10 @@ def test_0005_create_convert_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -84,16 +79,10 @@ def test_0010_create_column_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_repository_dependency(self): @@ -133,7 +122,6 @@ def test_0020_verify_repository_dependency(self): def test_0025_install_column_repository(self): """Install column_maker_0150.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) column_repository = self._get_repository_by_name_and_owner(column_repository_name, common.test_user_1_name) preview_strings_displayed = ["column_maker_0150", self.get_repository_tip(column_repository)] self._install_repository( @@ -147,10 +135,10 @@ def test_0025_install_column_repository(self): def test_0030_verify_installation_order(self): """Verify that convert_chars_0150 was installed before column_maker_0150.""" - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) # Column maker was selected for installation, so convert chars should have been installed first, as reflected by the update_time field. diff --git a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py index 621688811161..755c823211cc 100644 --- a/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1180_circular_prior_installation_required.py @@ -51,9 +51,10 @@ class TestSimplePriorInstallation(ShedTwillTestCase): """Test features related to datatype converters.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -72,16 +73,10 @@ def test_0005_create_convert_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="convert_chars/convert_chars.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "convert_chars/convert_chars.tar", commit_message="Uploaded convert_chars tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -97,16 +92,10 @@ def test_0010_create_column_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_filtering_repository(self): @@ -122,16 +111,10 @@ def test_0015_create_filtering_repository(self): strings_displayed=[], ) if running_standalone: - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_create_repository_dependency(self): @@ -214,7 +197,6 @@ def test_0025_verify_repository_dependency(self): def test_0030_install_filtering_repository(self): """Install the filtering_0160 repository.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) filter_repository = self._get_repository_by_name_and_owner(filter_repository_name, common.test_user_1_name) preview_strings_displayed = ["filtering_0160", self.get_repository_tip(filter_repository)] self._install_repository( @@ -228,13 +210,13 @@ def test_0030_install_filtering_repository(self): def test_0035_verify_installation_order(self): """Verify that convert_chars_0160 and column_maker_0160 were installed before filtering_0160.""" - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) # Filtering was selected for installation, so convert chars and column maker should have been installed first. @@ -247,13 +229,13 @@ def test_0035_verify_installation_order(self): def test_0040_deactivate_all_repositories(self): """Uninstall convert_chars_0160, column_maker_0160, and filtering_0160.""" - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) self.deactivate_repository(filter_repository) @@ -262,7 +244,7 @@ def test_0040_deactivate_all_repositories(self): def test_0045_reactivate_filter_repository(self): """Reinstall the filtering_0160 repository.""" - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) self.reactivate_repository(filter_repository) @@ -273,18 +255,19 @@ def test_0045_reactivate_filter_repository(self): def test_0050_verify_reinstallation_order(self): """Verify that convert_chars_0160 and column_maker_0160 were reinstalled before filtering_0160.""" # Fixme: this test is not covering any important behavior since repositories were only deactivated and not uninstalled. - filter_repository = self.test_db_util.get_installed_repository_by_name_owner( + filter_repository = self._get_installed_repository_by_name_owner( filter_repository_name, common.test_user_1_name ) - column_repository = self.test_db_util.get_installed_repository_by_name_owner( + column_repository = self._get_installed_repository_by_name_owner( column_repository_name, common.test_user_1_name ) - convert_repository = self.test_db_util.get_installed_repository_by_name_owner( + convert_repository = self._get_installed_repository_by_name_owner( convert_repository_name, common.test_user_1_name ) # Filtering was selected for reinstallation, so convert chars and column maker should have been installed first. - for repo in [convert_repository, column_repository, filter_repository]: - self.test_db_util.install_session().refresh(repo) + if self.full_stack_galaxy: + for repo in [convert_repository, column_repository, filter_repository]: + self.test_db_util.install_session().refresh(repo) assert ( filter_repository.update_time > convert_repository.update_time ), "Prior installed convert_chars_0160 shows a later update time than filtering_0160" diff --git a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py index af5b08617a20..b49ae8998ca2 100644 --- a/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py +++ b/lib/tool_shed/test/functional/test_1190_complex_prior_installation_required.py @@ -36,6 +36,8 @@ class TestComplexPriorInstallation(ShedTwillTestCase): """Test features related to datatype converters.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -59,18 +61,10 @@ def test_0005_create_matplotlib_repository(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_matplotlib/package_matplotlib_1_2.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_matplotlib/package_matplotlib_1_2.tar", commit_message="Uploaded matplotlib tool dependency tarball.", - strings_displayed=[ - "This repository currently contains a single file named tool_dependencies.xml" - ], - strings_not_displayed=[], ) def test_0010_create_numpy_repository(self): @@ -90,18 +84,10 @@ def test_0010_create_numpy_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="package_numpy/package_numpy_1_7.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "package_numpy/package_numpy_1_7.tar", commit_message="Uploaded numpy tool dependency tarball.", - strings_displayed=[ - "This repository currently contains a single file named tool_dependencies.xml" - ], - strings_not_displayed=[], ) def test_0015_create_complex_repository_dependency(self): @@ -134,18 +120,7 @@ def test_0015_create_complex_repository_dependency(self): dependency_xml_path = self.generate_temp_path("test_0170", additional_paths=["matplotlib"]) new_xml_file = os.path.join(dependency_xml_path, "tool_dependencies.xml") open(new_xml_file, "w").write(original_xml.replace("", processed_xml)) - # Upload the generated complex repository dependency XML to the matplotlib repository. - self.upload_file( - matplotlib_repository, - filename="tool_dependencies.xml", - filepath=dependency_xml_path, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded complex repository dependency on numpy 1.7.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(matplotlib_repository, dependency_xml_path, "tool_dependencies.xml") def test_0020_verify_generated_dependency(self): """Verify that matplotlib now has a package tool dependency and a complex repository dependency. @@ -161,16 +136,16 @@ def test_0020_verify_generated_dependency(self): ) changeset_revision = self.get_repository_tip(numpy_repository) self.check_repository_dependency(matplotlib_repository, depends_on_repository=numpy_repository) - self.display_manage_repository_page( - matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] - ) + if not self.is_v2: + self.display_manage_repository_page( + matplotlib_repository, strings_displayed=["numpy", "1.7", "package", changeset_revision] + ) def test_0025_install_matplotlib_repository(self): """Install the package_matplotlib_1_2_0170 repository. This is step 4 - Install package_matplotlib_1_2_0170 with repository dependencies. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) matplotlib_repository = self._get_repository_by_name_and_owner( matplotlib_repository_name, common.test_user_1_name ) @@ -192,12 +167,10 @@ def test_0030_verify_installation_order(self): prior_installation_required attribute set. Confirm that this resulted in package_numpy_1_7_0170 being installed before package_matplotlib_1_2_0170. """ - matplotlib_repository = self.test_db_util.get_installed_repository_by_name_owner( + matplotlib_repository = self._get_installed_repository_by_name_owner( matplotlib_repository_name, common.test_user_1_name ) - numpy_repository = self.test_db_util.get_installed_repository_by_name_owner( - numpy_repository_name, common.test_user_1_name - ) + numpy_repository = self._get_installed_repository_by_name_owner(numpy_repository_name, common.test_user_1_name) assert ( matplotlib_repository.update_time > numpy_repository.update_time ), "Error: package_numpy_1_7_0170 shows a later update time than package_matplotlib_1_2_0170" diff --git a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py index f896bc68ab5d..ca7d95a38507 100644 --- a/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py +++ b/lib/tool_shed/test/functional/test_1200_uninstall_and_reinstall_basic_repository.py @@ -7,11 +7,12 @@ class TestUninstallingAndReinstallingRepositories(ShedTwillTestCase): """Test uninstalling and reinstalling a basic repository.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_ensure_repositories_and_categories_exist(self): """Create the 0000 category and upload the filtering repository to the tool shed, if necessary.""" @@ -30,54 +31,17 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_0000.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 1.1.0", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme for 2.2.0", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_file_to_repository(repository, "filtering/filtering_0000.txt") + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") + self.add_file_to_repository(repository, "readme.txt") def test_0010_install_filtering_repository(self): """Install the filtering repository into the Galaxy instance.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( "filtering_0000", common.test_user_1_name, @@ -88,17 +52,13 @@ def test_0010_install_filtering_repository(self): def test_0015_uninstall_filtering_repository(self): """Uninstall the filtering repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) - self.uninstall_repository(installed_repository) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filtering_0000") def test_0020_reinstall_filtering_repository(self): """Reinstall the filtering repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self.reinstall_repository_api(installed_repository) self._assert_has_installed_repos_with_names("filtering_0000") self._assert_has_valid_tool_with_name("Filter1") @@ -106,17 +66,13 @@ def test_0020_reinstall_filtering_repository(self): def test_0025_deactivate_filtering_repository(self): """Deactivate the filtering repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self.deactivate_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filtering_0000") def test_0030_reactivate_filtering_repository(self): """Reactivate the filtering repository and verify that it now shows up in the list of installed repositories.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filtering_0000", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("filtering_0000", common.test_user_1_name) self.reactivate_repository(installed_repository) self._assert_has_installed_repos_with_names("filtering_0000") self._assert_has_valid_tool_with_name("Filter1") diff --git a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py index f79935b79f49..66be32573652 100644 --- a/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py +++ b/lib/tool_shed/test/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py @@ -9,9 +9,10 @@ class TestUninstallingAndReinstallingRepositories(ShedTwillTestCase): """Test uninstalling and reinstalling a repository with tool dependencies.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -30,82 +31,31 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool xml.", - strings_displayed=[ - "Metadata may have been defined", - "This file requires an entry", - "tool_data_table_conf", - ], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded the tool data table sample file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool data table .loc file.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded malformed tool dependency XML.", - strings_displayed=["Exception attempting to parse", "invalid element name"], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=False, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded invalid tool dependency XML.", - strings_displayed=[ - "The settings for name, version and type from a contained tool configuration" - ], - strings_not_displayed=[], + strings_displayed = [ + "Metadata may have been defined", + "This file requires an entry", + "tool_data_table_conf", + ] + self.add_file_to_repository(repository, "freebayes/freebayes.xml", strings_displayed=strings_displayed) + strings_displayed = ["Upload a file named sam_fa_indices.loc.sample"] + self.add_file_to_repository( + repository, "freebayes/tool_data_table_conf.xml.sample", strings_displayed=strings_displayed ) - self.upload_file( - repository, - filename=os.path.join("freebayes", "tool_dependencies.xml"), - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded valid tool dependency XML.", - strings_displayed=[], - strings_not_displayed=[], + self.add_file_to_repository(repository, "freebayes/sam_fa_indices.loc.sample") + target = os.path.join("freebayes", "malformed_tool_dependencies", "tool_dependencies.xml") + self.add_file_to_repository( + repository, target, strings_displayed=["Exception attempting to parse", "invalid element name"] ) + target = os.path.join("freebayes", "invalid_tool_dependencies", "tool_dependencies.xml") + strings_displayed = [ + "The settings for name, version and type from a contained tool configuration" + ] + self.add_file_to_repository(repository, target, strings_displayed=strings_displayed) + target = os.path.join("freebayes", "tool_dependencies.xml") + self.add_file_to_repository(repository, target) def test_0010_install_freebayes_repository(self): """Install the freebayes repository into the Galaxy instance.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( "freebayes_0010", common.test_user_1_name, @@ -116,17 +66,13 @@ def test_0010_install_freebayes_repository(self): def test_0015_uninstall_freebayes_repository(self): """Uninstall the freebayes repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) - self.uninstall_repository(installed_repository) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("freebayes_0010") def test_0020_reinstall_freebayes_repository(self): """Reinstall the freebayes repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self.reinstall_repository_api(installed_repository) self._assert_has_installed_repos_with_names("freebayes_0010") self._assert_has_valid_tool_with_name("FreeBayes") @@ -134,17 +80,13 @@ def test_0020_reinstall_freebayes_repository(self): def test_0025_deactivate_freebayes_repository(self): """Deactivate the freebayes repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self.deactivate_repository(installed_repository) self._assert_has_no_installed_repos_with_names("freebayes_0010") def test_0030_reactivate_freebayes_repository(self): """Reactivate the freebayes repository and verify that it now shows up in the list of installed repositories.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "freebayes_0010", common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner("freebayes_0010", common.test_user_1_name) self.reactivate_repository(installed_repository) self._assert_has_installed_repos_with_names("freebayes_0010") self._assert_has_valid_tool_with_name("FreeBayes") diff --git a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py index a48611c0f473..91720f67bfe1 100644 --- a/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py +++ b/lib/tool_shed/test/functional/test_1230_uninstall_reinstall_repository_with_dependency_revisions.py @@ -19,9 +19,10 @@ class TestUninstallingAndReinstallingRepositories(ShedTwillTestCase): """Test uninstalling and reinstalling a repository with repository dependency revisions.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) @@ -51,16 +52,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], + "column_maker/column_maker.tar", + commit_message="Uploaded column maker tarball.", ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) column_maker_tuple = ( @@ -82,16 +77,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "6"]) column_maker_tuple = ( @@ -113,16 +102,10 @@ def test_0005_ensure_repositories_and_categories_exist(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_1030", additional_paths=["emboss", "5"]) dependency_tuple = ( @@ -151,7 +134,6 @@ def test_0005_ensure_repositories_and_categories_exist(self): def test_0010_install_emboss_repository(self): """Install the emboss repository into the Galaxy instance.""" global running_standalone - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( emboss_repository_name, common.test_user_1_name, @@ -162,15 +144,15 @@ def test_0010_install_emboss_repository(self): def test_0015_uninstall_emboss_repository(self): """Uninstall the emboss repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) - self.uninstall_repository(installed_repository) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names(emboss_repository_name) def test_0020_reinstall_emboss_repository(self): """Reinstall the emboss repository.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self.reinstall_repository_api(installed_repository) @@ -179,7 +161,7 @@ def test_0020_reinstall_emboss_repository(self): def test_0025_deactivate_emboss_repository(self): """Deactivate the emboss repository without removing it from disk.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self.deactivate_repository(installed_repository) @@ -187,7 +169,7 @@ def test_0025_deactivate_emboss_repository(self): def test_0030_reactivate_emboss_repository(self): """Reactivate the emboss repository and verify that it now shows up in the list of installed repositories.""" - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( + installed_repository = self._get_installed_repository_by_name_owner( emboss_repository_name, common.test_user_1_name ) self.reactivate_repository(installed_repository) diff --git a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py index 424f059c9685..7115bf47d02f 100644 --- a/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_1300_reset_all_metadata.py @@ -44,6 +44,8 @@ class TestResetInstalledRepositoryMetadata(ShedTwillTestCase): """Verify that the "Reset selected metadata" feature works.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -79,28 +81,12 @@ def test_0010_create_repositories_from_0000_series(self): ) if self.repository_is_new(repository): running_standalone = True - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="filtering/filtering_2.2.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded filtering 2.2.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) + self.add_tar_to_repository(repository, "filtering/filtering_2.2.0.tar") def test_0015_create_repositories_from_0010_series(self): """Create repository freebayes_0010.""" @@ -114,50 +100,7 @@ def test_0015_create_repositories_from_0010_series(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( - repository, - filename="freebayes/freebayes.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded freebayes.xml.", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_data_table_conf.xml.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_data_table_conf.xml.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/sam_fa_indices.loc.sample", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded sam_fa_indices.loc.sample", - strings_displayed=[], - strings_not_displayed=[], - ) - self.upload_file( - repository, - filename="freebayes/tool_dependencies.xml", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded tool_dependencies.xml", - strings_displayed=[], - strings_not_displayed=[], - ) + self.setup_freebayes_0010_repo(repository) def test_0020_create_repositories_from_0020_series(self): """Create repositories emboss_0020 and column_maker_0020 if necessary.""" @@ -171,16 +114,10 @@ def test_0020_create_repositories_from_0020_series(self): strings_displayed=[], ) if self.repository_is_new(column_maker_repository): - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded column_maker tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository = self.get_or_create_repository( name="emboss_0020", @@ -190,16 +127,10 @@ def test_0020_create_repositories_from_0020_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) def test_0025_create_repositories_from_0030_series(self): @@ -215,16 +146,10 @@ def test_0025_create_repositories_from_0030_series(self): strings_displayed=[], ) if self.repository_is_new(column_maker_repository): - self.upload_file( + self.commit_tar_to_repository( column_maker_repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=False, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Uploaded bismark tarball.", - strings_displayed=[], - strings_not_displayed=[], ) emboss_5_repository = self.get_or_create_repository( name="emboss_5_0030", @@ -234,16 +159,10 @@ def test_0025_create_repositories_from_0030_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_5_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0330", additional_paths=["emboss", "5"]) dependency_tuple = ( @@ -265,16 +184,10 @@ def test_0025_create_repositories_from_0030_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_6_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0330", additional_paths=["emboss", "6"]) dependency_tuple = ( @@ -296,16 +209,10 @@ def test_0025_create_repositories_from_0030_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0330", additional_paths=["emboss", "5"]) dependency_tuple = ( @@ -343,16 +250,10 @@ def test_0030_create_repositories_from_0040_series(self): strings_displayed=[], ) if self.repository_is_new(repository): - self.upload_file( + self.commit_tar_to_repository( repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded the tool tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository = self.get_or_create_repository( name="filtering_0040", @@ -362,16 +263,10 @@ def test_0030_create_repositories_from_0040_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded the tool tarball for filtering 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) repository = self._get_repository_by_name_and_owner("freebayes_0040", common.test_user_1_name) filtering_repository = self._get_repository_by_name_and_owner("filtering_0040", common.test_user_1_name) @@ -430,38 +325,20 @@ def test_0035_create_repositories_from_0050_series(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( emboss_repository, - filename="emboss/emboss.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "emboss/emboss.tar", commit_message="Uploaded emboss.tar", - strings_displayed=[], - strings_not_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( freebayes_repository, - filename="freebayes/freebayes.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "freebayes/freebayes.tar", commit_message="Uploaded freebayes tarball.", - strings_displayed=[], - strings_not_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( filtering_repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) repository_dependencies_path = self.generate_temp_path("test_0350", additional_paths=["emboss"]) repository_dependencies_path = self.generate_temp_path("test_0350", additional_paths=["filtering"]) @@ -516,7 +393,6 @@ def test_0035_create_repositories_from_0050_series(self): def test_9900_install_all_missing_repositories(self): """Call the install_repository method to ensure that all required repositories are installed.""" - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository("filtering_0000", common.test_user_1_name, category_0000_name) self._install_repository("freebayes_0010", common.test_user_1_name, category_0010_name) self._install_repository("emboss_0020", common.test_user_1_name, category_0020_name) @@ -525,14 +401,13 @@ def test_9900_install_all_missing_repositories(self): def test_9905_reset_metadata_on_all_repositories(self): """Reset metadata on all repositories, then verify that it has not changed.""" - repository_metadata = dict() - repositories = self.test_db_util.get_all_installed_repositories(actually_installed=True) - for repository in repositories: - repository_metadata[self.security.encode_id(repository.id)] = repository.metadata_ - self.reset_metadata_on_selected_installed_repositories(list(repository_metadata.keys())) + repositories = self.get_all_installed_repositories() + repository_metadata = {} for repository in repositories: - self.test_db_util.ga_refresh(repository) - old_metadata = repository_metadata[self.security.encode_id(repository.id)] + repository_metadata[repository.id] = repository.metadata_ + self.reset_metadata_on_installed_repositories(repositories) + for repository in self.get_all_installed_repositories(): + old_metadata = repository_metadata[repository.id] # When a repository with tools to be displayed in a tool panel section is deactivated and reinstalled, # the tool panel section remains in the repository metadata. However, when the repository's metadata # is subsequently reset, the tool panel section is removed from the repository metadata. While this diff --git a/lib/tool_shed/test/functional/test_1410_update_manager.py b/lib/tool_shed/test/functional/test_1410_update_manager.py index 98164c77daa3..7debeaf57c90 100644 --- a/lib/tool_shed/test/functional/test_1410_update_manager.py +++ b/lib/tool_shed/test/functional/test_1410_update_manager.py @@ -26,6 +26,8 @@ class TestUpdateManager(ShedTwillTestCase): """Test the Galaxy update manager.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts and login as an admin user. @@ -34,7 +36,6 @@ def test_0000_initiate_users(self): """ self.login(email=common.test_user_1_email, username=common.test_user_1_name) self.login(email=common.admin_email, username=common.admin_username) - self.galaxy_login(email=common.admin_email, username=common.admin_username) def test_0005_create_filtering_repository(self): """Create and populate the filtering_1410 repository. @@ -52,16 +53,10 @@ def test_0005_create_filtering_repository(self): owner=common.test_user_1_name, category=category, ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=True, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_install_filtering_repository(self): @@ -70,13 +65,10 @@ def test_0010_install_filtering_repository(self): We are at step 2 - Install filtering_1410 to Galaxy. Install the filtering repository to Galaxy. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( repository_name, common.test_user_1_name, category_name, new_tool_panel_section_label="test_1410" ) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) self._assert_has_installed_repos_with_names(repository_name) self._assert_has_valid_tool_with_name("Filter") self._assert_repo_has_tool_with_id(installed_repository, "Filter1") @@ -91,17 +83,7 @@ def test_0015_upload_readme_file(self): """ self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.txt", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "readme.txt") def test_0020_check_for_displayed_update(self): """Browse installed repositories and verify update. @@ -111,10 +93,7 @@ def test_0020_check_for_displayed_update(self): """ # Wait 3 seconds, just to be sure we're past hours_between_check. time.sleep(3) - self.galaxy_login(email=common.admin_email, username=common.admin_username) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - response = self.update_installed_repository_api(installed_repository) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + response = self.update_installed_repository(installed_repository) assert response["status"] == "ok" assert "has been updated" in response["message"] diff --git a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py index e4bf404185e6..01cbdbea57e5 100644 --- a/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py +++ b/lib/tool_shed/test/functional/test_1430_repair_installed_repository.py @@ -37,6 +37,8 @@ class TestRepairRepository(ShedTwillTestCase): """Test repairing an installed repository.""" + requires_galaxy = True + def test_0000_initiate_users_and_category(self): """Create necessary user accounts and login as an admin user.""" self.login(email=common.admin_email, username=common.admin_username) @@ -60,16 +62,10 @@ def test_0005_create_filter_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Populate filter_1430 with version 1.1.0.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_create_column_repository(self): @@ -88,16 +84,10 @@ def test_0010_create_column_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="column_maker/column_maker.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "column_maker/column_maker.tar", commit_message="Populate column_1430 with tool definitions.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0015_create_repository_dependency(self): @@ -123,7 +113,6 @@ def test_0020_install_column_repository(self): handle repository dependencies so that the filter_1430 repository is also installed. Make sure to install the repositories in a specified section of the tool panel. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( "column_1430", common.test_user_1_name, @@ -138,8 +127,6 @@ def test_0025_uninstall_filter_repository(self): This is step 2 - Uninstall the filter_1430 repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - "filter_1430", common.test_user_1_name - ) - self.uninstall_repository(installed_repository) + installed_repository = self._get_installed_repository_by_name_owner("filter_1430", common.test_user_1_name) + self._uninstall_repository(installed_repository) self._assert_has_no_installed_repos_with_names("filter_1430") diff --git a/lib/tool_shed/test/functional/test_1460_data_managers.py b/lib/tool_shed/test/functional/test_1460_data_managers.py index ec514898e7af..42ad60b4ea6d 100644 --- a/lib/tool_shed/test/functional/test_1460_data_managers.py +++ b/lib/tool_shed/test/functional/test_1460_data_managers.py @@ -30,6 +30,8 @@ class TestDataManagers(ShedTwillTestCase): """Test installing a repository containing a Data Manager.""" + requires_galaxy = True + def test_0000_initiate_users_and_category(self): """Create necessary user accounts and login as an admin user.""" self.login(email=common.admin_email, username=common.admin_username) @@ -53,17 +55,12 @@ def test_0010_create_data_manager_repository(self): category=category, strings_displayed=[], ) + assert repository, "No repository created with name {commit_tar_to_repository}" # Upload the data manager files to the repository. - self.upload_file( + self.commit_tar_to_repository( repository, - filename=data_manager_tar_file, - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + data_manager_tar_file, commit_message=f"Populate {data_manager_repository_name} with a data manager configuration.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0020_install_data_manager_repository(self): @@ -71,23 +68,21 @@ def test_0020_install_data_manager_repository(self): This is step 3 - Attempt to install the repository into a galaxy instance, verify that it is installed. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( data_manager_repository_name, common.test_user_1_name, category_name, - install_tool_dependencies=True, + install_tool_dependencies=False, ) def test_0030_verify_data_manager_tool(self): """Verify that the data_manager_1460 repository is installed and Data Manager tool appears in list in Galaxy.""" - repository = self.test_db_util.get_installed_repository_by_name_owner( - data_manager_repository_name, common.test_user_1_name - ) - strings_displayed = ["status", "jobs", data_manager_name] - self.display_installed_jobs_list_page( - repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed - ) + repository = self._get_installed_repository_by_name_owner(data_manager_repository_name, common.test_user_1_name) + if self.full_stack_galaxy: + strings_displayed = ["status", "jobs", data_manager_name] + self.display_installed_jobs_list_page( + repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed + ) def test_0040_verify_data_manager_data_table(self): """Verify that the installed repository populated shed_tool_data_table.xml and the sample files.""" diff --git a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py index 6a868e4a2654..fa3ac3cf55be 100644 --- a/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py +++ b/lib/tool_shed/test/functional/test_1470_updating_installed_repositories.py @@ -29,6 +29,8 @@ class TestUpdateInstalledRepository(ShedTwillTestCase): """Verify that the code correctly handles updating an installed repository, then uninstalling and reinstalling.""" + requires_galaxy = True + def test_0000_initiate_users(self): """Create necessary user accounts.""" self.login(email=common.test_user_1_email, username=common.test_user_1_name) @@ -46,16 +48,10 @@ def test_0005_create_filtering_repository(self): category=category, strings_displayed=[], ) - self.upload_file( + self.commit_tar_to_repository( repository, - filename="filtering/filtering_1.1.0.tar", - filepath=None, - valid_tools_only=True, - uncompress_file=True, - remove_repo_files_not_in_tar=False, + "filtering/filtering_1.1.0.tar", commit_message="Uploaded filtering 1.1.0 tarball.", - strings_displayed=[], - strings_not_displayed=[], ) def test_0010_install_filtering_to_galaxy(self): @@ -63,7 +59,6 @@ def test_0010_install_filtering_to_galaxy(self): This is step 1 - Install a repository into Galaxy. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) self._install_repository( repository_name, common.test_user_1_name, @@ -84,47 +79,30 @@ def test_0015_update_repository(self): """ self.login(email=common.test_user_1_email, username=common.test_user_1_name) repository = self._get_repository_by_name_and_owner(repository_name, common.test_user_1_name) - self.upload_file( - repository, - filename="filtering/readme.txt", - filepath=None, - valid_tools_only=True, - uncompress_file=False, - remove_repo_files_not_in_tar=False, - commit_message="Uploaded readme.", - strings_displayed=[], - strings_not_displayed=[], - ) + self.add_file_to_repository(repository, "filtering/readme.txt") def test_0020_get_repository_updates(self): """Get updates to the installed repository. This is step 3 - In Galaxy, get updates to the repository. """ - self.galaxy_login(email=common.admin_email, username=common.admin_username) - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - self.update_installed_repository_api(installed_repository) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + self.update_installed_repository(installed_repository) def test_0025_uninstall_repository(self): """Uninstall the filtering_1470 repository. This is step 4 - In Galaxy, uninstall the repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) - self.uninstall_repository(installed_repository) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) + self._uninstall_repository(installed_repository) def test_0030_reinstall_repository(self): """Reinstall the filtering_1470 repository. This is step 5 - In Galaxy, reinstall the repository. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name - ) + installed_repository = self._get_installed_repository_by_name_owner(repository_name, common.test_user_1_name) self.reinstall_repository_api(installed_repository) def test_0035_verify_absence_of_ghosts(self): @@ -132,9 +110,7 @@ def test_0035_verify_absence_of_ghosts(self): This is step 6 - Make sure step 5 created no white ghosts. """ - installed_repository = self.test_db_util.get_installed_repository_by_name_owner( - repository_name, common.test_user_1_name, return_multiple=True - ) + installed_repository = self._get_installed_repositories_by_name_owner(repository_name, common.test_user_1_name) assert ( len(installed_repository) == 1 ), 'Multiple filtering repositories found in the Galaxy database, possibly indicating a "white ghost" scenario.' diff --git a/lib/tool_shed/test/functional/test_frontend_login.py b/lib/tool_shed/test/functional/test_frontend_login.py new file mode 100644 index 000000000000..1e752c37e934 --- /dev/null +++ b/lib/tool_shed/test/functional/test_frontend_login.py @@ -0,0 +1,76 @@ +from playwright.sync_api import ( + expect, + Page, +) + +from galaxy_test.base.api_util import random_name +from ..base.api import skip_if_api_v1 +from ..base.playwrightbrowser import ( + Locators, + PlaywrightShedBrowser, +) +from ..base.twilltestcase import ShedTwillTestCase + + +class PlaywrightTestCase(ShedTwillTestCase): + @property + def _playwright_browser(self) -> PlaywrightShedBrowser: + browser = self._browser + assert isinstance(browser, PlaywrightShedBrowser) + return browser + + @property + def _page(self) -> Page: + return self._playwright_browser._page + + +TEST_PASSWORD = "testpass" + + +class TestFrontendLogin(PlaywrightTestCase): + @skip_if_api_v1 + def test_register(self): + self.visit_url("/") + page = self._page + expect(page.locator(Locators.toolbar_login)).to_be_visible() + page.click(Locators.toolbar_login) + expect(page.locator(Locators.login_submit_button)).to_be_visible() + expect(page.locator(Locators.register_link)).to_be_visible() + page.click(Locators.register_link) + user = random_name(prefix="shduser") + self._submit_register_form( + f"{user}@galaxyproject.org", + TEST_PASSWORD, + user, + ) + expect(page.locator(Locators.login_submit_button)).to_be_visible() + + @skip_if_api_v1 + def test_create(self): + user = random_name(prefix="shduser") + self.create( + email=f"{user}@galaxyproject.org", + password=TEST_PASSWORD, + username=user, + ) + + @skip_if_api_v1 + def test_logout(self): + self._create_and_login() + self._playwright_browser.expect_logged_in() + self._playwright_browser.logout_if_logged_in() + self._playwright_browser.expect_not_logged_in() + + @skip_if_api_v1 + def test_change_password(self): + self._create_and_login() + + def _create_and_login(self): + user = random_name(prefix="shduser") + email = f"{user}@galaxyproject.org" + self.create( + email=email, + password=TEST_PASSWORD, + username=user, + ) + self.login(email, TEST_PASSWORD, username=user, redirect=None) diff --git a/lib/tool_shed/test/functional/test_galaxy_install.py b/lib/tool_shed/test/functional/test_galaxy_install.py index d46fa0a9b45b..227e41aa49c1 100644 --- a/lib/tool_shed/test/functional/test_galaxy_install.py +++ b/lib/tool_shed/test/functional/test_galaxy_install.py @@ -12,7 +12,7 @@ def test_install_simple_tool(self): self.install_repository(owner, name, latest_install_revision, tool_shed_url=self.url) response = self.galaxy_interactor._get("tools?in_panel=False") response.raise_for_status() - expected_tool = f"{self.host}:{self.port}/repos/{owner}/{name}/Add_a_column1/1.1.0" + expected_tool = populator.tool_guid(self, repository, "Add_a_column1", "1.1.0") tool_ids = [t["id"] for t in response.json()] assert expected_tool in tool_ids, f"Didn't find {expected_tool} in {tool_ids}" diff --git a/lib/tool_shed/test/functional/test_shed_configuration.py b/lib/tool_shed/test/functional/test_shed_configuration.py new file mode 100644 index 000000000000..5e631f2dfeb9 --- /dev/null +++ b/lib/tool_shed/test/functional/test_shed_configuration.py @@ -0,0 +1,8 @@ +from ..base.api import ShedApiTestCase + + +class TestShedConfigurationApi(ShedApiTestCase): + def test_version(self) -> None: + version = self.populator.version() + assert version.version + assert version.version_major diff --git a/lib/tool_shed/test/functional/test_shed_graphql.py b/lib/tool_shed/test/functional/test_shed_graphql.py new file mode 100644 index 000000000000..c427732872d8 --- /dev/null +++ b/lib/tool_shed/test/functional/test_shed_graphql.py @@ -0,0 +1,21 @@ +from galaxy_test.base.api_asserts import assert_status_code_is_ok +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, +) + + +class TestShedGraphqlApi(ShedApiTestCase): + @skip_if_api_v1 + def test_graphql_query(self): + populator = self.populator + category = populator.new_category(prefix="testcreate") + json = {"query": r"query { categories { name } }"} + response = self.api_interactor.post("graphql/", json=json) + assert_status_code_is_ok(response) + result = response.json() + assert "data" in result + data = result["data"] + assert "categories" in data + categories = data["categories"] + assert category.name in [c["name"] for c in categories] diff --git a/lib/tool_shed/test/functional/test_shed_repositories.py b/lib/tool_shed/test/functional/test_shed_repositories.py index f2fba654d2ad..90e9b8134059 100644 --- a/lib/tool_shed/test/functional/test_shed_repositories.py +++ b/lib/tool_shed/test/functional/test_shed_repositories.py @@ -5,12 +5,22 @@ from galaxy.util.compression_utils import CompressedFile from galaxy.util.resources import resource_path from galaxy_test.base import api_asserts -from tool_shed.test.base.populators import repo_tars -from ..base.api import ShedApiTestCase +from tool_shed.test.base.api_util import create_user +from tool_shed.test.base.populators import ( + HasRepositoryId, + repo_tars, +) +from tool_shed_client.schema import RepositoryRevisionMetadata +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, + skip_if_api_v2, +) COLUMN_MAKER_PATH = resource_path(__package__, "../test_data/column_maker/column_maker.tar") +# test_0000 tests commit_message - find a way to test it here class TestShedRepositoriesApi(ShedApiTestCase): def test_create(self): populator = self.populator @@ -51,7 +61,17 @@ def test_metadata_simple(self): assert only_revision.downloadable assert not only_revision.malicious + def test_metadata_invalid_tools(self): + populator = self.populator + repository = populator.setup_bismark_repo() + repository_metadata = populator.get_metadata(repository) + assert repository_metadata + for _, value in repository_metadata.__root__.items(): + assert value.invalid_tools + def test_index_simple(self): + # Logic and typing is pretty different if given a tool id to search for - this should + # be tested or dropped in v2. populator = self.populator repo = populator.setup_column_maker_repo(prefix="repoforindex") repository_id = repo.id @@ -66,6 +86,64 @@ def test_index_simple(self): assert repository.owner == repo.owner assert repository.name == repo.name + @skip_if_api_v1 + def test_allow_push(self): + populator = self.populator + request = { + "email": "sharewith@galaxyproject.org", + "username": "sharewith", + "password": "pAssworD1", + } + create_user(self.admin_api_interactor, request) + request = { + "email": "alsosharewith@galaxyproject.org", + "username": "alsosharewith", + "password": "pAssworD2", + } + create_user(self.admin_api_interactor, request) + + repo = populator.setup_column_maker_repo(prefix="repoforindex") + assert "sharewith" not in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" not in populator.get_usernames_allowed_to_push(repo) + + populator.allow_user_to_push(repo, "sharewith") + assert "sharewith" in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" not in populator.get_usernames_allowed_to_push(repo) + + populator.allow_user_to_push(repo, "alsosharewith") + assert "sharewith" in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" in populator.get_usernames_allowed_to_push(repo) + + populator.disallow_user_to_push(repo, "sharewith") + assert "sharewith" not in populator.get_usernames_allowed_to_push(repo) + assert "alsosharewith" in populator.get_usernames_allowed_to_push(repo) + + @skip_if_api_v1 + def test_set_malicious(self): + populator = self.populator + repository = populator.setup_column_maker_repo(prefix="repoformalicious") + + only_revision = self._get_only_revision(repository) + assert only_revision.downloadable + assert not only_revision.malicious + + assert not populator.tip_is_malicious(repository) + populator.set_malicious(repository, only_revision.changeset_revision) + assert populator.tip_is_malicious(repository) + populator.unset_malicious(repository, only_revision.changeset_revision) + assert not populator.tip_is_malicious(repository) + + @skip_if_api_v1 + def test_set_deprecated(self): + populator = self.populator + repository = populator.setup_column_maker_repo(prefix="repofordeprecated") + assert not repository.deprecated + assert not populator.is_deprecated(repository) + populator.set_deprecated(repository) + assert populator.is_deprecated(repository) + populator.unset_deprecated(repository) + assert not populator.is_deprecated(repository) + def test_install_info(self): # actually installing requires a whole Galaxy setup and the install manager but # we can test the response validates against the future facing InstallInfo pydandic @@ -123,6 +201,19 @@ def test_repo_tars(self): else: raise AssertionError("Wrong number of repo tars returned...") + @skip_if_api_v1 + def test_readmes(self): + populator = self.populator + repository = populator.setup_test_data_repo("column_maker_with_readme") + only_revision = self._get_only_revision(repository) + populator.assert_has_n_installable_revisions(repository, 1) + response = self.api_interactor.get( + f"repositories/{repository.id}/revisions/{only_revision.changeset_revision}/readmes" + ) + api_asserts.assert_status_code_is_ok(response) + readme_dicts = response.json() + assert "readme.txt" in readme_dicts + def test_reset_on_simple_repository(self): populator = self.populator repository = populator.setup_test_data_repo("column_maker") @@ -144,6 +235,7 @@ def test_reset_with_uninstallable_revisions(self): api_asserts.assert_status_code_is_ok(response) populator.assert_has_n_installable_revisions(repository, 3) + @skip_if_api_v2 def test_reset_all(self): populator = self.populator repository = populator.setup_test_data_repo("column_maker_with_download_gaps") @@ -156,3 +248,14 @@ def test_reset_all(self): ) api_asserts.assert_status_code_is_ok(response) populator.assert_has_n_installable_revisions(repository, 3) + + def _get_only_revision(self, repository: HasRepositoryId) -> RepositoryRevisionMetadata: + populator = self.populator + repository_metadata = populator.get_metadata(repository) + metadata_for_revisions = repository_metadata.__root__ + assert len(metadata_for_revisions) == 1 + only_key = list(metadata_for_revisions.keys())[0] + assert only_key.startswith("0:") + only_revision = list(metadata_for_revisions.values())[0] + assert only_revision + return only_revision diff --git a/lib/tool_shed/test/functional/test_shed_tools.py b/lib/tool_shed/test/functional/test_shed_tools.py index 103cfeecfbeb..8d91ce088b73 100644 --- a/lib/tool_shed/test/functional/test_shed_tools.py +++ b/lib/tool_shed/test/functional/test_shed_tools.py @@ -1,4 +1,12 @@ -from ..base.api import ShedApiTestCase +from tool_shed_client.schema.trs import ( + Tool, + ToolClass, +) +from tool_shed_client.trs_util import encode_identifier +from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, +) class TestShedToolsApi(ShedApiTestCase): @@ -32,3 +40,31 @@ def test_tool_search(self): # but if this tool has been installed a bunch by other tests - it might not be. tool_search_hit = response.find_search_hit(repository) assert tool_search_hit + + @skip_if_api_v1 + def test_trs_service_info(self): + service_info = self.api_interactor.get("ga4gh/trs/v2/service-info") + service_info.raise_for_status() + + @skip_if_api_v1 + def test_trs_tool_classes(self): + classes_response = self.api_interactor.get("ga4gh/trs/v2/toolClasses") + classes_response.raise_for_status() + classes = classes_response.json() + assert isinstance(classes, list) + assert len(classes) == 1 + class0 = classes[0] + assert ToolClass(**class0) + + @skip_if_api_v1 + def test_trs_tool_list(self): + populator = self.populator + repository = populator.setup_column_maker_repo(prefix="toolstrsindex") + tool_id = populator.tool_guid(self, repository, "Add_a_column1") + tool_shed_base, encoded_tool_id = encode_identifier(tool_id) + print(encoded_tool_id) + url = f"ga4gh/trs/v2/tools/{encoded_tool_id}" + print(url) + tool_response = self.api_interactor.get(url) + tool_response.raise_for_status() + assert Tool(**tool_response.json()) diff --git a/lib/tool_shed/test/functional/test_shed_users.py b/lib/tool_shed/test/functional/test_shed_users.py index fdb8c21373a6..0fa6fce83e27 100644 --- a/lib/tool_shed/test/functional/test_shed_users.py +++ b/lib/tool_shed/test/functional/test_shed_users.py @@ -8,11 +8,14 @@ User, ) from ..base.api import ( + ShedApiTestCase, + skip_if_api_v1, +) +from ..base.api_util import ( email_to_username, ensure_user_with_email, - ShedApiTestCase, + get_admin_api_key, ) -from ..base.api_util import get_admin_api_key class TestShedUsersApi(ShedApiTestCase): @@ -77,5 +80,24 @@ def test_simple_index_and_user(self): assert show_response.json()["username"] == username assert show_response.json()["id"] == user_id - def _verify_username_password(self, email, password): - self._api_key(email, password) + @skip_if_api_v1 + def test_api_key_endpoints(self): + email = "testindexapi@bx.psu.edu" + password = "mycoolpassword123" + ensure_user_with_email(self.admin_api_interactor, email, password) + api_key = self._verify_username_password(email, password) + second_try_api_key = self._verify_username_password(email, password) + assert api_key == second_try_api_key + + user_populator = self.populator_for_key(api_key) + user_populator.delete_api_key() + new_api_key = self._verify_username_password(email, password) + assert api_key != new_api_key + + user_populator = self.populator_for_key(new_api_key) + another_new_api_key = user_populator.create_new_api_key() + assert new_api_key != another_new_api_key + assert new_api_key != api_key + + def _verify_username_password(self, email: str, password: str) -> str: + return self.api_interactor.create_api_key(email, password) diff --git a/lib/tool_shed/test/test_data/bismark/bismark.tar b/lib/tool_shed/test/test_data/bismark/bismark.tar deleted file mode 100644 index e24183c72963..000000000000 Binary files a/lib/tool_shed/test/test_data/bismark/bismark.tar and /dev/null differ diff --git a/lib/tool_shed/test/test_data/emboss/0470_files/emboss_complex_dependency.tar b/lib/tool_shed/test/test_data/emboss/0470_files/emboss_complex_dependency.tar deleted file mode 100644 index 27189a12120e..000000000000 Binary files a/lib/tool_shed/test/test_data/emboss/0470_files/emboss_complex_dependency.tar and /dev/null differ diff --git a/lib/tool_shed/test/test_data/0480_files/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/0480/0/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/0480_files/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/0480/0/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie2_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie2_wrapper.xml new file mode 100644 index 000000000000..68238f4ddd85 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie2_wrapper.xml @@ -0,0 +1,616 @@ + + + bisulfite mapper (bowtie2) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + --bowtie2 + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml new file mode 100644 index 000000000000..6e4e4def6200 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_bowtie_wrapper.xml @@ -0,0 +1,614 @@ + + + bisulfite mapper (bowtie) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation new file mode 100755 index 000000000000..1895a296632c --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_genome_preparation @@ -0,0 +1,492 @@ +#!/usr/bin/perl -- +use strict; +use warnings; +use Cwd; +use File::Path qw(rmtree); +$|++; + + +## This program is Copyright (C) 2010-12, Felix Krueger (felix.krueger@bbsrc.ac.uk) + +## This program is free software: you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation, either version 3 of the License, or +## (at your option) any later version. + +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. + +## You should have received a copy of the GNU General Public License +## along with this program. If not, see . + +use Getopt::Long; +use Cwd; + +my $verbose; +my $help; +my $version; +my $man; +my $path_to_bowtie; +my $multi_fasta; +my $single_fasta; +my $bowtie2; + +my $bismark_version = 'v0.7.7'; + +GetOptions ('verbose' => \$verbose, + 'help' => \$help, + 'man' => \$man, + 'version' => \$version, + 'path_to_bowtie:s' => \$path_to_bowtie, + 'single_fasta' => \$single_fasta, + 'bowtie2' => \$bowtie2, + ); + +my $genome_folder = shift @ARGV; # mandatory +my $CT_dir; +my $GA_dir; + +if ($help or $man){ + print_helpfile(); + exit; +} + +if ($version){ + print << "VERSION"; + + Bismark - Bisulfite Mapper and Methylation Caller. + + Bismark Genome Preparation Version: $bismark_version + Copyright 2010-12 Felix Krueger, Babraham Bioinformatics + www.bioinformatics.babraham.ac.uk/projects/ + +VERSION + exit; +} + +if ($single_fasta){ + print "Writing individual genomes out into single-entry fasta files (one per chromosome)\n\n"; + $multi_fasta = 0; +} +else{ + print "Writing bisulfite genomes out into a single MFA (multi FastA) file\n\n"; + $single_fasta = 0; + $multi_fasta = 1; +} + +my @filenames = create_bisulfite_genome_folders(); + +process_sequence_files (); + +launch_bowtie_indexer(); + +sub launch_bowtie_indexer{ + if ($bowtie2){ + print "Bismark Genome Preparation - Step III: Launching the Bowtie 2 indexer\n"; + } + else{ + print "Bismark Genome Preparation - Step III: Launching the Bowtie (1) indexer\n"; + } + print "Please be aware that this process can - depending on genome size - take up to several hours!\n"; + sleep(5); + + ### if the path to bowtie was specfified explicitely + if ($path_to_bowtie){ + if ($bowtie2){ + $path_to_bowtie =~ s/$/bowtie2-build/; + } + else{ + $path_to_bowtie =~ s/$/bowtie-build/; + } + } + ### otherwise we assume that bowtie-build is in the path + else{ + if ($bowtie2){ + $path_to_bowtie = 'bowtie2-build'; + } + else{ + $path_to_bowtie = 'bowtie-build'; + } + } + + $verbose and print "\n"; + + ### Forking the program to run 2 instances of Bowtie-build or Bowtie2-build (= the Bowtie (1/2) indexer) + my $pid = fork(); + + # parent process + if ($pid){ + sleep(1); + chdir $CT_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of CT converted genome in $CT_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Parent process: Starting to index C->T converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_CT\n\n"; + + sleep (11); + exec ("$path_to_bowtie","-f","$file_list","BS_CT"); + } + + # child process + elsif ($pid == 0){ + sleep(2); + chdir $GA_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of GA converted genome in $GA_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Child process: Starting to index G->A converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_GA\n\n"; + $verbose and print "(starting in 10 seconds)\n"; + sleep(10); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } + + # if the platform doesn't support the fork command we will run the indexing processes one after the other + else{ + print "Forking process was not successful, therefore performing the indexing sequentially instead\n"; + sleep(10); + + ### moving to CT genome folder + $verbose and warn "Preparing to index CT converted genome in $CT_dir\n"; + chdir $CT_dir or die "Unable to change directory: $!\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + system ("$path_to_bowtie","-f","$file_list","BS_CT"); + @fasta_files=(); + $file_list= ''; + + ### moving to GA genome folder + $verbose and warn "Preparing to index GA converted genome in $GA_dir\n"; + chdir $GA_dir or die "Unable to change directory: $!\n"; + @fasta_files = <*.fa>; + $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } +} + + +sub process_sequence_files { + + my ($total_CT_conversions,$total_GA_conversions) = (0,0); + $verbose and print "Bismark Genome Preparation - Step II: Bisulfite converting reference genome\n\n"; + sleep (3); + + $verbose and print "conversions performed:\n"; + $verbose and print join("\t",'chromosome','C->T','G->A'),"\n"; + + + ### If someone wants to index a genome which consists of thousands of contig and scaffold files we need to write the genome conversions into an MFA file + ### Otherwise the list of comma separated chromosomes we provide for bowtie-build will get too long for the kernel to handle + ### This is now the default option + + if ($multi_fasta){ + ### Here we just use one multi FastA file name, append .CT_conversion or .GA_conversion and print all sequence conversions into these files + my $bisulfite_CT_conversion_filename = "$CT_dir/genome_mfa.CT_conversion.fa"; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/genome_mfa.GA_conversion.fa"; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + foreach my $filename(@filenames){ + my ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + open (IN,$filename) or die "Failed to read from sequence file $filename $!\n"; + # warn "Reading chromosome information from $filename\n\n"; + + ### first line needs to be a fastA header + my $first_line = ; + chomp $first_line; + + ### Extracting chromosome name from the FastA header + my $chromosome_name = extract_chromosome_name($first_line); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; # first entry + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; # first entry + + + while (){ + + ### in case the line is a new fastA header + if ($_ =~ /^>/){ + ### printing out the stats for the previous chromosome + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + ### resetting the chromosome transliteration counters + ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + + ### Extracting chromosome name from the additional FastA header + $chromosome_name = extract_chromosome_name($_); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; + } + + else{ + my $sequence = uc$_; + + ### (I) First replacing all ambiguous sequence characters (such as M,S,R....) by N (G,A,T,C,N and the line endings \r and \n are added to a character group) + + $sequence =~ s/[^ATCGN\n\r]/N/g; + + ### (II) Writing the chromosome out into a C->T converted version (equals forward strand conversion) + + my $CT_sequence = $sequence; + my $CT_transliterations_performed = ($CT_sequence =~ tr/C/T/); # converts all Cs into Ts + $total_CT_conversions += $CT_transliterations_performed; + $chromosome_CT_conversions += $CT_transliterations_performed; + + print CT_CONVERT $CT_sequence; + + ### (III) Writing the chromosome out in a G->A converted version of the forward strand (this is equivalent to reverse- + ### complementing the forward strand and then C->T converting it) + + my $GA_sequence = $sequence; + my $GA_transliterations_performed = ($GA_sequence =~ tr/G/A/); # converts all Gs to As on the forward strand + $total_GA_conversions += $GA_transliterations_performed; + $chromosome_GA_conversions += $GA_transliterations_performed; + + print GA_CONVERT $GA_sequence; + + } + } + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + } + close (CT_CONVERT) or die "Failed to close filehandle: $!\n"; + close (GA_CONVERT) or die "Failed to close filehandle: $!\n"; + + + print "\nTotal number of conversions performed:\n"; + print "C->T:\t$total_CT_conversions\n"; + print "G->A:\t$total_GA_conversions\n"; + + warn "\nStep II - Genome bisulfite conversions - completed\n\n\n"; +} + +sub extract_chromosome_name { + + my $header = shift; + + ## Bowtie extracts the first string after the initial > in the FASTA file, so we are doing this as well + + if ($header =~ s/^>//){ + my ($chromosome_name) = split (/\s+/,$header); + return $chromosome_name; + } + else{ + die "The specified chromosome file doesn't seem to be in FASTA format as required! $!\n"; + } +} + +sub create_bisulfite_genome_folders{ + + $verbose and print "Bismark Genome Preparation - Step I: Preparing folders\n\n"; + + # Ensuring a genome folder has been specified + if ($genome_folder){ + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + $verbose and print "Path to genome folder specified: $genome_folder\n"; + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + # making the genome folder path abolsolute so it won't break if the path was specified relative + $genome_folder = getcwd; + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + } + + else{ + $verbose and print "Genome folder was not provided as argument "; + while (1){ + print "Please specify a genome folder to be bisulfite converted:\n"; + $genome_folder = ; + chomp $genome_folder; + + # adding a trailing slash unless already present + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + if (chdir $genome_folder){ + last; + } + else{ + warn "Could't move to directory $genome_folder! $!"; + } + } + } + + if ($path_to_bowtie){ + unless ($path_to_bowtie =~ /\/$/){ + $path_to_bowtie =~ s/$/\//; + } + if (chdir $path_to_bowtie){ + if ($bowtie2){ + $verbose and print "Path to Bowtie 2 specified: $path_to_bowtie\n"; + } + else{ + $verbose and print "Path to Bowtie (1) specified: $path_to_bowtie\n"; + } + } + else{ + die "There was an error with the path to bowtie: $!\n"; + } + } + + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + + # Exiting unless there are fastA files in the folder + my @filenames = <*.fa>; + + ### if there aren't any genomic files with the extension .fa we will look for files with the extension .fasta + unless (@filenames){ + @filenames = <*.fasta>; + } + + unless (@filenames){ + die "The specified genome folder $genome_folder does not contain any sequence files in FastA format (with .fa or .fasta file extensions\n"; + } + + warn "Bisulfite Genome Indexer version $bismark_version (last modified 17 Nov 2011)\n\n"; + sleep (3); + + # creating a directory inside the genome folder to store the bisfulfite genomes unless it already exists + my $bisulfite_dir = "${genome_folder}Bisulfite_Genome/"; + unless (-d $bisulfite_dir){ + mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $bisulfite_dir\n"; + } + else{ + while (1){ + print "\nA directory called $bisulfite_dir already exists. Bisulfite converted sequences and/or already existing Bowtie (1 or 2) indexes might be overwritten!\nDo you want to continue anyway?\t"; + my $proceed = ; + chomp $proceed; + if ($proceed =~ /^y/i ){ + last; + } + elsif ($proceed =~ /^n/i){ + die "Terminated by user\n\n"; + } + } + } + + ### as of version 0.6.0 the Bismark indexer will no longer delete the Bisulfite_Genome directory if it was present already, since it could store the Bowtie 1 or 2 indexes already + # removing any existing files and subfolders in the bisulfite directory (the specified directory won't be deleted) + # rmtree($bisulfite_dir, {verbose => 1,keep_root => 1}); + # unless (-d $bisulfite_dir){ # had to add this after changing remove_tree to rmtree // suggested by Samantha Cooper @ Illumina + # mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + # } + # } + + chdir $bisulfite_dir or die "Unable to move to $bisulfite_dir\n"; + $CT_dir = "${bisulfite_dir}CT_conversion/"; + $GA_dir = "${bisulfite_dir}GA_conversion/"; + + # creating 2 subdirectories to store a C->T (forward strand conversion) and a G->A (reverse strand conversion) + # converted version of the genome + unless (-d $CT_dir){ + mkdir $CT_dir or die "Unable to create directory $CT_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $CT_dir\n"; + } + unless (-d $GA_dir){ + mkdir $GA_dir or die "Unable to create directory $GA_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $GA_dir\n"; + } + + # moving back to the original genome folder + chdir $genome_folder or die "Could't move to directory $genome_folder $!"; + # $verbose and print "Moved back to genome folder folder $genome_folder\n"; + warn "\nStep I - Prepare genome folders - completed\n\n\n"; + return @filenames; +} + +sub print_helpfile{ + print << 'HOW_TO'; + + +DESCRIPTION + +This script is supposed to convert a specified reference genome into two different bisulfite +converted versions and index them for alignments with Bowtie 1 (default), or Bowtie 2. The first +bisulfite genome will have all Cs converted to Ts (C->T), and the other one will have all Gs +converted to As (G->A). Both bisulfite genomes will be stored in subfolders within the reference +genome folder. Once the bisulfite conversion has been completed the program will fork and launch +two simultaneous instances of the bowtie 1 or 2 indexer (bowtie-build or bowtie2-build). Be aware +that the indexing process can take up to several hours; this will mainly depend on genome size +and system resources. + + + + +The following is a brief description of command line options and arguments to control the +Bismark Genome Preparation script: + + +USAGE: bismark_genome_preparation [options] + + +OPTIONS: + +--help/--man Displays this help filea and exits. + +--version Displays version information and exits. + +--verbose Print verbose output for more details or debugging. + +--path_to_bowtie The full path to the Bowtie 1 or Bowtie 2 installation on your system.If + the path is not provided as an option you will be prompted for it. + +--bowtie2 This will create bisulfite indexes for Bowtie 2. (Default: Bowtie 1). + +--single_fasta Instruct the Bismark Indexer to write the converted genomes into + single-entry FastA files instead of making one multi-FastA file (MFA) + per chromosome. This might be useful if individual bisulfite converted + chromosomes are needed (e.g. for debugging), however it can cause a + problem with indexing if the number of chromosomes is vast (this is likely + to be in the range of several thousand files; the operating system can + only handle lists up to a certain length, and some newly assembled + genomes may contain 20000-50000 contigs of scaffold files which do exceed + this list length limit). + + +ARGUMENTS: + + The path to the folder containing the genome to be bisulfite converted. + At the current time Bismark Genome Preparation expects one or more fastA + files in the folder (with the file extension: .fa or .fasta). If the path + is not provided as an argument you will be prompted for it. + + + +This script was last modified on 18 Nov 2011. +HOW_TO +} diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py new file mode 100644 index 000000000000..cb79d1ecf590 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import zipfile +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def zipper(dir, zip_file): + zip = zipfile.ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED) + root_len = len(os.path.abspath(dir)) + for root, dirs, files in os.walk(dir): + archive_root = os.path.abspath(root)[root_len:] + for f in files: + fullpath = os.path.join(root, f) + archive_name = os.path.join(archive_root, f) + zip.write(fullpath, archive_name, zipfile.ZIP_DEFLATED) + zip.close() + return zip_file + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark methylation caller.') + + # input options + parser.add_argument( '--infile', help='Input file in SAM format.' ) + parser.add_argument( '--single-end', dest='single_end', action="store_true" ) + parser.add_argument( '--paired-end', dest='paired_end', action="store_true" ) + + parser.add_argument( '--report-file', dest='report_file' ) + parser.add_argument( '--comprehensive', action="store_true" ) + parser.add_argument( '--merge-non-cpg', dest='merge_non_cpg', action="store_true" ) + parser.add_argument( '--no-overlap', dest='no_overlap', action="store_true" ) + parser.add_argument( '--compress' ) + parser.add_argument( '--ignore-bps', dest='ignore_bps', type=int ) + + # OT - original top strand + parser.add_argument( '--cpg_ot' ) + parser.add_argument( '--chg_ot' ) + parser.add_argument( '--chh_ot' ) + # CTOT - complementary to original top strand + parser.add_argument( '--cpg_ctot' ) + parser.add_argument( '--chg_ctot' ) + parser.add_argument( '--chh_ctot' ) + # OB - original bottom strand + parser.add_argument( '--cpg_ob' ) + parser.add_argument( '--chg_ob' ) + parser.add_argument( '--chh_ob' ) + # CTOT - complementary to original bottom strand + parser.add_argument( '--cpg_ctob' ) + parser.add_argument( '--chg_ctob' ) + parser.add_argument( '--chh_ctob' ) + + parser.add_argument( '--cpg_context' ) + parser.add_argument( '--chg_context' ) + parser.add_argument( '--chh_context' ) + + parser.add_argument( '--non_cpg_context' ) + + parser.add_argument( '--non_cpg_context_ot' ) + parser.add_argument( '--non_cpg_context_ctot' ) + parser.add_argument( '--non_cpg_context_ob' ) + parser.add_argument( '--non_cpg_context_ctob' ) + + args = parser.parse_args() + + + # Build methylation extractor command + output_dir = tempfile.mkdtemp() + cmd = 'bismark_methylation_extractor --no_header -o %s %s %s' + + additional_opts = '' + # Set up all options + if args.single_end: + additional_opts += ' --single-end ' + else: + additional_opts += ' --paired-end ' + if args.no_overlap: + additional_opts += ' --no_overlap ' + if args.ignore_bps: + additional_opts += ' --ignore %s ' % args.ignore_bps + if args.comprehensive: + additional_opts += ' --comprehensive ' + if args.merge_non_cpg: + additional_opts += ' --merge_non_CpG ' + if args.report_file: + additional_opts += ' --report ' + + + # Final command: + cmd = cmd % (output_dir, additional_opts, args.infile) + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark methylation extractor:\n' + str( e ) ) + + + # collect and copy output files + + if args.compress: + zipper(output_dir, args.compress) + + + if args.cpg_ot: + shutil.move( glob(os.path.join( output_dir, '*CpG_OT_*'))[0], args.cpg_ot ) + if args.chg_ot: + shutil.move( glob(os.path.join( output_dir, '*CHG_OT_*'))[0], args.chg_ot ) + if args.chh_ot: + shutil.move( glob(os.path.join( output_dir, '*CHH_OT_*'))[0], args.chh_ot ) + if args.cpg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOT_*'))[0], args.cpg_ctot ) + if args.chg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOT_*'))[0], args.chg_ctot ) + if args.chh_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOT_*'))[0], args.chh_ctot ) + if args.cpg_ob: + shutil.move( glob(os.path.join( output_dir, '*CpG_OB_*'))[0], args.cpg_ob ) + if args.chg_ob: + shutil.move( glob(os.path.join( output_dir, '*CHG_OB_*'))[0], args.chg_ob ) + if args.chh_ob: + shutil.move( glob(os.path.join( output_dir, '*CHH_OB_*'))[0], args.chh_ob ) + if args.cpg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOB_*'))[0], args.cpg_ctob ) + if args.chg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOB_*'))[0], args.chg_ctob ) + if args.chh_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOB_*'))[0], args.chh_ctob ) + + # context-dependent methylation output files + if args.cpg_context: + shutil.move( glob(os.path.join( output_dir, '*CpG_context_*'))[0], args.cpg_context ) + if args.chg_context: + shutil.move( glob(os.path.join( output_dir, '*CHG_context_*'))[0], args.chg_context ) + if args.chh_context: + shutil.move( glob(os.path.join( output_dir, '*CHH_context_*'))[0], args.chh_context ) + + if args.non_cpg_context: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_context_*'))[0], args.non_cpg_context ) + + if args.non_cpg_context_ot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OT_*'))[0], args.non_cpg_context_ot ) + if args.non_cpg_context_ctot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOT_*'))[0], args.non_cpg_context_ctot ) + if args.non_cpg_context_ob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OB_*'))[0], args.non_cpg_context_ob ) + if args.non_cpg_context_ctob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOB_*'))[0], args.non_cpg_context_ctob ) + + + + if args.report_file: + shutil.move( glob(os.path.join( output_dir, '*_splitting_report*'))[0], args.report_file ) + + + # Clean up temp dirs + if os.path.exists( output_dir ): + shutil.rmtree( output_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml new file mode 100644 index 000000000000..141ec5805d7f --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_methylation_extractor.xml @@ -0,0 +1,306 @@ + + + methylation extractor + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_methylation_extractor.py + + --infile $input + + --bismark_path \$SCRIPT_PATH + + #if $singlePaired.sPaired == "single": + --single-end + #else: + --paired-end + $no_overlap + #end if + + #if str($ignore_bps) != "0": + --ignore $ignore_bps + #end if + + #if $report: + --report-file $o_report + #end if + + #if $comprehensive: + --comprehensive + #end if + + #if $merge_non_cpg: + --merge-non-cpg + #end if + + #if $compress: + --compress $compressed_output + #else: + #if $comprehensive == False and $merge_non_cpg == False: + ##twelfe files + --cpg_ot $cpg_ot + --chg_ot $chg_ot + --chh_ot $chh_ot + --cpg_ctot $cpg_ctot + --chg_ctot $chg_ctot + --chh_ctot $chh_ctot + --cpg_ob $cpg_ob + --chg_ob $chg_ob + --chh_ob $chh_ob + --cpg_ctob $cpg_ctob + --chg_ctob $chg_ctob + --chh_ctob $chh_ctob + #elif $merge_non_cpg and $comprehensive: + ## two files + --non_cpg_context $non_cpg_context + --cpg_context $cpg_context + #elif $comprehensive: + ## three files + --cpg_context $cpg_context + --chg_context $chg_context + --chh_context $chh_context + #elif $merge_non_cpg: + ## eight files + --non_cpg_context_ctot $non_cpg_context_ctot + --non_cpg_context_ot $non_cpg_context_ot + --non_cpg_context_ob $non_cpg_context_ob + --non_cpg_context_ctob $non_cpg_context_ctob + --cpg_ot $cpg_ot + --cpg_ctot $cpg_ctot + --cpg_ob $cpg_ob + --cpg_ctob $cpg_ctob + #end if + ## end compress + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + ( report is True ) + + + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + ( compress == False and comprehensive == False and merge_non_CpG == False) + + + + + ( compress == False and comprehensive) + + + ( compress == False and comprehensive and merge_non_CpG == False) + + + ( compress == False and comprehensive and merge_non_CpG == False) + + + + ( compress == False and comprehensive and merge_non_cpg) + + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + ( compress == False and comprehensive == False and merge_non_cpg) + + + + ( compress ) + + + + + + + + +**What it does** + +The following is a brief description of all options to control the Bismark_ +methylation extractor. The script reads in a bisulfite read alignment results file +produced by the Bismark bisulfite mapper and extracts the methylation information +for individual cytosines. This information is found in the methylation call field +which can contain the following characters: + + + - X = for methylated C in CHG context (was protected) + - x = for not methylated C CHG (was converted) + - H = for methylated C in CHH context (was protected) + - h = for not methylated C in CHH context (was converted) + - Z = for methylated C in CpG context (was protected) + - z = for not methylated C in CpG context (was converted) + - . = for any bases not involving cytosines + + +The methylation extractor outputs result files for cytosines in CpG, CHG and CHH +context (this distinction is actually already made in Bismark itself). As the methylation +information for every C analysed can produce files which easily have tens or even hundreds of +millions of lines, file sizes can become very large and more difficult to handle. The C +methylation info additionally splits cytosine methylation calls up into one of the four possible +strands a given bisulfite read aligned against: + + - OT = original top strand + - CTOT = complementary to original top strand + + - OB = original bottom strand + - CTOB = complementary to original bottom strand + +Thus, by default twelve individual output files are being generated per input file (unless +--comprehensive is specified, see below). The output files can be imported into a genome +viewer, such as SeqMonk, and re-combined into a single data group if desired (in fact +unless the bisulfite reads were generated preserving directionality it doesn't make any +sense to look at the data in a strand-specific manner). Strand-specific output files can +optionally be skipped, in which case only three output files for CpG, CHG or CHH context +will be generated. For both the strand-specific and comprehensive outputs there is also +the option to merge both non-CpG contexts (CHG and CHH) into one single non-CpG context. + + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Outputs** + +The output files are in the following format (tab delimited):: + + + Column Description + -------- -------------------------------------------------------- + 1 seq-ID + 2 strand + 3 chromosome + 4 position + 5 methylation call + + + * Methylated cytosines receive a '+' orientation, + * Unmethylated cytosines receive a '-' orientation. + +------ + +**OPTIONS** + +Input:: + + -s/--single-end Input file(s) are Bismark result file(s) generated from single-end + read data. Specifying either --single-end or --paired-end is + mandatory. + + -p/--paired-end Input file(s) are Bismark result file(s) generated from paired-end + read data. Specifying either --paired-end or --single-end is + mandatory. + + --no_overlap For paired-end reads it is theoretically possible that read_1 and + read_2 overlap. This option avoids scoring overlapping methylation + calls twice. Whilst this removes a bias towards more methylation calls + towards the center of sequenced fragments it can de facto remove + a good proportion of the data. + + --ignore INT Ignore the first INT bp at the 5' end of each read when processing the + methylation call string. This can remove e.g. a restriction enzyme site + at the start of each read. + +Output:: + + --comprehensive Specifying this option will merge all four possible strand-specific + methylation info into context-dependent output files. The default + contexts are: + - CpG context + - CHG context + - CHH context + + --merge_non_CpG This will produce two output files (in --comprehensive mode) or eight + strand-specific output files (default) for Cs in + - CpG context + - non-CpG context + + --report Prints out a short methylation summary as well as the paramaters used to run + this script. + + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py new file mode 100644 index 000000000000..606fa428bd77 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bismark_wrapper.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import fileinput +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark bisulfite mapper.') + parser.add_argument( '-p', '--num-threads', dest='num_threads', + type=int, default=4, help='Use this many threads to align reads. The default is 4.' ) + + parser.add_argument( '--bismark_path', dest='bismark_path', help='Path to the bismark perl scripts' ) + + parser.add_argument( '--bowtie2', action='store_true', default=False, help='Running bismark with bowtie2 and not with bowtie.' ) + + # input options + parser.add_argument( '--own-file', dest='own_file', help='' ) + parser.add_argument( '-D', '--indexes-path', dest='index_path', help='Indexes directory; location of .ebwt and .fa files.' ) + parser.add_argument( '-O', '--output', dest='output' ) + parser.add_argument( '--output-report-file', dest='output_report_file' ) + parser.add_argument( '--suppress-header', dest='suppress_header', action="store_true" ) + + parser.add_argument( '--mate-paired', dest='mate_paired', action='store_true', help='Reads are mate-paired', default=False) + + + parser.add_argument( '-1', '--mate1', dest='mate1', + help='The forward reads file in Sanger FASTQ or FASTA format.' ) + parser.add_argument( '-2', '--mate2', dest='mate2', + help='The reverse reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--output-unmapped-reads', dest='output_unmapped_reads', + help='Additional output file with unmapped reads (single-end).' ) + parser.add_argument( '--output-unmapped-reads-l', dest='output_unmapped_reads_l', + help='File name for unmapped reads (left, paired-end).' ) + parser.add_argument( '--output-unmapped-reads-r', dest='output_unmapped_reads_r', + help='File name for unmapped reads (right, paired-end).' ) + + + parser.add_argument( '--output-suppressed-reads', dest='output_suppressed_reads', + help='Additional output file with suppressed reads (single-end).' ) + parser.add_argument( '--output-suppressed-reads-l', dest='output_suppressed_reads_l', + help='File name for suppressed reads (left, paired-end).' ) + parser.add_argument( '--output-suppressed-reads-r', dest='output_suppressed_reads_r', + help='File name for suppressed reads (right, paired-end).' ) + + + parser.add_argument( '--single-paired', dest='single_paired', + help='The single-end reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--fastq', action='store_true', help='Query filetype is in FASTQ format') + parser.add_argument( '--fasta', action='store_true', help='Query filetype is in FASTA format') + parser.add_argument( '--phred64-quals', dest='phred64', action="store_true" ) + + + parser.add_argument( '--skip-reads', dest='skip_reads', type=int ) + parser.add_argument( '--qupto', type=int) + + + # paired end options + parser.add_argument( '-I', '--minins', dest='min_insert' ) + parser.add_argument( '-X', '--maxins', dest='max_insert' ) + parser.add_argument( '--no-mixed', dest='no_mixed', action="store_true" ) + parser.add_argument( '--no-discordant', dest='no_discordant', action="store_true" ) + + #parse general options + # default 20 + parser.add_argument( '--seed-len', dest='seed_len', type=int) + # default 15 + parser.add_argument( '--seed-extention-attempts', dest='seed_extention_attempts', type=int ) + # default 0 + parser.add_argument( '--seed-mismatches', dest='seed_mismatches', type=int ) + # default 2 + parser.add_argument( '--max-reseed', dest='max_reseed', type=int ) + """ + # default 70 + parser.add_argument( '--maqerr', dest='maqerr', type=int ) + """ + + """ + The number of megabytes of memory a given thread is given to store path + descriptors in --best mode. Best-first search must keep track of many paths + at once to ensure it is always extending the path with the lowest cumulative + cost. Bowtie tries to minimize the memory impact of the descriptors, but + they can still grow very large in some cases. If you receive an error message + saying that chunk memory has been exhausted in --best mode, try adjusting + this parameter up to dedicate more memory to the descriptors. Default: 512. + """ + parser.add_argument( '--chunkmbs', type=int, default=512 ) + + args = parser.parse_args() + + # Create bismark index if necessary. + index_dir = "" + if args.own_file: + """ + Create a temporary index with the offered files from the user. + Utilizing the script: bismark_genome_preparation + bismark_genome_preparation --bowtie2 hg19/ + """ + tmp_index_dir = tempfile.mkdtemp() + index_path = os.path.join( tmp_index_dir, '.'.join( os.path.split( args.own_file )[1].split( '.' )[:-1] ) ) + try: + """ + Create a hard link pointing to args.own_file named 'index_path'.fa. + """ + os.symlink( args.own_file, index_path + '.fa' ) + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error in linking the reference database.\n' + str( e ) ) + # bismark_genome_preparation needs the complete path to the folder in which the database is stored + if args.bowtie2: + cmd_index = 'bismark_genome_preparation --bowtie2 %s ' % ( tmp_index_dir ) + else: + cmd_index = 'bismark_genome_preparation %s ' % ( tmp_index_dir ) + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd_index = '%s/%s' % (args.bismark_path, cmd_index) + try: + tmp = tempfile.NamedTemporaryFile( dir=tmp_index_dir ).name + tmp_stderr = open( tmp, 'wb' ) + proc = subprocess.Popen( args=cmd_index, shell=True, cwd=tmp_index_dir, stdout=open(os.devnull, 'wb'), stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error indexing reference sequence\n' + str( e ) ) + index_dir = tmp_index_dir + else: + index_dir = args.index_path + + # Build bismark command + tmp_bismark_dir = tempfile.mkdtemp() + output_dir = os.path.join( tmp_bismark_dir, 'results') + cmd = 'bismark %(args)s --temp_dir %(tmp_bismark_dir)s -o %(output_dir)s --quiet %(genome_folder)s %(reads)s' + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd = '%s/%s' % (args.bismark_path, cmd) + + arguments = { + 'genome_folder': index_dir, + 'args': '', + 'tmp_bismark_dir': tmp_bismark_dir, + 'output_dir': output_dir, + } + + additional_opts = '' + # Set up the reads + if args.mate_paired: + # paired-end reads library + reads = '-1 %s ' % ( args.mate1 ) + reads += ' -2 %s ' % ( args.mate2 ) + additional_opts += ' -I %s -X %s ' % (args.min_insert, args.max_insert) + else: + # single paired reads library + reads = ' %s ' % ( args.single_paired ) + + + if not args.bowtie2: + # use bowtie specific options + additional_opts += ' --best ' + if args.seed_mismatches: + # --seedmms + additional_opts += ' -n %s ' % args.seed_mismatches + if args.seed_len: + # --seedlen + additional_opts += ' -l %s ' % args.seed_len + + # alignment options + if args.bowtie2: + additional_opts += ' -p %s --bowtie2 ' % args.num_threads + if args.seed_mismatches: + additional_opts += ' -N %s ' % args.seed_mismatches + if args.seed_len: + additional_opts += ' -L %s ' % args.seed_len + if args.seed_extention_attempts: + additional_opts += ' -D %s ' % args.seed_extention_attempts + if args.max_reseed: + additional_opts += ' -R %s ' % args.max_reseed + if args.no_discordant: + additional_opts += ' --no-discordant ' + if args.no_mixed: + additional_opts += ' --no-mixed ' + """ + if args.maqerr: + additional_opts += ' --maqerr %s ' % args.maqerr + """ + if args.skip_reads: + additional_opts += ' --skip %s ' % args.skip_reads + if args.qupto: + additional_opts += ' --qupto %s ' % args.qupto + if args.phred64: + additional_opts += ' --phred64-quals ' + if args.suppress_header: + additional_opts += ' --sam-no-hd ' + if args.output_unmapped_reads or ( args.output_unmapped_reads_l and args.output_unmapped_reads_r): + additional_opts += ' --un ' + if args.output_suppressed_reads or ( args.output_suppressed_reads_l and args.output_suppressed_reads_r): + additional_opts += ' --ambiguous ' + + arguments.update( {'args': additional_opts, 'reads': reads} ) + + # Final command: + cmd = cmd % arguments + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark:\n' + str( e ) ) + + + # collect and copy output files + """ + if args.output_report_file: + output_report_file = open(args.output_report_file, 'w+') + for line in fileinput.input(glob( os.path.join( output_dir, '*.txt') )): + output_report_file.write(line) + output_report_file.close() + """ + + if args.output_suppressed_reads: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads.txt'))[0], args.output_suppressed_reads ) + if args.output_suppressed_reads_l: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_1.txt'))[0], args.output_suppressed_reads_l ) + if args.output_suppressed_reads_r: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_2.txt'))[0], args.output_suppressed_reads_r ) + + if args.output_unmapped_reads: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads.txt'))[0], args.output_unmapped_reads ) + if args.output_unmapped_reads_l: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_1.txt'))[0], args.output_unmapped_reads_l ) + if args.output_unmapped_reads_r: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_2.txt'))[0], args.output_unmapped_reads_r ) + + shutil.move( glob( os.path.join( output_dir, '*.sam'))[0] , args.output) + + # Clean up temp dirs + if args.own_file: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + if os.path.exists( tmp_bismark_dir ): + shutil.rmtree( tmp_bismark_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc new file mode 100755 index 000000000000..61663caa7c70 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc @@ -0,0 +1,37 @@ +# bowtie2_indices.loc.sample +# This is a *.loc.sample file distributed with Galaxy that enables tools +# to use a directory of indexed data files. This one is for Bowtie2 and Tophat2. +# See the wiki: http://wiki.galaxyproject.org/Admin/NGS%20Local%20Setup +# First create these data files and save them in your own data directory structure. +# Then, create a bowtie_indices.loc file to use those indexes with tools. +# Copy this file, save it with the same name (minus the .sample), +# follow the format examples, and store the result in this directory. +# The file should include an one line entry for each index set. +# The path points to the "basename" for the set, not a specific file. +# It has four text columns seperated by TABS. +# +# +# +# So, for example, if you had hg18 indexes stored in: +# +# /depot/data2/galaxy/hg19/bowtie2/ +# +# containing hg19 genome and hg19.*.bt2 files, such as: +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.fa +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 18:56 hg19canon.2.bt2 +# -rw-rw-r-- 1 james james 3.3K Feb 10 16:54 hg19canon.3.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 16:54 hg19canon.4.bt2 +# -rw-rw-r-- 1 james james 914M Feb 10 20:45 hg19canon.rev.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 20:45 hg19canon.rev.2.bt2 +# +# then the bowtie2_indices.loc entry could look like this: +# +#hg19 hg19 Human (hg19) /depot/data2/galaxy/hg19/bowtie2/hg19canon +# +#More examples: +# +#mm10 mm10 Mouse (mm10) /depot/data2/galaxy/mm10/bowtie2/mm10 +#dm3 dm3 D. melanogaster (dm3) /depot/data2/galaxy/mm10/bowtie2/dm3 +# +# diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample new file mode 100755 index 000000000000..61663caa7c70 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/bowtie2_indices.loc.sample @@ -0,0 +1,37 @@ +# bowtie2_indices.loc.sample +# This is a *.loc.sample file distributed with Galaxy that enables tools +# to use a directory of indexed data files. This one is for Bowtie2 and Tophat2. +# See the wiki: http://wiki.galaxyproject.org/Admin/NGS%20Local%20Setup +# First create these data files and save them in your own data directory structure. +# Then, create a bowtie_indices.loc file to use those indexes with tools. +# Copy this file, save it with the same name (minus the .sample), +# follow the format examples, and store the result in this directory. +# The file should include an one line entry for each index set. +# The path points to the "basename" for the set, not a specific file. +# It has four text columns seperated by TABS. +# +# +# +# So, for example, if you had hg18 indexes stored in: +# +# /depot/data2/galaxy/hg19/bowtie2/ +# +# containing hg19 genome and hg19.*.bt2 files, such as: +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.fa +# -rw-rw-r-- 1 james james 914M Feb 10 18:56 hg19canon.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 18:56 hg19canon.2.bt2 +# -rw-rw-r-- 1 james james 3.3K Feb 10 16:54 hg19canon.3.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 16:54 hg19canon.4.bt2 +# -rw-rw-r-- 1 james james 914M Feb 10 20:45 hg19canon.rev.1.bt2 +# -rw-rw-r-- 1 james james 683M Feb 10 20:45 hg19canon.rev.2.bt2 +# +# then the bowtie2_indices.loc entry could look like this: +# +#hg19 hg19 Human (hg19) /depot/data2/galaxy/hg19/bowtie2/hg19canon +# +#More examples: +# +#mm10 mm10 Mouse (mm10) /depot/data2/galaxy/mm10/bowtie2/mm10 +#dm3 dm3 D. melanogaster (dm3) /depot/data2/galaxy/mm10/bowtie2/dm3 +# +# diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample b/lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample new file mode 100644 index 000000000000..9e62763c741d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/tool_data_table_conf.xml.sample @@ -0,0 +1,13 @@ + + + +
+ value, dbkey, name, path + +
+ + + value, dbkey, name, path + +
+ diff --git a/lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml new file mode 100644 index 000000000000..776d2aa33442 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/0/tool_dependencies.xml @@ -0,0 +1,61 @@ + + + + $REPOSITORY_INSTALL_DIR + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.0.0-beta7/bowtie2-2.0.0-beta7-source.zip + make + + bowtie2 + $INSTALL_DIR/bin + + + bowtie2-align + $INSTALL_DIR/bin + + + bowtie2-build + $INSTALL_DIR/bin + + chmod +x $INSTALL_DIR/bin/bowtie2 + + $INSTALL_DIR/bin + + + + +Compiling bowtie2 requires zlib and libpthread to be present on your system. + + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie/0.12.8/bowtie-0.12.8-src.zip + make + + bowtie + $INSTALL_DIR/bin + + + bowtie-inspect + $INSTALL_DIR/bin + + + bowtie-build + $INSTALL_DIR/bin + + + $INSTALL_DIR/bin + + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml new file mode 100644 index 000000000000..68238f4ddd85 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie2_wrapper.xml @@ -0,0 +1,616 @@ + + + bisulfite mapper (bowtie2) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + --bowtie2 + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml new file mode 100644 index 000000000000..6e4e4def6200 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_bowtie_wrapper.xml @@ -0,0 +1,614 @@ + + + bisulfite mapper (bowtie) + + + SCRIPT_PATH + bowtie + bowtie2 + + + + bismark_wrapper.py + + ## Change this to accommodate the number of threads you have available. + --num-threads 4 + + --bismark_path \$SCRIPT_PATH + + ## + ## Bismark Genome Preparation, if desired. + ## + + ## Handle reference file. + #if $refGenomeSource.genomeSource == "history": + --own-file=$refGenomeSource.ownFile + #else: + --indexes-path ${refGenomeSource.index.fields.path} + #end if + + + ## + ## Input parameters + ## + + + #if $singlePaired.sPaired == "single": + --single-paired $singlePaired.input_singles + + #if $singlePaired.input_singles.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_singles.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_singles.ext == "fasta": + --fasta + #end if + #else: + --mate-paired + --mate1 $singlePaired.input_mate1 + --mate2 $singlePaired.input_mate2 + + #if $singlePaired.input_mate1.ext == "fastqillumina": + --phred64-quals + --fastq + #elif $singlePaired.input_mate1.ext == "fastqsanger": + --fastq + #elif $singlePaired.input_mate1.ext == "fasta": + --fasta + #end if + + -I $singlePaired.minInsert + -X $singlePaired.maxInsert + #end if + + + ## for now hardcode the value for the required memory per thread in --best mode + --chunkmbs 512 + + + #if $params.settingsType == "custom": + + ## default 20 + --seed-len $params.seed_len + ## default 0 + --seed-mismatches $params.seed_mismatches + ## default 15 + --seed-extention-attempts $params.seed_extention_attempts + ## default 2 + --max-reseed $params.max_reseed + + ## default 70 + ##--maqerr $params.maqerr + + ## default unlimited + #if $params.qupto != 0: + --qupto $params.qupto + #end if + #if $params.skip_reads != 0: + --skip-reads $params.skip_reads + #end if + + ## if set, disable the original behaviour + $params.no_mixed + ## if set, disable the original behaviour + $params.no_discordant + + + ###if str($params.isReportOutput) == "yes": + ## --output-report-file $report_file + ###end if + + #end if + + ## + ## Output parameters. + ## + --output $output + $suppress_header + + #if str( $singlePaired.sPaired ) == "single" + #if $output_unmapped_reads_l + --output-unmapped-reads $output_unmapped_reads_l + #end if + #if $output_suppressed_reads_l + --output-suppressed-reads $output_suppressed_reads_l + #end if + #else + #if $output_unmapped_reads_l and $output_unmapped_reads_r + --output-unmapped-reads-l $output_unmapped_reads_l + --output-unmapped-reads-r $output_unmapped_reads_r + #end if + #if $output_suppressed_reads_l and $output_suppressed_reads_l + --output-suppressed-reads-l $output_suppressed_reads_l + --output-suppressed-reads-r $output_suppressed_reads_r + #end if + #end if + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['suppressed_read_file'] is True + )) + + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['supressed_read_file'] is True + + + + + + + + + + + + + + + + + + (( + params['settingsType'] == "custom" and + params['unmapped_read_file'] is True + )) + + + + + + + + + + + + + + + + singlePaired['sPaired'] == "paired" + params['settingsType'] == "custom" + params['unmapped_read_file'] is True + + + + + + + + + + + + + + + + + + + + + + +**What it does** + +Bismark_ is a bisulfite mapper and methylation caller. Bismark takes in FastA or FastQ files and aligns the +reads to a specified bisulfite genome. Sequence reads are transformed into a bisulfite converted forward strand +version (C->T conversion) or into a bisulfite treated reverse strand (G->A conversion of the forward strand). +Each of these reads are then aligned to bisulfite treated forward strand index of a reference genome +(C->T converted) and a bisulfite treated reverse strand index of the genome (G->A conversion of the +forward strand, by doing this alignments will produce the same positions). These 4 instances of Bowtie (1 or 2) +are run in parallel. The sequence file(s) are then read in again sequence by sequence to pull out the original +sequence from the genome and determine if there were any protected C's present or not. + +.. _Bismark: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +As of version 0.7.0 Bismark will only run 2 alignment threads for OT and OB in parallel, the 4 strand mode can be +re-enabled by using non_directional mode. + +It is developed by Krueger F and Andrews SR. at the Babraham Institute. Krueger F, Andrews SR. (2011) Bismark: a flexible aligner and methylation caller for Bisulfite-Seq applications. Bioinformatics, 27, 1571-2. + +------ + +**Know what you are doing** + +.. class:: warningmark + +There is no such thing (yet) as an automated gearshift in short read mapping. It is all like stick-shift driving in San Francisco. In other words = running this tool with default parameters will probably not give you meaningful results. A way to deal with this is to **understand** the parameters by carefully reading the `documentation`__ and experimenting. Fortunately, Galaxy makes experimenting easy. + + .. __: http://www.bioinformatics.babraham.ac.uk/projects/bismark/ + +------ + +**Input formats** + +Bismark accepts files in either Sanger FASTQ format (galaxy type *fastqsanger*), Illumina FASTQ format (galaxy type *fastqillumina*) or FASTA format (galaxy type *fasta*). Use the FASTQ Groomer to prepare your files. + +------ + +**A Note on Built-in Reference Genomes** + +The default variant for all genomes is "Full", defined as all primary chromosomes (or scaffolds/contigs) including mitochondrial plus associated unmapped, plasmid, and other segments. When only one version of a genome is available in this tool, it represents the default "Full" variant. Some genomes will have more than one variant available. The "Canonical Male" or sometimes simply "Canonical" variant contains the primary chromosomes for a genome. For example a human "Canonical" variant contains chr1-chr22, chrX, chrY, and chrM. The "Canonical Female" variant contains the primary chromosomes excluding chrY. + +------ + +The final output of Bismark is in SAM format by default. + +**Outputs** + +The output is in SAM format, and has the following columns:: + + Column Description + -------- -------------------------------------------------------- + 1 QNAME seq-ID + 2 FLAG this flag tries to take the strand a bisulfite read + originated from into account + (this is different from ordinary DNA alignment flags!) + 3 RNAME chromosome + 4 POS start position + 5 MAPQ always 255 + 6 CIGAR extended CIGAR string + 7 MRNM Mate Reference sequence NaMe ('=' if same as RNAME) + 8 MPOS 1-based Mate POSition + 9 ISIZE Inferred insert SIZE + 10 SEQ query SEQuence on the same strand as the reference + 11 QUAL Phred33 scale + 12 NM-tag edit distance to the reference) + 13 XX-tag base-by-base mismatches to the reference. + This does not include indels. + 14 XM-tag methylation call string + 15 XR-tag read conversion state for the alignment + 16 XG-tag genome conversion state for the alignment + + +Each read of paired-end alignments is written out in a separate line in the above format. + + +It looks like this (scroll sideways to see the entire example):: + + QNAME FLAG RNAME POS MAPQ CIAGR MRNM MPOS ISIZE SEQ QUAL OPT + HWI-EAS91_1_30788AAXX:1:1:1761:343 4 * 0 0 * * 0 0 AAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAACNNANNGAGTNGNNNNNNNGCTTCCCACAGNNCTGG hhhhhhh;;hhhhhhhhhhh^hOhhhhghhhfhhhgh;;h;;hhhh;h;;;;;;;hhhhhhghhhh;;Phhh + HWI-EAS91_1_30788AAXX:1:1:1578:331 4 * 0 0 * * 0 0 GTATAGANNAATAAGAAAAAAAAAAATGAAGACTTTCNNANNTCTGNANNNNNNNTCTTTTTTCAGNNGTAG hhhhhhh;;hhhhhhhhhhhhhhhhhhhhhhhhhhhh;;h;;hhhh;h;;;;;;;hhhhhhhhhhh;;hhVh + +------- + +**Bismark settings** + +All of the options have a default value. You can change any of them. If any Bismark function is missing please contact the tool author or your Galaxy admin. + +------ + +**Bismark parameter list** + +This is an exhaustive list of Bismark options: + +------ + +**OPTIONS** + + +Input:: + + --singles A comma- or space-separated list of files containing the reads to be aligned (e.g. + lane1.fq,lane2.fq lane3.fq). Reads may be a mix of different lengths. Bismark will + produce one mapping result and one report file per input file. + + -1 mates1 Comma-separated list of files containing the #1 mates (filename usually includes + "_1"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates2. + Reads may be a mix of different lengths. Bismark will produce one mapping result + and one report file per paired-end input file pair. + + -2 mates2 Comma-separated list of files containing the #2 mates (filename usually includes + "_2"), e.g. flyA_1.fq,flyB_1.fq). Sequences specified with this option must + correspond file-for-file and read-for-read with those specified in mates1. + Reads may be a mix of different lengths. + + -q/--fastq The query input files (specified as mate1,mate2 or singles are FASTQ + files (usually having extension .fg or .fastq). This is the default. See also + --solexa-quals. + + -f/--fasta The query input files (specified as mate1,mate2 or singles are FASTA + files (usually havin extension .fa, .mfa, .fna or similar). All quality values + are assumed to be 40 on the Phred scale. + + -s/--skip INT Skip (i.e. do not align) the first INT reads or read pairs from the input. + + -u/--upto INT Only aligns the first INT reads or read pairs from the input. Default: no limit. + + --phred33-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 33. Default: on. + + --phred64-quals FASTQ qualities are ASCII chars equal to the Phred quality plus 64. Default: off. + + --solexa-quals Convert FASTQ qualities from solexa-scaled (which can be negative) to phred-scaled + (which can't). The formula for conversion is: + phred-qual = 10 * log(1 + 10 ** (solexa-qual/10.0)) / log(10). Used with -q. This + is usually the right option for use with (unconverted) reads emitted by the GA + Pipeline versions prior to 1.3. Works only for Bowtie 1. Default: off. + + --solexa1.3-quals Same as --phred64-quals. This is usually the right option for use with (unconverted) + reads emitted by GA Pipeline version 1.3 or later. Default: off. + + +Alignment:: + + -n/--seedmms INT The maximum number of mismatches permitted in the "seed", i.e. the first L base pairs + of the read (where L is set with -l/--seedlen). This may be 0, 1, 2 or 3 and the + default is 1. This option is only available for Bowtie 1 (for Bowtie 2 see -N). + + -l/--seedlen The "seed length"; i.e., the number of bases of the high quality end of the read to + which the -n ceiling applies. The default is 28. Bowtie (and thus Bismark) is faster for + larger values of -l. This option is only available for Bowtie 1 (for Bowtie 2 see -L). + + -e/--maqerr INT Maximum permitted total of quality values at all mismatched read positions throughout + the entire alignment, not just in the "seed". The default is 70. Like Maq, bowtie rounds + quality values to the nearest 10 and saturates at 30. This value is not relevant for + Bowtie 2. + + --chunkmbs INT The number of megabytes of memory a given thread is given to store path descriptors in + --best mode. Best-first search must keep track of many paths at once to ensure it is + always extending the path with the lowest cumulative cost. Bowtie tries to minimize the + memory impact of the descriptors, but they can still grow very large in some cases. If + you receive an error message saying that chunk memory has been exhausted in --best mode, + try adjusting this parameter up to dedicate more memory to the descriptors. This value + is not relevant for Bowtie 2. Default: 512. + + -I/--minins INT The minimum insert size for valid paired-end alignments. E.g. if -I 60 is specified and + a paired-end alignment consists of two 20-bp alignments in the appropriate orientation + with a 20-bp gap between them, that alignment is considered valid (as long as -X is also + satisfied). A 19-bp gap would not be valid in that case. Default: 0. + + -X/--maxins INT The maximum insert size for valid paired-end alignments. E.g. if -X 100 is specified and + a paired-end alignment consists of two 20-bp alignments in the proper orientation with a + 60-bp gap between them, that alignment is considered valid (as long as -I is also satisfied). + A 61-bp gap would not be valid in that case. Default: 500. + + + +Output:: + + --non_directional The sequencing library was constructed in a non strand-specific manner, alignments to all four + bisulfite strands will be reported. Default: OFF. + + (The current Illumina protocol for BS-Seq is directional, in which case the strands complementary + to the original strands are merely theoretical and should not exist in reality. Specifying directional + alignments (which is the default) will only run 2 alignment threads to the original top (OT) + or bottom (OB) strands in parallel and report these alignments. This is the recommended option + for sprand-specific libraries). + + --sam-no-hd Suppress SAM header lines (starting with @). This might be useful when very large input files are + split up into several smaller files to run concurrently and the output files are to be merged. + + --quiet Print nothing besides alignments. + + --vanilla Performs bisulfite mapping with Bowtie 1 and prints the 'old' output (as in Bismark 0.5.X) instead + of SAM format output. + + -un/--unmapped Write all reads that could not be aligned to a file in the output directory. Written reads will + appear as they did in the input, without any translation of quality values that may have + taken place within Bowtie or Bismark. Paired-end reads will be written to two parallel files with _1 + and _2 inserted in their filenames, i.e. _unmapped_reads_1.txt and unmapped_reads_2.txt. Reads + with more than one valid alignment with the same number of lowest mismatches (ambiguous mapping) + are also written to _unmapped_reads.txt unless the option --ambiguous is specified as well. + + --ambiguous Write all reads which produce more than one valid alignment with the same number of lowest + mismatches or other reads that fail to align uniquely to a file in the output directory. + Written reads will appear as they did in the input, without any of the translation of quality + values that may have taken place within Bowtie or Bismark. Paired-end reads will be written to two + parallel files with _1 and _2 inserted in theit filenames, i.e. _ambiguous_reads_1.txt and + _ambiguous_reads_2.txt. These reads are not written to the file specified with --un. + + -o/--output_dir DIR Write all output files into this directory. By default the output files will be written into + the same folder as the input file(s). If the specified folder does not exist, Bismark will attempt + to create it first. The path to the output folder can be either relative or absolute. + + --temp_dir DIR Write temporary files to this directory instead of into the same directory as the input files. If + the specified folder does not exist, Bismark will attempt to create it first. The path to the + temporary folder can be either relative or absolute. + +------ + +Bowtie 2 alignment options:: + + -N INT Sets the number of mismatches to allowed in a seed alignment during multiseed alignment. + Can be set to 0 or 1. Setting this higher makes alignment slower (often much slower) + but increases sensitivity. Default: 0. This option is only available for Bowtie 2 (for + Bowtie 1 see -n). + + -L INT Sets the length of the seed substrings to align during multiseed alignment. Smaller values + make alignment slower but more senstive. Default: the --sensitive preset of Bowtie 2 is + used by default, which sets -L to 20. This option is only available for Bowtie 2 (for + Bowtie 1 see -l). + + --ignore-quals When calculating a mismatch penalty, always consider the quality value at the mismatched + position to be the highest possible, regardless of the actual value. I.e. input is treated + as though all quality values are high. This is also the default behavior when the input + doesn't specify quality values (e.g. in -f mode). This option is invariable and on by default. + + +Bowtie 2 paired-end options:: + + --no-mixed This option disables Bowtie 2's behavior to try to find alignments for the individual mates if + it cannot find a concordant or discordant alignment for a pair. This option is invariable and + and on by default. + + --no-discordant Normally, Bowtie 2 looks for discordant alignments if it cannot find any concordant alignments. + A discordant alignment is an alignment where both mates align uniquely, but that does not + satisfy the paired-end constraints (--fr/--rf/--ff, -I, -X). This option disables that behavior + and it is on by default. + + +Bowtie 2 effort options:: + + -D INT Up to INT consecutive seed extension attempts can "fail" before Bowtie 2 moves on, using + the alignments found so far. A seed extension "fails" if it does not yield a new best or a + new second-best alignment. Default: 15. + + -R INT INT is the maximum number of times Bowtie 2 will "re-seed" reads with repetitive seeds. + When "re-seeding," Bowtie 2 simply chooses a new set of reads (same length, same number of + mismatches allowed) at different offsets and searches for more alignments. A read is considered + to have repetitive seeds if the total number of seed hits divided by the number of seeds + that aligned at least once is greater than 300. Default: 2. + + +Bowtie 2 Scoring options:: + + --score_min "func" Sets a function governing the minimum alignment score needed for an alignment to be considered + "valid" (i.e. good enough to report). This is a function of read length. For instance, specifying + L,0,-0.2 sets the minimum-score function f to f(x) = 0 + -0.2 * x, where x is the read length. + See also: setting function options at http://bowtie-bio.sourceforge.net/bowtie2. The default is + L,0,-0.2. + + +Bowtie 2 Reporting options:: + + --most_valid_alignments INT This used to be the Bowtie 2 parameter -M. As of Bowtie 2 version 2.0.0 beta7 the option -M is + deprecated. It will be removed in subsequent versions. What used to be called -M mode is still the + default mode, but adjusting the -M setting is deprecated. Use the -D and -R options to adjust the + effort expended to find valid alignments. + + For reference, this used to be the old (now deprecated) description of -M: + Bowtie 2 searches for at most INT+1 distinct, valid alignments for each read. The search terminates when it + can't find more distinct valid alignments, or when it finds INT+1 distinct alignments, whichever + happens first. Only the best alignment is reported. Information from the other alignments is used to + estimate mapping quality and to set SAM optional fields, such as AS:i and XS:i. Increasing -M makes + Bowtie 2 slower, but increases the likelihood that it will pick the correct alignment for a read that + aligns many places. For reads that have more than INT+1 distinct, valid alignments, Bowtie 2 does not + guarantee that the alignment reported is the best possible in terms of alignment score. -M is + always used and its default value is set to 10. + + + diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation new file mode 100755 index 000000000000..1895a296632c --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_genome_preparation @@ -0,0 +1,492 @@ +#!/usr/bin/perl -- +use strict; +use warnings; +use Cwd; +use File::Path qw(rmtree); +$|++; + + +## This program is Copyright (C) 2010-12, Felix Krueger (felix.krueger@bbsrc.ac.uk) + +## This program is free software: you can redistribute it and/or modify +## it under the terms of the GNU General Public License as published by +## the Free Software Foundation, either version 3 of the License, or +## (at your option) any later version. + +## This program is distributed in the hope that it will be useful, +## but WITHOUT ANY WARRANTY; without even the implied warranty of +## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +## GNU General Public License for more details. + +## You should have received a copy of the GNU General Public License +## along with this program. If not, see . + +use Getopt::Long; +use Cwd; + +my $verbose; +my $help; +my $version; +my $man; +my $path_to_bowtie; +my $multi_fasta; +my $single_fasta; +my $bowtie2; + +my $bismark_version = 'v0.7.7'; + +GetOptions ('verbose' => \$verbose, + 'help' => \$help, + 'man' => \$man, + 'version' => \$version, + 'path_to_bowtie:s' => \$path_to_bowtie, + 'single_fasta' => \$single_fasta, + 'bowtie2' => \$bowtie2, + ); + +my $genome_folder = shift @ARGV; # mandatory +my $CT_dir; +my $GA_dir; + +if ($help or $man){ + print_helpfile(); + exit; +} + +if ($version){ + print << "VERSION"; + + Bismark - Bisulfite Mapper and Methylation Caller. + + Bismark Genome Preparation Version: $bismark_version + Copyright 2010-12 Felix Krueger, Babraham Bioinformatics + www.bioinformatics.babraham.ac.uk/projects/ + +VERSION + exit; +} + +if ($single_fasta){ + print "Writing individual genomes out into single-entry fasta files (one per chromosome)\n\n"; + $multi_fasta = 0; +} +else{ + print "Writing bisulfite genomes out into a single MFA (multi FastA) file\n\n"; + $single_fasta = 0; + $multi_fasta = 1; +} + +my @filenames = create_bisulfite_genome_folders(); + +process_sequence_files (); + +launch_bowtie_indexer(); + +sub launch_bowtie_indexer{ + if ($bowtie2){ + print "Bismark Genome Preparation - Step III: Launching the Bowtie 2 indexer\n"; + } + else{ + print "Bismark Genome Preparation - Step III: Launching the Bowtie (1) indexer\n"; + } + print "Please be aware that this process can - depending on genome size - take up to several hours!\n"; + sleep(5); + + ### if the path to bowtie was specfified explicitely + if ($path_to_bowtie){ + if ($bowtie2){ + $path_to_bowtie =~ s/$/bowtie2-build/; + } + else{ + $path_to_bowtie =~ s/$/bowtie-build/; + } + } + ### otherwise we assume that bowtie-build is in the path + else{ + if ($bowtie2){ + $path_to_bowtie = 'bowtie2-build'; + } + else{ + $path_to_bowtie = 'bowtie-build'; + } + } + + $verbose and print "\n"; + + ### Forking the program to run 2 instances of Bowtie-build or Bowtie2-build (= the Bowtie (1/2) indexer) + my $pid = fork(); + + # parent process + if ($pid){ + sleep(1); + chdir $CT_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of CT converted genome in $CT_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Parent process: Starting to index C->T converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_CT\n\n"; + + sleep (11); + exec ("$path_to_bowtie","-f","$file_list","BS_CT"); + } + + # child process + elsif ($pid == 0){ + sleep(2); + chdir $GA_dir or die "Unable to change directory: $!\n"; + $verbose and warn "Preparing indexing of GA converted genome in $GA_dir\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "Child process: Starting to index G->A converted genome with the following command:\n\n"; + $verbose and print "$path_to_bowtie -f $file_list BS_GA\n\n"; + $verbose and print "(starting in 10 seconds)\n"; + sleep(10); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } + + # if the platform doesn't support the fork command we will run the indexing processes one after the other + else{ + print "Forking process was not successful, therefore performing the indexing sequentially instead\n"; + sleep(10); + + ### moving to CT genome folder + $verbose and warn "Preparing to index CT converted genome in $CT_dir\n"; + chdir $CT_dir or die "Unable to change directory: $!\n"; + my @fasta_files = <*.fa>; + my $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + system ("$path_to_bowtie","-f","$file_list","BS_CT"); + @fasta_files=(); + $file_list= ''; + + ### moving to GA genome folder + $verbose and warn "Preparing to index GA converted genome in $GA_dir\n"; + chdir $GA_dir or die "Unable to change directory: $!\n"; + @fasta_files = <*.fa>; + $file_list = join (',',@fasta_files); + $verbose and print "$file_list\n\n"; + sleep(2); + exec ("$path_to_bowtie","-f","$file_list","BS_GA"); + } +} + + +sub process_sequence_files { + + my ($total_CT_conversions,$total_GA_conversions) = (0,0); + $verbose and print "Bismark Genome Preparation - Step II: Bisulfite converting reference genome\n\n"; + sleep (3); + + $verbose and print "conversions performed:\n"; + $verbose and print join("\t",'chromosome','C->T','G->A'),"\n"; + + + ### If someone wants to index a genome which consists of thousands of contig and scaffold files we need to write the genome conversions into an MFA file + ### Otherwise the list of comma separated chromosomes we provide for bowtie-build will get too long for the kernel to handle + ### This is now the default option + + if ($multi_fasta){ + ### Here we just use one multi FastA file name, append .CT_conversion or .GA_conversion and print all sequence conversions into these files + my $bisulfite_CT_conversion_filename = "$CT_dir/genome_mfa.CT_conversion.fa"; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/genome_mfa.GA_conversion.fa"; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + foreach my $filename(@filenames){ + my ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + open (IN,$filename) or die "Failed to read from sequence file $filename $!\n"; + # warn "Reading chromosome information from $filename\n\n"; + + ### first line needs to be a fastA header + my $first_line = ; + chomp $first_line; + + ### Extracting chromosome name from the FastA header + my $chromosome_name = extract_chromosome_name($first_line); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; # first entry + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; # first entry + + + while (){ + + ### in case the line is a new fastA header + if ($_ =~ /^>/){ + ### printing out the stats for the previous chromosome + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + ### resetting the chromosome transliteration counters + ($chromosome_CT_conversions,$chromosome_GA_conversions) = (0,0); + + ### Extracting chromosome name from the additional FastA header + $chromosome_name = extract_chromosome_name($_); + + ### alternatively, chromosomes can be written out into single-entry FastA files. This will only work for genomes with up to a few hundred chromosomes. + unless ($multi_fasta){ + my $bisulfite_CT_conversion_filename = "$CT_dir/$chromosome_name"; + $bisulfite_CT_conversion_filename =~ s/$/.CT_conversion.fa/; + open (CT_CONVERT,'>',$bisulfite_CT_conversion_filename) or die "Can't write to file $bisulfite_CT_conversion_filename: $!\n"; + + my $bisulfite_GA_conversion_filename = "$GA_dir/$chromosome_name"; + $bisulfite_GA_conversion_filename =~ s/$/.GA_conversion.fa/; + open (GA_CONVERT,'>',$bisulfite_GA_conversion_filename) or die "Can't write to file $bisulfite_GA_conversion_filename: $!\n"; + } + + print CT_CONVERT ">",$chromosome_name,"_CT_converted\n"; + print GA_CONVERT ">",$chromosome_name,"_GA_converted\n"; + } + + else{ + my $sequence = uc$_; + + ### (I) First replacing all ambiguous sequence characters (such as M,S,R....) by N (G,A,T,C,N and the line endings \r and \n are added to a character group) + + $sequence =~ s/[^ATCGN\n\r]/N/g; + + ### (II) Writing the chromosome out into a C->T converted version (equals forward strand conversion) + + my $CT_sequence = $sequence; + my $CT_transliterations_performed = ($CT_sequence =~ tr/C/T/); # converts all Cs into Ts + $total_CT_conversions += $CT_transliterations_performed; + $chromosome_CT_conversions += $CT_transliterations_performed; + + print CT_CONVERT $CT_sequence; + + ### (III) Writing the chromosome out in a G->A converted version of the forward strand (this is equivalent to reverse- + ### complementing the forward strand and then C->T converting it) + + my $GA_sequence = $sequence; + my $GA_transliterations_performed = ($GA_sequence =~ tr/G/A/); # converts all Gs to As on the forward strand + $total_GA_conversions += $GA_transliterations_performed; + $chromosome_GA_conversions += $GA_transliterations_performed; + + print GA_CONVERT $GA_sequence; + + } + } + $verbose and print join ("\t",$chromosome_name,$chromosome_CT_conversions,$chromosome_GA_conversions),"\n"; + } + close (CT_CONVERT) or die "Failed to close filehandle: $!\n"; + close (GA_CONVERT) or die "Failed to close filehandle: $!\n"; + + + print "\nTotal number of conversions performed:\n"; + print "C->T:\t$total_CT_conversions\n"; + print "G->A:\t$total_GA_conversions\n"; + + warn "\nStep II - Genome bisulfite conversions - completed\n\n\n"; +} + +sub extract_chromosome_name { + + my $header = shift; + + ## Bowtie extracts the first string after the initial > in the FASTA file, so we are doing this as well + + if ($header =~ s/^>//){ + my ($chromosome_name) = split (/\s+/,$header); + return $chromosome_name; + } + else{ + die "The specified chromosome file doesn't seem to be in FASTA format as required! $!\n"; + } +} + +sub create_bisulfite_genome_folders{ + + $verbose and print "Bismark Genome Preparation - Step I: Preparing folders\n\n"; + + # Ensuring a genome folder has been specified + if ($genome_folder){ + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + $verbose and print "Path to genome folder specified: $genome_folder\n"; + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + # making the genome folder path abolsolute so it won't break if the path was specified relative + $genome_folder = getcwd; + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + } + + else{ + $verbose and print "Genome folder was not provided as argument "; + while (1){ + print "Please specify a genome folder to be bisulfite converted:\n"; + $genome_folder = ; + chomp $genome_folder; + + # adding a trailing slash unless already present + unless ($genome_folder =~ /\/$/){ + $genome_folder =~ s/$/\//; + } + if (chdir $genome_folder){ + last; + } + else{ + warn "Could't move to directory $genome_folder! $!"; + } + } + } + + if ($path_to_bowtie){ + unless ($path_to_bowtie =~ /\/$/){ + $path_to_bowtie =~ s/$/\//; + } + if (chdir $path_to_bowtie){ + if ($bowtie2){ + $verbose and print "Path to Bowtie 2 specified: $path_to_bowtie\n"; + } + else{ + $verbose and print "Path to Bowtie (1) specified: $path_to_bowtie\n"; + } + } + else{ + die "There was an error with the path to bowtie: $!\n"; + } + } + + chdir $genome_folder or die "Could't move to directory $genome_folder. Make sure the directory exists! $!"; + + + # Exiting unless there are fastA files in the folder + my @filenames = <*.fa>; + + ### if there aren't any genomic files with the extension .fa we will look for files with the extension .fasta + unless (@filenames){ + @filenames = <*.fasta>; + } + + unless (@filenames){ + die "The specified genome folder $genome_folder does not contain any sequence files in FastA format (with .fa or .fasta file extensions\n"; + } + + warn "Bisulfite Genome Indexer version $bismark_version (last modified 17 Nov 2011)\n\n"; + sleep (3); + + # creating a directory inside the genome folder to store the bisfulfite genomes unless it already exists + my $bisulfite_dir = "${genome_folder}Bisulfite_Genome/"; + unless (-d $bisulfite_dir){ + mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $bisulfite_dir\n"; + } + else{ + while (1){ + print "\nA directory called $bisulfite_dir already exists. Bisulfite converted sequences and/or already existing Bowtie (1 or 2) indexes might be overwritten!\nDo you want to continue anyway?\t"; + my $proceed = ; + chomp $proceed; + if ($proceed =~ /^y/i ){ + last; + } + elsif ($proceed =~ /^n/i){ + die "Terminated by user\n\n"; + } + } + } + + ### as of version 0.6.0 the Bismark indexer will no longer delete the Bisulfite_Genome directory if it was present already, since it could store the Bowtie 1 or 2 indexes already + # removing any existing files and subfolders in the bisulfite directory (the specified directory won't be deleted) + # rmtree($bisulfite_dir, {verbose => 1,keep_root => 1}); + # unless (-d $bisulfite_dir){ # had to add this after changing remove_tree to rmtree // suggested by Samantha Cooper @ Illumina + # mkdir $bisulfite_dir or die "Unable to create directory $bisulfite_dir $!\n"; + # } + # } + + chdir $bisulfite_dir or die "Unable to move to $bisulfite_dir\n"; + $CT_dir = "${bisulfite_dir}CT_conversion/"; + $GA_dir = "${bisulfite_dir}GA_conversion/"; + + # creating 2 subdirectories to store a C->T (forward strand conversion) and a G->A (reverse strand conversion) + # converted version of the genome + unless (-d $CT_dir){ + mkdir $CT_dir or die "Unable to create directory $CT_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $CT_dir\n"; + } + unless (-d $GA_dir){ + mkdir $GA_dir or die "Unable to create directory $GA_dir $!\n"; + $verbose and print "Created Bisulfite Genome folder $GA_dir\n"; + } + + # moving back to the original genome folder + chdir $genome_folder or die "Could't move to directory $genome_folder $!"; + # $verbose and print "Moved back to genome folder folder $genome_folder\n"; + warn "\nStep I - Prepare genome folders - completed\n\n\n"; + return @filenames; +} + +sub print_helpfile{ + print << 'HOW_TO'; + + +DESCRIPTION + +This script is supposed to convert a specified reference genome into two different bisulfite +converted versions and index them for alignments with Bowtie 1 (default), or Bowtie 2. The first +bisulfite genome will have all Cs converted to Ts (C->T), and the other one will have all Gs +converted to As (G->A). Both bisulfite genomes will be stored in subfolders within the reference +genome folder. Once the bisulfite conversion has been completed the program will fork and launch +two simultaneous instances of the bowtie 1 or 2 indexer (bowtie-build or bowtie2-build). Be aware +that the indexing process can take up to several hours; this will mainly depend on genome size +and system resources. + + + + +The following is a brief description of command line options and arguments to control the +Bismark Genome Preparation script: + + +USAGE: bismark_genome_preparation [options] + + +OPTIONS: + +--help/--man Displays this help filea and exits. + +--version Displays version information and exits. + +--verbose Print verbose output for more details or debugging. + +--path_to_bowtie The full path to the Bowtie 1 or Bowtie 2 installation on your system.If + the path is not provided as an option you will be prompted for it. + +--bowtie2 This will create bisulfite indexes for Bowtie 2. (Default: Bowtie 1). + +--single_fasta Instruct the Bismark Indexer to write the converted genomes into + single-entry FastA files instead of making one multi-FastA file (MFA) + per chromosome. This might be useful if individual bisulfite converted + chromosomes are needed (e.g. for debugging), however it can cause a + problem with indexing if the number of chromosomes is vast (this is likely + to be in the range of several thousand files; the operating system can + only handle lists up to a certain length, and some newly assembled + genomes may contain 20000-50000 contigs of scaffold files which do exceed + this list length limit). + + +ARGUMENTS: + + The path to the folder containing the genome to be bisulfite converted. + At the current time Bismark Genome Preparation expects one or more fastA + files in the folder (with the file extension: .fa or .fasta). If the path + is not provided as an argument you will be prompted for it. + + + +This script was last modified on 18 Nov 2011. +HOW_TO +} diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py new file mode 100644 index 000000000000..cb79d1ecf590 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import zipfile +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def zipper(dir, zip_file): + zip = zipfile.ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED) + root_len = len(os.path.abspath(dir)) + for root, dirs, files in os.walk(dir): + archive_root = os.path.abspath(root)[root_len:] + for f in files: + fullpath = os.path.join(root, f) + archive_name = os.path.join(archive_root, f) + zip.write(fullpath, archive_name, zipfile.ZIP_DEFLATED) + zip.close() + return zip_file + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark methylation caller.') + + # input options + parser.add_argument( '--infile', help='Input file in SAM format.' ) + parser.add_argument( '--single-end', dest='single_end', action="store_true" ) + parser.add_argument( '--paired-end', dest='paired_end', action="store_true" ) + + parser.add_argument( '--report-file', dest='report_file' ) + parser.add_argument( '--comprehensive', action="store_true" ) + parser.add_argument( '--merge-non-cpg', dest='merge_non_cpg', action="store_true" ) + parser.add_argument( '--no-overlap', dest='no_overlap', action="store_true" ) + parser.add_argument( '--compress' ) + parser.add_argument( '--ignore-bps', dest='ignore_bps', type=int ) + + # OT - original top strand + parser.add_argument( '--cpg_ot' ) + parser.add_argument( '--chg_ot' ) + parser.add_argument( '--chh_ot' ) + # CTOT - complementary to original top strand + parser.add_argument( '--cpg_ctot' ) + parser.add_argument( '--chg_ctot' ) + parser.add_argument( '--chh_ctot' ) + # OB - original bottom strand + parser.add_argument( '--cpg_ob' ) + parser.add_argument( '--chg_ob' ) + parser.add_argument( '--chh_ob' ) + # CTOT - complementary to original bottom strand + parser.add_argument( '--cpg_ctob' ) + parser.add_argument( '--chg_ctob' ) + parser.add_argument( '--chh_ctob' ) + + parser.add_argument( '--cpg_context' ) + parser.add_argument( '--chg_context' ) + parser.add_argument( '--chh_context' ) + + parser.add_argument( '--non_cpg_context' ) + + parser.add_argument( '--non_cpg_context_ot' ) + parser.add_argument( '--non_cpg_context_ctot' ) + parser.add_argument( '--non_cpg_context_ob' ) + parser.add_argument( '--non_cpg_context_ctob' ) + + args = parser.parse_args() + + + # Build methylation extractor command + output_dir = tempfile.mkdtemp() + cmd = 'bismark_methylation_extractor --no_header -o %s %s %s' + + additional_opts = '' + # Set up all options + if args.single_end: + additional_opts += ' --single-end ' + else: + additional_opts += ' --paired-end ' + if args.no_overlap: + additional_opts += ' --no_overlap ' + if args.ignore_bps: + additional_opts += ' --ignore %s ' % args.ignore_bps + if args.comprehensive: + additional_opts += ' --comprehensive ' + if args.merge_non_cpg: + additional_opts += ' --merge_non_CpG ' + if args.report_file: + additional_opts += ' --report ' + + + # Final command: + cmd = cmd % (output_dir, additional_opts, args.infile) + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark methylation extractor:\n' + str( e ) ) + + + # collect and copy output files + + if args.compress: + zipper(output_dir, args.compress) + + + if args.cpg_ot: + shutil.move( glob(os.path.join( output_dir, '*CpG_OT_*'))[0], args.cpg_ot ) + if args.chg_ot: + shutil.move( glob(os.path.join( output_dir, '*CHG_OT_*'))[0], args.chg_ot ) + if args.chh_ot: + shutil.move( glob(os.path.join( output_dir, '*CHH_OT_*'))[0], args.chh_ot ) + if args.cpg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOT_*'))[0], args.cpg_ctot ) + if args.chg_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOT_*'))[0], args.chg_ctot ) + if args.chh_ctot: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOT_*'))[0], args.chh_ctot ) + if args.cpg_ob: + shutil.move( glob(os.path.join( output_dir, '*CpG_OB_*'))[0], args.cpg_ob ) + if args.chg_ob: + shutil.move( glob(os.path.join( output_dir, '*CHG_OB_*'))[0], args.chg_ob ) + if args.chh_ob: + shutil.move( glob(os.path.join( output_dir, '*CHH_OB_*'))[0], args.chh_ob ) + if args.cpg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CpG_CTOB_*'))[0], args.cpg_ctob ) + if args.chg_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHG_CTOB_*'))[0], args.chg_ctob ) + if args.chh_ctob: + shutil.move( glob(os.path.join( output_dir, '*CHH_CTOB_*'))[0], args.chh_ctob ) + + # context-dependent methylation output files + if args.cpg_context: + shutil.move( glob(os.path.join( output_dir, '*CpG_context_*'))[0], args.cpg_context ) + if args.chg_context: + shutil.move( glob(os.path.join( output_dir, '*CHG_context_*'))[0], args.chg_context ) + if args.chh_context: + shutil.move( glob(os.path.join( output_dir, '*CHH_context_*'))[0], args.chh_context ) + + if args.non_cpg_context: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_context_*'))[0], args.non_cpg_context ) + + if args.non_cpg_context_ot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OT_*'))[0], args.non_cpg_context_ot ) + if args.non_cpg_context_ctot: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOT_*'))[0], args.non_cpg_context_ctot ) + if args.non_cpg_context_ob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_OB_*'))[0], args.non_cpg_context_ob ) + if args.non_cpg_context_ctob: + shutil.move( glob(os.path.join( output_dir, '*Non_CpG_CTOB_*'))[0], args.non_cpg_context_ctob ) + + + + if args.report_file: + shutil.move( glob(os.path.join( output_dir, '*_splitting_report*'))[0], args.report_file ) + + + # Clean up temp dirs + if os.path.exists( output_dir ): + shutil.rmtree( output_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/bismark/bismark_methylation_extractor.xml b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.xml similarity index 100% rename from lib/tool_shed/test/test_data/bismark/bismark_methylation_extractor.xml rename to lib/tool_shed/test/test_data/repos/bismark/1/bismark_methylation_extractor.xml diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py new file mode 100644 index 000000000000..606fa428bd77 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/bismark_wrapper.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python + +import argparse, os, shutil, subprocess, sys, tempfile, fileinput +import fileinput +from glob import glob + +def stop_err( msg ): + sys.stderr.write( "%s\n" % msg ) + sys.exit() + +def __main__(): + #Parse Command Line + parser = argparse.ArgumentParser(description='Wrapper for the bismark bisulfite mapper.') + parser.add_argument( '-p', '--num-threads', dest='num_threads', + type=int, default=4, help='Use this many threads to align reads. The default is 4.' ) + + parser.add_argument( '--bismark_path', dest='bismark_path', help='Path to the bismark perl scripts' ) + + parser.add_argument( '--bowtie2', action='store_true', default=False, help='Running bismark with bowtie2 and not with bowtie.' ) + + # input options + parser.add_argument( '--own-file', dest='own_file', help='' ) + parser.add_argument( '-D', '--indexes-path', dest='index_path', help='Indexes directory; location of .ebwt and .fa files.' ) + parser.add_argument( '-O', '--output', dest='output' ) + parser.add_argument( '--output-report-file', dest='output_report_file' ) + parser.add_argument( '--suppress-header', dest='suppress_header', action="store_true" ) + + parser.add_argument( '--mate-paired', dest='mate_paired', action='store_true', help='Reads are mate-paired', default=False) + + + parser.add_argument( '-1', '--mate1', dest='mate1', + help='The forward reads file in Sanger FASTQ or FASTA format.' ) + parser.add_argument( '-2', '--mate2', dest='mate2', + help='The reverse reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--output-unmapped-reads', dest='output_unmapped_reads', + help='Additional output file with unmapped reads (single-end).' ) + parser.add_argument( '--output-unmapped-reads-l', dest='output_unmapped_reads_l', + help='File name for unmapped reads (left, paired-end).' ) + parser.add_argument( '--output-unmapped-reads-r', dest='output_unmapped_reads_r', + help='File name for unmapped reads (right, paired-end).' ) + + + parser.add_argument( '--output-suppressed-reads', dest='output_suppressed_reads', + help='Additional output file with suppressed reads (single-end).' ) + parser.add_argument( '--output-suppressed-reads-l', dest='output_suppressed_reads_l', + help='File name for suppressed reads (left, paired-end).' ) + parser.add_argument( '--output-suppressed-reads-r', dest='output_suppressed_reads_r', + help='File name for suppressed reads (right, paired-end).' ) + + + parser.add_argument( '--single-paired', dest='single_paired', + help='The single-end reads file in Sanger FASTQ or FASTA format.' ) + + parser.add_argument( '--fastq', action='store_true', help='Query filetype is in FASTQ format') + parser.add_argument( '--fasta', action='store_true', help='Query filetype is in FASTA format') + parser.add_argument( '--phred64-quals', dest='phred64', action="store_true" ) + + + parser.add_argument( '--skip-reads', dest='skip_reads', type=int ) + parser.add_argument( '--qupto', type=int) + + + # paired end options + parser.add_argument( '-I', '--minins', dest='min_insert' ) + parser.add_argument( '-X', '--maxins', dest='max_insert' ) + parser.add_argument( '--no-mixed', dest='no_mixed', action="store_true" ) + parser.add_argument( '--no-discordant', dest='no_discordant', action="store_true" ) + + #parse general options + # default 20 + parser.add_argument( '--seed-len', dest='seed_len', type=int) + # default 15 + parser.add_argument( '--seed-extention-attempts', dest='seed_extention_attempts', type=int ) + # default 0 + parser.add_argument( '--seed-mismatches', dest='seed_mismatches', type=int ) + # default 2 + parser.add_argument( '--max-reseed', dest='max_reseed', type=int ) + """ + # default 70 + parser.add_argument( '--maqerr', dest='maqerr', type=int ) + """ + + """ + The number of megabytes of memory a given thread is given to store path + descriptors in --best mode. Best-first search must keep track of many paths + at once to ensure it is always extending the path with the lowest cumulative + cost. Bowtie tries to minimize the memory impact of the descriptors, but + they can still grow very large in some cases. If you receive an error message + saying that chunk memory has been exhausted in --best mode, try adjusting + this parameter up to dedicate more memory to the descriptors. Default: 512. + """ + parser.add_argument( '--chunkmbs', type=int, default=512 ) + + args = parser.parse_args() + + # Create bismark index if necessary. + index_dir = "" + if args.own_file: + """ + Create a temporary index with the offered files from the user. + Utilizing the script: bismark_genome_preparation + bismark_genome_preparation --bowtie2 hg19/ + """ + tmp_index_dir = tempfile.mkdtemp() + index_path = os.path.join( tmp_index_dir, '.'.join( os.path.split( args.own_file )[1].split( '.' )[:-1] ) ) + try: + """ + Create a hard link pointing to args.own_file named 'index_path'.fa. + """ + os.symlink( args.own_file, index_path + '.fa' ) + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error in linking the reference database.\n' + str( e ) ) + # bismark_genome_preparation needs the complete path to the folder in which the database is stored + if args.bowtie2: + cmd_index = 'bismark_genome_preparation --bowtie2 %s ' % ( tmp_index_dir ) + else: + cmd_index = 'bismark_genome_preparation %s ' % ( tmp_index_dir ) + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd_index = '%s/%s' % (args.bismark_path, cmd_index) + try: + tmp = tempfile.NamedTemporaryFile( dir=tmp_index_dir ).name + tmp_stderr = open( tmp, 'wb' ) + proc = subprocess.Popen( args=cmd_index, shell=True, cwd=tmp_index_dir, stdout=open(os.devnull, 'wb'), stderr=tmp_stderr.fileno() ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + except Exception, e: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + stop_err( 'Error indexing reference sequence\n' + str( e ) ) + index_dir = tmp_index_dir + else: + index_dir = args.index_path + + # Build bismark command + tmp_bismark_dir = tempfile.mkdtemp() + output_dir = os.path.join( tmp_bismark_dir, 'results') + cmd = 'bismark %(args)s --temp_dir %(tmp_bismark_dir)s -o %(output_dir)s --quiet %(genome_folder)s %(reads)s' + if args.bismark_path: + # add the path to the bismark perl scripts, that is needed for galaxy + cmd = '%s/%s' % (args.bismark_path, cmd) + + arguments = { + 'genome_folder': index_dir, + 'args': '', + 'tmp_bismark_dir': tmp_bismark_dir, + 'output_dir': output_dir, + } + + additional_opts = '' + # Set up the reads + if args.mate_paired: + # paired-end reads library + reads = '-1 %s ' % ( args.mate1 ) + reads += ' -2 %s ' % ( args.mate2 ) + additional_opts += ' -I %s -X %s ' % (args.min_insert, args.max_insert) + else: + # single paired reads library + reads = ' %s ' % ( args.single_paired ) + + + if not args.bowtie2: + # use bowtie specific options + additional_opts += ' --best ' + if args.seed_mismatches: + # --seedmms + additional_opts += ' -n %s ' % args.seed_mismatches + if args.seed_len: + # --seedlen + additional_opts += ' -l %s ' % args.seed_len + + # alignment options + if args.bowtie2: + additional_opts += ' -p %s --bowtie2 ' % args.num_threads + if args.seed_mismatches: + additional_opts += ' -N %s ' % args.seed_mismatches + if args.seed_len: + additional_opts += ' -L %s ' % args.seed_len + if args.seed_extention_attempts: + additional_opts += ' -D %s ' % args.seed_extention_attempts + if args.max_reseed: + additional_opts += ' -R %s ' % args.max_reseed + if args.no_discordant: + additional_opts += ' --no-discordant ' + if args.no_mixed: + additional_opts += ' --no-mixed ' + """ + if args.maqerr: + additional_opts += ' --maqerr %s ' % args.maqerr + """ + if args.skip_reads: + additional_opts += ' --skip %s ' % args.skip_reads + if args.qupto: + additional_opts += ' --qupto %s ' % args.qupto + if args.phred64: + additional_opts += ' --phred64-quals ' + if args.suppress_header: + additional_opts += ' --sam-no-hd ' + if args.output_unmapped_reads or ( args.output_unmapped_reads_l and args.output_unmapped_reads_r): + additional_opts += ' --un ' + if args.output_suppressed_reads or ( args.output_suppressed_reads_l and args.output_suppressed_reads_r): + additional_opts += ' --ambiguous ' + + arguments.update( {'args': additional_opts, 'reads': reads} ) + + # Final command: + cmd = cmd % arguments + + # Run + try: + tmp_out = tempfile.NamedTemporaryFile().name + tmp_stdout = open( tmp_out, 'wb' ) + tmp_err = tempfile.NamedTemporaryFile().name + tmp_stderr = open( tmp_err, 'wb' ) + proc = subprocess.Popen( args=cmd, shell=True, cwd=".", stdout=tmp_stdout, stderr=tmp_stderr ) + returncode = proc.wait() + tmp_stderr.close() + # get stderr, allowing for case where it's very large + tmp_stderr = open( tmp_err, 'rb' ) + stderr = '' + buffsize = 1048576 + try: + while True: + stderr += tmp_stderr.read( buffsize ) + if not stderr or len( stderr ) % buffsize != 0: + break + except OverflowError: + pass + tmp_stdout.close() + tmp_stderr.close() + if returncode != 0: + raise Exception, stderr + + # TODO: look for errors in program output. + except Exception, e: + stop_err( 'Error in bismark:\n' + str( e ) ) + + + # collect and copy output files + """ + if args.output_report_file: + output_report_file = open(args.output_report_file, 'w+') + for line in fileinput.input(glob( os.path.join( output_dir, '*.txt') )): + output_report_file.write(line) + output_report_file.close() + """ + + if args.output_suppressed_reads: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads.txt'))[0], args.output_suppressed_reads ) + if args.output_suppressed_reads_l: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_1.txt'))[0], args.output_suppressed_reads_l ) + if args.output_suppressed_reads_r: + shutil.move( glob(os.path.join( output_dir, '*ambiguous_reads_2.txt'))[0], args.output_suppressed_reads_r ) + + if args.output_unmapped_reads: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads.txt'))[0], args.output_unmapped_reads ) + if args.output_unmapped_reads_l: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_1.txt'))[0], args.output_unmapped_reads_l ) + if args.output_unmapped_reads_r: + shutil.move( glob(os.path.join( output_dir, '*unmapped_reads_2.txt'))[0], args.output_unmapped_reads_r ) + + shutil.move( glob( os.path.join( output_dir, '*.sam'))[0] , args.output) + + # Clean up temp dirs + if args.own_file: + if os.path.exists( tmp_index_dir ): + shutil.rmtree( tmp_index_dir ) + if os.path.exists( tmp_bismark_dir ): + shutil.rmtree( tmp_bismark_dir ) + +if __name__=="__main__": __main__() diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample b/lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample new file mode 100644 index 000000000000..9e62763c741d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/tool_data_table_conf.xml.sample @@ -0,0 +1,13 @@ + + + + + value, dbkey, name, path + +
+ + + value, dbkey, name, path + +
+
diff --git a/lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml new file mode 100644 index 000000000000..776d2aa33442 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/bismark/1/tool_dependencies.xml @@ -0,0 +1,61 @@ + + + + $REPOSITORY_INSTALL_DIR + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.0.0-beta7/bowtie2-2.0.0-beta7-source.zip + make + + bowtie2 + $INSTALL_DIR/bin + + + bowtie2-align + $INSTALL_DIR/bin + + + bowtie2-build + $INSTALL_DIR/bin + + chmod +x $INSTALL_DIR/bin/bowtie2 + + $INSTALL_DIR/bin + + + + +Compiling bowtie2 requires zlib and libpthread to be present on your system. + + + + + + http://downloads.sourceforge.net/project/bowtie-bio/bowtie/0.12.8/bowtie-0.12.8-src.zip + make + + bowtie + $INSTALL_DIR/bin + + + bowtie-inspect + $INSTALL_DIR/bin + + + bowtie-build + $INSTALL_DIR/bin + + + $INSTALL_DIR/bin + + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml new file mode 100644 index 000000000000..e3f8ff296c69 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/column_maker.xml @@ -0,0 +1,83 @@ + + an expression on every row + + column_maker.py $input $out_file1 "$cond" $round ${input.metadata.columns} "${input.metadata.column_types}" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + .. class:: infomark + +**TIP:** If your data is not TAB delimited, use *Text Manipulation->Convert* + +----- + +**What it does** + +This tool computes an expression for every row of a query and appends the result as a new column (field). + +- Columns are referenced with **c** and a **number**. For example, **c1** refers to the first column of a tab-delimited file + +- **c3-c2** will add a length column to the query if **c2** and **c3** are start and end position + +----- + +**Example** + +If this is your input:: + + chr1 151077881 151077918 2 200 - + chr1 151081985 151082078 3 500 + + +computing "c4*c5" will produce:: + + chr1 151077881 151077918 2 200 - 400.0 + chr1 151081985 151082078 3 500 + 1500.0 + +if, at the same time, "Round result?" is set to **YES** results will look like this:: + + chr1 151077881 151077918 2 200 - 400 + chr1 151081985 151082078 3 500 + 1500 + +You can also use this tool to evaluate expressions. For example, computing "c3>=c2" for Input will result in the following:: + + chr1 151077881 151077918 2 200 - True + chr1 151081985 151082078 3 500 + True + +or computing "type(c2)==type('') for Input will return:: + + chr1 151077881 151077918 2 200 - False + chr1 151081985 151082078 3 500 + False + + + diff --git a/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt new file mode 100644 index 000000000000..9956da05797d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/column_maker_with_readme/1/readme.txt @@ -0,0 +1 @@ +This is a readme that will be rendered as rst. diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml new file mode 100644 index 000000000000..36891279dc41 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager.xml @@ -0,0 +1,18 @@ + + + {"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}} + + + mkdir $out_file.files_path ; + echo "A new value" > '$out_file.files_path/newvalue.txt'; + cp '$static_test_data' '$out_file'; + exit $exit_code + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml new file mode 100644 index 000000000000..81dce17225ff --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/0/data_manager_conf.xml @@ -0,0 +1,16 @@ + + + + + + + + testbeta/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path} + abspath + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml new file mode 100644 index 000000000000..36891279dc41 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager.xml @@ -0,0 +1,18 @@ + + + {"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}} + + + mkdir $out_file.files_path ; + echo "A new value" > '$out_file.files_path/newvalue.txt'; + cp '$static_test_data' '$out_file'; + exit $exit_code + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml new file mode 100644 index 000000000000..62ddce2c99f7 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/1/data_manager_conf.xml @@ -0,0 +1,17 @@ + + + + + + + + + testbeta/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path} + abspath + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml new file mode 100644 index 000000000000..36891279dc41 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager.xml @@ -0,0 +1,18 @@ + + + {"data_tables": {"testbeta": [{"value": "newvalue", "path": "newvalue.txt"}]}} + + + mkdir $out_file.files_path ; + echo "A new value" > '$out_file.files_path/newvalue.txt'; + cp '$static_test_data' '$out_file'; + exit $exit_code + + + + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml new file mode 100644 index 000000000000..200ae00d8be2 --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/data_manager_gaps/2/data_manager_conf.xml @@ -0,0 +1,28 @@ + + + + + + + + testbeta/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta/${value}/${path} + abspath + + + + + + + + + testbeta2/${value} + + ${GALAXY_DATA_MANAGER_DATA_PATH}/testbeta2/${value}/${path} + abspath + + + + + diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml new file mode 100644 index 000000000000..1f3793c6007d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_antigenic.xml @@ -0,0 +1,58 @@ + + Predicts potentially antigenic regions of a protein sequence, using the method of Kolaskar and Tongaonkar. + emboss + antigenic -sequence $input1 -outfile $out_file1 -minlen $minlen -rformat2 $out_format1 -auto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + You can view the original documentation here_. + + .. _here: http://emboss.sourceforge.net/apps/release/5.0/emboss/apps/antigenic.html + +------ + +**Citation** + +For the underlying tool, please cite `Rice P, Longden I, Bleasby A. EMBOSS: the European Molecular Biology Open Software Suite. Trends Genet. 2000 Jun;16(6):276-7. <http://www.ncbi.nlm.nih.gov/pubmed/10827456>`_ + +If you use this tool in Galaxy, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_ + + + \ No newline at end of file diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py new file mode 100644 index 000000000000..3591cd8feaeb --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/emboss_format_corrector.py @@ -0,0 +1,53 @@ +#EMBOSS format corrector + +import operator +#from galaxy import datatypes + +#Properly set file formats after job run +def exec_after_process( app, inp_data, out_data, param_dict,tool, stdout, stderr): +#Properly set file formats before job run +#def exec_before_job(trans, inp_data, out_data, param_dict,tool): + #why isn't items an ordered list? + items = out_data.items() + #lets sort it ourselves.... + items = sorted(items, key=operator.itemgetter(0)) + #items is now sorted... + + #normal filetype correction + data_count=1 + for name, data in items: + outputType = param_dict.get( 'out_format'+str(data_count), None ) + #print "data_count",data_count, "name", name, "outputType", outputType + if outputType !=None: + if outputType == 'ncbi': + outputType = "fasta" + elif outputType == 'excel': + outputType = "tabular" + elif outputType == 'text': + outputType = "txt" + data = app.datatypes_registry.change_datatype(data, outputType) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #html filetype correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'html_out'+str(data_count), None ) + ext = "html" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #png file correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'plot'+str(data_count), None ) + ext = "png" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml new file mode 100644 index 000000000000..3c9b8f43ec1e --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/0/tool_dependencies.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml new file mode 100644 index 000000000000..1f3793c6007d --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_antigenic.xml @@ -0,0 +1,58 @@ + + Predicts potentially antigenic regions of a protein sequence, using the method of Kolaskar and Tongaonkar. + emboss + antigenic -sequence $input1 -outfile $out_file1 -minlen $minlen -rformat2 $out_format1 -auto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + You can view the original documentation here_. + + .. _here: http://emboss.sourceforge.net/apps/release/5.0/emboss/apps/antigenic.html + +------ + +**Citation** + +For the underlying tool, please cite `Rice P, Longden I, Bleasby A. EMBOSS: the European Molecular Biology Open Software Suite. Trends Genet. 2000 Jun;16(6):276-7. <http://www.ncbi.nlm.nih.gov/pubmed/10827456>`_ + +If you use this tool in Galaxy, please cite `Blankenberg D, Taylor J, Schenck I, He J, Zhang Y, Ghent M, Veeraraghavan N, Albert I, Miller W, Makova KD, Hardison RC, Nekrutenko A. A framework for collaborative analysis of ENCODE data: making large-scale analyses biologist-friendly. Genome Res. 2007 Jun;17(6):960-4. <http://www.ncbi.nlm.nih.gov/pubmed/17568012>`_ + + + \ No newline at end of file diff --git a/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py new file mode 100644 index 000000000000..3591cd8feaeb --- /dev/null +++ b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/emboss_format_corrector.py @@ -0,0 +1,53 @@ +#EMBOSS format corrector + +import operator +#from galaxy import datatypes + +#Properly set file formats after job run +def exec_after_process( app, inp_data, out_data, param_dict,tool, stdout, stderr): +#Properly set file formats before job run +#def exec_before_job(trans, inp_data, out_data, param_dict,tool): + #why isn't items an ordered list? + items = out_data.items() + #lets sort it ourselves.... + items = sorted(items, key=operator.itemgetter(0)) + #items is now sorted... + + #normal filetype correction + data_count=1 + for name, data in items: + outputType = param_dict.get( 'out_format'+str(data_count), None ) + #print "data_count",data_count, "name", name, "outputType", outputType + if outputType !=None: + if outputType == 'ncbi': + outputType = "fasta" + elif outputType == 'excel': + outputType = "tabular" + elif outputType == 'text': + outputType = "txt" + data = app.datatypes_registry.change_datatype(data, outputType) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #html filetype correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'html_out'+str(data_count), None ) + ext = "html" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 + + #png file correction + data_count=1 + for name, data in items: + wants_plot = param_dict.get( 'plot'+str(data_count), None ) + ext = "png" + if wants_plot == "yes": + data = app.datatypes_registry.change_datatype(data, ext) + app.model.context.add( data ) + app.model.context.flush() + data_count+=1 diff --git a/lib/tool_shed/test/test_data/emboss/0470_files/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/emboss_5_0470/1/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/0470_files/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/emboss_5_0470/1/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/libx11_proto/0/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/libx11_proto/first_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/libx11_proto/0/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/libx11_proto/1/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/libx11_proto/second_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/libx11_proto/1/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/0/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/emboss_5_0_0/first_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/0/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml b/lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/1/tool_dependencies.xml similarity index 100% rename from lib/tool_shed/test/test_data/emboss/emboss_5_0_0/second_tool_dependency/tool_dependencies.xml rename to lib/tool_shed/test/test_data/repos/package_emboss_5_0_0_0470/1/tool_dependencies.xml diff --git a/lib/tool_shed/test/test_data/safetar_with_symlink.tar b/lib/tool_shed/test/test_data/safetar_with_symlink.tar new file mode 100644 index 000000000000..a7810960f691 Binary files /dev/null and b/lib/tool_shed/test/test_data/safetar_with_symlink.tar differ diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py index 51940220d918..e3e3148343c1 100644 --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -137,28 +137,6 @@ def get_change_lines_in_file_for_tag(tag, change_dict): return cleaned_lines -def get_upload_point(repository: "Repository", **kwd) -> Optional[str]: - upload_point = kwd.get("upload_point", None) - if upload_point is not None: - # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed - if os.path.exists(upload_point): - if os.path.isfile(upload_point): - # Get the parent directory - upload_point, not_needed = os.path.split(upload_point) - # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/ - upload_point = upload_point.split("repo_%d" % repository.id)[1] - if upload_point: - upload_point = upload_point.lstrip("/") - upload_point = upload_point.rstrip("/") - # Now the value of uplaod_point will be something like: / - if upload_point == "/": - upload_point = None - else: - # Must have been an error selecting something that didn't exist, so default to repository root - upload_point = None - return upload_point - - def handle_bz2(repository: "Repository", uploaded_file_name): with tempfile.NamedTemporaryFile( mode="wb", @@ -183,7 +161,7 @@ def handle_bz2(repository: "Repository", uploaded_file_name): def handle_directory_changes( - app, + app: "ToolShedApp", host: str, username: str, repository: "Repository", @@ -194,8 +172,10 @@ def handle_directory_changes( commit_message: str, undesirable_dirs_removed: int, undesirable_files_removed: int, + repo_path: Optional[str] = None, + dry_run: bool = False, ) -> ChangeResponseT: - repo_path = repository.repo_path(app) + repo_path = repo_path or repository.repo_path(app) content_alert_str = "" files_to_remove = [] filenames_in_archive = [os.path.normpath(os.path.join(full_path, name)) for name in filenames_in_archive] @@ -250,9 +230,15 @@ def handle_directory_changes( ) hg_util.commit_changeset(repo_path, full_path_to_changeset=full_path, username=username, message=commit_message) admin_only = len(repository.downloadable_revisions) != 1 - suc.handle_email_alerts( - app, host, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only - ) + if not dry_run: + suc.handle_email_alerts( + app, + host, + repository, + content_alert_str=content_alert_str, + new_repo_alert=new_repo_alert, + admin_only=admin_only, + ) return True, "", files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed diff --git a/lib/tool_shed/util/common_util.py b/lib/tool_shed/util/common_util.py index 00ce0d153ff0..b05e34332eb6 100644 --- a/lib/tool_shed/util/common_util.py +++ b/lib/tool_shed/util/common_util.py @@ -1,8 +1,14 @@ +from typing import ( + Optional, + TYPE_CHECKING, +) + +from routes import url_for + from galaxy.util.tool_shed.common_util import ( accumulate_tool_dependencies, check_tool_tag_set, generate_clone_url_for_installed_repository, - generate_clone_url_for_repository_in_tool_shed, generate_clone_url_from_repo_info_tup, get_protocol_from_tool_shed_url, get_repository_dependencies, @@ -18,6 +24,26 @@ remove_protocol_from_tool_shed_url, ) +if TYPE_CHECKING: + from tool_shed.context import ProvidesRepositoriesContext + from tool_shed.webapp.model import ( + Repository, + User, + ) + + +def generate_clone_url_for(trans: "ProvidesRepositoriesContext", repository: "Repository") -> str: + return generate_clone_url_for_repository_in_tool_shed(trans.user, repository, trans.repositories_hostname) + + +def generate_clone_url_for_repository_in_tool_shed( + user: Optional["User"], repository: "Repository", hostname: Optional[str] = None +) -> str: + """Generate the URL for cloning a repository that is in the tool shed.""" + base_url = hostname or url_for("/", qualified=True).rstrip("/") + return f"{base_url}/repos/{repository.user.username}/{repository.name}" + + __all__ = ( "accumulate_tool_dependencies", "check_tool_tag_set", diff --git a/lib/tool_shed/util/hg_util.py b/lib/tool_shed/util/hg_util.py index f2c81c34d285..965013846c4f 100644 --- a/lib/tool_shed/util/hg_util.py +++ b/lib/tool_shed/util/hg_util.py @@ -85,7 +85,11 @@ def create_hgrc_file(app, repository): fp.write("default = .\n") fp.write("default-push = .\n") fp.write("[web]\n") - fp.write(f"allow_push = {repository.user.username}\n") + if app.config.config_hg_for_dev: + allow_push = "*" + else: + allow_push = repository.user.username + fp.write(f"allow_push = {allow_push}\n") fp.write(f"name = {repository.name}\n") fp.write("push_ssl = false\n") fp.write("[extensions]\n") diff --git a/lib/tool_shed/util/metadata_util.py b/lib/tool_shed/util/metadata_util.py index 47d82928f0b6..c41b62b7a7c0 100644 --- a/lib/tool_shed/util/metadata_util.py +++ b/lib/tool_shed/util/metadata_util.py @@ -45,6 +45,7 @@ def get_all_dependencies(app, metadata_entry, processed_dependency_links=None): dependency_dict["repository"] = repository.to_dict(value_mapper=value_mapper) if dependency_metadata.includes_tools: dependency_dict["tools"] = dependency_metadata.metadata["tools"] + dependency_dict["invalid_tools"] = dependency_metadata.metadata.get("invalid_tools", []) dependency_dict["repository_dependencies"] = [] if dependency_dict["includes_tool_dependencies"]: dependency_dict["tool_dependencies"] = repository.get_tool_dependencies( diff --git a/lib/tool_shed/util/repository_content_util.py b/lib/tool_shed/util/repository_content_util.py index 705f8fc3e9a0..10b59af6f5be 100644 --- a/lib/tool_shed/util/repository_content_util.py +++ b/lib/tool_shed/util/repository_content_util.py @@ -1,38 +1,83 @@ import os import shutil +import tarfile +import tempfile +from typing import ( + Optional, + TYPE_CHECKING, +) import tool_shed.repository_types.util as rt_util -from tool_shed.util import ( - commit_util, - xml_util, +from galaxy.tool_shed.util.hg_util import clone_repository +from galaxy.util import checkers +from tool_shed.dependencies.attribute_handlers import ( + RepositoryDependencyAttributeHandler, + ToolDependencyAttributeHandler, +) +from tool_shed.util import xml_util +from tool_shed.util.commit_util import ( + ChangeResponseT, + check_archive, + handle_directory_changes, ) +if TYPE_CHECKING: + from tool_shed.context import ProvidesRepositoriesContext + from tool_shed.webapp.model import Repository + + +def tar_open(uploaded_file): + isgzip = False + isbz2 = False + isgzip = checkers.is_gzip(uploaded_file) + if not isgzip: + isbz2 = checkers.is_bz2(uploaded_file) + if isgzip or isbz2: + # Open for reading with transparent compression. + tar = tarfile.open(uploaded_file, "r:*") + else: + tar = tarfile.open(uploaded_file) + return tar + def upload_tar( - trans, - rdah, - tdah, - repository, - tar, + trans: "ProvidesRepositoriesContext", + username: str, + repository: "Repository", uploaded_file, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, -): + commit_message: str, + dry_run: bool = False, + remove_repo_files_not_in_tar: bool = True, + new_repo_alert: bool = False, + tar=None, + rdah: Optional[RepositoryDependencyAttributeHandler] = None, + tdah: Optional[ToolDependencyAttributeHandler] = None, +) -> ChangeResponseT: + host = trans.repositories_hostname + app = trans.app + if tar is None: + tar = tar_open(uploaded_file) + rdah = rdah or RepositoryDependencyAttributeHandler(trans, unpopulate=False) + tdah = tdah or ToolDependencyAttributeHandler(trans, unpopulate=False) # Upload a tar archive of files. undesirable_dirs_removed = 0 undesirable_files_removed = 0 - check_results = commit_util.check_archive(repository, tar) + check_results = check_archive(repository, tar) if check_results.invalid: tar.close() - uploaded_file.close() - message = "{} Invalid paths were: {}".format(" ".join(check_results.errors), ", ".join(check_results.invalid)) + try: + uploaded_file.close() + except AttributeError: + pass + message = "{} Invalid paths were: {}".format( + " ".join(check_results.errors), ", ".join([i.name for i in check_results.invalid]) + ) return False, message, [], "", undesirable_dirs_removed, undesirable_files_removed else: - repo_dir = repository.repo_path(trans.app) - if upload_point is not None: - full_path = os.path.abspath(os.path.join(repo_dir, upload_point)) + repo_dir = repository.repo_path(app) + if dry_run: + full_path = tempfile.mkdtemp() + clone_repository(repo_dir, full_path) else: full_path = os.path.abspath(repo_dir) undesirable_files_removed = len(check_results.undesirable_files) @@ -41,7 +86,10 @@ def upload_tar( # Extract the uploaded tar to the load_point within the repository hierarchy. tar.extractall(path=full_path, members=check_results.valid) tar.close() - uploaded_file.close() + try: + uploaded_file.close() + except AttributeError: + pass for filename in filenames_in_archive: uploaded_file_name = os.path.join(full_path, filename) if os.path.split(uploaded_file_name)[-1] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: @@ -49,7 +97,7 @@ def upload_tar( # are missing and if so, set them appropriately. altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) if error_message: - return False, error_message, [], "", [], [] + return False, error_message, [], "", 0, 0 elif altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, uploaded_file_name) @@ -58,14 +106,14 @@ def upload_tar( # attributes are missing and if so, set them appropriately. altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) if error_message: - return False, error_message, [], "", [], [] + return False, error_message, [], "", 0, 0 if altered: tmp_filename = xml_util.create_and_write_tmp_file(root_elem) shutil.move(tmp_filename, uploaded_file_name) - return commit_util.handle_directory_changes( - trans.app, - trans.request.host, - trans.user.username, + return handle_directory_changes( + app, + host, + username, repository, full_path, filenames_in_archive, @@ -74,4 +122,5 @@ def upload_tar( commit_message, undesirable_dirs_removed, undesirable_files_removed, + repo_path=full_path, ) diff --git a/lib/tool_shed/util/repository_util.py b/lib/tool_shed/util/repository_util.py index afd773f05242..2f242a5d66f0 100644 --- a/lib/tool_shed/util/repository_util.py +++ b/lib/tool_shed/util/repository_util.py @@ -48,7 +48,7 @@ repository_was_previously_installed, set_repository_attributes, ) -from galaxy.util.tool_shed import common_util +from tool_shed.util.common_util import generate_clone_url_for from tool_shed.util.hg_util import ( changeset2rev, create_hgrc_file, @@ -58,10 +58,14 @@ from tool_shed.util.metadata_util import ( get_next_downloadable_changeset_revision, get_repository_metadata_by_changeset_revision, + repository_metadata_by_changeset_revision, ) if TYPE_CHECKING: - from tool_shed.context import ProvidesUserContext + from tool_shed.context import ( + ProvidesRepositoriesContext, + ProvidesUserContext, + ) from tool_shed.structured_app import ToolShedApp from tool_shed.webapp.model import Repository @@ -82,6 +86,7 @@ def create_repo_info_dict( repository_metadata=None, tool_dependencies=None, repository_dependencies=None, + trans=None, ): """ Return a dictionary that includes all of the information needed to install a repository into a local @@ -109,15 +114,16 @@ def create_repo_info_dict( repository = get_repository_by_name_and_owner(app, repository_name, repository_owner) if app.name == "tool_shed": # We're in the tool shed. - repository_metadata = get_repository_metadata_by_changeset_revision( - app, app.security.encode_id(repository.id), changeset_revision - ) + repository_metadata = repository_metadata_by_changeset_revision(app.model, repository.id, changeset_revision) if repository_metadata: metadata = repository_metadata.metadata if metadata: - tool_shed_url = web.url_for("/", qualified=True).rstrip("/") + if trans is not None: + tool_shed_url = trans.repositories_hostname + else: + tool_shed_url = web.url_for("/", qualified=True).rstrip("/") rb = tool_shed.dependencies.repository.relation_builder.RelationBuilder( - app, repository, repository_metadata, tool_shed_url + app, repository, repository_metadata, tool_shed_url, trans=trans ) # Get a dictionary of all repositories upon which the contents of the received repository depends. repository_dependencies = rb.get_repository_dependencies_for_changeset_revision() @@ -258,9 +264,10 @@ def get_repository_in_tool_shed(app, id, eagerload_columns=None): return q.get(app.security.decode_id(id)) -def get_repo_info_dict(app: "ToolShedApp", user, repository_id, changeset_revision): +def get_repo_info_dict(trans: "ProvidesRepositoriesContext", repository_id, changeset_revision): + app = trans.app repository = get_repository_in_tool_shed(app, repository_id) - repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed(user, repository) + repository_clone_url = generate_clone_url_for(trans, repository) repository_metadata = get_repository_metadata_by_changeset_revision(app, repository_id, changeset_revision) if not repository_metadata: # The received changeset_revision is no longer installable, so get the next changeset_revision @@ -313,6 +320,7 @@ def get_repo_info_dict(app: "ToolShedApp", user, repository_id, changeset_revisi repository_metadata=repository_metadata, tool_dependencies=None, repository_dependencies=None, + trans=trans, ) return ( repo_info_dict, @@ -368,8 +376,7 @@ def get_repositories_by_category( repository_dict = repository.to_dict(value_mapper=default_value_mapper) repository_dict["metadata"] = {} for changeset, changehash in repository.installable_revisions(app): - encoded_id = app.security.encode_id(repository.id) - metadata = get_repository_metadata_by_changeset_revision(app, encoded_id, changehash) + metadata = repository_metadata_by_changeset_revision(app.model, repository.id, changehash) assert metadata repository_dict["metadata"][f"{changeset}:{changehash}"] = metadata.to_dict( value_mapper=default_value_mapper diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index 85bde805a932..548f8ede40e4 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -23,12 +23,12 @@ get_user, have_shed_tool_conf_for_install, set_image_paths, - tool_shed_is_this_tool_shed, ) from galaxy.util import ( checkers, unicodify, ) +from galaxy.web import url_for from tool_shed.util import ( basic_util, common_util, @@ -87,20 +87,6 @@ ----------------------------------------------------------------------------- """ -contact_owner_template = """ -GALAXY TOOL SHED REPOSITORY MESSAGE ------------------------- - -The user '${username}' sent you the following message regarding your tool shed -repository named '${repository_name}'. You can respond by sending a reply to -the user's email address: ${email}. ------------------------------------------------------------------------------ -${message} ------------------------------------------------------------------------------ -This message was sent from the Galaxy Tool Shed instance hosted on the server -'${host}' -""" - def count_repositories_in_category(app: "ToolShedApp", category_id: str) -> int: sa_session = app.model.session @@ -449,6 +435,14 @@ def open_repository_files_folder(app, folder_path, repository_id, is_admin=False return folder_contents +def tool_shed_is_this_tool_shed(toolshed_base_url, trans=None): + """Determine if a tool shed is the current tool shed.""" + cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url(toolshed_base_url) + hostname = trans.repositories_hostname if trans else str(url_for("/", qualified=True)) + cleaned_tool_shed = common_util.remove_protocol_from_tool_shed_url(hostname) + return cleaned_toolshed_base_url == cleaned_tool_shed + + __all__ = ( "can_eliminate_repository_dependency", "clean_dependency_relationships", diff --git a/lib/tool_shed/webapp/api/categories.py b/lib/tool_shed/webapp/api/categories.py index c0ca91ef8704..b0eb31cf4cca 100644 --- a/lib/tool_shed/webapp/api/categories.py +++ b/lib/tool_shed/webapp/api/categories.py @@ -1,34 +1,34 @@ import logging from typing import ( Any, - Callable, Dict, + List, ) import tool_shed.util.shed_util_common as suc +import tool_shed_client.schema from galaxy import ( - exceptions, util, web, ) -from galaxy.model.base import transaction from galaxy.web import ( expose_api, expose_api_anonymous_and_sessionless, require_admin, ) -from galaxy.webapps.base.controller import BaseAPIController -from tool_shed.util import repository_util +from galaxy.webapps.galaxy.api import depends +from tool_shed.managers.categories import CategoryManager +from tool_shed.managers.repositories import repositories_by_category +from tool_shed.webapp.model import Category +from . import BaseShedAPIController log = logging.getLogger(__name__) -class CategoriesController(BaseAPIController): +class CategoriesController(BaseShedAPIController): """RESTful controller for interactions with categories in the Tool Shed.""" - def __get_value_mapper(self, trans) -> Dict[str, Callable]: - value_mapper = {"id": trans.security.encode_id} - return value_mapper + category_manager: CategoryManager = depends(CategoryManager) @expose_api @require_admin @@ -46,27 +46,13 @@ def create(self, trans, payload, **kwd): Content-Disposition: form-data; name="description" Category_Description """ category_dict = dict(message="", status="ok") - name = payload.get("name", "") - if name: - description = payload.get("description", "") - if not description: - # Default the description to the name. - description = name - if suc.get_category_by_name(self.app, name): - raise exceptions.Conflict("A category with that name already exists.") - else: - # Create the category - category = self.app.model.Category(name=name, description=description) - trans.sa_session.add(category) - with transaction(trans.sa_session): - trans.sa_session.commit() - category_dict = category.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - category_dict["message"] = f"Category '{str(category.name)}' has been created" - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - else: - raise exceptions.RequestParameterMissingException('Missing required parameter "name".') + request = tool_shed_client.schema.CreateCategoryRequest( + name=payload.get("name"), + description=payload.get("description", ""), + ) + category: Category = self.category_manager.create(trans, request) + category_dict = self.category_manager.to_dict(category) + category_dict["message"] = f"Category '{str(category.name)}' has been created" return category_dict @expose_api_anonymous_and_sessionless @@ -86,24 +72,19 @@ def get_repositories(self, trans, category_id, **kwd): sort_key = kwd.get("sort_key", "name") sort_order = kwd.get("sort_order", "asc") page = kwd.get("page", None) - category = suc.get_category(self.app, category_id) - category_dict: Dict[str, Any] - if category is None: - category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") - return category_dict - category_dict = category.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - category_dict["repository_count"] = suc.count_repositories_in_category(self.app, category_id) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - repositories = repository_util.get_repositories_by_category( - self.app, category.id, installable=installable, sort_order=sort_order, sort_key=sort_key, page=page + category_dict = repositories_by_category( + self.app, + category_id, + page=page, + sort_key=sort_key, + sort_order=sort_order, + installable=installable, ) - category_dict["repositories"] = repositories + category_dict["url"] = web.url_for(controller="categories", action="show", id=category_dict["id"]) return category_dict @expose_api_anonymous_and_sessionless - def index(self, trans, deleted=False, **kwd): + def index(self, trans, deleted=False, **kwd) -> List[Dict[str, Any]]: """ GET /api/categories Return a list of dictionaries that contain information about each Category. @@ -112,24 +93,8 @@ def index(self, trans, deleted=False, **kwd): Example: GET localhost:9009/api/categories """ - category_dicts = [] deleted = util.asbool(deleted) - if deleted and not trans.user_is_admin: - raise exceptions.AdminRequiredException("Only administrators can query deleted categories.") - for category in ( - trans.sa_session.query(self.app.model.Category) - .filter(self.app.model.Category.table.c.deleted == deleted) - .order_by(self.app.model.Category.table.c.name) - ): - category_dict = category.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) - category_dict[ - "repositories" - ] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get(category.name, 0) - category_dicts.append(category_dict) - return category_dicts + return self.category_manager.index(trans, deleted) @expose_api_anonymous_and_sessionless def show(self, trans, id, **kwd): @@ -145,8 +110,5 @@ def show(self, trans, id, **kwd): if category is None: category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") return category_dict - category_dict = category.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - category_dict["url"] = web.url_for( - controller="categories", action="show", id=trans.security.encode_id(category.id) - ) + category_dict = self.category_manager.to_dict(category) return category_dict diff --git a/lib/tool_shed/webapp/api/configuration.py b/lib/tool_shed/webapp/api/configuration.py index fb4495976071..920baeb670b5 100644 --- a/lib/tool_shed/webapp/api/configuration.py +++ b/lib/tool_shed/webapp/api/configuration.py @@ -5,15 +5,12 @@ import logging from galaxy.web import expose_api_anonymous_and_sessionless -from galaxy.webapps.base.controller import BaseAPIController +from . import BaseShedAPIController log = logging.getLogger(__name__) -class ConfigurationController(BaseAPIController): - def __init__(self, app): - super().__init__(app) - +class ConfigurationController(BaseShedAPIController): @expose_api_anonymous_and_sessionless def version(self, trans, **kwds): """ diff --git a/lib/tool_shed/webapp/api/groups.py b/lib/tool_shed/webapp/api/groups.py index 05f46673fc8b..72329e20b787 100644 --- a/lib/tool_shed/webapp/api/groups.py +++ b/lib/tool_shed/webapp/api/groups.py @@ -22,16 +22,17 @@ expose_api_anonymous_and_sessionless, require_admin, ) -from galaxy.webapps.base.controller import BaseAPIController from tool_shed.managers import groups +from tool_shed.structured_app import ToolShedApp +from . import BaseShedAPIController log = logging.getLogger(__name__) -class GroupsController(BaseAPIController): +class GroupsController(BaseShedAPIController): """RESTful controller for interactions with groups in the Tool Shed.""" - def __init__(self, app): + def __init__(self, app: ToolShedApp): super().__init__(app) self.group_manager = groups.GroupManager() diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index c5d3806e2b02..4499534c0cdf 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -1,8 +1,6 @@ import json import logging import os -import tarfile -from collections import namedtuple from io import StringIO from time import strftime from typing import ( @@ -10,10 +8,6 @@ Dict, ) -from sqlalchemy import ( - and_, - false, -) from webob.compat import cgi_FieldStorage from galaxy import ( @@ -22,41 +16,52 @@ ) from galaxy.exceptions import ( ActionInputError, - ConfigDoesNotAllowException, InsufficientPermissionsException, + MessageException, ObjectNotFound, RequestParameterInvalidException, RequestParameterMissingException, ) -from galaxy.util import checkers from galaxy.web import ( expose_api, expose_api_anonymous_and_sessionless, expose_api_raw_anonymous_and_sessionless, ) -from galaxy.webapps.base.controller import ( - BaseAPIController, - HTTPBadRequest, +from galaxy.webapps.base.controller import HTTPBadRequest +from tool_shed.managers.repositories import ( + can_update_repo, + check_updates, + create_repository, + get_install_info, + get_ordered_installable_revisions, + get_repository_metadata_dict, + get_value_mapper, + index_repositories, + index_tool_ids, + reset_metadata_on_repository, + search, + to_element_dict, + UpdatesRequest, + upload_tar_and_set_metadata, ) -from tool_shed.dependencies import attribute_handlers from tool_shed.metadata import repository_metadata_manager from tool_shed.repository_types import util as rt_util from tool_shed.util import ( - commit_util, - encoding_util, - hg_util, metadata_util, - repository_content_util, repository_util, tool_util, ) from tool_shed.webapp import model -from tool_shed.webapp.search.repo_search import RepoSearch +from tool_shed_client.schema import ( + CreateRepositoryRequest, + LegacyInstallInfoTuple, +) +from . import BaseShedAPIController log = logging.getLogger(__name__) -class RepositoriesController(BaseAPIController): +class RepositoriesController(BaseShedAPIController): """RESTful controller for interactions with repositories in the Tool Shed.""" @web.legacy_expose_api @@ -119,28 +124,12 @@ def get_ordered_installable_revisions(self, trans, name=None, owner=None, **kwd) if owner is None: owner = kwd.get("owner", None) tsr_id = kwd.get("tsr_id", None) - eagerload_columns = [model.Repository.downloadable_revisions] - if None not in [name, owner]: - # Get the repository information. - repository = repository_util.get_repository_by_name_and_owner( - self.app, name, owner, eagerload_columns=eagerload_columns - ) - if repository is None: - trans.response.status = 404 - return {"status": "error", "message": f"No repository named {name} found with owner {owner}"} - elif tsr_id is not None: - repository = repository_util.get_repository_in_tool_shed( - self.app, tsr_id, eagerload_columns=eagerload_columns - ) - else: - error_message = "Error in the Tool Shed repositories API in get_ordered_installable_revisions: " - error_message += "invalid parameters received." - log.debug(error_message) - return [] - return [revision[1] for revision in repository.installable_revisions(self.app, sort_revisions=True)] + return get_ordered_installable_revisions(self.app, name, owner, tsr_id) @web.legacy_expose_api_anonymous - def get_repository_revision_install_info(self, trans, name, owner, changeset_revision, **kwd): + def get_repository_revision_install_info( + self, trans, name, owner, changeset_revision, **kwd + ) -> LegacyInstallInfoTuple: """ GET /api/repositories/get_repository_revision_install_info @@ -213,65 +202,7 @@ def get_repository_revision_install_info(self, trans, name, owner, changeset_rev } """ - # Example URL: - # http:///api/repositories/get_repository_revision_install_info?name=&owner=&changeset_revision= - if name and owner and changeset_revision: - # Get the repository information. - repository = repository_util.get_repository_by_name_and_owner( - self.app, name, owner, eagerload_columns=[model.Repository.downloadable_revisions] - ) - if repository is None: - log.debug(f"Cannot locate repository {name} owned by {owner}") - return {}, {}, {} - encoded_repository_id = trans.security.encode_id(repository.id) - repository_dict = repository.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - repository_dict["url"] = web.url_for(controller="repositories", action="show", id=encoded_repository_id) - # Get the repository_metadata information. - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, changeset_revision - ) - if repository_metadata is None: - # The changeset_revision column in the repository_metadata table has been updated with a new - # value value, so find the changeset_revision to which we need to update. - new_changeset_revision = metadata_util.get_next_downloadable_changeset_revision( - self.app, repository, changeset_revision - ) - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - self.app, encoded_repository_id, new_changeset_revision - ) - changeset_revision = new_changeset_revision - if repository_metadata is not None: - encoded_repository_metadata_id = trans.security.encode_id(repository_metadata.id) - repository_metadata_dict = repository_metadata.to_dict( - view="collection", value_mapper=self.__get_value_mapper(trans) - ) - repository_metadata_dict["url"] = web.url_for( - controller="repository_revisions", action="show", id=encoded_repository_metadata_id - ) - if "tools" in repository_metadata.metadata: - repository_metadata_dict["valid_tools"] = repository_metadata.metadata["tools"] - # Get the repo_info_dict for installing the repository. - ( - repo_info_dict, - includes_tools, - includes_tool_dependencies, - includes_tools_for_display_in_tool_panel, - has_repository_dependencies, - has_repository_dependencies_only_if_compiling_contained_td, - ) = repository_util.get_repo_info_dict(self.app, trans.user, encoded_repository_id, changeset_revision) - return repository_dict, repository_metadata_dict, repo_info_dict - else: - log.debug( - "Unable to locate repository_metadata record for repository id %s and changeset_revision %s", - repository.id, - changeset_revision, - ) - return repository_dict, {}, {} - else: - debug_msg = "Error in the Tool Shed repositories API in get_repository_revision_install_info: " - debug_msg += f"Invalid name {name} or owner {owner} or changeset_revision {changeset_revision} received." - log.debug(debug_msg) - return {}, {}, {} + return get_install_info(trans, name, owner, changeset_revision) @web.legacy_expose_api_anonymous def get_installable_revisions(self, trans, **kwd): @@ -296,12 +227,7 @@ def get_installable_revisions(self, trans, **kwd): return repository.installable_revisions(self.app) def __get_value_mapper(self, trans) -> Dict[str, Callable]: - value_mapper = { - "id": trans.security.encode_id, - "repository_id": trans.security.encode_id, - "user_id": trans.security.encode_id, - } - return value_mapper + return get_value_mapper(self.app) @expose_api_raw_anonymous_and_sessionless def index(self, trans, deleted=False, owner=None, name=None, **kwd): @@ -356,7 +282,7 @@ def index(self, trans, deleted=False, owner=None, name=None, **kwd): raise RequestParameterInvalidException('The "page" and "page_size" parameters have to be integers.') return_jsonp = util.asbool(kwd.get("jsonp", False)) callback = kwd.get("callback", "callback") - search_results = self._search(trans, q, page, page_size) + search_results = search(trans, q, page, page_size) if return_jsonp: response = str(f"{callback}({json.dumps(search_results)});") else: @@ -365,134 +291,18 @@ def index(self, trans, deleted=False, owner=None, name=None, **kwd): tool_ids = kwd.get("tool_ids", None) if tool_ids is not None: tool_ids = util.listify(tool_ids) - repository_found = [] - all_metadata = dict() - for tool_id in tool_ids: - # A valid GUID looks like toolshed.g2.bx.psu.edu/repos/bgruening/deeptools/deeptools_computeMatrix/1.1.0 - shed, _, owner, name, tool, version = tool_id.split("/") - clause_list = [ - and_( - self.app.model.Repository.table.c.deprecated == false(), - self.app.model.Repository.table.c.deleted == false(), - self.app.model.Repository.table.c.name == name, - self.app.model.User.table.c.username == owner, - self.app.model.Repository.table.c.user_id == self.app.model.User.table.c.id, - ) + response = index_tool_ids(self.app, tool_ids) + return json.dumps(response) + else: + repositories = index_repositories(self.app, name, owner, deleted) + repository_dicts = [] + for repository in repositories: + repository_dict = repository.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) + repository_dict["category_ids"] = [ + trans.security.encode_id(x.category.id) for x in repository.categories ] - repository = trans.sa_session.query(self.app.model.Repository).filter(*clause_list).first() - if not repository: - log.warning(f"Repository {owner}/{name} does not exist, skipping") - continue - for changeset, changehash in repository.installable_revisions(self.app): - metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( - self.app, repository, changehash - ) - tools = metadata.metadata.get("tools") - if not tools: - log.warning(f"Repository {owner}/{name}/{changehash} does not contain valid tools, skipping") - continue - for tool in tools: - if tool["guid"] in tool_ids: - repository_found.append("%d:%s" % (int(changeset), changehash)) - metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( - self.app, repository, changehash - ) - if metadata is None: - continue - metadata_dict = metadata.to_dict( - value_mapper={"id": self.app.security.encode_id, "repository_id": self.app.security.encode_id} - ) - metadata_dict["repository"] = repository.to_dict(value_mapper={"id": self.app.security.encode_id}) - if metadata.has_repository_dependencies: - metadata_dict["repository_dependencies"] = metadata_util.get_all_dependencies( - self.app, metadata, processed_dependency_links=[] - ) - else: - metadata_dict["repository_dependencies"] = [] - if metadata.includes_tool_dependencies: - metadata_dict["tool_dependencies"] = repository.get_tool_dependencies(self.app, changehash) - else: - metadata_dict["tool_dependencies"] = {} - if metadata.includes_tools: - metadata_dict["tools"] = metadata.metadata["tools"] - all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict - if repository_found: - all_metadata["current_changeset"] = repository_found[0] - # all_metadata[ 'found_changesets' ] = repository_found - return json.dumps(all_metadata) - return "{}" - - clause_list = [ - and_( - self.app.model.Repository.table.c.deprecated == false(), - self.app.model.Repository.table.c.deleted == deleted, - ) - ] - if owner is not None: - clause_list.append( - and_( - self.app.model.User.table.c.username == owner, - self.app.model.Repository.table.c.user_id == self.app.model.User.table.c.id, - ) - ) - if name is not None: - clause_list.append(self.app.model.Repository.table.c.name == name) - for repository in ( - trans.sa_session.query(self.app.model.Repository) - .filter(*clause_list) - .order_by(self.app.model.Repository.table.c.name) - ): - repository_dict = repository.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) - repository_dict["category_ids"] = [trans.security.encode_id(x.category.id) for x in repository.categories] - repository_dicts.append(repository_dict) - return json.dumps(repository_dicts) - - def _search(self, trans, q, page=1, page_size=10): - """ - Perform the search over TS repositories. - Note that search works over the Whoosh index which you have - to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. - Also TS config option toolshed_search_on has to be True and - whoosh_index_dir has to be specified. - """ - conf = self.app.config - if not conf.toolshed_search_on: - raise ConfigDoesNotAllowException("Searching the TS through the API is turned off for this instance.") - if not conf.whoosh_index_dir: - raise ConfigDoesNotAllowException( - "There is no directory for the search index specified. Please contact the administrator." - ) - search_term = q.strip() - if len(search_term) < 1: - raise RequestParameterInvalidException("The search term has to be at least one character long.") - - repo_search = RepoSearch() - - Boosts = namedtuple( - "Boosts", - [ - "repo_name_boost", - "repo_description_boost", - "repo_long_description_boost", - "repo_homepage_url_boost", - "repo_remote_repository_url_boost", - "categories_boost", - "repo_owner_username_boost", - ], - ) - boosts = Boosts( - float(conf.get("repo_name_boost", 0.9)), - float(conf.get("repo_description_boost", 0.6)), - float(conf.get("repo_long_description_boost", 0.5)), - float(conf.get("repo_homepage_url_boost", 0.3)), - float(conf.get("repo_remote_repository_url_boost", 0.2)), - float(conf.get("categories_boost", 0.5)), - float(conf.get("repo_owner_username_boost", 0.3)), - ) - - results = repo_search.search(trans, search_term, page, page_size, boosts) - results["hostname"] = web.url_for("/", qualified=True) - return results + repository_dicts.append(repository_dict) + return json.dumps(repository_dicts) @web.legacy_expose_api def remove_repository_registry_entry(self, trans, payload, **kwd): @@ -568,8 +378,7 @@ def handle_repository(trans, repository, results): log.debug(f"Resetting metadata on repository {repository.name}") try: rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, - user=trans.user, + trans, resetting_all_metadata_on_repository=True, updating_installed_repository=False, repository=repository, @@ -613,8 +422,7 @@ def handle_repository(trans, repository, results): else: my_writable = True rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, - user=trans.user, + trans, resetting_all_metadata_on_repository=True, updating_installed_repository=False, persist=False, @@ -658,46 +466,8 @@ def reset_metadata_on_repository(self, trans, payload, **kwd): The following parameters must be included in the payload. :param repository_id: the encoded id of the repository on which metadata is to be reset. """ - - def handle_repository(trans, start_time, repository): - results = dict(start_time=start_time, repository_status=[]) - try: - rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, - user=trans.user, - repository=repository, - resetting_all_metadata_on_repository=True, - updating_installed_repository=False, - persist=False, - ) - rmm.reset_all_metadata_on_repository_in_tool_shed() - rmm_invalid_file_tups = rmm.get_invalid_file_tups() - if rmm_invalid_file_tups: - message = tool_util.generate_message_for_invalid_tools( - self.app, rmm_invalid_file_tups, repository, None, as_html=False - ) - results["status"] = "warning" - else: - message = f"Successfully reset metadata on repository {repository.name} owned by {repository.user.username}" - results["status"] = "ok" - except Exception as e: - message = ( - f"Error resetting metadata on repository {repository.name} owned by {repository.user.username}: {e}" - ) - results["status"] = "error" - status = f"{repository.name} : {message}" - results["repository_status"].append(status) - return results - repository_id = payload.get("repository_id", None) - if repository_id is not None: - repository = repository_util.get_repository_in_tool_shed(self.app, repository_id) - start_time = strftime("%Y-%m-%d %H:%M:%S") - log.debug(f"{start_time}...resetting metadata on repository {repository.name}") - results = handle_repository(trans, start_time, repository) - stop_time = strftime("%Y-%m-%d %H:%M:%S") - results["stop_time"] = stop_time - return results + return reset_metadata_on_repository(trans, repository_id).dict() @expose_api_anonymous_and_sessionless def show(self, trans, id, **kwd): @@ -747,63 +517,13 @@ def updates(self, trans, **kwd): owner = kwd.get("owner", None) changeset_revision = kwd.get("changeset_revision", None) hexlify_this = util.asbool(kwd.get("hexlify", True)) - repository = repository_util.get_repository_by_name_and_owner( - trans.app, name, owner, eagerload_columns=[model.Repository.downloadable_revisions] + request = UpdatesRequest( + name=name, + owner=owner, + changeset_revision=changeset_revision, + hexlify=hexlify_this, ) - if repository and repository.downloadable_revisions: - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - trans.app, trans.security.encode_id(repository.id), changeset_revision - ) - tool_shed_status_dict = {} - # Handle repository deprecation. - tool_shed_status_dict["repository_deprecated"] = str(repository.deprecated) - tip_revision = repository.downloadable_revisions[0] - # Handle latest installable revision. - if changeset_revision == tip_revision: - tool_shed_status_dict["latest_installable_revision"] = "True" - else: - next_installable_revision = metadata_util.get_next_downloadable_changeset_revision( - trans.app, repository, changeset_revision - ) - if repository_metadata is None: - if next_installable_revision and next_installable_revision != changeset_revision: - tool_shed_status_dict["latest_installable_revision"] = "True" - else: - tool_shed_status_dict["latest_installable_revision"] = "False" - else: - if next_installable_revision and next_installable_revision != changeset_revision: - tool_shed_status_dict["latest_installable_revision"] = "False" - else: - tool_shed_status_dict["latest_installable_revision"] = "True" - # Handle revision updates. - if changeset_revision == tip_revision: - tool_shed_status_dict["revision_update"] = "False" - else: - if repository_metadata is None: - tool_shed_status_dict["revision_update"] = "True" - else: - tool_shed_status_dict["revision_update"] = "False" - # Handle revision upgrades. - metadata_revisions = [ - revision[1] for revision in metadata_util.get_metadata_revisions(trans.app, repository) - ] - num_metadata_revisions = len(metadata_revisions) - for index, metadata_revision in enumerate(metadata_revisions): - if index == num_metadata_revisions: - tool_shed_status_dict["revision_upgrade"] = "False" - break - if metadata_revision == changeset_revision: - if num_metadata_revisions - index > 1: - tool_shed_status_dict["revision_upgrade"] = "True" - else: - tool_shed_status_dict["revision_upgrade"] = "False" - break - return ( - encoding_util.tool_shed_encode(tool_shed_status_dict) - if hexlify_this - else json.dumps(tool_shed_status_dict) - ) - return encoding_util.tool_shed_encode({}) if hexlify_this else json.dumps({}) + return check_updates(trans.app, request) @expose_api_anonymous_and_sessionless def show_tools(self, trans, id, changeset, **kwd): @@ -847,32 +567,7 @@ def metadata(self, trans, id, **kwd): """ recursive = util.asbool(kwd.get("recursive", "True")) downloadable_only = util.asbool(kwd.get("downloadable_only", "True")) - all_metadata = {} - repository = repository_util.get_repository_in_tool_shed( - self.app, id, eagerload_columns=[model.Repository.downloadable_revisions] - ) - for changeset, changehash in metadata_util.get_metadata_revisions( - self.app, repository, sort_revisions=True, downloadable=downloadable_only - ): - metadata = metadata_util.get_current_repository_metadata_for_changeset_revision( - self.app, repository, changehash - ) - if metadata is None: - continue - metadata_dict = metadata.to_dict( - value_mapper={"id": self.app.security.encode_id, "repository_id": self.app.security.encode_id} - ) - metadata_dict["repository"] = repository.to_dict(value_mapper={"id": self.app.security.encode_id}) - if metadata.has_repository_dependencies and recursive: - metadata_dict["repository_dependencies"] = metadata_util.get_all_dependencies( - self.app, metadata, processed_dependency_links=[] - ) - else: - metadata_dict["repository_dependencies"] = [] - if metadata.includes_tools: - metadata_dict["tools"] = metadata.metadata["tools"] - all_metadata[f"{int(changeset)}:{changehash}"] = metadata_dict - return all_metadata + return get_repository_metadata_dict(self.app, id, recursive, downloadable_only) @expose_api def update(self, trans, id, **kwd): @@ -970,31 +665,22 @@ def create(self, trans, **kwd): description = payload.get("description", "") remote_repository_url = payload.get("remote_repository_url", "") homepage_url = payload.get("homepage_url", "") - category_ids = util.listify(payload.get("category_ids[]", "")) repo_type = payload.get("type", rt_util.UNRESTRICTED) if repo_type not in rt_util.types: raise RequestParameterInvalidException("This repository type is not valid") - invalid_message = repository_util.validate_repository_name(self.app, name, trans.user) - if invalid_message: - raise RequestParameterInvalidException(invalid_message) - - repo, message = repository_util.create_repository( - app=self.app, + request = CreateRepositoryRequest( name=name, - type=repo_type, - description=synopsis, - long_description=description, - user_id=trans.user.id, - category_ids=category_ids, + synopsis=synopsis, + description=description, remote_repository_url=remote_repository_url, homepage_url=homepage_url, + category_ids=payload.get("category_ids[]", ""), + type_=repo_type, ) - - repository_dict = repo.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) - repository_dict["category_ids"] = [trans.security.encode_id(x.category.id) for x in repo.categories] - return repository_dict + repo = create_repository(trans, request) + return to_element_dict(self.app, repo, include_categories=True) @web.legacy_expose_api def create_changeset_revision(self, trans, id, payload, **kwd): @@ -1013,26 +699,14 @@ def create_changeset_revision(self, trans, id, payload, **kwd): """ # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135 - rdah = attribute_handlers.RepositoryDependencyAttributeHandler(self.app, unpopulate=False) - tdah = attribute_handlers.ToolDependencyAttributeHandler(self.app, unpopulate=False) - repository = repository_util.get_repository_in_tool_shed(self.app, id) - if not ( - trans.user_is_admin - or self.app.security_agent.user_can_administer_repository(trans.user, repository) - or self.app.security_agent.can_push(self.app, trans.user, repository) - ): + if not can_update_repo(trans, repository): trans.response.status = 400 return { "err_msg": "You do not have permission to update this repository.", } - repo_dir = repository.repo_path(self.app) - - upload_point = commit_util.get_upload_point(repository, **kwd) - tip = repository.tip() - file_data = payload.get("file") # Code stolen from gx's upload_common.py if isinstance(file_data, cgi_FieldStorage): @@ -1046,66 +720,19 @@ def create_changeset_revision(self, trans, id, payload, **kwd): commit_message = kwd.get("commit_message", "Uploaded") - uploaded_file = open(file_data["local_filename"], "rb") uploaded_file_name = file_data["local_filename"] - - isgzip = False - isbz2 = False - isgzip = checkers.is_gzip(uploaded_file_name) - if not isgzip: - isbz2 = checkers.is_bz2(uploaded_file_name) - if isgzip or isbz2: - # Open for reading with transparent compression. - tar = tarfile.open(uploaded_file_name, "r:*") - else: - tar = tarfile.open(uploaded_file_name) - - new_repo_alert = False - remove_repo_files_not_in_tar = True - - ( - ok, - message, - files_to_remove, - content_alert_str, - undesirable_dirs_removed, - undesirable_files_removed, - ) = repository_content_util.upload_tar( - trans, - rdah, - tdah, - repository, - tar, - uploaded_file, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ) - if ok: - # Update the repository files for browsing. - hg_util.update_repository(repo_dir) - # Get the new repository tip. - if tip == repository.tip(): - trans.response.status = 400 - message = "No changes to repository." - ok = False - else: - rmm = repository_metadata_manager.RepositoryMetadataManager( - app=self.app, user=trans.user, repository=repository - ) - status, error_message = rmm.set_repository_metadata_due_to_new_tip( - trans.request.host, content_alert_str=content_alert_str, **kwd - ) - if error_message: - ok = False - trans.response.status = 500 - message = error_message - else: - trans.response.status = 500 + try: + message = upload_tar_and_set_metadata( + trans, + trans.request.host, + repository, + uploaded_file_name, + commit_message, + ) + rval = {"message": message} + except MessageException as e: + trans.response.status = e.status_code + rval = {"err_msg": str(e)} if os.path.exists(uploaded_file_name): os.remove(uploaded_file_name) - if not ok: - return {"err_msg": message} - else: - return {"message": message} + return rval diff --git a/lib/tool_shed/webapp/api/repository_revisions.py b/lib/tool_shed/webapp/api/repository_revisions.py index 4c66fbeb3b40..1b413827aced 100644 --- a/lib/tool_shed/webapp/api/repository_revisions.py +++ b/lib/tool_shed/webapp/api/repository_revisions.py @@ -11,19 +11,17 @@ web, ) from galaxy.model.base import transaction -from galaxy.webapps.base.controller import ( - BaseAPIController, - HTTPBadRequest, -) +from galaxy.webapps.base.controller import HTTPBadRequest from tool_shed.util import ( metadata_util, repository_util, ) +from . import BaseShedAPIController log = logging.getLogger(__name__) -class RepositoryRevisionsController(BaseAPIController): +class RepositoryRevisionsController(BaseShedAPIController): """RESTful controller for interactions with tool shed repository revisions.""" def __get_value_mapper(self, trans) -> Dict[str, Callable]: diff --git a/lib/tool_shed/webapp/api/tools.py b/lib/tool_shed/webapp/api/tools.py index fdbdc1043a54..33099f5e2ada 100644 --- a/lib/tool_shed/webapp/api/tools.py +++ b/lib/tool_shed/webapp/api/tools.py @@ -1,25 +1,23 @@ import json import logging -from collections import namedtuple from galaxy import ( exceptions, util, - web, ) from galaxy.web import ( expose_api, expose_api_raw_anonymous_and_sessionless, require_admin, ) -from galaxy.webapps.base.controller import BaseAPIController +from tool_shed.managers.tools import search from tool_shed.util.shed_index import build_index -from tool_shed.webapp.search.tool_search import ToolSearch +from . import BaseShedAPIController log = logging.getLogger(__name__) -class ToolsController(BaseAPIController): +class ToolsController(BaseShedAPIController): """RESTful controller for interactions with tools in the Tool Shed.""" @expose_api @@ -85,46 +83,9 @@ def index(self, trans, **kwd): raise exceptions.RequestParameterInvalidException('The "page" and "page_size" have to be integers.') return_jsonp = util.asbool(kwd.get("jsonp", False)) callback = kwd.get("callback", "callback") - search_results = self._search(trans, q, page, page_size) + search_results = search(trans, q, page, page_size) if return_jsonp: response = str(f"{callback}({json.dumps(search_results)});") else: response = json.dumps(search_results) return response - - def _search(self, trans, q, page=1, page_size=10): - """ - Perform the search over TS tools index. - Note that search works over the Whoosh index which you have - to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. - Also TS config option toolshed_search_on has to be True and - whoosh_index_dir has to be specified. - """ - conf = self.app.config - if not conf.toolshed_search_on: - raise exceptions.ConfigDoesNotAllowException( - "Searching the TS through the API is turned off for this instance." - ) - if not conf.whoosh_index_dir: - raise exceptions.ConfigDoesNotAllowException( - "There is no directory for the search index specified. Please contact the administrator." - ) - search_term = q.strip() - if len(search_term) < 1: - raise exceptions.RequestParameterInvalidException("The search term has to be at least one character long.") - - tool_search = ToolSearch() - - Boosts = namedtuple( - "Boosts", ["tool_name_boost", "tool_description_boost", "tool_help_boost", "tool_repo_owner_username_boost"] - ) - boosts = Boosts( - float(conf.get("tool_name_boost", 1.2)), - float(conf.get("tool_description_boost", 0.6)), - float(conf.get("tool_help_boost", 0.4)), - float(conf.get("tool_repo_owner_username_boost", 0.3)), - ) - - results = tool_search.search(trans, search_term, page, page_size, boosts) - results["hostname"] = web.url_for("/", qualified=True) - return results diff --git a/lib/tool_shed/webapp/api/users.py b/lib/tool_shed/webapp/api/users.py index 6371b29b92ae..54f1fba3ffe7 100644 --- a/lib/tool_shed/webapp/api/users.py +++ b/lib/tool_shed/webapp/api/users.py @@ -2,22 +2,20 @@ import tool_shed.util.shed_util_common as suc from galaxy import ( - exceptions, util, web, ) -from galaxy.model.base import transaction -from galaxy.security.validate_user_input import ( - validate_email, - validate_password, - validate_publicname, +from tool_shed.managers.users import ( + api_create_user, + index, ) -from galaxy.webapps.base.controller import BaseAPIController +from tool_shed_client.schema import CreateUserRequest +from . import BaseShedAPIController log = logging.getLogger(__name__) -class UsersController(BaseAPIController): +class UsersController(BaseShedAPIController): """RESTful controller for interactions with users in the Tool Shed.""" @web.expose_api @@ -38,31 +36,18 @@ def create(self, trans, payload, **kwd): email = payload.get("email", "") password = payload.get("password", "") username = payload.get("username", "") - message = self.__validate(trans, email=email, password=password, confirm=password, username=username) - if message: - raise exceptions.RequestParameterInvalidException(message) - # Create the user. - user = self.__create_user(trans, email, username, password) - user_dict = user.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) + request = CreateUserRequest( + email=email, + username=username, + password=password, + ) + user = api_create_user(trans, request) + user_dict = user.dict() user_dict["message"] = f"User '{str(user.username)}' has been created." user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) return user_dict - def __create_user(self, trans, email, username, password): - user = trans.app.model.User(email=email) - user.set_password_cleartext(password) - user.username = username - if trans.app.config.user_activation_on: - user.active = False - else: - user.active = True # Activation is off, every new user is active by default. - trans.sa_session.add(user) - with transaction(trans.sa_session): - trans.sa_session.commit() - trans.app.security_agent.create_private_user_role(user) - return user - def __get_value_mapper(self, trans): value_mapper = {"id": trans.security.encode_id} return value_mapper @@ -76,12 +61,8 @@ def index(self, trans, deleted=False, **kwd): # Example URL: http://localhost:9009/api/users user_dicts = [] deleted = util.asbool(deleted) - for user in ( - trans.sa_session.query(trans.app.model.User) - .filter(trans.app.model.User.table.c.deleted == deleted) - .order_by(trans.app.model.User.table.c.username) - ): - user_dict = user.to_dict(view="collection", value_mapper=self.__get_value_mapper(trans)) + for user in index(trans.app, deleted): + user_dict = user.dict() user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) user_dicts.append(user_dict) return user_dicts @@ -104,15 +85,3 @@ def show(self, trans, id, **kwd): user_dict = user.to_dict(view="element", value_mapper=self.__get_value_mapper(trans)) user_dict["url"] = web.url_for(controller="users", action="show", id=trans.security.encode_id(user.id)) return user_dict - - def __validate(self, trans, email, password, confirm, username): - if username in ["repos"]: - return f"The term '{username}' is a reserved word in the Tool Shed, so it cannot be used as a public user name." - message = "\n".join( - ( - validate_email(trans, email), - validate_password(trans, password, confirm), - validate_publicname(trans, username), - ) - ).rstrip() - return message diff --git a/lib/tool_shed/webapp/api2/__init__.py b/lib/tool_shed/webapp/api2/__init__.py new file mode 100644 index 000000000000..6961c8407b93 --- /dev/null +++ b/lib/tool_shed/webapp/api2/__init__.py @@ -0,0 +1,355 @@ +import logging +from json import JSONDecodeError +from typing import ( + AsyncGenerator, + cast, + List, + Optional, + Type, + TypeVar, +) + +from fastapi import ( + Depends, + HTTPException, + Path, + Query, + Request, + Response, + Security, +) +from fastapi.security import ( + APIKeyCookie, + APIKeyHeader, + APIKeyQuery, +) +from pydantic import BaseModel +from starlette_context import context as request_context + +from galaxy.exceptions import AdminRequiredException +from galaxy.managers.session import GalaxySessionManager +from galaxy.managers.users import UserManager +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.util import unicodify +from galaxy.web.framework.decorators import require_admin_message +from galaxy.webapps.base.webapp import create_new_session +from galaxy.webapps.galaxy.api import ( + depends as framework_depends, + FrameworkRouter, + GalaxyASGIRequest, + GalaxyASGIResponse, + T, + UrlBuilder, +) +from tool_shed.context import ( + SessionRequestContext, + SessionRequestContextImpl, +) +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp import app as tool_shed_app_mod +from tool_shed.webapp.model import ( + GalaxySession, + User, +) + +log = logging.getLogger(__name__) + + +def get_app() -> ToolShedApp: + if tool_shed_app_mod.app is None: + raise Exception("Failed to initialize the tool shed app correctly for FastAPI") + return cast(ToolShedApp, tool_shed_app_mod.app) + + +async def get_app_with_request_session() -> AsyncGenerator[ToolShedApp, None]: + app = get_app() + request_id = request_context.data["X-Request-ID"] + app.model.set_request_id(request_id) + try: + yield app + finally: + app.model.unset_request_id(request_id) + + +DependsOnApp = cast(ToolShedApp, Depends(get_app_with_request_session)) +AUTH_COOKIE_NAME = "galaxycommunitysession" + +api_key_query = APIKeyQuery(name="key", auto_error=False) +api_key_header = APIKeyHeader(name="x-api-key", auto_error=False) +api_key_cookie = APIKeyCookie(name=AUTH_COOKIE_NAME, auto_error=False) + + +def depends(dep_type: Type[T]) -> T: + return framework_depends(dep_type, get_app=get_app) + + +def get_api_user( + user_manager: UserManager = depends(UserManager), + key: str = Security(api_key_query), + x_api_key: str = Security(api_key_header), +) -> Optional[User]: + api_key = key or x_api_key + if not api_key: + return None + user = user_manager.by_api_key(api_key=api_key) + return user + + +def get_session_manager(app: ToolShedApp = DependsOnApp) -> GalaxySessionManager: + # TODO: find out how to adapt dependency for Galaxy/Report/TS + return GalaxySessionManager(app.model) + + +def get_session( + session_manager=cast(GalaxySessionManager, Depends(get_session_manager)), + security: IdEncodingHelper = depends(IdEncodingHelper), + galaxysession: str = Security(api_key_cookie), +) -> Optional[GalaxySession]: + if galaxysession: + session_key = security.decode_guid(galaxysession) + if session_key: + return session_manager.get_session_from_session_key(session_key) + # TODO: What should we do if there is no session? Since this is the API, maybe nothing is the right choice? + return None + + +def get_user( + galaxy_session=cast(Optional[GalaxySession], Depends(get_session)), + api_user=cast(Optional[User], Depends(get_api_user)), +) -> Optional[User]: + if galaxy_session: + return galaxy_session.user + return api_user + + +def get_trans( + request: Request, + response: Response, + app: ToolShedApp = DependsOnApp, + user=cast(Optional[User], Depends(get_user)), + galaxy_session=cast(Optional[GalaxySession], Depends(get_session)), +) -> SessionRequestContext: + url_builder = UrlBuilder(request) + galaxy_request = GalaxyASGIRequest(request) + galaxy_response = GalaxyASGIResponse(response) + return SessionRequestContextImpl( + app, + galaxy_request, + galaxy_response, + user=user, + galaxy_session=galaxy_session, + url_builder=url_builder, + ) + + +DependsOnTrans: SessionRequestContext = cast(SessionRequestContext, Depends(get_trans)) + + +def get_admin_user(trans: SessionRequestContext = DependsOnTrans): + if not trans.user_is_admin: + raise AdminRequiredException(require_admin_message(trans.app.config, trans.user)) + return trans.user + + +AdminUserRequired = Depends(get_admin_user) + + +class Router(FrameworkRouter): + admin_user_dependency = AdminUserRequired + + +B = TypeVar("B", bound=BaseModel) + + +# async def depend_on_either_json_or_form_data(model: Type[T]): +# return Depends(get_body) + + +def depend_on_either_json_or_form_data(model: Type[B]) -> B: + async def get_body(request: Request): + content_type = request.headers.get("Content-Type") + if content_type is None: + raise HTTPException(status_code=400, detail="No Content-Type provided!") + elif content_type == "application/json": + try: + return model(**await request.json()) + except JSONDecodeError: + raise HTTPException(status_code=400, detail="Invalid JSON data") + elif content_type == "application/x-www-form-urlencoded" or content_type.startswith("multipart/form-data"): + try: + return model(**await request.form()) + except Exception: + raise HTTPException(status_code=400, detail="Invalid Form data") + else: + raise HTTPException(status_code=400, detail="Content-Type not supported!") + + return Depends(get_body) + + +UserIdPathParam: str = Path(..., title="User ID", description="The encoded database identifier of the user.") + +RequiredRepoOwnerParam: str = Query( + title="owner", + description="Owner of the target repository.", +) + +RequiredRepoNameParam: str = Query( + title="Name", + description="Name of the target repository.", +) + +RequiredChangesetParam: str = Query( + title="changeset", + description="Changeset of the target repository.", +) + +RepositoryIdPathParam: str = Path( + ..., title="Repository ID", description="The encoded database identifier of the repository." +) + +ChangesetRevisionPathParam: str = Path( + ..., + title="Change Revision", + description="The changeset revision corresponding to the target revision of the target repository.", +) + +UsernameIdPathParam: str = Path(..., title="Username", description="The target username.") + +CommitMessageQueryParam: Optional[str] = Query( + default=None, + title="Commit Message", + description="Set commit message as a query parameter.", +) + +DownloadableQueryParam: bool = Query( + default=True, + title="downloadable_only", + description="Include only downloadable repositories.", +) + +CommitMessage: str = Query( + None, + title="Commit message", + description="A commit message to store with repository update.", +) + +RepositoryIndexQueryParam: Optional[str] = Query( + default=None, + title="Search Query", +) + +ToolsIndexQueryParam: str = Query( + default=..., + title="Search Query", +) + +RepositorySearchPageQueryParam: int = Query( + default=1, + title="Page", +) + +RepositorySearchPageSizeQueryParam: int = Query( + default=10, + title="Page Size", +) + +RepositoryIndexDeletedQueryParam: Optional[bool] = Query(False, title="Deleted?") + +RepositoryIndexOwnerQueryParam: Optional[str] = Query(None, title="Owner") + +RepositoryIndexNameQueryParam: Optional[str] = Query(None, title="Name") + +RepositoryIndexToolIdsQueryParam: Optional[List[str]] = Query( + None, title="Tool IDs", description="List of tool GUIDs to find the repository for" +) + + +OptionalRepositoryOwnerParam: Optional[str] = Query(None, title="Owner") +OptionalRepositoryNameParam: Optional[str] = Query(None, title="Name") +RequiredRepositoryChangesetRevisionParam: str = Query(..., title="Changeset Revision") +OptionalRepositoryIdParam: Optional[str] = Query(None, title="TSR ID") +OptionalHexlifyParam: Optional[bool] = Query(True, title="Hexlify response") + +CategoryIdPathParam: str = Path( + ..., title="Category ID", description="The encoded database identifier of the category." +) +CategoryRepositoriesInstallableQueryParam: bool = Query(False, title="Installable?") +CategoryRepositoriesSortKeyQueryParam: str = Query("name", title="Sort Key") +CategoryRepositoriesSortOrderQueryParam: str = Query("asc", title="Sort Order") +CategoryRepositoriesPageQueryParam: Optional[int] = Query(None, title="Page") + + +def ensure_valid_session(trans: SessionRequestContext) -> None: + """ + Ensure that a valid Galaxy session exists and is available as + trans.session (part of initialization) + """ + app = trans.app + mapping = app.model + session_manager = GalaxySessionManager(mapping) + sa_session = app.model.context + request = trans.request + # Try to load an existing session + secure_id = request.get_cookie(AUTH_COOKIE_NAME) + galaxy_session = None + prev_galaxy_session = None + user_for_new_session = None + invalidate_existing_session = False + # Track whether the session has changed so we can avoid calling flush + # in the most common case (session exists and is valid). + galaxy_session_requires_flush = False + if secure_id: + session_key: Optional[str] = app.security.decode_guid(secure_id) + if session_key: + # We do NOT catch exceptions here, if the database is down the request should fail, + # and we should not generate a new session. + galaxy_session = session_manager.get_session_from_session_key(session_key=session_key) + if not galaxy_session: + session_key = None + + if galaxy_session is not None and galaxy_session.user is not None and galaxy_session.user.deleted: + invalidate_existing_session = True + log.warning(f"User '{galaxy_session.user.email}' is marked deleted, invalidating session") + # Do we need to invalidate the session for some reason? + if invalidate_existing_session: + assert galaxy_session + prev_galaxy_session = galaxy_session + prev_galaxy_session.is_valid = False + galaxy_session = None + # No relevant cookies, or couldn't find, or invalid, so create a new session + if galaxy_session is None: + galaxy_session = create_new_session(trans, prev_galaxy_session, user_for_new_session) + galaxy_session_requires_flush = True + trans.set_galaxy_session(galaxy_session) + set_auth_cookie(trans, galaxy_session) + else: + trans.set_galaxy_session(galaxy_session) + # Do we need to flush the session? + if galaxy_session_requires_flush: + sa_session.add(galaxy_session) + # FIXME: If prev_session is a proper relation this would not + # be needed. + if prev_galaxy_session: + sa_session.add(prev_galaxy_session) + sa_session.flush() + + +def set_auth_cookie(trans: SessionRequestContext, session): + cookie_name = AUTH_COOKIE_NAME + set_cookie(trans, trans.app.security.encode_guid(session.session_key), cookie_name) + + +def set_cookie(trans: SessionRequestContext, value: str, key, path="/", age=90) -> None: + """Convenience method for setting a session cookie""" + # In wsgi we were setting both a max_age and and expires, but + # all browsers support max_age now. + domain: Optional[str] = trans.app.config.cookie_domain + trans.response.set_cookie( + key, + unicodify(value), + path=path, + max_age=3600 * 24 * age, # 90 days + httponly=True, + secure=trans.request.is_secure, + domain=domain, + ) diff --git a/lib/tool_shed/webapp/api2/authenticate.py b/lib/tool_shed/webapp/api2/authenticate.py new file mode 100644 index 000000000000..7e4ea1c4ee2d --- /dev/null +++ b/lib/tool_shed/webapp/api2/authenticate.py @@ -0,0 +1,27 @@ +from fastapi import Request + +from galaxy.webapps.galaxy.services.authenticate import ( + APIKeyResponse, + AuthenticationService, +) +from . import ( + depends, + Router, +) + +router = Router(tags=["authenticate"]) + + +@router.cbv +class FastAPIAuthenticate: + authentication_service: AuthenticationService = depends(AuthenticationService) + + @router.get( + "/api/authenticate/baseauth", + summary="Returns returns an API key for authenticated user based on BaseAuth headers.", + operation_id="authenticate__baseauth", + ) + def get_api_key(self, request: Request) -> APIKeyResponse: + authorization = request.headers.get("Authorization") + auth = {"HTTP_AUTHORIZATION": authorization} + return self.authentication_service.get_api_key(auth, request) diff --git a/lib/tool_shed/webapp/api2/categories.py b/lib/tool_shed/webapp/api2/categories.py new file mode 100644 index 000000000000..023242469dd1 --- /dev/null +++ b/lib/tool_shed/webapp/api2/categories.py @@ -0,0 +1,80 @@ +from typing import ( + List, + Optional, +) + +from fastapi import Body + +from tool_shed.context import SessionRequestContext +from tool_shed.managers.categories import CategoryManager +from tool_shed.managers.repositories import repositories_by_category +from tool_shed_client.schema import ( + Category as CategoryResponse, + CreateCategoryRequest, + RepositoriesByCategory, +) +from . import ( + CategoryIdPathParam, + CategoryRepositoriesInstallableQueryParam, + CategoryRepositoriesPageQueryParam, + CategoryRepositoriesSortKeyQueryParam, + CategoryRepositoriesSortOrderQueryParam, + depends, + DependsOnTrans, + Router, +) + +router = Router(tags=["categories"]) + + +@router.cbv +class FastAPICategories: + category_manager: CategoryManager = depends(CategoryManager) + + @router.post( + "/api/categories", + description="create a category", + operation_id="categories__create", + require_admin=True, + ) + def create( + self, trans: SessionRequestContext = DependsOnTrans, request: CreateCategoryRequest = Body(...) + ) -> CategoryResponse: + category = self.category_manager.create(trans, request) + return self.category_manager.to_model(category) + + @router.get( + "/api/categories", + description="index category", + operation_id="categories__index", + ) + def index(self, trans: SessionRequestContext = DependsOnTrans) -> List[CategoryResponse]: + """ + Return a list of dictionaries that contain information about each Category. + """ + deleted = False + categories = self.category_manager.index_db(trans, deleted) + return [self.category_manager.to_model(c) for c in categories] + + @router.get( + "/api/categories/{encoded_category_id}/repositories", + description="display repositories by category", + operation_id="categories__repositories", + ) + def repositories( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_category_id: str = CategoryIdPathParam, + installable: bool = CategoryRepositoriesInstallableQueryParam, + sort_key: str = CategoryRepositoriesSortKeyQueryParam, + sort_order: str = CategoryRepositoriesSortOrderQueryParam, + page: Optional[int] = CategoryRepositoriesPageQueryParam, + ) -> RepositoriesByCategory: + return repositories_by_category( + trans.app, + encoded_category_id, + page=page, + sort_key=sort_key, + sort_order=sort_order, + installable=installable, + ) diff --git a/lib/tool_shed/webapp/api2/configuration.py b/lib/tool_shed/webapp/api2/configuration.py new file mode 100644 index 000000000000..815039150e81 --- /dev/null +++ b/lib/tool_shed/webapp/api2/configuration.py @@ -0,0 +1,24 @@ +from tool_shed.structured_app import ToolShedApp +from tool_shed_client.schema import Version +from . import ( + depends, + Router, +) + +router = Router(tags=["configuration"]) + + +@router.cbv +class FastAPIConfiguration: + app: ToolShedApp = depends(ToolShedApp) + + @router.get( + "/api/version", + operation_id="configuration__version", + ) + def version(self) -> Version: + return Version( + version_major=self.app.config.version_major, + version=self.app.config.version, + api_version="v2", + ) diff --git a/lib/tool_shed/webapp/api2/repositories.py b/lib/tool_shed/webapp/api2/repositories.py new file mode 100644 index 000000000000..776e4064a7ab --- /dev/null +++ b/lib/tool_shed/webapp/api2/repositories.py @@ -0,0 +1,509 @@ +import os +import shutil +import tempfile +from typing import ( + cast, + IO, + List, + Optional, + Union, +) + +from fastapi import ( + Body, + Depends, + Request, + Response, + status, + UploadFile, +) +from starlette.datastructures import UploadFile as StarletteUploadFile + +from galaxy.exceptions import InsufficientPermissionsException +from galaxy.webapps.galaxy.api import as_form +from tool_shed.context import SessionRequestContext +from tool_shed.managers.repositories import ( + can_manage_repo, + can_update_repo, + check_updates, + create_repository, + get_install_info, + get_ordered_installable_revisions, + get_repository_metadata_dict, + get_repository_metadata_for_management, + index_repositories, + readmes, + reset_metadata_on_repository, + search, + to_detailed_model, + to_model, + UpdatesRequest, + upload_tar_and_set_metadata, +) +from tool_shed.structured_app import ToolShedApp +from tool_shed.util.repository_util import get_repository_in_tool_shed +from tool_shed_client.schema import ( + CreateRepositoryRequest, + DetailedRepository, + from_legacy_install_info, + InstallInfo, + Repository, + RepositoryMetadata, + RepositoryPermissions, + RepositoryRevisionReadmes, + RepositorySearchResults, + RepositoryUpdate, + RepositoryUpdateRequest, + ResetMetadataOnRepositoryRequest, + ResetMetadataOnRepositoryResponse, + ValidRepostiroyUpdateMessage, +) +from . import ( + ChangesetRevisionPathParam, + CommitMessageQueryParam, + depend_on_either_json_or_form_data, + depends, + DependsOnTrans, + DownloadableQueryParam, + OptionalHexlifyParam, + OptionalRepositoryIdParam, + OptionalRepositoryNameParam, + OptionalRepositoryOwnerParam, + RepositoryIdPathParam, + RepositoryIndexDeletedQueryParam, + RepositoryIndexNameQueryParam, + RepositoryIndexOwnerQueryParam, + RepositoryIndexQueryParam, + RepositorySearchPageQueryParam, + RepositorySearchPageSizeQueryParam, + RequiredChangesetParam, + RequiredRepoNameParam, + RequiredRepoOwnerParam, + RequiredRepositoryChangesetRevisionParam, + Router, + UsernameIdPathParam, +) + +router = Router(tags=["repositories"]) + +IndexResponse = Union[RepositorySearchResults, List[Repository]] + + +@as_form +class RepositoryUpdateRequestFormData(RepositoryUpdateRequest): + pass + + +@router.cbv +class FastAPIRepositories: + app: ToolShedApp = depends(ToolShedApp) + + @router.get( + "/api/repositories", + description="Get a list of repositories or perform a search.", + operation_id="repositories__index", + ) + def index( + self, + q: Optional[str] = RepositoryIndexQueryParam, + page: Optional[int] = RepositorySearchPageQueryParam, + page_size: Optional[int] = RepositorySearchPageSizeQueryParam, + deleted: Optional[bool] = RepositoryIndexDeletedQueryParam, + owner: Optional[str] = RepositoryIndexOwnerQueryParam, + name: Optional[str] = RepositoryIndexNameQueryParam, + trans: SessionRequestContext = DependsOnTrans, + ) -> IndexResponse: + if q: + assert page is not None + assert page_size is not None + search_results = search(trans, q, page, page_size) + return RepositorySearchResults(**search_results) + # See API notes - was added in https://github.com/galaxyproject/galaxy/pull/3626/files + # but I think is currently unused. So probably we should just drop it until someone + # complains. + # elif params.tool_ids: + # response = index_tool_ids(self.app, params.tool_ids) + # return response + else: + repositories = index_repositories(self.app, name, owner, deleted or False) + return [to_model(self.app, r) for r in repositories] + + @router.get( + "/api/repositories/get_repository_revision_install_info", + description="Get information used by the install client to install this repository.", + operation_id="repositories__legacy_install_info", + ) + def legacy_install_info( + self, + trans: SessionRequestContext = DependsOnTrans, + name: str = RequiredRepoNameParam, + owner: str = RequiredRepoOwnerParam, + changeset_revision: str = RequiredChangesetParam, + ) -> list: + legacy_install_info = get_install_info( + trans, + name, + owner, + changeset_revision, + ) + return list(legacy_install_info) + + @router.get( + "/api/repositories/install_info", + description="Get information used by the install client to install this repository.", + operation_id="repositories__install_info", + ) + def install_info( + self, + trans: SessionRequestContext = DependsOnTrans, + name: str = RequiredRepoNameParam, + owner: str = RequiredRepoOwnerParam, + changeset_revision: str = RequiredChangesetParam, + ) -> InstallInfo: + # A less problematic version of the above API, but I guess we + # need to maintain the older version for older Galaxy API clients + # for... sometime... or forever. + legacy_install_info = get_install_info( + trans, + name, + owner, + changeset_revision, + ) + return from_legacy_install_info(legacy_install_info) + + @router.get( + "/api/repositories/{encoded_repository_id}/metadata", + description="Get information about repository metadata", + operation_id="repositories__metadata", + # See comment below. + # response_model=RepositoryMetadata, + ) + def metadata( + self, + encoded_repository_id: str = RepositoryIdPathParam, + downloadable_only: bool = DownloadableQueryParam, + ) -> dict: + recursive = True + as_dict = get_repository_metadata_dict(self.app, encoded_repository_id, recursive, downloadable_only) + # fails 1020 if we try to use the model - I guess repository dependencies + # are getting lost + return as_dict + # return _hack_fastapi_4428(as_dict) + + @router.get( + "/api_internal/repositories/{encoded_repository_id}/metadata", + description="Get information about repository metadata", + operation_id="repositories__internal_metadata", + response_model=RepositoryMetadata, + ) + def metadata_internal( + self, + encoded_repository_id: str = RepositoryIdPathParam, + downloadable_only: bool = DownloadableQueryParam, + ) -> dict: + recursive = True + as_dict = get_repository_metadata_dict(self.app, encoded_repository_id, recursive, downloadable_only) + return _hack_fastapi_4428(as_dict) + + @router.get( + "/api/repositories/get_ordered_installable_revisions", + description="Get an ordered list of the repository changeset revisions that are installable", + operation_id="repositories__get_ordered_installable_revisions", + ) + def get_ordered_installable_revisions( + self, + owner: Optional[str] = OptionalRepositoryOwnerParam, + name: Optional[str] = OptionalRepositoryNameParam, + tsr_id: Optional[str] = OptionalRepositoryIdParam, + ) -> List[str]: + return get_ordered_installable_revisions(self.app, name, owner, tsr_id) + + @router.post( + "/api/repositories/reset_metadata_on_repository", + description="reset metadata on a repository", + operation_id="repositories__reset_legacy", + ) + def reset_metadata_on_repository_legacy( + self, + trans: SessionRequestContext = DependsOnTrans, + request: ResetMetadataOnRepositoryRequest = depend_on_either_json_or_form_data( + ResetMetadataOnRepositoryRequest + ), + ) -> ResetMetadataOnRepositoryResponse: + return reset_metadata_on_repository(trans, request.repository_id) + + @router.post( + "/api/repositories/{encoded_repository_id}/reset_metadata", + description="reset metadata on a repository", + operation_id="repositories__reset", + ) + def reset_metadata_on_repository( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> ResetMetadataOnRepositoryResponse: + return reset_metadata_on_repository(trans, encoded_repository_id) + + @router.get( + "/api/repositories/updates", + operation_id="repositories__update", + ) + @router.get( + "/api/repositories/updates/", + ) + def updates( + self, + owner: Optional[str] = OptionalRepositoryOwnerParam, + name: Optional[str] = OptionalRepositoryNameParam, + changeset_revision: str = RequiredRepositoryChangesetRevisionParam, + hexlify: Optional[bool] = OptionalHexlifyParam, + ): + request = UpdatesRequest( + name=name, + owner=owner, + changeset_revision=changeset_revision, + hexlify=hexlify, + ) + return Response(content=check_updates(self.app, request)) + + @router.post( + "/api/repositories", + description="create a new repository", + operation_id="repositories__create", + ) + def create( + self, + trans: SessionRequestContext = DependsOnTrans, + request: CreateRepositoryRequest = Body(...), + ) -> Repository: + db_repository = create_repository( + trans, + request, + ) + return to_model(self.app, db_repository) + + @router.get( + "/api/repositories/{encoded_repository_id}", + operation_id="repositories__show", + ) + def show( + self, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> DetailedRepository: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + return to_detailed_model(self.app, repository) + + @router.get( + "/api/repositories/{encoded_repository_id}/permissions", + operation_id="repositories__permissions", + ) + def permissions( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> RepositoryPermissions: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_update_repo(trans, repository): + raise InsufficientPermissionsException( + "You do not have permission to inspect repository repository permissions." + ) + return RepositoryPermissions( + allow_push=trans.app.security_agent.usernames_that_can_push(repository), + can_manage=can_manage_repo(trans, repository), + can_push=can_update_repo(trans, repository), + ) + + @router.get( + "/api/repositories/{encoded_repository_id}/allow_push", + operation_id="repositories__show_allow_push", + ) + def show_allow_push( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ) -> List[str]: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + return trans.app.security_agent.usernames_that_can_push(repository) + + @router.post( + "/api/repositories/{encoded_repository_id}/allow_push/{username}", + operation_id="repositories__add_allow_push", + ) + def add_allow_push( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + username: str = UsernameIdPathParam, + ) -> List[str]: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.set_allow_push([username]) + return trans.app.security_agent.usernames_that_can_push(repository) + + @router.put( + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/malicious", + operation_id="repositories__set_malicious", + status_code=status.HTTP_204_NO_CONTENT, + ) + def set_malicious( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + changeset_revision: str = ChangesetRevisionPathParam, + ): + repository_metadata = get_repository_metadata_for_management(trans, encoded_repository_id, changeset_revision) + repository_metadata.malicious = True + trans.sa_session.add(repository_metadata) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete( + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/malicious", + operation_id="repositories__unset_malicious", + status_code=status.HTTP_204_NO_CONTENT, + ) + def unset_malicious( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + changeset_revision: str = ChangesetRevisionPathParam, + ): + repository_metadata = get_repository_metadata_for_management(trans, encoded_repository_id, changeset_revision) + repository_metadata.malicious = False + trans.sa_session.add(repository_metadata) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.put( + "/api/repositories/{encoded_repository_id}/deprecated", + operation_id="repositories__set_deprecated", + status_code=status.HTTP_204_NO_CONTENT, + ) + def set_deprecated( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ): + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.deprecated = True + trans.sa_session.add(repository) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete( + "/api/repositories/{encoded_repository_id}/deprecated", + operation_id="repositories__unset_deprecated", + status_code=status.HTTP_204_NO_CONTENT, + ) + def unset_deprecated( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + ): + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.deprecated = False + trans.sa_session.add(repository) + trans.sa_session.flush() + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.delete( + "/api/repositories/{encoded_repository_id}/allow_push/{username}", + operation_id="repositories__remove_allow_push", + ) + def remove_allow_push( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_repository_id: str = RepositoryIdPathParam, + username: str = UsernameIdPathParam, + ) -> List[str]: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + if not can_manage_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + repository.set_allow_push(None, remove_auth=username) + return trans.app.security_agent.usernames_that_can_push(repository) + + @router.post( + "/api/repositories/{encoded_repository_id}/changeset_revision", + description="upload new revision to the repository", + operation_id="repositories__create_revision", + ) + async def create_changeset_revision( + self, + request: Request, + encoded_repository_id: str = RepositoryIdPathParam, + commit_message: Optional[str] = CommitMessageQueryParam, + trans: SessionRequestContext = DependsOnTrans, + files: Optional[List[UploadFile]] = None, + revision_request: RepositoryUpdateRequest = Depends(RepositoryUpdateRequestFormData.as_form), # type: ignore[attr-defined] + ) -> RepositoryUpdate: + try: + # Code stolen from Marius' work in Galaxy's Tools API. + + files2: List[StarletteUploadFile] = cast(List[StarletteUploadFile], files or []) + # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile + if not files2: + data = await request.form() + for value in data.values(): + if isinstance(value, StarletteUploadFile): + files2.append(value) + + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + + if not can_update_repo(trans, repository): + raise InsufficientPermissionsException("You do not have permission to update this repository.") + + assert trans.user + assert files2 + the_file = files2[0] + with tempfile.NamedTemporaryFile( + dir=trans.app.config.new_file_path, prefix="upload_file_data_", delete=False + ) as dest: + upload_file_like: IO[bytes] = the_file.file + shutil.copyfileobj(upload_file_like, dest) # type: ignore[misc] # https://github.com/python/mypy/issues/15031 + the_file.file.close() + filename = dest.name + try: + message = upload_tar_and_set_metadata( + trans, + trans.request.host, + repository, + filename, + commit_message or revision_request.commit_message or "Uploaded", + ) + return RepositoryUpdate(__root__=ValidRepostiroyUpdateMessage(message=message)) + finally: + if os.path.exists(filename): + os.remove(filename) + except Exception: + import logging + + log = logging.getLogger(__name__) + log.exception("Problem in here...") + raise + + @router.get( + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/readmes", + description="fetch readmes for repository revision", + operation_id="repositories__readmes", + response_model=RepositoryRevisionReadmes, + ) + def get_readmes( + self, + encoded_repository_id: str = RepositoryIdPathParam, + changeset_revision: str = ChangesetRevisionPathParam, + ) -> dict: + repository = get_repository_in_tool_shed(self.app, encoded_repository_id) + return readmes(self.app, repository, changeset_revision) + + +def _hack_fastapi_4428(as_dict) -> dict: + # https://github.com/tiangolo/fastapi/pull/4428#issuecomment-1145429263 + # after pydantic2 swap to really returning the object + return RepositoryMetadata(__root__=as_dict).dict()["__root__"] diff --git a/lib/tool_shed/webapp/api2/tools.py b/lib/tool_shed/webapp/api2/tools.py new file mode 100644 index 000000000000..0d8c2f2d5524 --- /dev/null +++ b/lib/tool_shed/webapp/api2/tools.py @@ -0,0 +1,123 @@ +import logging +from typing import List + +from fastapi import ( + Path, + Request, +) + +from tool_shed.context import SessionRequestContext +from tool_shed.managers.tools import search +from tool_shed.managers.trs import ( + get_tool, + service_info, + tool_classes, +) +from tool_shed.structured_app import ToolShedApp +from tool_shed.util.shed_index import build_index +from tool_shed_client.schema import BuildSearchIndexResponse +from tool_shed_client.schema.trs import ( + Tool, + ToolClass, + ToolVersion, +) +from tool_shed_client.schema.trs_service_info import Service +from . import ( + depends, + DependsOnTrans, + RepositorySearchPageQueryParam, + RepositorySearchPageSizeQueryParam, + Router, + ToolsIndexQueryParam, +) + +log = logging.getLogger(__name__) + +router = Router(tags=["tools"]) + +TOOL_ID_PATH_PARAM: str = Path( + ..., + title="GA4GH TRS Tool ID", + description="See also https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs-tool-and-trs-tool-version-ids", +) + + +@router.cbv +class FastAPITools: + app: ToolShedApp = depends(ToolShedApp) + + @router.get( + "/api/tools", + operation_id="tools__index", + ) + def index( + self, + q: str = ToolsIndexQueryParam, + page: int = RepositorySearchPageQueryParam, + page_size: int = RepositorySearchPageSizeQueryParam, + trans: SessionRequestContext = DependsOnTrans, + ): + search_results = search(trans, q, page, page_size) + return search_results + + @router.put( + "/api/tools/build_search_index", + operation_id="tools__build_search_index", + require_admin=True, + ) + def build_search_index(self) -> BuildSearchIndexResponse: + """Not part of the stable API, just something to simplify + bootstrapping tool sheds, scripting, testing, etc... + """ + config = self.app.config + repos_indexed, tools_indexed = build_index( + config.whoosh_index_dir, + config.file_path, + config.hgweb_config_dir, + config.database_connection, + ) + return BuildSearchIndexResponse( + repositories_indexed=repos_indexed, + tools_indexed=tools_indexed, + ) + + @router.get("/api/ga4gh/trs/v2/service-info", operation_id="tools_trs_service_info") + def service_info(self, request: Request) -> Service: + return service_info(self.app, request.url) + + @router.get("/api/ga4gh/trs/v2/toolClasses", operation_id="tools__trs_tool_classes") + def tool_classes(self) -> List[ToolClass]: + return tool_classes() + + @router.get( + "/api/ga4gh/trs/v2/tools", + operation_id="tools__trs_index", + ) + def trs_index( + self, + ): + # we probably want to be able to query the database at the + # tool level and such to do this right? + return [] + + @router.get( + "/api/ga4gh/trs/v2/tools/{tool_id}", + operation_id="tools__trs_get", + ) + def trs_get( + self, + trans: SessionRequestContext = DependsOnTrans, + tool_id: str = TOOL_ID_PATH_PARAM, + ) -> Tool: + return get_tool(trans, tool_id) + + @router.get( + "/api/ga4gh/trs/v2/tools/{tool_id}/versions", + operation_id="tools__trs_get_versions", + ) + def trs_get_versions( + self, + trans: SessionRequestContext = DependsOnTrans, + tool_id: str = TOOL_ID_PATH_PARAM, + ) -> List[ToolVersion]: + return get_tool(trans, tool_id).versions diff --git a/lib/tool_shed/webapp/api2/users.py b/lib/tool_shed/webapp/api2/users.py new file mode 100644 index 000000000000..8873d8b9800f --- /dev/null +++ b/lib/tool_shed/webapp/api2/users.py @@ -0,0 +1,340 @@ +import logging +from typing import ( + List, + Optional, +) + +from fastapi import ( + Body, + Response, + status, +) +from pydantic import BaseModel +from sqlalchemy import ( + and_, + true, +) + +import tool_shed.util.shed_util_common as suc +from galaxy.exceptions import ( + InsufficientPermissionsException, + ObjectNotFound, + RequestParameterInvalidException, +) +from galaxy.managers.api_keys import ApiKeyManager +from galaxy.managers.users import UserManager +from galaxy.webapps.base.webapp import create_new_session +from tool_shed.context import SessionRequestContext +from tool_shed.managers.users import ( + api_create_user, + get_api_user, + index, +) +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.model import User as SaUser +from tool_shed_client.schema import ( + CreateUserRequest, + User, +) +from . import ( + depends, + DependsOnTrans, + ensure_valid_session, + Router, + set_auth_cookie, + UserIdPathParam, +) + +router = Router(tags=["users"]) + +log = logging.getLogger(__name__) + + +class UiRegisterRequest(BaseModel): + email: str + username: str + password: str + bear_field: str + + +class HasCsrfToken(BaseModel): + session_csrf_token: str + + +class UiLoginRequest(HasCsrfToken): + login: str + password: str + + +class UiLogoutRequest(HasCsrfToken): + logout_all: bool = False + + +class UiLoginResponse(BaseModel): + pass + + +class UiLogoutResponse(BaseModel): + pass + + +class UiRegisterResponse(BaseModel): + email: str + activation_sent: bool = False + activation_error: bool = False + contact_email: Optional[str] = None + + +class UiChangePasswordRequest(BaseModel): + current: str + password: str + + +INVALID_LOGIN_OR_PASSWORD = "Invalid login or password" + + +@router.cbv +class FastAPIUsers: + app: ToolShedApp = depends(ToolShedApp) + user_manager: UserManager = depends(UserManager) + api_key_manager: ApiKeyManager = depends(ApiKeyManager) + + @router.get( + "/api/users", + description="index users", + operation_id="users__index", + ) + def index(self, trans: SessionRequestContext = DependsOnTrans) -> List[User]: + deleted = False + return index(trans.app, deleted) + + @router.post( + "/api/users", + description="create a user", + operation_id="users__create", + require_admin=True, + ) + def create(self, trans: SessionRequestContext = DependsOnTrans, request: CreateUserRequest = Body(...)) -> User: + return api_create_user(trans, request) + + @router.get( + "/api/users/current", + description="show current user", + operation_id="users__current", + ) + def current(self, trans: SessionRequestContext = DependsOnTrans) -> User: + user = trans.user + if not user: + raise ObjectNotFound() + + return get_api_user(trans.app, user) + + @router.get( + "/api/users/{encoded_user_id}", + description="show a user", + operation_id="users__show", + ) + def show(self, trans: SessionRequestContext = DependsOnTrans, encoded_user_id: str = UserIdPathParam) -> User: + user = suc.get_user(trans.app, encoded_user_id) + if user is None: + raise ObjectNotFound() + return get_api_user(trans.app, user) + + @router.get( + "/api/users/{encoded_user_id}/api_key", + name="get_or_create_api_key", + summary="Return the user's API key", + operation_id="users__get_or_create_api_key", + ) + def get_or_create_api_key( + self, trans: SessionRequestContext = DependsOnTrans, encoded_user_id: str = UserIdPathParam + ) -> str: + user = self._get_user(trans, encoded_user_id) + return self.api_key_manager.get_or_create_api_key(user) + + @router.post( + "/api/users/{encoded_user_id}/api_key", + summary="Creates a new API key for the user", + operation_id="users__create_api_key", + ) + def create_api_key( + self, trans: SessionRequestContext = DependsOnTrans, encoded_user_id: str = UserIdPathParam + ) -> str: + user = self._get_user(trans, encoded_user_id) + return self.api_key_manager.create_api_key(user).key + + @router.delete( + "/api/users/{encoded_user_id}/api_key", + summary="Delete the current API key of the user", + status_code=status.HTTP_204_NO_CONTENT, + operation_id="users__delete_api_key", + ) + def delete_api_key( + self, + trans: SessionRequestContext = DependsOnTrans, + encoded_user_id: str = UserIdPathParam, + ): + user = self._get_user(trans, encoded_user_id) + self.api_key_manager.delete_api_key(user) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + def _get_user(self, trans: SessionRequestContext, encoded_user_id: str): + if encoded_user_id == "current": + user = trans.user + else: + user = suc.get_user(trans.app, encoded_user_id) + if user is None: + raise ObjectNotFound() + if not (trans.user_is_admin or trans.user == user): + raise InsufficientPermissionsException() + return user + + @router.post( + "/api_internal/register", + description="register a user", + operation_id="users__internal_register", + ) + def register( + self, trans: SessionRequestContext = DependsOnTrans, request: UiRegisterRequest = Body(...) + ) -> UiRegisterResponse: + honeypot_field = request.bear_field + if honeypot_field != "": + message = "You've been flagged as a possible bot. If you are not, please try registering again and fill the form out carefully." + raise RequestParameterInvalidException(message) + + username = request.username + if username == "repos": + raise RequestParameterInvalidException("Cannot create a user with the username 'repos'") + self.user_manager.create(email=request.email, username=username, password=request.password) + if self.app.config.user_activation_on: + is_activation_sent = self.user_manager.send_activation_email(trans, request.email, username) + if is_activation_sent: + return UiRegisterResponse(email=request.email, activation_sent=True) + else: + return UiRegisterResponse( + email=request.email, + activation_sent=False, + activation_error=True, + contact_email=self.app.config.error_email_to, + ) + else: + return UiRegisterResponse(email=request.email) + + @router.put( + "/api_internal/change_password", + description="reset a user", + operation_id="users__internal_change_password", + status_code=status.HTTP_204_NO_CONTENT, + ) + def change_password( + self, trans: SessionRequestContext = DependsOnTrans, request: UiChangePasswordRequest = Body(...) + ): + password = request.password + current = request.current + if trans.user is None: + raise InsufficientPermissionsException("Must be logged into use this functionality") + user_id = trans.user.id + token = None + user, message = self.user_manager.change_password( + trans, password=password, current=current, token=token, confirm=password, id=user_id + ) + if not user: + raise RequestParameterInvalidException(message) + return Response(status_code=status.HTTP_204_NO_CONTENT) + + @router.put( + "/api_internal/login", + description="login to web UI", + operation_id="users__internal_login", + ) + def internal_login( + self, trans: SessionRequestContext = DependsOnTrans, request: UiLoginRequest = Body(...) + ) -> UiLoginResponse: + log.info(f"top of internal_login {trans.session_csrf_token}") + ensure_csrf_token(trans, request) + login = request.login + password = request.password + user = self.user_manager.get_user_by_identity(login) + if user is None: + raise InsufficientPermissionsException(INVALID_LOGIN_OR_PASSWORD) + elif user.deleted: + message = ( + "This account has been marked deleted, contact your local Galaxy administrator to restore the account." + ) + if trans.app.config.error_email_to is not None: + message += f" Contact: {trans.app.config.error_email_to}." + raise InsufficientPermissionsException(message) + elif not trans.app.auth_manager.check_password(user, password, trans.request): + raise InsufficientPermissionsException(INVALID_LOGIN_OR_PASSWORD) + else: + handle_user_login(trans, user) + return UiLoginResponse() + + @router.put( + "/api_internal/logout", + description="logout of web UI", + operation_id="users__internal_logout", + ) + def internal_logout( + self, trans: SessionRequestContext = DependsOnTrans, request: UiLogoutRequest = Body(...) + ) -> UiLogoutResponse: + ensure_csrf_token(trans, request) + handle_user_logout(trans, logout_all=request.logout_all) + return UiLogoutResponse() + + +def ensure_csrf_token(trans: SessionRequestContext, request: HasCsrfToken): + session_csrf_token = request.session_csrf_token + if not trans.session_csrf_token: + ensure_valid_session(trans) + message = None + if not session_csrf_token: + message = "No session token set, denying request." + elif session_csrf_token != trans.session_csrf_token: + log.info(f"{session_csrf_token} != {trans.session_csrf_token}") + message = "Wrong session token found, denying request." + if message: + raise InsufficientPermissionsException(message) + + +def handle_user_login(trans: SessionRequestContext, user: SaUser) -> None: + trans.app.security_agent.create_user_role(user, trans.app) + # Set the previous session + prev_galaxy_session = trans.get_galaxy_session() + if prev_galaxy_session: + prev_galaxy_session.is_valid = False + # Define a new current_session + new_session = create_new_session(trans, prev_galaxy_session, user) + trans.set_galaxy_session(new_session) + trans.sa_session.add_all((prev_galaxy_session, new_session)) + trans.sa_session.flush() + set_auth_cookie(trans, new_session) + + +def handle_user_logout(trans, logout_all=False): + """ + Logout the current user: + - invalidate the current session + - create a new session with no user associated + """ + prev_galaxy_session = trans.get_galaxy_session() + if prev_galaxy_session: + prev_galaxy_session.is_valid = False + new_session = create_new_session(trans, prev_galaxy_session, None) + trans.set_galaxy_session(new_session) + trans.sa_session.add_all((prev_galaxy_session, new_session)) + trans.sa_session.flush() + + galaxy_user_id = prev_galaxy_session.user_id + if logout_all and galaxy_user_id is not None: + for other_galaxy_session in trans.sa_session.query(trans.app.model.GalaxySession).filter( + and_( + trans.app.model.GalaxySession.table.c.user_id == galaxy_user_id, + trans.app.model.GalaxySession.table.c.is_valid == true(), + trans.app.model.GalaxySession.table.c.id != prev_galaxy_session.id, + ) + ): + other_galaxy_session.is_valid = False + trans.sa_session.add(other_galaxy_session) + trans.sa_session.flush() + set_auth_cookie(trans, new_session) diff --git a/lib/tool_shed/webapp/app.py b/lib/tool_shed/webapp/app.py index bdd32ef95d86..c71ad3938c68 100644 --- a/lib/tool_shed/webapp/app.py +++ b/lib/tool_shed/webapp/app.py @@ -1,7 +1,10 @@ import logging import sys import time -from typing import Any +from typing import ( + Any, + Optional, +) from sqlalchemy.orm.scoping import scoped_session @@ -108,3 +111,7 @@ def __init__(self, **kwd) -> None: # used for cachebusting -- refactor this into a *SINGLE* UniverseApplication base. self.server_starttime = int(time.time()) log.debug("Tool shed hgweb.config file is: %s", self.hgweb_config_manager.hgweb_config) + + +# Global instance of the universe app. +app: Optional[ToolShedApp] = None diff --git a/lib/tool_shed/webapp/buildapp.py b/lib/tool_shed/webapp/buildapp.py index 7565a7a2d98c..e25859c33be6 100644 --- a/lib/tool_shed/webapp/buildapp.py +++ b/lib/tool_shed/webapp/buildapp.py @@ -13,20 +13,36 @@ import galaxy.webapps.base.webapp from galaxy import util +from galaxy.structured_app import BasicSharedApp from galaxy.util import asbool from galaxy.util.properties import load_app_properties +from galaxy.web import url_for from galaxy.web.framework.middleware.error import ErrorMiddleware from galaxy.web.framework.middleware.request_id import RequestIDMiddleware from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware -from galaxy.webapps.base.webapp import build_url_map +from galaxy.webapps.base.webapp import ( + build_url_map, + GalaxyWebTransaction, +) from galaxy.webapps.util import wrap_if_allowed +SHED_API_VERSION = os.environ.get("TOOL_SHED_API_VERSION", "v1") + log = logging.getLogger(__name__) +class ToolShedGalaxyWebTransaction(GalaxyWebTransaction): + @property + def repositories_hostname(self) -> str: + return url_for("/", qualified=True).rstrip("/") + + class CommunityWebApplication(galaxy.webapps.base.webapp.WebApplication): injection_aware: bool = True + def transaction_chooser(self, environ, galaxy_app: BasicSharedApp, session_cookie: str): + return ToolShedGalaxyWebTransaction(environ, galaxy_app, self, session_cookie) + def add_ui_controllers(webapp, app): """ @@ -65,11 +81,15 @@ def app_pair(global_conf, load_app_kwds=None, **kwargs): kwargs = load_app_properties(kwds=kwargs, config_prefix="TOOL_SHED_CONFIG_", **load_app_kwds) if "app" in kwargs: app = kwargs.pop("app") + import tool_shed.webapp.app + + tool_shed.webapp.app.app = app else: try: - from tool_shed.webapp.app import UniverseApplication + import tool_shed.webapp.app - app = UniverseApplication(global_conf=global_conf, **kwargs) + app = tool_shed.webapp.app.UniverseApplication(global_conf=global_conf, **kwargs) + tool_shed.webapp.app.app = app except Exception: import sys import traceback @@ -98,123 +118,128 @@ def app_pair(global_conf, load_app_kwds=None, **kwargs): # Enable 'hg clone' functionality on repos by letting hgwebapp handle the request webapp.add_route("/repos/*path_info", controller="hg", action="handle_request", path_info="/") # Add the web API. # A good resource for RESTful services - https://routes.readthedocs.io/en/latest/restful.html - webapp.add_api_controllers("tool_shed.webapp.api", app) - webapp.mapper.connect( - "api_key_retrieval", - "/api/authenticate/baseauth/", - controller="authenticate", - action="get_tool_shed_api_key", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect("group", "/api/groups/", controller="groups", action="index", conditions=dict(method=["GET"])) - webapp.mapper.connect( - "group", "/api/groups/", controller="groups", action="create", conditions=dict(method=["POST"]) - ) - webapp.mapper.connect( - "group", "/api/groups/{encoded_id}", controller="groups", action="show", conditions=dict(method=["GET"]) - ) - webapp.mapper.resource( - "category", - "categories", - controller="categories", - name_prefix="category_", - path_prefix="/api", - parent_resources=dict(member_name="category", collection_name="categories"), - ) - webapp.mapper.connect( - "repositories_in_category", - "/api/categories/{category_id}/repositories", - controller="categories", - action="get_repositories", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect( - "show_updates_for_repository", - "/api/repositories/updates", - controller="repositories", - action="updates", - conditions=dict(method=["GET"]), - ) - webapp.mapper.resource( - "repository", - "repositories", - controller="repositories", - collection={ - "add_repository_registry_entry": "POST", - "get_repository_revision_install_info": "GET", - "get_ordered_installable_revisions": "GET", - "get_installable_revisions": "GET", - "remove_repository_registry_entry": "POST", - "reset_metadata_on_repositories": "POST", - "reset_metadata_on_repository": "POST", - }, - name_prefix="repository_", - path_prefix="/api", - parent_resources=dict(member_name="repository", collection_name="repositories"), - ) - webapp.mapper.resource( - "repository_revision", - "repository_revisions", - member={"repository_dependencies": "GET", "export": "POST"}, - controller="repository_revisions", - name_prefix="repository_revision_", - path_prefix="/api", - parent_resources=dict(member_name="repository_revision", collection_name="repository_revisions"), - ) - webapp.mapper.resource( - "user", - "users", - controller="users", - name_prefix="user_", - path_prefix="/api", - parent_resources=dict(member_name="user", collection_name="users"), - ) - webapp.mapper.connect( - "update_repository", - "/api/repositories/{id}", - controller="repositories", - action="update", - conditions=dict(method=["PATCH", "PUT"]), - ) - webapp.mapper.connect( - "repository_create_changeset_revision", - "/api/repositories/{id}/changeset_revision", - controller="repositories", - action="create_changeset_revision", - conditions=dict(method=["POST"]), - ) - webapp.mapper.connect( - "repository_get_metadata", - "/api/repositories/{id}/metadata", - controller="repositories", - action="metadata", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect( - "repository_show_tools", - "/api/repositories/{id}/{changeset}/show_tools", - controller="repositories", - action="show_tools", - conditions=dict(method=["GET"]), - ) - webapp.mapper.connect( - "create_repository", - "/api/repositories", - controller="repositories", - action="create", - conditions=dict(method=["POST"]), - ) - webapp.mapper.connect( - "tools", - "/api/tools/build_search_index", - controller="tools", - action="build_search_index", - conditions=dict(method=["PUT"]), - ) - webapp.mapper.connect("tools", "/api/tools", controller="tools", action="index", conditions=dict(method=["GET"])) - webapp.mapper.connect( - "version", "/api/version", controller="configuration", action="version", conditions=dict(method=["GET"]) - ) + if SHED_API_VERSION == "v1": + webapp.add_api_controllers("tool_shed.webapp.api", app) + webapp.mapper.connect( + "api_key_retrieval", + "/api/authenticate/baseauth/", + controller="authenticate", + action="get_tool_shed_api_key", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "group", "/api/groups/", controller="groups", action="index", conditions=dict(method=["GET"]) + ) + webapp.mapper.connect( + "group", "/api/groups/", controller="groups", action="create", conditions=dict(method=["POST"]) + ) + webapp.mapper.connect( + "group", "/api/groups/{encoded_id}", controller="groups", action="show", conditions=dict(method=["GET"]) + ) + webapp.mapper.resource( + "category", + "categories", + controller="categories", + name_prefix="category_", + path_prefix="/api", + parent_resources=dict(member_name="category", collection_name="categories"), + ) + webapp.mapper.connect( + "repositories_in_category", + "/api/categories/{category_id}/repositories", + controller="categories", + action="get_repositories", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "show_updates_for_repository", + "/api/repositories/updates", + controller="repositories", + action="updates", + conditions=dict(method=["GET"]), + ) + webapp.mapper.resource( + "repository", + "repositories", + controller="repositories", + collection={ + "add_repository_registry_entry": "POST", + "get_repository_revision_install_info": "GET", + "get_ordered_installable_revisions": "GET", + "get_installable_revisions": "GET", + "remove_repository_registry_entry": "POST", + "reset_metadata_on_repositories": "POST", + "reset_metadata_on_repository": "POST", + }, + name_prefix="repository_", + path_prefix="/api", + parent_resources=dict(member_name="repository", collection_name="repositories"), + ) + webapp.mapper.resource( + "repository_revision", + "repository_revisions", + member={"repository_dependencies": "GET", "export": "POST"}, + controller="repository_revisions", + name_prefix="repository_revision_", + path_prefix="/api", + parent_resources=dict(member_name="repository_revision", collection_name="repository_revisions"), + ) + webapp.mapper.resource( + "user", + "users", + controller="users", + name_prefix="user_", + path_prefix="/api", + parent_resources=dict(member_name="user", collection_name="users"), + ) + webapp.mapper.connect( + "update_repository", + "/api/repositories/{id}", + controller="repositories", + action="update", + conditions=dict(method=["PATCH", "PUT"]), + ) + webapp.mapper.connect( + "repository_create_changeset_revision", + "/api/repositories/{id}/changeset_revision", + controller="repositories", + action="create_changeset_revision", + conditions=dict(method=["POST"]), + ) + webapp.mapper.connect( + "repository_get_metadata", + "/api/repositories/{id}/metadata", + controller="repositories", + action="metadata", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "repository_show_tools", + "/api/repositories/{id}/{changeset}/show_tools", + controller="repositories", + action="show_tools", + conditions=dict(method=["GET"]), + ) + webapp.mapper.connect( + "create_repository", + "/api/repositories", + controller="repositories", + action="create", + conditions=dict(method=["POST"]), + ) + webapp.mapper.connect( + "tools", + "/api/tools/build_search_index", + controller="tools", + action="build_search_index", + conditions=dict(method=["PUT"]), + ) + webapp.mapper.connect( + "tools", "/api/tools", controller="tools", action="index", conditions=dict(method=["GET"]) + ) + webapp.mapper.connect( + "version", "/api/version", controller="configuration", action="version", conditions=dict(method=["GET"]) + ) webapp.finalize_config() # Wrap the webapp in some useful middleware diff --git a/lib/tool_shed/webapp/controllers/admin.py b/lib/tool_shed/webapp/controllers/admin.py index c1e4c17286b8..aec70e430121 100644 --- a/lib/tool_shed/webapp/controllers/admin.py +++ b/lib/tool_shed/webapp/controllers/admin.py @@ -362,7 +362,7 @@ def manage_role_associations(self, trans, **kwd): @web.expose @web.require_admin def reset_metadata_on_selected_repositories_in_tool_shed(self, trans, **kwd): - rmm = repository_metadata_manager.RepositoryMetadataManager(trans.app, trans.user) + rmm = repository_metadata_manager.RepositoryMetadataManager(trans) if "reset_metadata_on_selected_repositories_button" in kwd: message, status = rmm.reset_metadata_on_selected_repositories(**kwd) else: diff --git a/lib/tool_shed/webapp/controllers/repository.py b/lib/tool_shed/webapp/controllers/repository.py index 03ee7e2a6671..b3c1725e8507 100644 --- a/lib/tool_shed/webapp/controllers/repository.py +++ b/lib/tool_shed/webapp/controllers/repository.py @@ -35,6 +35,7 @@ from galaxy.web.legacy_framework import grids from galaxy.webapps.base.controller import BaseUIController from tool_shed.dependencies.repository import relation_builder +from tool_shed.managers.repositories import readmes from tool_shed.metadata import repository_metadata_manager from tool_shed.tools import ( tool_validator, @@ -645,26 +646,6 @@ def browse_valid_repositories(self, trans, **kwd): self.valid_repository_grid.title = title return self.valid_repository_grid(trans, **kwd) - @web.expose - def contact_owner(self, trans, id, **kwd): - message = escape(kwd.get("message", "")) - status = kwd.get("status", "done") - repository = repository_util.get_repository_in_tool_shed(trans.app, id) - metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( - trans.app, id, repository.tip(), metadata_only=True - ) - if trans.user and trans.user.email: - return trans.fill_template( - "/webapps/tool_shed/repository/contact_owner.mako", - repository=repository, - metadata=metadata, - message=message, - status=status, - ) - else: - # Do all we can to eliminate spam. - return trans.show_error_message("You must be logged in to contact the owner of a repository.") - @web.expose def create_galaxy_docker_image(self, trans, **kwd): message = escape(kwd.get("message", "")) @@ -1202,16 +1183,7 @@ def get_readme_files(self, trans, **kwd): changeset_revision = kwd.get("changeset_revision", None) if repository_name is not None and repository_owner is not None and changeset_revision is not None: repository = repository_util.get_repository_by_name_and_owner(trans.app, repository_name, repository_owner) - if repository: - repository_metadata = metadata_util.get_repository_metadata_by_changeset_revision( - trans.app, trans.security.encode_id(repository.id), changeset_revision - ) - if repository_metadata: - metadata = repository_metadata.metadata - if metadata: - return readme_util.build_readme_files_dict( - trans.app, repository, changeset_revision, repository_metadata.metadata - ) + return readmes(trans.app, repository, changeset_revision) return {} @web.json @@ -1263,7 +1235,7 @@ def get_repository_information(self, trans, repository_ids, changeset_revisions, cur_includes_tools_for_display_in_tool_panel, cur_has_repository_dependencies, cur_has_repository_dependencies_only_if_compiling_contained_td, - ) = repository_util.get_repo_info_dict(trans.app, trans.user, repository_id, changeset_revision) + ) = repository_util.get_repo_info_dict(trans, repository_id, changeset_revision) if cur_has_repository_dependencies and not has_repository_dependencies: has_repository_dependencies = True if ( @@ -2147,7 +2119,7 @@ def reset_all_metadata(self, trans, id, **kwd): # This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template. repository = repository_util.get_repository_in_tool_shed(trans.app, id) rmm = repository_metadata_manager.RepositoryMetadataManager( - app=trans.app, user=trans.user, repository=repository, resetting_all_metadata_on_repository=True + trans, repository=repository, resetting_all_metadata_on_repository=True ) rmm.reset_all_metadata_on_repository_in_tool_shed() rmm_metadata_dict = rmm.get_metadata_dict() @@ -2166,9 +2138,7 @@ def reset_all_metadata(self, trans, id, **kwd): @web.expose def reset_metadata_on_my_writable_repositories_in_tool_shed(self, trans, **kwd): - rmm = repository_metadata_manager.RepositoryMetadataManager( - trans.app, trans.user, resetting_all_metadata_on_repository=True - ) + rmm = repository_metadata_manager.RepositoryMetadataManager(trans, resetting_all_metadata_on_repository=True) if "reset_metadata_on_selected_repositories_button" in kwd: message, status = rmm.reset_metadata_on_selected_repositories(**kwd) else: @@ -2184,44 +2154,6 @@ def reset_metadata_on_my_writable_repositories_in_tool_shed(self, trans, **kwd): status=status, ) - @web.expose - def send_to_owner(self, trans, id, message=""): - repository = repository_util.get_repository_in_tool_shed(trans.app, id) - if not message: - message = "Enter a message" - status = "error" - elif trans.user and trans.user.email: - smtp_server = trans.app.config.smtp_server - from_address = trans.app.config.email_from - if smtp_server is None or from_address is None: - return trans.show_error_message("Mail is not configured for this Galaxy tool shed instance") - to_address = repository.user.email - # Get the name of the server hosting the tool shed instance. - host = trans.request.host - # Build the email message - body = string.Template(suc.contact_owner_template).safe_substitute( - username=trans.user.username, - repository_name=repository.name, - email=trans.user.email, - message=message, - host=host, - ) - subject = f"Regarding your tool shed repository named {repository.name}" - # Send it - try: - util.send_mail(from_address, to_address, subject, body, trans.app.config) - message = "Your message has been sent" - status = "done" - except Exception as e: - message = f"An error occurred sending your message by email: {util.unicodify(e)}" - status = "error" - else: - # Do all we can to eliminate spam. - return trans.show_error_message("You must be logged in to contact the owner of a repository.") - return trans.response.send_redirect( - web.url_for(controller="repository", action="contact_owner", id=id, message=message, status=status) - ) - @web.expose @require_login("set email alerts") def set_email_alerts(self, trans, **kwd): diff --git a/lib/tool_shed/webapp/controllers/upload.py b/lib/tool_shed/webapp/controllers/upload.py deleted file mode 100644 index ef3ef730dd78..000000000000 --- a/lib/tool_shed/webapp/controllers/upload.py +++ /dev/null @@ -1,447 +0,0 @@ -import logging -import os -import shutil -import tarfile -import tempfile - -import requests - -from galaxy import ( - util, - web, -) -from galaxy.tool_shed.util import dependency_display -from galaxy.util import checkers -from galaxy.webapps.base.controller import BaseUIController -from tool_shed.dependencies import attribute_handlers -from tool_shed.metadata import repository_metadata_manager -from tool_shed.repository_types import util as rt_util -from tool_shed.tools.data_table_manager import ShedToolDataTableManager -from tool_shed.util import ( - basic_util, - commit_util, - hg_util, - repository_content_util, - repository_util, - shed_util_common as suc, - xml_util, -) -from tool_shed.util.web_util import escape -from tool_shed.webapp.framework.decorators import require_login - -log = logging.getLogger(__name__) - - -class UploadController(BaseUIController): - @web.expose - @require_login("upload", use_panels=True) - def upload(self, trans, **kwd): - message = escape(kwd.get("message", "")) - status = kwd.get("status", "done") - commit_message = escape(kwd.get("commit_message", "Uploaded")) - repository_id = kwd.get("repository_id", "") - repository = repository_util.get_repository_in_tool_shed(trans.app, repository_id) - repo_dir = repository.repo_path(trans.app) - uncompress_file = util.string_as_bool(kwd.get("uncompress_file", "true")) - remove_repo_files_not_in_tar = util.string_as_bool(kwd.get("remove_repo_files_not_in_tar", "true")) - uploaded_file = None - upload_point = commit_util.get_upload_point(repository, **kwd) - tip = repository.tip() - file_data = kwd.get("file_data", "") - url = kwd.get("url", "") - # Part of the upload process is sending email notification to those that have registered to - # receive them. One scenario occurs when the first change set is produced for the repository. - # See the suc.handle_email_alerts() method for the definition of the scenarios. - new_repo_alert = repository.is_new() - uploaded_directory = None - if kwd.get("upload_button", False): - if file_data == "" and url == "": - message = "No files were entered on the upload form." - status = "error" - uploaded_file = None - elif url and url.startswith("hg"): - # Use mercurial clone to fetch repository, contents will then be copied over. - uploaded_directory = tempfile.mkdtemp() - repo_url = f"http{url[len('hg'):]}" - cloned_ok, error_message = hg_util.clone_repository(repo_url, uploaded_directory) - if not cloned_ok: - message = f"Error uploading via mercurial clone: {error_message}" - status = "error" - basic_util.remove_dir(uploaded_directory) - uploaded_directory = None - elif url: - valid_url = True - try: - stream = requests.get(url, stream=True, timeout=util.DEFAULT_SOCKET_TIMEOUT) - except Exception as e: - valid_url = False - message = f"Error uploading file via http: {util.unicodify(e)}" - status = "error" - uploaded_file = None - if valid_url: - with tempfile.NamedTemporaryFile(mode="wb", delete=False) as uploaded_file: - uploaded_file_name = uploaded_file.name - for chunk in stream.iter_content(chunk_size=util.CHUNK_SIZE): - if chunk: - uploaded_file.write(chunk) - uploaded_file.flush() - uploaded_file_filename = url.split("/")[-1] - isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 - elif file_data not in ("", None): - uploaded_file = file_data.file - uploaded_file_name = uploaded_file.name - uploaded_file_filename = os.path.split(file_data.filename)[-1] - isempty = os.path.getsize(os.path.abspath(uploaded_file_name)) == 0 - if uploaded_file or uploaded_directory: - rdah = attribute_handlers.RepositoryDependencyAttributeHandler(trans.app, unpopulate=False) - tdah = attribute_handlers.ToolDependencyAttributeHandler(trans.app, unpopulate=False) - stdtm = ShedToolDataTableManager(trans.app) - ok = True - isgzip = False - isbz2 = False - if uploaded_file: - if uncompress_file: - isgzip = checkers.is_gzip(uploaded_file_name) - if not isgzip: - isbz2 = checkers.is_bz2(uploaded_file_name) - if isempty: - tar = None - istar = False - else: - # Determine what we have - a single file or an archive - try: - if (isgzip or isbz2) and uncompress_file: - # Open for reading with transparent compression. - tar = tarfile.open(uploaded_file_name, "r:*") - else: - tar = tarfile.open(uploaded_file_name) - istar = True - except tarfile.ReadError: - tar = None - istar = False - else: - # Uploaded directory - istar = False - if istar: - ( - ok, - message, - files_to_remove, - content_alert_str, - undesirable_dirs_removed, - undesirable_files_removed, - ) = repository_content_util.upload_tar( - trans, - rdah, - tdah, - repository, - tar, - uploaded_file, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ) - elif uploaded_directory: - ( - ok, - message, - files_to_remove, - content_alert_str, - undesirable_dirs_removed, - undesirable_files_removed, - ) = self.upload_directory( - trans, - rdah, - tdah, - repository, - uploaded_directory, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ) - else: - if (isgzip or isbz2) and uncompress_file: - uploaded_file_filename = commit_util.uncompress( - repository, uploaded_file_name, uploaded_file_filename, isgzip=isgzip, isbz2=isbz2 - ) - if ( - repository.type == rt_util.REPOSITORY_SUITE_DEFINITION - and uploaded_file_filename != rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME - ): - ok = False - message = "Repositories of type Repository suite definition can only contain a single file named " - message += "repository_dependencies.xml." - elif ( - repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION - and uploaded_file_filename != rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME - ): - ok = False - message = "Repositories of type Tool dependency definition can only contain a single file named " - message += "tool_dependencies.xml." - if ok: - if upload_point is not None: - full_path = os.path.abspath(os.path.join(repo_dir, upload_point, uploaded_file_filename)) - else: - full_path = os.path.abspath(os.path.join(repo_dir, uploaded_file_filename)) - # Move some version of the uploaded file to the load_point within the repository hierarchy. - if uploaded_file_filename in [rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME]: - # Inspect the contents of the file to see if toolshed or changeset_revision attributes - # are missing and if so, set them appropriately. - altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) - if error_message: - ok = False - message = error_message - status = "error" - elif altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, full_path) - else: - shutil.move(uploaded_file_name, full_path) - elif uploaded_file_filename in [rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME]: - # Inspect the contents of the file to see if changeset_revision values are - # missing and if so, set them appropriately. - altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) - if error_message: - ok = False - message = error_message - status = "error" - if ok: - if altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, full_path) - else: - shutil.move(uploaded_file_name, full_path) - else: - shutil.move(uploaded_file_name, full_path) - if ok: - # See if any admin users have chosen to receive email alerts when a repository is updated. - # If so, check every uploaded file to ensure content is appropriate. - check_contents = commit_util.check_file_contents_for_email_alerts(trans.app) - if check_contents and os.path.isfile(full_path): - content_alert_str = commit_util.check_file_content_for_html_and_images(full_path) - else: - content_alert_str = "" - hg_util.add_changeset(repo_dir, full_path) - hg_util.commit_changeset( - repo_dir, - full_path_to_changeset=full_path, - username=trans.user.username, - message=commit_message, - ) - if full_path.endswith("tool_data_table_conf.xml.sample"): - # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded - # by parsing the file and adding new entries to the in-memory trans.app.tool_data_tables - # dictionary. - error, error_message = stdtm.handle_sample_tool_data_table_conf_file( - full_path, persist=False - ) - if error: - message = f"{message}
{error_message}" - # See if the content of the change set was valid. - admin_only = len(repository.downloadable_revisions) != 1 - suc.handle_email_alerts( - trans.app, - trans.request.host, - repository, - content_alert_str=content_alert_str, - new_repo_alert=new_repo_alert, - admin_only=admin_only, - ) - if ok: - # Update the repository files for browsing. - hg_util.update_repository(repo_dir) - # Get the new repository tip. - if tip == repository.tip(): - message = "No changes to repository. " - status = "warning" - else: - if (isgzip or isbz2) and uncompress_file: - uncompress_str = " uncompressed and " - else: - uncompress_str = " " - if uploaded_directory: - source_type = "repository" - source = url - else: - source_type = "file" - source = uploaded_file_filename - message = f"The {source_type} {escape(source)} has been successfully{uncompress_str}uploaded to the repository. " - if istar and (undesirable_dirs_removed or undesirable_files_removed): - items_removed = undesirable_dirs_removed + undesirable_files_removed - message += ( - " %d undesirable items (.hg .svn .git directories, .DS_Store, hgrc files, etc) " - % items_removed - ) - message += "were removed from the archive. " - if istar and remove_repo_files_not_in_tar and files_to_remove: - if upload_point is not None: - message += ( - " %d files were removed from the repository relative to the selected upload point '%s'. " - % (len(files_to_remove), upload_point) - ) - else: - message += " %d files were removed from the repository root. " % len(files_to_remove) - rmm = repository_metadata_manager.RepositoryMetadataManager( - app=trans.app, user=trans.user, repository=repository - ) - status, error_message = rmm.set_repository_metadata_due_to_new_tip( - trans.request.host, content_alert_str=content_alert_str, **kwd - ) - if error_message: - message = error_message - kwd["message"] = message - if repository.metadata_revisions: - # A repository's metadata revisions are order descending by update_time, so the zeroth revision - # will be the tip just after an upload. - metadata_dict = repository.metadata_revisions[0].metadata - else: - metadata_dict = {} - dd = dependency_display.DependencyDisplayer(trans.app) - if str(repository.type) not in [ - rt_util.REPOSITORY_SUITE_DEFINITION, - rt_util.TOOL_DEPENDENCY_DEFINITION, - ]: - change_repository_type_message = rt_util.generate_message_for_repository_type_change( - trans.app, repository - ) - if change_repository_type_message: - message += change_repository_type_message - status = "warning" - else: - # Provide a warning message if a tool_dependencies.xml file is provided, but tool dependencies - # weren't loaded due to a requirement tag mismatch or some other problem. Tool dependency - # definitions can define orphan tool dependencies (no relationship to any tools contained in the - # repository), so warning messages are important because orphans are always valid. The repository - # owner must be warned in case they did not intend to define an orphan dependency, but simply - # provided incorrect information (tool shed, name owner, changeset_revision) for the definition. - orphan_message = dd.generate_message_for_orphan_tool_dependencies(repository, metadata_dict) - if orphan_message: - message += orphan_message - status = "warning" - # Handle messaging for invalid tool dependencies. - invalid_tool_dependencies_message = dd.generate_message_for_invalid_tool_dependencies(metadata_dict) - if invalid_tool_dependencies_message: - message += invalid_tool_dependencies_message - status = "error" - # Handle messaging for invalid repository dependencies. - invalid_repository_dependencies_message = dd.generate_message_for_invalid_repository_dependencies( - metadata_dict, error_from_tuple=True - ) - if invalid_repository_dependencies_message: - message += invalid_repository_dependencies_message - status = "error" - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - stdtm.reset_tool_data_tables() - if uploaded_directory: - basic_util.remove_dir(uploaded_directory) - trans.response.send_redirect( - web.url_for( - controller="repository", - action="browse_repository", - id=repository_id, - commit_message="Deleted selected files", - message=message, - status=status, - ) - ) - else: - if uploaded_directory: - basic_util.remove_dir(uploaded_directory) - status = "error" - # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file. - stdtm.reset_tool_data_tables() - return trans.fill_template( - "/webapps/tool_shed/repository/upload.mako", - repository=repository, - changeset_revision=tip, - url=url, - commit_message=commit_message, - uncompress_file=uncompress_file, - remove_repo_files_not_in_tar=remove_repo_files_not_in_tar, - message=message, - status=status, - ) - - def upload_directory( - self, - trans, - rdah, - tdah, - repository, - uploaded_directory, - upload_point, - remove_repo_files_not_in_tar, - commit_message, - new_repo_alert, - ): - repo_dir = repository.repo_path(trans.app) - undesirable_dirs_removed = 0 - undesirable_files_removed = 0 - if upload_point is not None: - full_path = os.path.abspath(os.path.join(repo_dir, upload_point)) - else: - full_path = os.path.abspath(repo_dir) - filenames_in_archive = [] - for root, _dirs, files in os.walk(uploaded_directory): - for uploaded_file in files: - relative_path = os.path.normpath(os.path.join(os.path.relpath(root, uploaded_directory), uploaded_file)) - if repository.type == rt_util.REPOSITORY_SUITE_DEFINITION: - ok = os.path.basename(uploaded_file) == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME - elif repository.type == rt_util.TOOL_DEPENDENCY_DEFINITION: - ok = os.path.basename(uploaded_file) == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME - else: - ok = os.path.basename(uploaded_file) not in commit_util.UNDESIRABLE_FILES - if ok: - for file_path_item in relative_path.split("/"): - if file_path_item in commit_util.UNDESIRABLE_DIRS: - undesirable_dirs_removed += 1 - ok = False - break - else: - undesirable_files_removed += 1 - if ok: - uploaded_file_name = os.path.abspath(os.path.join(root, uploaded_file)) - if os.path.split(uploaded_file_name)[-1] == rt_util.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME: - # Inspect the contents of the file to see if toolshed or changeset_revision - # attributes are missing and if so, set them appropriately. - altered, root_elem, error_message = rdah.handle_tag_attributes(uploaded_file_name) - if error_message: - return False, error_message, [], "", [], [] - elif altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, uploaded_file_name) - elif os.path.split(uploaded_file_name)[-1] == rt_util.TOOL_DEPENDENCY_DEFINITION_FILENAME: - # Inspect the contents of the file to see if toolshed or changeset_revision - # attributes are missing and if so, set them appropriately. - altered, root_elem, error_message = tdah.handle_tag_attributes(uploaded_file_name) - if error_message: - return False, error_message, [], "", [], [] - if altered: - tmp_filename = xml_util.create_and_write_tmp_file(root_elem) - shutil.move(tmp_filename, uploaded_file_name) - repo_path = os.path.join(full_path, relative_path) - repo_basedir = os.path.normpath(os.path.join(repo_path, os.path.pardir)) - if not os.path.exists(repo_basedir): - os.makedirs(repo_basedir) - if os.path.exists(repo_path): - if os.path.isdir(repo_path): - shutil.rmtree(repo_path) - else: - os.remove(repo_path) - shutil.move(os.path.join(uploaded_directory, relative_path), repo_path) - filenames_in_archive.append(relative_path) - return commit_util.handle_directory_changes( - trans.app, - trans.request.host, - trans.user.username, - repository, - full_path, - filenames_in_archive, - remove_repo_files_not_in_tar, - new_repo_alert, - commit_message, - undesirable_dirs_removed, - undesirable_files_removed, - ) diff --git a/lib/tool_shed/webapp/fast_app.py b/lib/tool_shed/webapp/fast_app.py index 707dba00041d..18af5dcb7708 100644 --- a/lib/tool_shed/webapp/fast_app.py +++ b/lib/tool_shed/webapp/fast_app.py @@ -1,30 +1,212 @@ +import logging +import os +from pathlib import Path +from typing import ( + Any, + cast, + Dict, + Optional, +) + from a2wsgi import WSGIMiddleware -from fastapi import FastAPI +from fastapi import ( + Depends, + FastAPI, +) +from fastapi.responses import ( + HTMLResponse, + RedirectResponse, +) +from fastapi.staticfiles import StaticFiles +from starlette_graphene3 import ( + GraphQLApp, + make_graphiql_handler, +) from galaxy.webapps.base.api import ( add_exception_handler, add_request_id_middleware, include_all_package_routers, ) +from galaxy.webapps.openapi.utils import get_openapi +from tool_shed.structured_app import ToolShedApp +from tool_shed.webapp.api2 import ( + ensure_valid_session, + get_trans, +) +from tool_shed.webapp.graphql.schema import schema + +log = logging.getLogger(__name__) + +api_tags_metadata = [ + { + "name": "authenticate", + "description": "Authentication-related endpoints.", + }, + { + "name": "categories", + "description": "Category-related endpoints.", + }, + { + "name": "repositories", + "description": "Repository-related endpoints.", + }, + { + "name": "users", + "description": "User-related endpoints.", + }, + {"name": "undocumented", "description": "API routes that have not yet been ported to FastAPI."}, +] + +# Set this if asset handling should be sent to vite. +# Run vite with: +# yarn dev +# Start tool shed with: +# TOOL_SHED_VITE_PORT=4040 TOOL_SHED_API_VERSION=v2 ./run_tool_shed.sh +TOOL_SHED_VITE_PORT: Optional[str] = os.environ.get("TOOL_SHED_VITE_PORT", None) +TOOL_SHED_USE_HMR: bool = TOOL_SHED_VITE_PORT is not None +FRONTEND = Path(__file__).parent.resolve() / "frontend" +FRONTEND_DIST = FRONTEND / "dist" + + +def frontend_controller(app): + shed_entry_point = "main.ts" + vite_runtime = "@vite/client" + + def index(trans=Depends(get_trans)): + if TOOL_SHED_USE_HMR: + index = FRONTEND / "index.html" + index_html = index.read_text() + index_html = index_html.replace( + f"""""", + f"""""", + ) + else: + index = FRONTEND_DIST / "index.html" + index_html = index.read_text() + ensure_valid_session(trans) + cookie = trans.session_csrf_token + r: HTMLResponse = cast(HTMLResponse, trans.response) + r.set_cookie("session_csrf_token", cookie) + return index_html + + return app, index + + +def redirect_route(app, from_url: str, to_url: str): + @app.get(from_url) + def redirect(): + return RedirectResponse(to_url) + + +def frontend_route(controller, path): + app, index = controller + app.get(path, response_class=HTMLResponse)(index) + + +def mount_graphql(app: FastAPI, tool_shed_app: ToolShedApp): + context = { + "session": tool_shed_app.model.context, + "security": tool_shed_app.security, + } + g_app = GraphQLApp(schema, on_get=make_graphiql_handler(), context_value=context, root_value=context) + app.mount("/graphql", g_app) + app.mount("/api/graphql", g_app) + + +FRONT_END_ROUTES = [ + "/", + "/admin", + "/login", + "/register", + "/logout_success", + "/login_success", + "/registration_success", + "/help", + "/repositories_by_search", + "/repositories_by_category", + "/repositories_by_category/{category_id}", + "/repositories_by_owner", + "/repositories_by_owner/{username}", + "/repositories/{repository_id}", + "/repositories_search", + "/_component_showcase", + "/user/api_key", + "/user/change_password", + "/view/{username}", + "/view/{username}/{repository_name}", + "/view/{username}/{repository_name}/{changeset_revision}", +] +LEGACY_ROUTES = { + "/user/create": "/register", # for twilltestcase + "/user/login": "/login", # for twilltestcase +} def initialize_fast_app(gx_webapp, tool_shed_app): - app = FastAPI( - title="Galaxy Tool Shed API", - description=("This API allows you to manage the Tool Shed repositories."), - docs_url="/api/docs", - redoc_url="/api/redoc", - ) + app = get_fastapi_instance() add_exception_handler(app) add_request_id_middleware(app) - include_all_package_routers(app, "tool_shed.webapp.api") + from .buildapp import SHED_API_VERSION + + def mount_static(directory: Path): + name = directory.name + if directory.exists(): + app.mount(f"/{name}", StaticFiles(directory=directory), name=name) + + if SHED_API_VERSION == "v2": + controller = frontend_controller(app) + for route in FRONT_END_ROUTES: + frontend_route(controller, route) + + for from_route, to_route in LEGACY_ROUTES.items(): + redirect_route(app, from_route, to_route) + + mount_graphql(app, tool_shed_app) + + mount_static(FRONTEND / "static") + if TOOL_SHED_USE_HMR: + mount_static(FRONTEND / "node_modules") + else: + mount_static(FRONTEND_DIST / "assets") + + routes_package = "tool_shed.webapp.api" if SHED_API_VERSION == "v1" else "tool_shed.webapp.api2" + include_all_package_routers(app, routes_package) wsgi_handler = WSGIMiddleware(gx_webapp) tool_shed_app.haltables.append(("WSGI Middleware threadpool", wsgi_handler.executor.shutdown)) app.mount("/", wsgi_handler) return app +def get_fastapi_instance() -> FastAPI: + return FastAPI( + title="Galaxy Tool Shed API", + description=("This API allows you to manage the Tool Shed repositories."), + docs_url="/api/docs", + redoc_url="/api/redoc", + tags=api_tags_metadata, + license_info={"name": "MIT", "url": "https://github.com/galaxyproject/galaxy/blob/dev/LICENSE.txt"}, + ) + + +def get_openapi_schema() -> Dict[str, Any]: + """ + Dumps openAPI schema without starting a full app and webserver. + """ + app = get_fastapi_instance() + include_all_package_routers(app, "tool_shed.webapp.api2") + return get_openapi( + title=app.title, + version=app.version, + openapi_version="3.1.0", + description=app.description, + routes=app.routes, + license_info=app.license_info, + ) + + __all__ = ( "add_request_id_middleware", + "get_openapi_schema", "initialize_fast_app", ) diff --git a/lib/tool_shed/webapp/frontend/.eslintignore b/lib/tool_shed/webapp/frontend/.eslintignore new file mode 100644 index 000000000000..b22c816bfd5e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/.eslintignore @@ -0,0 +1,7 @@ +# don't ever lint node_modules +node_modules +# don't lint build output (make sure it's set to your correct build folder name) +dist + +# Ignore codegen aritfacts +src/gql/*.ts diff --git a/lib/tool_shed/webapp/frontend/.eslintrc.js b/lib/tool_shed/webapp/frontend/.eslintrc.js new file mode 100644 index 000000000000..7343e0cc14e1 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/.eslintrc.js @@ -0,0 +1,29 @@ +module.exports = { + root: true, + parser: "vue-eslint-parser", + parserOptions: { + parser: "@typescript-eslint/parser", + // project: ['./tsconfig.json'], + }, + extends: [ + "plugin:vue/strongly-recommended", + "eslint:recommended", + "@vue/typescript/recommended", + "prettier", + "plugin:vuejs-accessibility/recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:@typescript-eslint/recommended", + // More goodies.. + // "plugin:@typescript-eslint/recommended-requiring-type-checking", + ], + plugins: ["@typescript-eslint", "prettier", "vuejs-accessibility"], + rules: { + "prettier/prettier": "error", + // not needed for vue 3 + "vue/no-multiple-template-root": "off", + // upgrade warnings for common John problems + "@typescript-eslint/no-unused-vars": "error", + "vue/require-default-prop": "error", + "vue/v-slot-style": "error", + }, +} diff --git a/lib/tool_shed/webapp/frontend/.prettierrc b/lib/tool_shed/webapp/frontend/.prettierrc new file mode 100644 index 000000000000..0fe7f46213c9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/.prettierrc @@ -0,0 +1,5 @@ +{ + "tabWidth": 4, + "printWidth": 120, + "semi": false +} diff --git a/lib/tool_shed/webapp/frontend/Makefile b/lib/tool_shed/webapp/frontend/Makefile new file mode 100644 index 000000000000..b520b0be6844 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/Makefile @@ -0,0 +1,22 @@ +GALAXY_ROOT=../../../.. + +client: + yarn build + +dev: + yarn dev-all + +format: + yarn format + +lint: + yarn typecheck && yarn lint + +# These next two tasks don't really belong here, but they do demonstrate +# how to get a test server running and populated with some initial data +# for the new tool shed frontend. +run_test_backend: + cd $(GALAXY_ROOT); TOOL_SHED_CONFIG_OVERRIDE_BOOTSTRAP_ADMIN_API_KEY=tsadminkey TOOL_SHED_VITE_PORT=4040 TOOL_SHED_API_VERSION=v2 ./run_tool_shed.sh + +bootstrap_test_backend: + cd $(GALAXY_ROOT); . .venv/bin/activate; python scripts/bootstrap_test_shed.py diff --git a/lib/tool_shed/webapp/frontend/codegen.ts b/lib/tool_shed/webapp/frontend/codegen.ts new file mode 100644 index 000000000000..116794231c76 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/codegen.ts @@ -0,0 +1,16 @@ +import type { CodegenConfig } from '@graphql-codegen/cli' + +const config: CodegenConfig = { + schema: 'http://localhost:9009/graphql/', + documents: ['src/**/*.vue', 'src/**/*.ts'], + generates: { + './src/gql/': { + preset: 'client', + plugins: [], + config: { + useTypeImport: true + } + } + } +} +export default config diff --git a/lib/tool_shed/webapp/frontend/index.html b/lib/tool_shed/webapp/frontend/index.html new file mode 100644 index 000000000000..ab7c301efc7d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + + Galaxy Tool Shed + + +
+ + + diff --git a/lib/tool_shed/webapp/frontend/package.json b/lib/tool_shed/webapp/frontend/package.json new file mode 100644 index 000000000000..ab8b86cc6b26 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/package.json @@ -0,0 +1,54 @@ +{ + "name": "galaxy-tool-shed", + "license": "MIT", + "version": "0.2.0", + "scripts": { + "dev": "vite --port 4040 --strict-port", + "build": "vue-tsc --noEmit && vite build", + "graphql": "graphql-codegen --watch", + "dev-all": "concurrently --kill-others \"npm run dev\" \"npm run graphql\"", + "format": "prettier --write src", + "typecheck": "vue-tsc --noEmit", + "lint": "eslint src --ext .ts,.vue" + }, + "devDependencies": { + "@graphql-codegen/cli": "^2.16.1", + "@graphql-codegen/client-preset": "^1.2.3", + "@quasar/vite-plugin": "^1.0.4", + "@types/node": "^16.6.1", + "@typescript-eslint/eslint-plugin": "^5.47.1", + "@typescript-eslint/parser": "^5.47.1", + "@vitejs/plugin-vue": "^1.6.0", + "@vue/compiler-sfc": "^3.2.6", + "@vue/eslint-config-typescript": "^11.0.2", + "concurrently": "^7.6.0", + "eslint": "^8.30.0", + "eslint-config-prettier": "^8.5.0", + "eslint-plugin-prettier": "^4.2.1", + "eslint-plugin-vue": "^9.8.0", + "eslint-plugin-vuejs-accessibility": "^2.0.0", + "prettier": "^2.8.1", + "sass": "^1.32.0", + "typescript": "^4.3.2", + "vite": "^4.4.9", + "vue-eslint-parser": "^9.1.0", + "vue-tsc": "^1.0.16" + }, + "dependencies": { + "@apollo/client": "^3.7.3", + "@quasar/extras": "^1.12.4", + "@vue/apollo-composable": "^4.0.0-beta.1", + "@vue/apollo-option": "^4.0.0-alpha.20", + "axios": "^1.2.1", + "date-fns": "^2.29.3", + "date-fns-tz": "^1.3.7", + "e": "^0.2.2", + "graphql": "^16.6.0", + "graphql-tag": "^2.12.6", + "openapi-typescript-fetch": "^1.1.3", + "pinia": "^2.0.28", + "quasar": "^2.5.0", + "vue": "^3.2.6", + "vue-router": "4" + } +} diff --git a/lib/tool_shed/webapp/frontend/src/App.vue b/lib/tool_shed/webapp/frontend/src/App.vue new file mode 100644 index 000000000000..a11ce54f175e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/App.vue @@ -0,0 +1,51 @@ + + + + + diff --git a/lib/tool_shed/webapp/frontend/src/apiUtil.ts b/lib/tool_shed/webapp/frontend/src/apiUtil.ts new file mode 100644 index 000000000000..f14774767f97 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/apiUtil.ts @@ -0,0 +1,19 @@ +import axios from "axios" +import { RawAxiosRequestConfig } from "axios" +import { components } from "@/schema" + +type User = components["schemas"]["User"] + +export async function getCurrentUser(): Promise { + const conf: RawAxiosRequestConfig = {} + conf.validateStatus = (status: number) => { + const valid = status == 200 || status == 404 + return valid + } + const { data: user, status } = await axios.get("/api/users/current", conf) + if (status == 404) { + return null + } else { + return user as User + } +} diff --git a/lib/tool_shed/webapp/frontend/src/apollo.ts b/lib/tool_shed/webapp/frontend/src/apollo.ts new file mode 100644 index 000000000000..572617f52fc8 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/apollo.ts @@ -0,0 +1,25 @@ +import { createApolloProvider } from "@vue/apollo-option" +import { ApolloClient, InMemoryCache, DefaultOptions } from "@apollo/client/core" + +const defaultOptions: DefaultOptions = { + watchQuery: { + fetchPolicy: "no-cache", + errorPolicy: "ignore", + }, + query: { + fetchPolicy: "no-cache", + errorPolicy: "all", + }, +} + +export const apolloClient = new ApolloClient({ + uri: "/api/graphql/", + cache: new InMemoryCache(), + defaultOptions: defaultOptions, +}) + +export const apolloClientProvider = createApolloProvider({ + defaultClient: apolloClient, +}) + +// npx apollo schema:download --endpoint=http://localhost:9009/graphql/ graphql-schema.json diff --git a/lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue new file mode 100644 index 000000000000..4d07d9cbe897 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcase.vue @@ -0,0 +1,15 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue new file mode 100644 index 000000000000..d57104208e6c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ComponentShowcaseExample.vue @@ -0,0 +1,21 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue b/lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue new file mode 100644 index 000000000000..1720ccf24208 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ConfigFileContents.vue @@ -0,0 +1,36 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue b/lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue new file mode 100644 index 000000000000..7dfa1aff16e1 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ErrorBanner.vue @@ -0,0 +1,38 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue b/lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue new file mode 100644 index 000000000000..907c40210c82 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/LoadingDiv.vue @@ -0,0 +1,32 @@ + + + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/LoginForm.vue b/lib/tool_shed/webapp/frontend/src/components/LoginForm.vue new file mode 100644 index 000000000000..bc1342c5418e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/LoginForm.vue @@ -0,0 +1,38 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/LoginPage.vue b/lib/tool_shed/webapp/frontend/src/components/LoginPage.vue new file mode 100644 index 000000000000..65e7403bde93 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/LoginPage.vue @@ -0,0 +1,17 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue b/lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue new file mode 100644 index 000000000000..538c8aeb32f9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ManagePushAccess.vue @@ -0,0 +1,42 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ModalForm.vue b/lib/tool_shed/webapp/frontend/src/components/ModalForm.vue new file mode 100644 index 000000000000..77a34dc8d324 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ModalForm.vue @@ -0,0 +1,23 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/PageContainer.vue b/lib/tool_shed/webapp/frontend/src/components/PageContainer.vue new file mode 100644 index 000000000000..ae392f503046 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/PageContainer.vue @@ -0,0 +1,14 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue b/lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue new file mode 100644 index 000000000000..934f4e385985 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RecentlyCreatedRepositories.vue @@ -0,0 +1,39 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue b/lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue new file mode 100644 index 000000000000..44454558a79b --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RecentlyUpdatedRepositories.vue @@ -0,0 +1,39 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue b/lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue new file mode 100644 index 000000000000..a66cf394f529 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RegisterPage.vue @@ -0,0 +1,82 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue b/lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue new file mode 100644 index 000000000000..2d6eaac69728 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RegistrationSuccess.vue @@ -0,0 +1,22 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue new file mode 100644 index 000000000000..28fe8e1f8b35 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoriesForOwner.vue @@ -0,0 +1,67 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue new file mode 100644 index 000000000000..7cba807d69db --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGrid.vue @@ -0,0 +1,160 @@ + + + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts new file mode 100644 index 000000000000..5b9ca505d151 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoriesGridInterface.ts @@ -0,0 +1,36 @@ +import { useFragment } from "@/gql/fragment-masking" +import { RepositoryListItemFragment } from "@/gqlFragements" + +export interface RepositoryGridItem { + id: string + name: string + owner: string + index: number + update_time: string + description: string | null + homepage_url: string | null | undefined + remote_repository_url: string | null | undefined +} + +export type OnScroll = () => Promise + +/* eslint-disable @typescript-eslint/no-explicit-any */ +export function nodeToRow(node: any, index: number): RepositoryGridItem { + /* Adapt CQL results to RepositoryGridItem interface consumed by the + component. */ + if (node == null) { + throw Error("Problem with server response") + } + + const fragment = useFragment(RepositoryListItemFragment, node) + return { + id: fragment.encodedId, + index: index, + name: fragment.name as string, // TODO: fix schema.py so this is nonnull + owner: fragment.user.username, + description: fragment.description || null, + homepage_url: fragment.homepageUrl || null, + remote_repository_url: fragment.remoteRepositoryUrl || null, + update_time: fragment.updateTime, + } +} diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue new file mode 100644 index 000000000000..ccd55676efbd --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryActions.vue @@ -0,0 +1,45 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue new file mode 100644 index 000000000000..8e41a409c1bd --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryCreation.vue @@ -0,0 +1,41 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue new file mode 100644 index 000000000000..994ca74089af --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryExplore.vue @@ -0,0 +1,71 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue new file mode 100644 index 000000000000..0d3f0924f9b5 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryHealth.vue @@ -0,0 +1,28 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue new file mode 100644 index 000000000000..706ee2143cc2 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryLink.vue @@ -0,0 +1,29 @@ + + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue new file mode 100644 index 000000000000..6a69ebe9e4fa --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryLinks.vue @@ -0,0 +1,41 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue new file mode 100644 index 000000000000..996c4400f28d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryTool.vue @@ -0,0 +1,25 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue b/lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue new file mode 100644 index 000000000000..e479176d30d6 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RepositoryUpdate.vue @@ -0,0 +1,25 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue b/lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue new file mode 100644 index 000000000000..525934331e1e --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RevisionActions.vue @@ -0,0 +1,62 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue b/lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue new file mode 100644 index 000000000000..eb559931236d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/RevisionSelect.vue @@ -0,0 +1,56 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/SelectUser.vue b/lib/tool_shed/webapp/frontend/src/components/SelectUser.vue new file mode 100644 index 000000000000..351418bc895c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/SelectUser.vue @@ -0,0 +1,62 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue b/lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue new file mode 100644 index 000000000000..631d2349f8ac --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/ShedToolbar.vue @@ -0,0 +1,117 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/UtcDate.vue b/lib/tool_shed/webapp/frontend/src/components/UtcDate.vue new file mode 100644 index 000000000000..03a3a0443bed --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/UtcDate.vue @@ -0,0 +1,32 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue b/lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue new file mode 100644 index 000000000000..14744ddb64d8 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/AdminControls.vue @@ -0,0 +1,23 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue b/lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue new file mode 100644 index 000000000000..de69706dec34 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/ChangePassword.vue @@ -0,0 +1,50 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue new file mode 100644 index 000000000000..bbf717fecde4 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/CitableRepositoryPage.vue @@ -0,0 +1,44 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue b/lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue new file mode 100644 index 000000000000..65786575671a --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/ComponentsShowcase.vue @@ -0,0 +1,60 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue new file mode 100644 index 000000000000..adb927f6331c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/HelpPage.vue @@ -0,0 +1,20 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue new file mode 100644 index 000000000000..5e04f26eeb58 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/LandingPage.vue @@ -0,0 +1,25 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue b/lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue new file mode 100644 index 000000000000..74d7c55980f9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/ManageApiKey.vue @@ -0,0 +1,84 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue new file mode 100644 index 000000000000..54e7addc4e53 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategories.vue @@ -0,0 +1,44 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue new file mode 100644 index 000000000000..b57b82ecc98a --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByCategory.vue @@ -0,0 +1,85 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue new file mode 100644 index 000000000000..765e7e64d7ec --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwner.vue @@ -0,0 +1,15 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue new file mode 100644 index 000000000000..8b91b25fce02 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesByOwners.vue @@ -0,0 +1,36 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue new file mode 100644 index 000000000000..a2a87875cc5f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoriesBySearch.vue @@ -0,0 +1,88 @@ + + diff --git a/lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue new file mode 100644 index 000000000000..6e31d63fc2d0 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/components/pages/RepositoryPage.vue @@ -0,0 +1,277 @@ + + + diff --git a/lib/tool_shed/webapp/frontend/src/constants.ts b/lib/tool_shed/webapp/frontend/src/constants.ts new file mode 100644 index 000000000000..b3a88733c8c6 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/constants.ts @@ -0,0 +1,13 @@ +export const UPDATING_WITH_PLANEMO_URL = + "https://planemo.readthedocs.io/en/latest/publishing.html#updating-a-repository" + +export const EPHEMERIS_TRAINING = + "https://training.galaxyproject.org/training-material/topics/admin/tutorials/tool-management/tutorial.html" + +export const AUTH_FORM_INPUT_PROPS = { + square: true, + clearable: false, + // choose filled or outlined or neither I think? + outlined: true, + filled: false, +} diff --git a/lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts b/lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts new file mode 100644 index 000000000000..a1f5d6e8ef75 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/fragment-masking.ts @@ -0,0 +1,50 @@ +import { ResultOf, TypedDocumentNode as DocumentNode } from "@graphql-typed-document-node/core" + +export type FragmentType> = TDocumentType extends DocumentNode< + infer TType, + any +> + ? TType extends { " $fragmentName"?: infer TKey } + ? TKey extends string + ? { " $fragmentRefs"?: { [key in TKey]: TType } } + : never + : never + : never + +// return non-nullable if `fragmentType` is non-nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: FragmentType> +): TType +// return nullable if `fragmentType` is nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: FragmentType> | null | undefined +): TType | null | undefined +// return array of non-nullable if `fragmentType` is array of non-nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: ReadonlyArray>> +): ReadonlyArray +// return array of nullable if `fragmentType` is array of nullable +export function useFragment( + _documentNode: DocumentNode, + fragmentType: ReadonlyArray>> | null | undefined +): ReadonlyArray | null | undefined +export function useFragment( + _documentNode: DocumentNode, + fragmentType: + | FragmentType> + | ReadonlyArray>> + | null + | undefined +): TType | ReadonlyArray | null | undefined { + return fragmentType as any +} + +export function makeFragmentData>( + data: FT, + _fragment: F +): FragmentType { + return data as FragmentType +} diff --git a/lib/tool_shed/webapp/frontend/src/gql/gql.ts b/lib/tool_shed/webapp/frontend/src/gql/gql.ts new file mode 100644 index 000000000000..0793f6959859 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/gql.ts @@ -0,0 +1,98 @@ +/* eslint-disable */ +import * as types from "./graphql" +import { TypedDocumentNode as DocumentNode } from "@graphql-typed-document-node/core" + +/** + * Map of all GraphQL operations in the project. + * + * This map has several performance disadvantages: + * 1. It is not tree-shakeable, so it will include all operations in the project. + * 2. It is not minifiable, so the string of a GraphQL query will be multiple times inside the bundle. + * 3. It does not support dead code elimination, so it will add unused operations. + * + * Therefore it is highly recommended to use the babel-plugin for production. + */ +const documents = { + "\n query recentlyCreatedRepositories {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryCreationItem\n }\n }\n }\n }\n": + types.RecentlyCreatedRepositoriesDocument, + "\n query recentRepositoryUpdates {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryUpdateItem\n }\n }\n }\n }\n": + types.RecentRepositoryUpdatesDocument, + "\n query repositoriesByOwner($username: String, $cursor: String) {\n relayRepositoriesForOwner(username: $username, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n": + types.RepositoriesByOwnerDocument, + "\n fragment RepositoryCreationItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n createTime\n }\n": + types.RepositoryCreationItemFragmentDoc, + "\n query repositoriesByCategory($categoryId: String, $cursor: String) {\n relayRepositoriesForCategory(encodedId: $categoryId, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n": + types.RepositoriesByCategoryDocument, + "\n fragment RepositoryListItemFragment on RelayRepository {\n encodedId\n name\n user {\n username\n }\n description\n type\n updateTime\n homepageUrl\n remoteRepositoryUrl\n }\n": + types.RepositoryListItemFragmentFragmentDoc, + "\n fragment RepositoryUpdateItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n updateTime\n }\n": + types.RepositoryUpdateItemFragmentDoc, +} + +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + * + * + * @example + * ```ts + * const query = gql(`query GetUser($id: ID!) { user(id: $id) { name } }`); + * ``` + * + * The query argument is unknown! + * Please regenerate the types. + */ +export function graphql(source: string): unknown + +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query recentlyCreatedRepositories {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryCreationItem\n }\n }\n }\n }\n" +): (typeof documents)["\n query recentlyCreatedRepositories {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryCreationItem\n }\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query recentRepositoryUpdates {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryUpdateItem\n }\n }\n }\n }\n" +): (typeof documents)["\n query recentRepositoryUpdates {\n relayRepositories(first: 10, sort: UPDATE_TIME_DESC) {\n edges {\n node {\n ...RepositoryUpdateItem\n }\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query repositoriesByOwner($username: String, $cursor: String) {\n relayRepositoriesForOwner(username: $username, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n" +): (typeof documents)["\n query repositoriesByOwner($username: String, $cursor: String) {\n relayRepositoriesForOwner(username: $username, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n fragment RepositoryCreationItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n createTime\n }\n" +): (typeof documents)["\n fragment RepositoryCreationItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n createTime\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n query repositoriesByCategory($categoryId: String, $cursor: String) {\n relayRepositoriesForCategory(encodedId: $categoryId, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n" +): (typeof documents)["\n query repositoriesByCategory($categoryId: String, $cursor: String) {\n relayRepositoriesForCategory(encodedId: $categoryId, sort: UPDATE_TIME_DESC, first: 10, after: $cursor) {\n edges {\n cursor\n node {\n ...RepositoryListItemFragment\n }\n }\n pageInfo {\n endCursor\n hasNextPage\n }\n }\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n fragment RepositoryListItemFragment on RelayRepository {\n encodedId\n name\n user {\n username\n }\n description\n type\n updateTime\n homepageUrl\n remoteRepositoryUrl\n }\n" +): (typeof documents)["\n fragment RepositoryListItemFragment on RelayRepository {\n encodedId\n name\n user {\n username\n }\n description\n type\n updateTime\n homepageUrl\n remoteRepositoryUrl\n }\n"] +/** + * The graphql function is used to parse GraphQL queries into a document that can be used by GraphQL clients. + */ +export function graphql( + source: "\n fragment RepositoryUpdateItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n updateTime\n }\n" +): (typeof documents)["\n fragment RepositoryUpdateItem on RelayRepository {\n encodedId\n name\n user {\n username\n }\n updateTime\n }\n"] + +export function graphql(source: string) { + return (documents as any)[source] ?? {} +} + +export type DocumentType> = TDocumentNode extends DocumentNode< + infer TType, + any +> + ? TType + : never diff --git a/lib/tool_shed/webapp/frontend/src/gql/graphql.ts b/lib/tool_shed/webapp/frontend/src/gql/graphql.ts new file mode 100644 index 000000000000..f6c3433eb96b --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/graphql.ts @@ -0,0 +1,821 @@ +/* eslint-disable */ +import { TypedDocumentNode as DocumentNode } from "@graphql-typed-document-node/core" +export type Maybe = T | null +export type InputMaybe = Maybe +export type Exact = { [K in keyof T]: T[K] } +export type MakeOptional = Omit & { [SubKey in K]?: Maybe } +export type MakeMaybe = Omit & { [SubKey in K]: Maybe } +/** All built-in and custom scalars, mapped to their actual values */ +export type Scalars = { + ID: string + String: string + Boolean: boolean + Int: number + Float: number + /** + * The `DateTime` scalar type represents a DateTime + * value as specified by + * [iso8601](https://en.wikipedia.org/wiki/ISO_8601). + */ + DateTime: any +} + +/** An object with an ID */ +export type Node = { + /** The ID of the object */ + id: Scalars["ID"] +} + +/** The Relay compliant `PageInfo` type, containing data necessary to paginate this connection. */ +export type PageInfo = { + __typename?: "PageInfo" + /** When paginating forwards, the cursor to continue. */ + endCursor?: Maybe + /** When paginating forwards, are there more items? */ + hasNextPage: Scalars["Boolean"] + /** When paginating backwards, are there more items? */ + hasPreviousPage: Scalars["Boolean"] + /** When paginating backwards, the cursor to continue. */ + startCursor?: Maybe +} + +export type Query = { + __typename?: "Query" + categories?: Maybe>> + node?: Maybe + relayCategories?: Maybe + relayRepositories?: Maybe + relayRepositoriesForCategory?: Maybe + relayRepositoriesForOwner?: Maybe + relayRevisions?: Maybe + relayUsers?: Maybe + repositories?: Maybe>> + revisions?: Maybe>> + users?: Maybe>> +} + +export type QueryNodeArgs = { + id: Scalars["ID"] +} + +export type QueryRelayCategoriesArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayRepositoriesArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayRepositoriesForCategoryArgs = { + after?: InputMaybe + before?: InputMaybe + encodedId?: InputMaybe + first?: InputMaybe + id?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayRepositoriesForOwnerArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> + username?: InputMaybe +} + +export type QueryRelayRevisionsArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type QueryRelayUsersArgs = { + after?: InputMaybe + before?: InputMaybe + first?: InputMaybe + last?: InputMaybe + sort?: InputMaybe>> +} + +export type RelayCategory = Node & { + __typename?: "RelayCategory" + createTime?: Maybe + deleted?: Maybe + description?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + name: Scalars["String"] + repositories?: Maybe>> + updateTime?: Maybe +} + +export type RelayCategoryConnection = { + __typename?: "RelayCategoryConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayCategory` and its cursor. */ +export type RelayCategoryEdge = { + __typename?: "RelayCategoryEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +/** An enumeration. */ +export enum RelayCategorySortEnum { + CreateTimeAsc = "CREATE_TIME_ASC", + CreateTimeDesc = "CREATE_TIME_DESC", + DeletedAsc = "DELETED_ASC", + DeletedDesc = "DELETED_DESC", + DescriptionAsc = "DESCRIPTION_ASC", + DescriptionDesc = "DESCRIPTION_DESC", + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", + NameAsc = "NAME_ASC", + NameDesc = "NAME_DESC", + UpdateTimeAsc = "UPDATE_TIME_ASC", + UpdateTimeDesc = "UPDATE_TIME_DESC", +} + +export type RelayRepository = Node & { + __typename?: "RelayRepository" + categories?: Maybe>> + createTime?: Maybe + description?: Maybe + encodedId: Scalars["String"] + homepageUrl?: Maybe + id: Scalars["ID"] + longDescription?: Maybe + name: Scalars["String"] + remoteRepositoryUrl?: Maybe + type?: Maybe + updateTime?: Maybe + user: SimpleUser +} + +export type RelayRepositoryConnection = { + __typename?: "RelayRepositoryConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayRepository` and its cursor. */ +export type RelayRepositoryEdge = { + __typename?: "RelayRepositoryEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +export type RelayRepositoryMetadata = Node & { + __typename?: "RelayRepositoryMetadata" + changesetRevision: Scalars["String"] + createTime?: Maybe + downloadable?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + malicious?: Maybe + numericRevision?: Maybe + repository: SimpleRepository + updateTime?: Maybe +} + +export type RelayRepositoryMetadataConnection = { + __typename?: "RelayRepositoryMetadataConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayRepositoryMetadata` and its cursor. */ +export type RelayRepositoryMetadataEdge = { + __typename?: "RelayRepositoryMetadataEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +/** An enumeration. */ +export enum RelayRepositoryMetadataSortEnum { + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", +} + +/** An enumeration. */ +export enum RelayRepositorySortEnum { + CreateTimeAsc = "CREATE_TIME_ASC", + CreateTimeDesc = "CREATE_TIME_DESC", + DescriptionAsc = "DESCRIPTION_ASC", + DescriptionDesc = "DESCRIPTION_DESC", + HomepageUrlAsc = "HOMEPAGE_URL_ASC", + HomepageUrlDesc = "HOMEPAGE_URL_DESC", + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", + LongDescriptionAsc = "LONG_DESCRIPTION_ASC", + LongDescriptionDesc = "LONG_DESCRIPTION_DESC", + NameAsc = "NAME_ASC", + NameDesc = "NAME_DESC", + RemoteRepositoryUrlAsc = "REMOTE_REPOSITORY_URL_ASC", + RemoteRepositoryUrlDesc = "REMOTE_REPOSITORY_URL_DESC", + TypeAsc = "TYPE_ASC", + TypeDesc = "TYPE_DESC", + UpdateTimeAsc = "UPDATE_TIME_ASC", + UpdateTimeDesc = "UPDATE_TIME_DESC", +} + +export type RelayUser = Node & { + __typename?: "RelayUser" + encodedId: Scalars["String"] + id: Scalars["ID"] + username: Scalars["String"] +} + +export type RelayUserConnection = { + __typename?: "RelayUserConnection" + /** Contains the nodes in this connection. */ + edges: Array> + /** Pagination data for this connection. */ + pageInfo: PageInfo +} + +/** A Relay edge containing a `RelayUser` and its cursor. */ +export type RelayUserEdge = { + __typename?: "RelayUserEdge" + /** A cursor for use in pagination */ + cursor: Scalars["String"] + /** The item at the end of the edge */ + node?: Maybe +} + +/** An enumeration. */ +export enum RelayUserSortEnum { + IdAsc = "ID_ASC", + IdDesc = "ID_DESC", + UsernameAsc = "USERNAME_ASC", + UsernameDesc = "USERNAME_DESC", +} + +export type SimpleCategory = { + __typename?: "SimpleCategory" + createTime?: Maybe + deleted?: Maybe + description?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + name: Scalars["String"] + repositories?: Maybe>> + updateTime?: Maybe +} + +export type SimpleRepository = { + __typename?: "SimpleRepository" + categories?: Maybe>> + createTime?: Maybe + description?: Maybe + downloadableRevisions?: Maybe>> + encodedId: Scalars["String"] + homepageUrl?: Maybe + id: Scalars["ID"] + longDescription?: Maybe + metadataRevisions?: Maybe>> + name: Scalars["String"] + remoteRepositoryUrl?: Maybe + type?: Maybe + updateTime?: Maybe + user: SimpleUser +} + +export type SimpleRepositoryMetadata = { + __typename?: "SimpleRepositoryMetadata" + changesetRevision: Scalars["String"] + createTime?: Maybe + downloadable?: Maybe + encodedId: Scalars["String"] + id: Scalars["ID"] + malicious?: Maybe + numericRevision?: Maybe + repository: SimpleRepository + updateTime?: Maybe +} + +export type SimpleUser = { + __typename?: "SimpleUser" + encodedId: Scalars["String"] + id: Scalars["ID"] + username: Scalars["String"] +} + +export type RecentlyCreatedRepositoriesQueryVariables = Exact<{ [key: string]: never }> + +export type RecentlyCreatedRepositoriesQuery = { + __typename?: "Query" + relayRepositories?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryCreationItemFragment: RepositoryCreationItemFragment } + }) + | null + } | null> + } | null +} + +export type RecentRepositoryUpdatesQueryVariables = Exact<{ [key: string]: never }> + +export type RecentRepositoryUpdatesQuery = { + __typename?: "Query" + relayRepositories?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryUpdateItemFragment: RepositoryUpdateItemFragment } + }) + | null + } | null> + } | null +} + +export type RepositoriesByOwnerQueryVariables = Exact<{ + username?: InputMaybe + cursor?: InputMaybe +}> + +export type RepositoriesByOwnerQuery = { + __typename?: "Query" + relayRepositoriesForOwner?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + cursor: string + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryListItemFragmentFragment: RepositoryListItemFragmentFragment } + }) + | null + } | null> + pageInfo: { __typename?: "PageInfo"; endCursor?: string | null; hasNextPage: boolean } + } | null +} + +export type RepositoryCreationItemFragment = { + __typename?: "RelayRepository" + encodedId: string + name: string + createTime?: any | null + user: { __typename?: "SimpleUser"; username: string } +} & { " $fragmentName"?: "RepositoryCreationItemFragment" } + +export type RepositoriesByCategoryQueryVariables = Exact<{ + categoryId?: InputMaybe + cursor?: InputMaybe +}> + +export type RepositoriesByCategoryQuery = { + __typename?: "Query" + relayRepositoriesForCategory?: { + __typename?: "RelayRepositoryConnection" + edges: Array<{ + __typename?: "RelayRepositoryEdge" + cursor: string + node?: + | ({ __typename?: "RelayRepository" } & { + " $fragmentRefs"?: { RepositoryListItemFragmentFragment: RepositoryListItemFragmentFragment } + }) + | null + } | null> + pageInfo: { __typename?: "PageInfo"; endCursor?: string | null; hasNextPage: boolean } + } | null +} + +export type RepositoryListItemFragmentFragment = { + __typename?: "RelayRepository" + encodedId: string + name: string + description?: string | null + type?: string | null + updateTime?: any | null + homepageUrl?: string | null + remoteRepositoryUrl?: string | null + user: { __typename?: "SimpleUser"; username: string } +} & { " $fragmentName"?: "RepositoryListItemFragmentFragment" } + +export type RepositoryUpdateItemFragment = { + __typename?: "RelayRepository" + encodedId: string + name: string + updateTime?: any | null + user: { __typename?: "SimpleUser"; username: string } +} & { " $fragmentName"?: "RepositoryUpdateItemFragment" } + +export const RepositoryCreationItemFragmentDoc = { + kind: "Document", + definitions: [ + { + kind: "FragmentDefinition", + name: { kind: "Name", value: "RepositoryCreationItem" }, + typeCondition: { kind: "NamedType", name: { kind: "Name", value: "RelayRepository" } }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "encodedId" } }, + { kind: "Field", name: { kind: "Name", value: "name" } }, + { + kind: "Field", + name: { kind: "Name", value: "user" }, + selectionSet: { + kind: "SelectionSet", + selections: [{ kind: "Field", name: { kind: "Name", value: "username" } }], + }, + }, + { kind: "Field", name: { kind: "Name", value: "createTime" } }, + ], + }, + }, + ], +} as unknown as DocumentNode +export const RepositoryListItemFragmentFragmentDoc = { + kind: "Document", + definitions: [ + { + kind: "FragmentDefinition", + name: { kind: "Name", value: "RepositoryListItemFragment" }, + typeCondition: { kind: "NamedType", name: { kind: "Name", value: "RelayRepository" } }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "encodedId" } }, + { kind: "Field", name: { kind: "Name", value: "name" } }, + { + kind: "Field", + name: { kind: "Name", value: "user" }, + selectionSet: { + kind: "SelectionSet", + selections: [{ kind: "Field", name: { kind: "Name", value: "username" } }], + }, + }, + { kind: "Field", name: { kind: "Name", value: "description" } }, + { kind: "Field", name: { kind: "Name", value: "type" } }, + { kind: "Field", name: { kind: "Name", value: "updateTime" } }, + { kind: "Field", name: { kind: "Name", value: "homepageUrl" } }, + { kind: "Field", name: { kind: "Name", value: "remoteRepositoryUrl" } }, + ], + }, + }, + ], +} as unknown as DocumentNode +export const RepositoryUpdateItemFragmentDoc = { + kind: "Document", + definitions: [ + { + kind: "FragmentDefinition", + name: { kind: "Name", value: "RepositoryUpdateItem" }, + typeCondition: { kind: "NamedType", name: { kind: "Name", value: "RelayRepository" } }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "encodedId" } }, + { kind: "Field", name: { kind: "Name", value: "name" } }, + { + kind: "Field", + name: { kind: "Name", value: "user" }, + selectionSet: { + kind: "SelectionSet", + selections: [{ kind: "Field", name: { kind: "Name", value: "username" } }], + }, + }, + { kind: "Field", name: { kind: "Name", value: "updateTime" } }, + ], + }, + }, + ], +} as unknown as DocumentNode +export const RecentlyCreatedRepositoriesDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "recentlyCreatedRepositories" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositories" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryCreationItem" }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryCreationItemFragmentDoc.definitions, + ], +} as unknown as DocumentNode +export const RecentRepositoryUpdatesDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "recentRepositoryUpdates" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositories" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryUpdateItem" }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryUpdateItemFragmentDoc.definitions, + ], +} as unknown as DocumentNode +export const RepositoriesByOwnerDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "repositoriesByOwner" }, + variableDefinitions: [ + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "username" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositoriesForOwner" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "username" }, + value: { kind: "Variable", name: { kind: "Name", value: "username" } }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "after" }, + value: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "cursor" } }, + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryListItemFragment" }, + }, + ], + }, + }, + ], + }, + }, + { + kind: "Field", + name: { kind: "Name", value: "pageInfo" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "endCursor" } }, + { kind: "Field", name: { kind: "Name", value: "hasNextPage" } }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryListItemFragmentFragmentDoc.definitions, + ], +} as unknown as DocumentNode +export const RepositoriesByCategoryDocument = { + kind: "Document", + definitions: [ + { + kind: "OperationDefinition", + operation: "query", + name: { kind: "Name", value: "repositoriesByCategory" }, + variableDefinitions: [ + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "categoryId" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + { + kind: "VariableDefinition", + variable: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + type: { kind: "NamedType", name: { kind: "Name", value: "String" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "relayRepositoriesForCategory" }, + arguments: [ + { + kind: "Argument", + name: { kind: "Name", value: "encodedId" }, + value: { kind: "Variable", name: { kind: "Name", value: "categoryId" } }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "sort" }, + value: { kind: "EnumValue", value: "UPDATE_TIME_DESC" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "first" }, + value: { kind: "IntValue", value: "10" }, + }, + { + kind: "Argument", + name: { kind: "Name", value: "after" }, + value: { kind: "Variable", name: { kind: "Name", value: "cursor" } }, + }, + ], + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "Field", + name: { kind: "Name", value: "edges" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "cursor" } }, + { + kind: "Field", + name: { kind: "Name", value: "node" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { + kind: "FragmentSpread", + name: { kind: "Name", value: "RepositoryListItemFragment" }, + }, + ], + }, + }, + ], + }, + }, + { + kind: "Field", + name: { kind: "Name", value: "pageInfo" }, + selectionSet: { + kind: "SelectionSet", + selections: [ + { kind: "Field", name: { kind: "Name", value: "endCursor" } }, + { kind: "Field", name: { kind: "Name", value: "hasNextPage" } }, + ], + }, + }, + ], + }, + }, + ], + }, + }, + ...RepositoryListItemFragmentFragmentDoc.definitions, + ], +} as unknown as DocumentNode diff --git a/lib/tool_shed/webapp/frontend/src/gql/index.ts b/lib/tool_shed/webapp/frontend/src/gql/index.ts new file mode 100644 index 000000000000..f2b0e1a9ae69 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gql/index.ts @@ -0,0 +1,2 @@ +export * from "./fragment-masking" +export * from "./gql" diff --git a/lib/tool_shed/webapp/frontend/src/gqlFragements.ts b/lib/tool_shed/webapp/frontend/src/gqlFragements.ts new file mode 100644 index 000000000000..4eca9379d0da --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/gqlFragements.ts @@ -0,0 +1,27 @@ +import { graphql } from "@/gql" + +export const RepositoryListItemFragment = graphql(/* GraphQL */ ` + fragment RepositoryListItemFragment on RelayRepository { + encodedId + name + user { + username + } + description + type + updateTime + homepageUrl + remoteRepositoryUrl + } +`) + +export const UpdateFragment = graphql(/* GraphQL */ ` + fragment RepositoryUpdateItem on RelayRepository { + encodedId + name + user { + username + } + updateTime + } +`) diff --git a/lib/tool_shed/webapp/frontend/src/main.ts b/lib/tool_shed/webapp/frontend/src/main.ts new file mode 100644 index 000000000000..4130acc25b72 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/main.ts @@ -0,0 +1,24 @@ +import { createApp } from "vue" +import { Quasar, Notify, Cookies } from "quasar" +import App from "./App.vue" +// ( + props: TProps, + emit: (event: string, value: TProps[TKey]) => void, + name: TKey = "modelValue" as TKey +): WritableComputedRef { + return computed({ + get: () => props[name], + set: (value: TProps[TKey]) => { + emit("update:modelValue", value) + }, + }) +} diff --git a/lib/tool_shed/webapp/frontend/src/quasar-variables.sass b/lib/tool_shed/webapp/frontend/src/quasar-variables.sass new file mode 100644 index 000000000000..42043a4e6e4f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/quasar-variables.sass @@ -0,0 +1,15 @@ +$primary : #2c3143 +$secondary : #dee2e6 + +// really struggling to create constrast in visually +// appealing ways... some failed experiments +// #ffdb58 // #a6c9e1 +$accent : #63a0ca + +$dark : #a3aac4 +$dark-page : #121212 + +$positive : #66cc66 +$negative : #e31a1e +$info : #2077b3 +$warning : #fe7f02 diff --git a/lib/tool_shed/webapp/frontend/src/router.ts b/lib/tool_shed/webapp/frontend/src/router.ts new file mode 100644 index 000000000000..61518786abd9 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/router.ts @@ -0,0 +1,13 @@ +import { createRouter, createWebHistory } from "vue-router" +import routes from "@/routes" + +const router = createRouter({ + history: createWebHistory(), + routes: routes, +}) + +export function goToRepository(id: string) { + router.push(`/repositories/${id}`) +} + +export default router diff --git a/lib/tool_shed/webapp/frontend/src/routes.ts b/lib/tool_shed/webapp/frontend/src/routes.ts new file mode 100644 index 000000000000..5dc76636fd81 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/routes.ts @@ -0,0 +1,113 @@ +import AdminControls from "@/components/pages/AdminControls.vue" +import LandingPage from "@/components/pages/LandingPage.vue" +import LoginPage from "@/components/LoginPage.vue" +import RegisterPage from "@/components/RegisterPage.vue" +import RegistrationSuccess from "@/components/RegistrationSuccess.vue" +import HelpPage from "@/components/pages/HelpPage.vue" +import RepositoriesByCategories from "@/components/pages/RepositoriesByCategories.vue" +import RepositoriesByOwners from "@/components/pages/RepositoriesByOwners.vue" +import RepositoriesByOwner from "@/components/pages/RepositoriesByOwner.vue" +import RepositoriesBySearch from "@/components/pages/RepositoriesBySearch.vue" +import RepositoriesByCategory from "@/components/pages/RepositoriesByCategory.vue" +import ComponentsShowcase from "@/components/pages/ComponentsShowcase.vue" +import RepositoryPage from "@/components/pages/RepositoryPage.vue" +import ManageApiKey from "@/components/pages/ManageApiKey.vue" +import ChangePassword from "@/components/pages/ChangePassword.vue" +import CitableRepositoryPage from "@/components/pages/CitableRepositoryPage.vue" + +import type { RouteRecordRaw } from "vue-router" + +const routes: Array = [ + { + path: "/", + component: LandingPage, + }, + { + path: "/register", + component: RegisterPage, + }, + { + path: "/login", + component: LoginPage, + }, + { + path: "/registration_success", + component: RegistrationSuccess, + }, + { + path: "/login_success", + component: LandingPage, + props: { message: "Login successful!" }, + }, + { + path: "/logout_success", + component: LandingPage, + props: { message: "Logout successful!" }, + }, + { + path: "/help", + component: HelpPage, + }, + { + path: "/admin", + component: AdminControls, + }, + { + path: "/_component_showcase", + component: ComponentsShowcase, + }, + { + path: "/repositories_by_search", + component: RepositoriesBySearch, + }, + { + path: "/repositories_by_category", + component: RepositoriesByCategories, + }, + { + path: "/repositories_by_owner", + component: RepositoriesByOwners, + }, + { + path: "/repositories_by_owner/:username", + component: RepositoriesByOwner, + props: true, + }, + { + path: "/repositories_by_category/:categoryId", + component: RepositoriesByCategory, + props: true, + }, + { + path: "/repositories/:repositoryId", + component: RepositoryPage, + props: true, + }, + { + path: "/user/api_key", + component: ManageApiKey, + }, + { + path: "/user/change_password", + component: ChangePassword, + }, + // legacy style access - was thought of as a citable URL + // so lets keep this path. + { + path: "/view/:username", + component: RepositoriesByOwner, + props: true, + }, + { + path: "/view/:username/:repositoryName", + component: CitableRepositoryPage, + props: true, + }, + { + path: "/view/:username/:repositoryName/:changesetRevision", + component: CitableRepositoryPage, + props: true, + }, +] + +export default routes diff --git a/lib/tool_shed/webapp/frontend/src/schema/fetcher.ts b/lib/tool_shed/webapp/frontend/src/schema/fetcher.ts new file mode 100644 index 000000000000..bb88f1cd5cbd --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/fetcher.ts @@ -0,0 +1,20 @@ +import { Fetcher } from "openapi-typescript-fetch" +import type { paths } from "./schema" + +/* +import type { Middleware } from "openapi-typescript-fetch"; +import { rethrowSimple } from "@/utils/simple-error"; +const rethrowSimpleMiddleware: Middleware = async (url, init, next) => { + try { + const response = await next(url, init); + return response; + } catch (e) { + rethrowSimple(e); + } +}; + +use: [rethrowSimpleMiddleware] +*/ + +export const fetcher = Fetcher.for() +fetcher.configure({ baseUrl: "" }) diff --git a/lib/tool_shed/webapp/frontend/src/schema/index.ts b/lib/tool_shed/webapp/frontend/src/schema/index.ts new file mode 100644 index 000000000000..f334fdb0d2a2 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/index.ts @@ -0,0 +1,3 @@ +export type { components, operations, paths } from "./schema" +export { fetcher } from "./fetcher" +export type { RepositoryTool, RevisionMetadata } from "./types" diff --git a/lib/tool_shed/webapp/frontend/src/schema/schema.ts b/lib/tool_shed/webapp/frontend/src/schema/schema.ts new file mode 100644 index 000000000000..5b2011c4db4f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/schema.ts @@ -0,0 +1,2061 @@ +/** + * This file was auto-generated by openapi-typescript. + * Do not make direct changes to the file. + */ + +export interface paths { + "/api/authenticate/baseauth": { + /** Returns returns an API key for authenticated user based on BaseAuth headers. */ + get: operations["authenticate__baseauth"] + } + "/api/categories": { + /** + * Index + * @description index category + */ + get: operations["categories__index"] + /** + * Create + * @description create a category + */ + post: operations["categories__create"] + } + "/api/categories/{encoded_category_id}/repositories": { + /** + * Repositories + * @description display repositories by category + */ + get: operations["categories__repositories"] + } + "/api/ga4gh/trs/v2/service-info": { + /** Service Info */ + get: operations["tools_trs_service_info"] + } + "/api/ga4gh/trs/v2/toolClasses": { + /** Tool Classes */ + get: operations["tools__trs_tool_classes"] + } + "/api/ga4gh/trs/v2/tools": { + /** Trs Index */ + get: operations["tools__trs_index"] + } + "/api/ga4gh/trs/v2/tools/{tool_id}": { + /** Trs Get */ + get: operations["tools__trs_get"] + } + "/api/ga4gh/trs/v2/tools/{tool_id}/versions": { + /** Trs Get Versions */ + get: operations["tools__trs_get_versions"] + } + "/api/repositories": { + /** + * Index + * @description Get a list of repositories or perform a search. + */ + get: operations["repositories__index"] + /** + * Create + * @description create a new repository + */ + post: operations["repositories__create"] + } + "/api/repositories/get_ordered_installable_revisions": { + /** + * Get Ordered Installable Revisions + * @description Get an ordered list of the repository changeset revisions that are installable + */ + get: operations["repositories__get_ordered_installable_revisions"] + } + "/api/repositories/get_repository_revision_install_info": { + /** + * Legacy Install Info + * @description Get information used by the install client to install this repository. + */ + get: operations["repositories__legacy_install_info"] + } + "/api/repositories/install_info": { + /** + * Install Info + * @description Get information used by the install client to install this repository. + */ + get: operations["repositories__install_info"] + } + "/api/repositories/reset_metadata_on_repository": { + /** + * Reset Metadata On Repository Legacy + * @description reset metadata on a repository + */ + post: operations["repositories__reset_legacy"] + } + "/api/repositories/updates": { + /** Updates */ + get: operations["repositories__update"] + } + "/api/repositories/{encoded_repository_id}": { + /** Show */ + get: operations["repositories__show"] + } + "/api/repositories/{encoded_repository_id}/allow_push": { + /** Show Allow Push */ + get: operations["repositories__show_allow_push"] + } + "/api/repositories/{encoded_repository_id}/allow_push/{username}": { + /** Add Allow Push */ + post: operations["repositories__add_allow_push"] + /** Remove Allow Push */ + delete: operations["repositories__remove_allow_push"] + } + "/api/repositories/{encoded_repository_id}/changeset_revision": { + /** + * Create Changeset Revision + * @description upload new revision to the repository + */ + post: operations["repositories__create_revision"] + } + "/api/repositories/{encoded_repository_id}/deprecated": { + /** Set Deprecated */ + put: operations["repositories__set_deprecated"] + /** Unset Deprecated */ + delete: operations["repositories__unset_deprecated"] + } + "/api/repositories/{encoded_repository_id}/metadata": { + /** + * Metadata + * @description Get information about repository metadata + */ + get: operations["repositories__metadata"] + } + "/api/repositories/{encoded_repository_id}/permissions": { + /** Permissions */ + get: operations["repositories__permissions"] + } + "/api/repositories/{encoded_repository_id}/reset_metadata": { + /** + * Reset Metadata On Repository + * @description reset metadata on a repository + */ + post: operations["repositories__reset"] + } + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/malicious": { + /** Set Malicious */ + put: operations["repositories__set_malicious"] + /** Unset Malicious */ + delete: operations["repositories__unset_malicious"] + } + "/api/repositories/{encoded_repository_id}/revisions/{changeset_revision}/readmes": { + /** + * Get Readmes + * @description fetch readmes for repository revision + */ + get: operations["repositories__readmes"] + } + "/api/tools": { + /** Index */ + get: operations["tools__index"] + } + "/api/tools/build_search_index": { + /** + * Build Search Index + * @description Not part of the stable API, just something to simplify + * bootstrapping tool sheds, scripting, testing, etc... + */ + put: operations["tools__build_search_index"] + } + "/api/users": { + /** + * Index + * @description index users + */ + get: operations["users__index"] + /** + * Create + * @description create a user + */ + post: operations["users__create"] + } + "/api/users/current": { + /** + * Current + * @description show current user + */ + get: operations["users__current"] + } + "/api/users/{encoded_user_id}": { + /** + * Show + * @description show a user + */ + get: operations["users__show"] + } + "/api/users/{encoded_user_id}/api_key": { + /** Return the user's API key */ + get: operations["users__get_or_create_api_key"] + /** Creates a new API key for the user */ + post: operations["users__create_api_key"] + /** Delete the current API key of the user */ + delete: operations["users__delete_api_key"] + } + "/api/version": { + /** Version */ + get: operations["configuration__version"] + } + "/api_internal/change_password": { + /** + * Change Password + * @description reset a user + */ + put: operations["users__internal_change_password"] + } + "/api_internal/login": { + /** + * Internal Login + * @description login to web UI + */ + put: operations["users__internal_login"] + } + "/api_internal/logout": { + /** + * Internal Logout + * @description logout of web UI + */ + put: operations["users__internal_logout"] + } + "/api_internal/register": { + /** + * Register + * @description register a user + */ + post: operations["users__internal_register"] + } + "/api_internal/repositories/{encoded_repository_id}/metadata": { + /** + * Metadata Internal + * @description Get information about repository metadata + */ + get: operations["repositories__internal_metadata"] + } +} + +export type webhooks = Record + +export interface components { + schemas: { + /** APIKeyResponse */ + APIKeyResponse: { + /** Api Key */ + api_key: string + } + /** Body_repositories__create_revision */ + Body_repositories__create_revision: { + /** Commit Message */ + commit_message?: Record + /** Files */ + files?: string[] + } + /** BuildSearchIndexResponse */ + BuildSearchIndexResponse: { + /** Repositories Indexed */ + repositories_indexed: number + /** Tools Indexed */ + tools_indexed: number + } + /** Category */ + Category: { + /** Description */ + description: string + /** Id */ + id: string + /** Name */ + name: string + /** Repositories */ + repositories: number + } + /** Checksum */ + Checksum: { + /** + * Checksum + * @description The hex-string encoded checksum for the data. + */ + checksum: string + /** + * Type + * @description The digest method used to create the checksum. + * The value (e.g. `sha-256`) SHOULD be listed as `Hash Name String` in the https://github.com/ga4gh-discovery/ga4gh-checksum/blob/master/hash-alg.csv[GA4GH Checksum Hash Algorithm Registry]. + * Other values MAY be used, as long as implementors are aware of the issues discussed in https://tools.ietf.org/html/rfc6920#section-9.4[RFC6920]. + * GA4GH may provide more explicit guidance for use of non-IANA-registered algorithms in the future. + */ + type: string + } + /** CreateCategoryRequest */ + CreateCategoryRequest: { + /** Description */ + description?: string + /** Name */ + name: string + } + /** CreateRepositoryRequest */ + CreateRepositoryRequest: { + /** Category IDs */ + "category_ids[]": string + /** Description */ + description?: string + /** Homepage Url */ + homepage_url?: string + /** Name */ + name: string + /** Remote Repository Url */ + remote_repository_url?: string + /** Synopsis */ + synopsis: string + /** + * Type + * @default unrestricted + * @enum {string} + */ + type?: "repository_suite_definition" | "tool_dependency_definition" | "unrestricted" + } + /** CreateUserRequest */ + CreateUserRequest: { + /** Email */ + email: string + /** Password */ + password: string + /** Username */ + username: string + } + /** + * DescriptorType + * @description An enumeration. + * @enum {unknown} + */ + DescriptorType: "CWL" | "WDL" | "NFL" | "GALAXY" | "SMK" + /** + * DescriptorTypeVersion + * @description The language version for a given descriptor type. The version should correspond to the actual declared version of the descriptor. For example, tools defined in CWL could have a version of `v1.0.2` whereas WDL tools may have a version of `1.0` or `draft-2` + */ + DescriptorTypeVersion: string + /** DetailedRepository */ + DetailedRepository: { + /** Create Time */ + create_time: string + /** Deleted */ + deleted: boolean + /** Deprecated */ + deprecated: boolean + /** Description */ + description: string + /** Homepage Url */ + homepage_url?: string + /** Id */ + id: string + /** Long Description */ + long_description?: string + /** Name */ + name: string + /** Owner */ + owner: string + /** Private */ + private: boolean + /** Remote Repository Url */ + remote_repository_url?: string + /** Times Downloaded */ + times_downloaded: number + /** Type */ + type: string + /** Update Time */ + update_time: string + /** User Id */ + user_id: string + } + /** FailedRepositoryUpdateMessage */ + FailedRepositoryUpdateMessage: { + /** Err Msg */ + err_msg: string + } + /** HTTPValidationError */ + HTTPValidationError: { + /** Detail */ + detail?: components["schemas"]["ValidationError"][] + } + /** ImageData */ + ImageData: { + /** + * Checksum + * @description A production (immutable) tool version is required to have a hashcode. Not required otherwise, but might be useful to detect changes. This exposes the hashcode for specific image versions to verify that the container version pulled is actually the version that was indexed by the registry. + * @example [ + * { + * "checksum": "77af4d6b9913e693e8d0b4b294fa62ade6054e6b2f1ffb617ac955dd63fb0182", + * "type": "sha256" + * } + * ] + */ + checksum?: components["schemas"]["Checksum"][] + /** + * Image Name + * @description Used in conjunction with a registry_url if provided to locate images. + * @example [ + * "quay.io/seqware/seqware_full/1.1", + * "ubuntu:latest" + * ] + */ + image_name?: string + image_type?: components["schemas"]["ImageType"] + /** + * Registry Host + * @description A docker registry or a URL to a Singularity registry. Used along with image_name to locate a specific image. + * @example [ + * "registry.hub.docker.com" + * ] + */ + registry_host?: string + /** + * Size + * @description Size of the container in bytes. + */ + size?: number + /** + * Updated + * @description Last time the container was updated. + */ + updated?: string + } + /** + * ImageType + * @description An enumeration. + * @enum {unknown} + */ + ImageType: "Docker" | "Singularity" | "Conda" + /** InstallInfo */ + InstallInfo: { + metadata_info?: components["schemas"]["RepositoryMetadataInstallInfo"] + repo_info?: components["schemas"]["RepositoryExtraInstallInfo"] + } + /** Organization */ + Organization: { + /** + * Name + * @description Name of the organization responsible for the service + * @example My organization + */ + name: string + /** + * Url + * Format: uri + * @description URL of the website of the organization (RFC 3986 format) + * @example https://example.com + */ + url: string + } + /** RepositoriesByCategory */ + RepositoriesByCategory: { + /** Description */ + description: string + /** Id */ + id: string + /** Name */ + name: string + /** Repositories */ + repositories: components["schemas"]["Repository"][] + /** Repository Count */ + repository_count: number + } + /** Repository */ + Repository: { + /** Create Time */ + create_time: string + /** Deleted */ + deleted: boolean + /** Deprecated */ + deprecated: boolean + /** Description */ + description: string + /** Homepage Url */ + homepage_url?: string + /** Id */ + id: string + /** Name */ + name: string + /** Owner */ + owner: string + /** Private */ + private: boolean + /** Remote Repository Url */ + remote_repository_url?: string + /** Times Downloaded */ + times_downloaded: number + /** Type */ + type: string + /** Update Time */ + update_time: string + /** User Id */ + user_id: string + } + /** RepositoryDependency */ + RepositoryDependency: { + /** Changeset Revision */ + changeset_revision: string + /** Downloadable */ + downloadable: boolean + /** Has Repository Dependencies */ + has_repository_dependencies: boolean + /** Id */ + id: string + /** Includes Datatypes */ + includes_datatypes?: boolean + /** Includes Tool Dependencies */ + includes_tool_dependencies?: boolean + /** Includes Tools */ + includes_tools: boolean + /** Includes Tools For Display In Tool Panel */ + includes_tools_for_display_in_tool_panel: boolean + /** Includes Workflows */ + includes_workflows?: boolean + /** Invalid Tools */ + invalid_tools: string[] + /** Malicious */ + malicious: boolean + /** Missing Test Components */ + missing_test_components: boolean + /** Numeric Revision */ + numeric_revision: number + repository: components["schemas"]["Repository"] + /** Repository Dependencies */ + repository_dependencies: components["schemas"]["RepositoryDependency"][] + /** Repository Id */ + repository_id: string + /** Tools */ + tools?: components["schemas"]["RepositoryTool"][] + } + /** RepositoryExtraInstallInfo */ + RepositoryExtraInstallInfo: { + /** Changeset Revision */ + changeset_revision: string + /** Ctx Rev */ + ctx_rev: string + /** Description */ + description: string + /** Name */ + name: string + /** Repository Clone Url */ + repository_clone_url: string + /** Repository Dependencies */ + repository_dependencies?: Record + /** Repository Owner */ + repository_owner: string + } + /** RepositoryMetadata */ + RepositoryMetadata: { + [key: string]: components["schemas"]["RepositoryRevisionMetadata"] | undefined + } + /** RepositoryMetadataInstallInfo */ + RepositoryMetadataInstallInfo: { + /** Changeset Revision */ + changeset_revision: string + /** Downloadable */ + downloadable: boolean + /** Has Repository Dependencies */ + has_repository_dependencies: boolean + /** Id */ + id: string + /** Includes Tools */ + includes_tools: boolean + /** Includes Tools For Display In Tool Panel */ + includes_tools_for_display_in_tool_panel: boolean + /** Malicious */ + malicious: boolean + /** Repository Id */ + repository_id: string + /** Url */ + url: string + /** Valid Tools */ + valid_tools: components["schemas"]["ValidToolDict"][] + } + /** RepositoryPermissions */ + RepositoryPermissions: { + /** Allow Push */ + allow_push: string[] + /** Can Manage */ + can_manage: boolean + /** Can Push */ + can_push: boolean + } + /** RepositoryRevisionMetadata */ + RepositoryRevisionMetadata: { + /** Changeset Revision */ + changeset_revision: string + /** Downloadable */ + downloadable: boolean + /** Has Repository Dependencies */ + has_repository_dependencies: boolean + /** Id */ + id: string + /** Includes Datatypes */ + includes_datatypes?: boolean + /** Includes Tool Dependencies */ + includes_tool_dependencies?: boolean + /** Includes Tools */ + includes_tools: boolean + /** Includes Tools For Display In Tool Panel */ + includes_tools_for_display_in_tool_panel: boolean + /** Includes Workflows */ + includes_workflows?: boolean + /** Invalid Tools */ + invalid_tools: string[] + /** Malicious */ + malicious: boolean + /** Missing Test Components */ + missing_test_components: boolean + /** Numeric Revision */ + numeric_revision: number + repository: components["schemas"]["Repository"] + /** Repository Dependencies */ + repository_dependencies: components["schemas"]["RepositoryDependency"][] + /** Repository Id */ + repository_id: string + /** Tools */ + tools?: components["schemas"]["RepositoryTool"][] + } + /** RepositoryRevisionReadmes */ + RepositoryRevisionReadmes: { + [key: string]: string | undefined + } + /** RepositorySearchHit */ + RepositorySearchHit: { + repository: components["schemas"]["RepositorySearchResult"] + /** Score */ + score: number + } + /** RepositorySearchResult */ + RepositorySearchResult: { + /** Approved */ + approved: boolean + /** Categories */ + categories: string + /** Description */ + description: string + /** Full Last Updated */ + full_last_updated: string + /** Homepage Url */ + homepage_url?: string + /** Id */ + id: string + /** Last Update */ + last_update?: string + /** Long Description */ + long_description?: string + /** Name */ + name: string + /** Remote Repository Url */ + remote_repository_url?: string + /** Repo Lineage */ + repo_lineage: string + /** Repo Owner Username */ + repo_owner_username: string + /** Times Downloaded */ + times_downloaded: number + } + /** RepositorySearchResults */ + RepositorySearchResults: { + /** Hits */ + hits: components["schemas"]["RepositorySearchHit"][] + /** Hostname */ + hostname: string + /** Page */ + page: string + /** Page Size */ + page_size: string + /** Total Results */ + total_results: string + } + /** RepositoryTool */ + RepositoryTool: { + /** Description */ + description: string + /** Guid */ + guid: string + /** Id */ + id: string + /** Name */ + name: string + /** Requirements */ + requirements: Record[] + /** Tool Config */ + tool_config: string + /** Tool Type */ + tool_type: string + /** Version */ + version: string + } + /** RepositoryUpdate */ + RepositoryUpdate: + | components["schemas"]["ValidRepostiroyUpdateMessage"] + | components["schemas"]["FailedRepositoryUpdateMessage"] + /** ResetMetadataOnRepositoryResponse */ + ResetMetadataOnRepositoryResponse: { + /** Repository Status */ + repository_status: string[] + /** Start Time */ + start_time: string + /** Status */ + status: string + /** Stop Time */ + stop_time: string + } + /** Service */ + Service: { + /** + * Contacturl + * Format: uri + * @description URL of the contact for the provider of this service, e.g. a link to a contact form (RFC 3986 format), or an email (RFC 2368 format). + * @example mailto:support@example.com + */ + contactUrl?: string + /** + * Createdat + * Format: date-time + * @description Timestamp describing when the service was first deployed and available (RFC 3339 format) + * @example 2019-06-04T12:58:19Z + */ + createdAt?: string + /** + * Description + * @description Description of the service. Should be human readable and provide information about the service. + * @example This service provides... + */ + description?: string + /** + * Documentationurl + * Format: uri + * @description URL of the documentation of this service (RFC 3986 format). This should help someone learn how to use your service, including any specifics required to access data, e.g. authentication. + * @example https://docs.myservice.example.com + */ + documentationUrl?: string + /** + * Environment + * @description Environment the service is running in. Use this to distinguish between production, development and testing/staging deployments. Suggested values are prod, test, dev, staging. However this is advised and not enforced. + * @example test + */ + environment?: string + /** + * Id + * @description Unique ID of this service. Reverse domain name notation is recommended, though not required. The identifier should attempt to be globally unique so it can be used in downstream aggregator services e.g. Service Registry. + * @example org.ga4gh.myservice + */ + id: string + /** + * Name + * @description Name of this service. Should be human readable. + * @example My project + */ + name: string + /** + * Organization + * @description Organization providing the service + */ + organization: components["schemas"]["Organization"] + type: components["schemas"]["ServiceType"] + /** + * Updatedat + * Format: date-time + * @description Timestamp describing when the service was last updated (RFC 3339 format) + * @example 2019-06-04T12:58:19Z + */ + updatedAt?: string + /** + * Version + * @description Version of the service being described. Semantic versioning is recommended, but other identifiers, such as dates or commit hashes, are also allowed. The version should be changed whenever the service is updated. + * @example 1.0.0 + */ + version: string + } + /** ServiceType */ + ServiceType: { + /** + * Artifact + * @description Name of the API or GA4GH specification implemented. Official GA4GH types should be assigned as part of standards approval process. Custom artifacts are supported. + * @example beacon + */ + artifact: string + /** + * Group + * @description Namespace in reverse domain name format. Use `org.ga4gh` for implementations compliant with official GA4GH specifications. For services with custom APIs not standardized by GA4GH, or implementations diverging from official GA4GH specifications, use a different namespace (e.g. your organization's reverse domain name). + * @example org.ga4gh + */ + group: string + /** + * Version + * @description Version of the API or specification. GA4GH specifications use semantic versioning. + * @example 1.0.0 + */ + version: string + } + /** Tool */ + Tool: { + /** + * Aliases + * @description Support for this parameter is optional for tool registries that support aliases. + * A list of strings that can be used to identify this tool which could be straight up URLs. + * This can be used to expose alternative ids (such as GUIDs) for a tool + * for registries. Can be used to match tools across registries. + */ + aliases?: string[] + /** + * Checker Url + * @description Optional url to the checker tool that will exit successfully if this tool produced the expected result given test data. + */ + checker_url?: string + /** + * Description + * @description The description of the tool. + */ + description?: string + /** + * Has Checker + * @description Whether this tool has a checker tool associated with it. + */ + has_checker?: boolean + /** + * Id + * @description A unique identifier of the tool, scoped to this registry. + * @example 123456 + */ + id: string + /** + * Meta Version + * @description The version of this tool in the registry. Iterates when fields like the description, author, etc. are updated. + */ + meta_version?: string + /** + * Name + * @description The name of the tool. + */ + name?: string + /** + * Organization + * @description The organization that published the image. + */ + organization: string + toolclass: components["schemas"]["ToolClass"] + /** + * Url + * @description The URL for this tool in this registry. + * @example http://agora.broadinstitute.org/tools/123456 + */ + url: string + /** + * Versions + * @description A list of versions for this tool. + */ + versions: components["schemas"]["ToolVersion"][] + } + /** ToolClass */ + ToolClass: { + /** + * Description + * @description A longer explanation of what this class is and what it can accomplish. + */ + description?: string + /** + * Id + * @description The unique identifier for the class. + */ + id?: string + /** + * Name + * @description A short friendly name for the class. + */ + name?: string + } + /** ToolVersion */ + ToolVersion: { + /** + * Author + * @description Contact information for the author of this version of the tool in the registry. (More complex authorship information is handled by the descriptor). + */ + author?: string[] + /** + * Containerfile + * @description Reports if this tool has a containerfile available. (For Docker-based tools, this would indicate the presence of a Dockerfile) + */ + containerfile?: boolean + /** @description The type (or types) of descriptors available. */ + descriptor_type?: components["schemas"]["DescriptorType"][] + /** + * Descriptor Type Version + * @description A map providing information about the language versions used in this tool. The keys should be the same values used in the `descriptor_type` field, and the value should be an array of all the language versions used for the given `descriptor_type`. Depending on the `descriptor_type` (e.g. CWL) multiple version values may be used in a single tool. + * @example { + * "WDL": ["1.0", "1.0"], + * "CWL": ["v1.0.2"], + * "NFL": ["DSL2"] + * } + */ + descriptor_type_version?: { + [key: string]: components["schemas"]["DescriptorTypeVersion"][] | undefined + } + /** + * Id + * @description An identifier of the version of this tool for this particular tool registry. + * @example v1 + */ + id: string + /** + * Images + * @description All known docker images (and versions/hashes) used by this tool. If the tool has to evaluate any of the docker images strings at runtime, those ones cannot be reported here. + */ + images?: components["schemas"]["ImageData"][] + /** + * Included Apps + * @description An array of IDs for the applications that are stored inside this tool. + * @example [ + * "https://bio.tools/tool/mytum.de/SNAP2/1", + * "https://bio.tools/bioexcel_seqqc" + * ] + */ + included_apps?: string[] + /** + * Is Production + * @description This version of a tool is guaranteed to not change over time (for example, a tool built from a tag in git as opposed to a branch). A production quality tool is required to have a checksum + */ + is_production?: boolean + /** + * Meta Version + * @description The version of this tool version in the registry. Iterates when fields like the description, author, etc. are updated. + */ + meta_version?: string + /** + * Name + * @description The name of the version. + */ + name?: string + /** + * Signed + * @description Reports whether this version of the tool has been signed. + */ + signed?: boolean + /** + * Url + * @description The URL for this tool version in this registry. + * @example http://agora.broadinstitute.org/tools/123456/versions/1 + */ + url: string + /** + * Verified + * @description Reports whether this tool has been verified by a specific organization or individual. + */ + verified?: boolean + /** + * Verified Source + * @description Source of metadata that can support a verified tool, such as an email or URL. + */ + verified_source?: string[] + } + /** UiChangePasswordRequest */ + UiChangePasswordRequest: { + /** Current */ + current: string + /** Password */ + password: string + } + /** UiLoginRequest */ + UiLoginRequest: { + /** Login */ + login: string + /** Password */ + password: string + /** Session Csrf Token */ + session_csrf_token: string + } + /** UiLoginResponse */ + UiLoginResponse: Record + /** UiLogoutRequest */ + UiLogoutRequest: { + /** + * Logout All + * @default false + */ + logout_all?: boolean + /** Session Csrf Token */ + session_csrf_token: string + } + /** UiLogoutResponse */ + UiLogoutResponse: Record + /** UiRegisterRequest */ + UiRegisterRequest: { + /** Bear Field */ + bear_field: string + /** Email */ + email: string + /** Password */ + password: string + /** Username */ + username: string + } + /** UiRegisterResponse */ + UiRegisterResponse: { + /** + * Activation Error + * @default false + */ + activation_error?: boolean + /** + * Activation Sent + * @default false + */ + activation_sent?: boolean + /** Contact Email */ + contact_email?: string + /** Email */ + email: string + } + /** User */ + User: { + /** Id */ + id: string + /** Username */ + username: string + } + /** ValidRepostiroyUpdateMessage */ + ValidRepostiroyUpdateMessage: { + /** Message */ + message: string + } + /** ValidToolDict */ + ValidToolDict: { + /** Add To Tool Panel */ + add_to_tool_panel: boolean + /** Description */ + description: string + /** Guid */ + guid: string + /** Id */ + id: string + /** Name */ + name: string + /** Requirements */ + requirements: Record[] + /** Tests */ + tests: Record[] + /** Tool Config */ + tool_config: string + /** Tool Type */ + tool_type: string + /** Version */ + version: string + /** Version String Cmd */ + version_string_cmd: string + } + /** ValidationError */ + ValidationError: { + /** Location */ + loc: string[] + /** Message */ + msg: string + /** Error Type */ + type: string + } + /** Version */ + Version: { + /** + * Api Version + * @default v1 + */ + api_version?: string + /** Version */ + version: string + /** Version Major */ + version_major: string + } + } + responses: never + parameters: never + requestBodies: never + headers: never + pathItems: never +} + +export type external = Record + +export interface operations { + authenticate__baseauth: { + /** Returns returns an API key for authenticated user based on BaseAuth headers. */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["APIKeyResponse"] + } + } + } + } + categories__index: { + /** + * Index + * @description index category + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Category"][] + } + } + } + } + categories__create: { + /** + * Create + * @description create a category + */ + requestBody: { + content: { + "application/json": components["schemas"]["CreateCategoryRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Category"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + categories__repositories: { + /** + * Repositories + * @description display repositories by category + */ + parameters: { + query?: { + installable?: boolean + sort_key?: string + sort_order?: string + page?: number + } + /** @description The encoded database identifier of the category. */ + path: { + encoded_category_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoriesByCategory"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools_trs_service_info: { + /** Service Info */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Service"] + } + } + } + } + tools__trs_tool_classes: { + /** Tool Classes */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ToolClass"][] + } + } + } + } + tools__trs_index: { + /** Trs Index */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + } + } + tools__trs_get: { + /** Trs Get */ + parameters: { + /** @description See also https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs-tool-and-trs-tool-version-ids */ + path: { + tool_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Tool"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools__trs_get_versions: { + /** Trs Get Versions */ + parameters: { + /** @description See also https://ga4gh.github.io/tool-registry-service-schemas/DataModel/#trs-tool-and-trs-tool-version-ids */ + path: { + tool_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ToolVersion"][] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__index: { + /** + * Index + * @description Get a list of repositories or perform a search. + */ + parameters?: { + query?: { + q?: string + page?: number + page_size?: number + deleted?: boolean + owner?: string + name?: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": + | components["schemas"]["RepositorySearchResults"] + | components["schemas"]["Repository"][] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__create: { + /** + * Create + * @description create a new repository + */ + requestBody: { + content: { + "application/json": components["schemas"]["CreateRepositoryRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Repository"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__get_ordered_installable_revisions: { + /** + * Get Ordered Installable Revisions + * @description Get an ordered list of the repository changeset revisions that are installable + */ + parameters?: { + query?: { + owner?: string + name?: string + tsr_id?: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__legacy_install_info: { + /** + * Legacy Install Info + * @description Get information used by the install client to install this repository. + */ + parameters: { + /** @description Name of the target repository. */ + /** @description Owner of the target repository. */ + /** @description Changeset of the target repository. */ + query: { + name: string + owner: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__install_info: { + /** + * Install Info + * @description Get information used by the install client to install this repository. + */ + parameters: { + /** @description Name of the target repository. */ + /** @description Owner of the target repository. */ + /** @description Changeset of the target repository. */ + query: { + name: string + owner: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["InstallInfo"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__reset_legacy: { + /** + * Reset Metadata On Repository Legacy + * @description reset metadata on a repository + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ResetMetadataOnRepositoryResponse"] + } + } + } + } + repositories__update: { + /** Updates */ + parameters: { + query: { + owner?: string + name?: string + changeset_revision: string + hexlify?: boolean + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__show: { + /** Show */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["DetailedRepository"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__show_allow_push: { + /** Show Allow Push */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__add_allow_push: { + /** Add Allow Push */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The target username. */ + path: { + encoded_repository_id: string + username: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__remove_allow_push: { + /** Remove Allow Push */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The target username. */ + path: { + encoded_repository_id: string + username: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string[] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__create_revision: { + /** + * Create Changeset Revision + * @description upload new revision to the repository + */ + parameters: { + /** @description Set commit message as a query parameter. */ + query?: { + commit_message?: string + } + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + requestBody?: { + content: { + "multipart/form-data": components["schemas"]["Body_repositories__create_revision"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryUpdate"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__set_deprecated: { + /** Set Deprecated */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__unset_deprecated: { + /** Unset Deprecated */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__metadata: { + /** + * Metadata + * @description Get information about repository metadata + */ + parameters: { + /** @description Include only downloadable repositories. */ + query?: { + downloadable_only?: boolean + } + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__permissions: { + /** Permissions */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryPermissions"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__reset: { + /** + * Reset Metadata On Repository + * @description reset metadata on a repository + */ + parameters: { + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["ResetMetadataOnRepositoryResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__set_malicious: { + /** Set Malicious */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The changeset revision corresponding to the target revision of the target repository. */ + path: { + encoded_repository_id: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__unset_malicious: { + /** Unset Malicious */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The changeset revision corresponding to the target revision of the target repository. */ + path: { + encoded_repository_id: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__readmes: { + /** + * Get Readmes + * @description fetch readmes for repository revision + */ + parameters: { + /** @description The encoded database identifier of the repository. */ + /** @description The changeset revision corresponding to the target revision of the target repository. */ + path: { + encoded_repository_id: string + changeset_revision: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryRevisionReadmes"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools__index: { + /** Index */ + parameters: { + query: { + q: string + page?: number + page_size?: number + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": Record + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + tools__build_search_index: { + /** + * Build Search Index + * @description Not part of the stable API, just something to simplify + * bootstrapping tool sheds, scripting, testing, etc... + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["BuildSearchIndexResponse"] + } + } + } + } + users__index: { + /** + * Index + * @description index users + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"][] + } + } + } + } + users__create: { + /** + * Create + * @description create a user + */ + requestBody: { + content: { + "application/json": components["schemas"]["CreateUserRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__current: { + /** + * Current + * @description show current user + */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"] + } + } + } + } + users__show: { + /** + * Show + * @description show a user + */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["User"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__get_or_create_api_key: { + /** Return the user's API key */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__create_api_key: { + /** Creates a new API key for the user */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": string + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__delete_api_key: { + /** Delete the current API key of the user */ + parameters: { + /** @description The encoded database identifier of the user. */ + path: { + encoded_user_id: string + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + configuration__version: { + /** Version */ + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["Version"] + } + } + } + } + users__internal_change_password: { + /** + * Change Password + * @description reset a user + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiChangePasswordRequest"] + } + } + responses: { + /** @description Successful Response */ + 204: never + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__internal_login: { + /** + * Internal Login + * @description login to web UI + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiLoginRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["UiLoginResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__internal_logout: { + /** + * Internal Logout + * @description logout of web UI + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiLogoutRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["UiLogoutResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + users__internal_register: { + /** + * Register + * @description register a user + */ + requestBody: { + content: { + "application/json": components["schemas"]["UiRegisterRequest"] + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["UiRegisterResponse"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } + repositories__internal_metadata: { + /** + * Metadata Internal + * @description Get information about repository metadata + */ + parameters: { + /** @description Include only downloadable repositories. */ + query?: { + downloadable_only?: boolean + } + /** @description The encoded database identifier of the repository. */ + path: { + encoded_repository_id: string + } + } + responses: { + /** @description Successful Response */ + 200: { + content: { + "application/json": components["schemas"]["RepositoryMetadata"] + } + } + /** @description Validation Error */ + 422: { + content: { + "application/json": components["schemas"]["HTTPValidationError"] + } + } + } + } +} diff --git a/lib/tool_shed/webapp/frontend/src/schema/types.ts b/lib/tool_shed/webapp/frontend/src/schema/types.ts new file mode 100644 index 000000000000..2e328c9cb591 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/schema/types.ts @@ -0,0 +1,5 @@ +import type { components } from "./schema" + +export type Repository = components["schemas"]["Repository"] +export type RevisionMetadata = components["schemas"]["RepositoryRevisionMetadata"] +export type RepositoryTool = components["schemas"]["RepositoryTool"] diff --git a/lib/tool_shed/webapp/frontend/src/shims-vue.d.ts b/lib/tool_shed/webapp/frontend/src/shims-vue.d.ts new file mode 100644 index 000000000000..bae47cae845d --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/shims-vue.d.ts @@ -0,0 +1,6 @@ +declare module "*.vue" { + import { DefineComponent } from "vue" + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/ban-types + const component: DefineComponent<{}, {}, any> + export default component +} diff --git a/lib/tool_shed/webapp/frontend/src/stores/auth.store.ts b/lib/tool_shed/webapp/frontend/src/stores/auth.store.ts new file mode 100644 index 000000000000..9392f9d60ba4 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/auth.store.ts @@ -0,0 +1,54 @@ +import { defineStore } from "pinia" +import { ensureCookie, notifyOnCatch } from "@/util" +import { getCurrentUser } from "@/apiUtil" + +import { fetcher } from "@/schema" + +const loginFetcher = fetcher.path("/api_internal/login").method("put").create() +const logoutFetcher = fetcher.path("/api_internal/logout").method("put").create() + +export const useAuthStore = defineStore({ + id: "auth", + state: () => ({ + // initialize state from local storage to enable user to stay logged in + user: JSON.parse(localStorage.getItem("user") || "null"), + returnUrl: null, + }), + actions: { + async setup() { + const user = await getCurrentUser() + this.user = user + // store user details and jwt in local storage to keep user logged in between page refreshes + localStorage.setItem("user", user ? JSON.stringify(user) : "null") + }, + async login(username: string, password: string) { + const token = ensureCookie("session_csrf_token") + console.log(token) + loginFetcher({ + login: username, + password: password, + session_csrf_token: token, + }) + .then(async () => { + // We need to do this outside the router to get updated + // cookies and hence csrf token. + window.location.href = "/login_success" + }) + .catch(notifyOnCatch) + }, + async logout() { + const token = ensureCookie("session_csrf_token") + logoutFetcher({ + session_csrf_token: token, + }) + .then(async () => { + this.user = null + localStorage.removeItem("user") + // We need to do this outside the router to get updated + // cookies and hence csrf token. + window.location.href = "/logout_success" + }) + .catch(notifyOnCatch) + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/stores/categories.store.ts b/lib/tool_shed/webapp/frontend/src/stores/categories.store.ts new file mode 100644 index 000000000000..4038ad835f67 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/categories.store.ts @@ -0,0 +1,33 @@ +import { defineStore } from "pinia" + +import { fetcher, components } from "@/schema" +const categoriesFetcher = fetcher.path("/api/categories").method("get").create() +type Category = components["schemas"]["Category"] + +export const useCategoriesStore = defineStore({ + id: "categories", + state: () => ({ + categories: [] as Category[], + loading: true, + }), + actions: { + async getAll() { + this.loading = true + const { data: categories } = await categoriesFetcher({}) + this.categories = categories + this.loading = false + }, + }, + getters: { + byId(state) { + return (categoryId: string) => { + for (const category of state.categories) { + if (category.id == categoryId) { + return category + } + } + return null + } + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/stores/index.ts b/lib/tool_shed/webapp/frontend/src/stores/index.ts new file mode 100644 index 000000000000..bb94b71fd9fb --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/index.ts @@ -0,0 +1,4 @@ +export { useAuthStore } from "./auth.store" +export { useCategoriesStore } from "./categories.store" +export { useRepositoryStore } from "./repository.store" +export { useUsersStore } from "./users.store" diff --git a/lib/tool_shed/webapp/frontend/src/stores/repository.store.ts b/lib/tool_shed/webapp/frontend/src/stores/repository.store.ts new file mode 100644 index 000000000000..7d5f7223c12c --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/repository.store.ts @@ -0,0 +1,111 @@ +import { defineStore } from "pinia" + +import { fetcher, components } from "@/schema" +const repositoryFetcher = fetcher.path("/api/repositories/{encoded_repository_id}").method("get").create() +const repositoryMetadataFetcher = fetcher + .path("/api_internal/repositories/{encoded_repository_id}/metadata") + .method("get") + .create() +const repositoryPermissionsFetcher = fetcher + .path("/api/repositories/{encoded_repository_id}/permissions") + .method("get") + .create() +const repositoryPermissionsAdder = fetcher + .path("/api/repositories/{encoded_repository_id}/allow_push/{username}") + .method("post") + .create() +const repositoryPermissionsRemover = fetcher + .path("/api/repositories/{encoded_repository_id}/allow_push/{username}") + .method("delete") + .create() +const repositoryInstallInfoFetcher = fetcher.path("/api/repositories/install_info").method("get").create() + +type DetailedRepository = components["schemas"]["DetailedRepository"] +type InstallInfo = components["schemas"]["InstallInfo"] +type RepositoryMetadata = components["schemas"]["RepositoryMetadata"] +type RepositoryPermissions = components["schemas"]["RepositoryPermissions"] + +export const useRepositoryStore = defineStore({ + id: "repository", + state: () => ({ + repositoryId: null as string | null, + repository: null as DetailedRepository | null, + repositoryMetadata: null as RepositoryMetadata | null, + repositoryInstallInfo: null as InstallInfo | null, + repositoryPermissions: null as RepositoryPermissions | null, + loading: true as boolean, + empty: false as boolean, + }), + actions: { + async allowPush(username: string) { + if (this.repositoryId == null) { + throw Error("Logic problem in repository store") + } + const params = { + encoded_repository_id: this.repositoryId, + username: username, + } + await repositoryPermissionsAdder(params) + const { data: _repositoryPermissions } = await repositoryPermissionsFetcher(params) + this.repositoryPermissions = _repositoryPermissions + }, + async disallowPush(username: string) { + if (this.repositoryId == null) { + throw Error("Logic problem in repository store") + } + const params = { + encoded_repository_id: this.repositoryId, + username: username, + } + await repositoryPermissionsRemover(params) + const { data: _repositoryPermissions } = await repositoryPermissionsFetcher(params) + this.repositoryPermissions = _repositoryPermissions + }, + async setId(repositoryId: string) { + this.repositoryId = repositoryId + this.refresh() + }, + async refresh() { + if (!this.repositoryId) { + return + } + this.loading = true + const params = { encoded_repository_id: this.repositoryId } + const metadataParams = { encoded_repository_id: this.repositoryId, downloadable_only: false } + const [{ data: repository }, { data: repositoryMetadata }] = await Promise.all([ + repositoryFetcher(params), + repositoryMetadataFetcher(metadataParams), + ]) + this.repository = repository + this.repositoryMetadata = repositoryMetadata + let repositoryPermissions = { + can_manage: false, + can_push: false, + allow_push: [] as string[], + } + try { + const { data: _repositoryPermissions } = await repositoryPermissionsFetcher(params) + repositoryPermissions = _repositoryPermissions + this.repositoryPermissions = repositoryPermissions + } catch (e) { + // console.log(e) + } + const latestMetadata = Object.values(repositoryMetadata)[0] + if (!latestMetadata) { + this.empty = true + } else { + if (this.empty) { + this.empty = false + } + const installParams = { + name: repository.name, + owner: repository.owner, + changeset_revision: latestMetadata.changeset_revision, + } + const { data: repositoryInstallInfo } = await repositoryInstallInfoFetcher(installParams) + this.repositoryInstallInfo = repositoryInstallInfo + } + this.loading = false + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/stores/users.store.ts b/lib/tool_shed/webapp/frontend/src/stores/users.store.ts new file mode 100644 index 000000000000..13cb403f801f --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/stores/users.store.ts @@ -0,0 +1,22 @@ +import { defineStore } from "pinia" + +import { fetcher, components } from "@/schema" +const usersFetcher = fetcher.path("/api/users").method("get").create() + +type User = components["schemas"]["User"] + +export const useUsersStore = defineStore({ + id: "users", + state: () => ({ + users: [] as User[], + loading: true, + }), + actions: { + async getAll() { + this.loading = true + const { data: users } = await usersFetcher({}) + this.users = users + this.loading = false + }, + }, +}) diff --git a/lib/tool_shed/webapp/frontend/src/util.ts b/lib/tool_shed/webapp/frontend/src/util.ts new file mode 100644 index 000000000000..fec1486bd948 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/util.ts @@ -0,0 +1,48 @@ +import { copyToClipboard, Notify, Cookies } from "quasar" +import type { QNotifyCreateOptions } from "quasar" +import { type LocationQueryValue } from "vue-router" +import { ApiError } from "openapi-typescript-fetch" + +export function getCookie(name: string): string | null { + return Cookies.get(name) +} + +export function ensureCookie(name: string): string { + const cookie = getCookie(name) + if (cookie == null) { + notify("An important cookie was not set by the tool shed server, this may result in serious problems.") + throw Error(`Cookie ${name} not set`) + } + return cookie +} + +export function notify(notification: string, type: string | null = null) { + const opts: QNotifyCreateOptions = { + message: notification, + } + if (type) { + opts.type = type + } + Notify.create(opts) +} + +export async function copyAndNotify(value: string, notification: string) { + await copyToClipboard(value) + notify(notification) +} + +export function errorMessage(e: Error): string { + if (e instanceof ApiError) { + return e.data.err_msg + } else { + return JSON.stringify(e) + } +} + +export function queryParamToString(param: LocationQueryValue | LocationQueryValue[]): string | null { + return Array.isArray(param) ? param[0] : param +} + +export function notifyOnCatch(e: Error) { + notify(errorMessage(e)) +} diff --git a/lib/tool_shed/webapp/frontend/src/vite-env.d.ts b/lib/tool_shed/webapp/frontend/src/vite-env.d.ts new file mode 100644 index 000000000000..11f02fe2a006 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/lib/tool_shed/webapp/frontend/static/favicon.ico b/lib/tool_shed/webapp/frontend/static/favicon.ico new file mode 100644 index 000000000000..cf52fdcad290 Binary files /dev/null and b/lib/tool_shed/webapp/frontend/static/favicon.ico differ diff --git a/lib/tool_shed/webapp/frontend/tsconfig.json b/lib/tool_shed/webapp/frontend/tsconfig.json new file mode 100644 index 000000000000..7dad262e0c00 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "target": "esnext", + "module": "esnext", + "strict": true, + "jsx": "preserve", + "moduleResolution": "node", + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "forceConsistentCasingInFileNames": true, + "useDefineForClassFields": true, + "sourceMap": true, + "baseUrl": ".", + "paths": { + "@/*": ["src/*"] + }, + "lib": ["esnext", "dom", "dom.iterable", "scripthost"] + }, + "include": ["src/**/*.ts", "src/**/*.tsx", "src/**/*.vue", "tests/**/*.ts", "tests/**/*.tsx"], + "exclude": ["node_modules"] +} diff --git a/lib/tool_shed/webapp/frontend/vite.config.ts b/lib/tool_shed/webapp/frontend/vite.config.ts new file mode 100644 index 000000000000..ccf3705b8553 --- /dev/null +++ b/lib/tool_shed/webapp/frontend/vite.config.ts @@ -0,0 +1,23 @@ +import { fileURLToPath } from 'url' +import { defineConfig } from 'vite' +import vue from '@vitejs/plugin-vue' +import { quasar, transformAssetUrls } from '@quasar/vite-plugin' + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [ + vue({ + template: { transformAssetUrls }, + }), + + quasar({ + sassVariables: 'src/quasar-variables.sass', + }), + ], + build: {}, + resolve: { + alias: { + '@': fileURLToPath(new URL('./src', import.meta.url)), + }, + }, +}) diff --git a/lib/tool_shed/webapp/graphql-schema.json b/lib/tool_shed/webapp/graphql-schema.json new file mode 100644 index 000000000000..d51019220a41 --- /dev/null +++ b/lib/tool_shed/webapp/graphql-schema.json @@ -0,0 +1,2990 @@ +{ + "__schema": { + "queryType": { + "name": "Query" + }, + "mutationType": null, + "subscriptionType": null, + "types": [ + { + "kind": "OBJECT", + "name": "Query", + "description": null, + "fields": [ + { + "name": "users", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleUser", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "categories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "revisions", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "node", + "description": null, + "args": [ + { + "name": "id", + "description": "The ID of the object", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null + } + ], + "type": { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayUsers", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayUserSortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayUserConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayRepositoriesForCategory", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayRepositorySortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayRepositories", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayRepositorySortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayCategories", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayCategorySortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayCategoryConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "relayRevisions", + "description": null, + "args": [ + { + "name": "sort", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "RelayRepositoryMetadataSortEnum", + "ofType": null + } + }, + "defaultValue": "[ID_ASC]" + }, + { + "name": "before", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "after", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "first", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + }, + { + "name": "last", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null + } + ], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleUser", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "username", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "ID", + "description": "The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `\"4\"`) or integer (such as `4`) input value will be accepted as an ID.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "String", + "description": "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleRepository", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "remoteRepositoryUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "homepageUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "longDescription", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "categories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleUser", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "metadataRevisions", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "downloadableRevisions", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "DateTime", + "description": "The `DateTime` scalar type represents a DateTime\nvalue as specified by\n[iso8601](https://en.wikipedia.org/wiki/ISO_8601).", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleCategory", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deleted", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Boolean", + "description": "The `Boolean` scalar type represents `true` or `false`.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "SimpleRepositoryMetadata", + "description": null, + "fields": [ + { + "name": "id", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "changesetRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "numericRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "malicious", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "downloadable", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "SCALAR", + "name": "Int", + "description": "The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INTERFACE", + "name": "Node", + "description": "An object with an ID", + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": null, + "enumValues": null, + "possibleTypes": [ + { + "kind": "OBJECT", + "name": "RelayUser", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RelayRepository", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RelayCategory", + "ofType": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadata", + "ofType": null + } + ] + }, + { + "kind": "ENUM", + "name": "RelayUserSortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "USERNAME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "USERNAME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayUserConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayUserEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "PageInfo", + "description": "The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.", + "fields": [ + { + "name": "hasNextPage", + "description": "When paginating forwards, are there more items?", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "hasPreviousPage", + "description": "When paginating backwards, are there more items?", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "startCursor", + "description": "When paginating backwards, the cursor to continue.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "endCursor", + "description": "When paginating forwards, the cursor to continue.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayUserEdge", + "description": "A Relay edge containing a `RelayUser` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayUser", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayUser", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "username", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RelayRepositorySortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TYPE_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "TYPE_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "REMOTE_REPOSITORY_URL_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "REMOTE_REPOSITORY_URL_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "HOMEPAGE_URL_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "HOMEPAGE_URL_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LONG_DESCRIPTION_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LONG_DESCRIPTION_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayRepositoryEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryEdge", + "description": "A Relay edge containing a `RelayRepository` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayRepository", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepository", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "remoteRepositoryUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "homepageUrl", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "longDescription", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "categories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleCategory", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleUser", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RelayCategorySortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "CREATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UPDATE_TIME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NAME_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DESCRIPTION_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DELETED_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "DELETED_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayCategoryConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayCategoryEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayCategoryEdge", + "description": "A Relay edge containing a `RelayCategory` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayCategory", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayCategory", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deleted", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repositories", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "RelayRepositoryMetadataSortEnum", + "description": "An enumeration.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "ID_ASC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ID_DESC", + "description": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataConnection", + "description": null, + "fields": [ + { + "name": "pageInfo", + "description": "Pagination data for this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "edges", + "description": "Contains the nodes in this connection.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataEdge", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadataEdge", + "description": "A Relay edge containing a `RelayRepositoryMetadata` and its cursor.", + "fields": [ + { + "name": "node", + "description": "The item at the end of the edge", + "args": [], + "type": { + "kind": "OBJECT", + "name": "RelayRepositoryMetadata", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "cursor", + "description": "A cursor for use in pagination", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "RelayRepositoryMetadata", + "description": null, + "fields": [ + { + "name": "id", + "description": "The ID of the object", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "createTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updateTime", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "DateTime", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "repository", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "SimpleRepository", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "changesetRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "numericRevision", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "malicious", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "downloadable", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Schema", + "description": "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations.", + "fields": [ + { + "name": "types", + "description": "A list of all types supported by this server.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "queryType", + "description": "The type that query operations will be rooted at.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "mutationType", + "description": "If this server supports mutation, the type that mutation operations will be rooted at.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "subscriptionType", + "description": "If this server support subscription, the type that subscription operations will be rooted at.", + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "directives", + "description": "A list of all directives supported by this server.", + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Directive", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Type", + "description": "The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum.\n\nDepending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name and description, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types.", + "fields": [ + { + "name": "kind", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__TypeKind", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fields", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Field", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "interfaces", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "possibleTypes", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "enumValues", + "description": null, + "args": [ + { + "name": "includeDeprecated", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": "false" + } + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__EnumValue", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "inputFields", + "description": null, + "args": [], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ofType", + "description": null, + "args": [], + "type": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__TypeKind", + "description": "An enum describing what kind of type a given `__Type` is.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "SCALAR", + "description": "Indicates this type is a scalar.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "OBJECT", + "description": "Indicates this type is an object. `fields` and `interfaces` are valid fields.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INTERFACE", + "description": "Indicates this type is an interface. `fields` and `possibleTypes` are valid fields.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNION", + "description": "Indicates this type is a union. `possibleTypes` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM", + "description": "Indicates this type is an enum. `enumValues` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_OBJECT", + "description": "Indicates this type is an input object. `inputFields` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "LIST", + "description": "Indicates this type is a list. `ofType` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "NON_NULL", + "description": "Indicates this type is a non-null. `ofType` is a valid field.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Field", + "description": "Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "args", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__InputValue", + "description": "Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__Type", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "defaultValue", + "description": "A GraphQL-formatted string representing the default value for this input value.", + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__EnumValue", + "description": "One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "isDeprecated", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deprecationReason", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "__Directive", + "description": "A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document.\n\nIn some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor.", + "fields": [ + { + "name": "name", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "args": [], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "locations", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "ENUM", + "name": "__DirectiveLocation", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "args", + "description": null, + "args": [], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "__InputValue", + "ofType": null + } + } + } + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "ENUM", + "name": "__DirectiveLocation", + "description": "A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies.", + "fields": null, + "inputFields": null, + "interfaces": null, + "enumValues": [ + { + "name": "QUERY", + "description": "Location adjacent to a query operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "MUTATION", + "description": "Location adjacent to a mutation operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SUBSCRIPTION", + "description": "Location adjacent to a subscription operation.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FIELD", + "description": "Location adjacent to a field.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FRAGMENT_DEFINITION", + "description": "Location adjacent to a fragment definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FRAGMENT_SPREAD", + "description": "Location adjacent to a fragment spread.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INLINE_FRAGMENT", + "description": "Location adjacent to an inline fragment.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "VARIABLE_DEFINITION", + "description": "Location adjacent to a variable definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCHEMA", + "description": "Location adjacent to a schema definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "SCALAR", + "description": "Location adjacent to a scalar definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "OBJECT", + "description": "Location adjacent to an object type definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "FIELD_DEFINITION", + "description": "Location adjacent to a field definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ARGUMENT_DEFINITION", + "description": "Location adjacent to an argument definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INTERFACE", + "description": "Location adjacent to an interface definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "UNION", + "description": "Location adjacent to a union definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM", + "description": "Location adjacent to an enum definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "ENUM_VALUE", + "description": "Location adjacent to an enum value definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_OBJECT", + "description": "Location adjacent to an input object type definition.", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "INPUT_FIELD_DEFINITION", + "description": "Location adjacent to an input object field definition.", + "isDeprecated": false, + "deprecationReason": null + } + ], + "possibleTypes": null + } + ], + "directives": [ + { + "name": "include", + "description": "Directs the executor to include this field or fragment only when the `if` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Included when true.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ] + }, + { + "name": "skip", + "description": "Directs the executor to skip this field or fragment when the `if` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Skipped when true.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + } + }, + "defaultValue": null + } + ] + }, + { + "name": "deprecated", + "description": "Marks an element of a GraphQL schema as no longer supported.", + "locations": [ + "FIELD_DEFINITION", + "ARGUMENT_DEFINITION", + "INPUT_FIELD_DEFINITION", + "ENUM_VALUE" + ], + "args": [ + { + "name": "reason", + "description": "Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted using the Markdown syntax, as specified by [CommonMark](https://commonmark.org/).", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": "\"No longer supported\"" + } + ] + }, + { + "name": "specifiedBy", + "description": "Exposes a URL that specifies the behaviour of this scalar.", + "locations": [ + "SCALAR" + ], + "args": [ + { + "name": "url", + "description": "The URL that specifies the behaviour of this scalar.", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null + } + ] + } + ] + } +} \ No newline at end of file diff --git a/lib/tool_shed/webapp/graphql/__init__.py b/lib/tool_shed/webapp/graphql/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/lib/tool_shed/webapp/graphql/schema.py b/lib/tool_shed/webapp/graphql/schema.py new file mode 100644 index 000000000000..af07fb37ebc9 --- /dev/null +++ b/lib/tool_shed/webapp/graphql/schema.py @@ -0,0 +1,244 @@ +import graphene +from graphene import relay +from graphene_sqlalchemy import ( + SQLAlchemyConnectionField, + SQLAlchemyObjectType, +) +from graphene_sqlalchemy.converter import ( + convert_sqlalchemy_hybrid_property_type, + convert_sqlalchemy_type, +) +from graphql import GraphQLResolveInfo +from sqlalchemy.orm import scoped_session +from typing_extensions import TypedDict + +from galaxy.model.custom_types import TrimmedString +from galaxy.security.idencoding import IdEncodingHelper +from tool_shed.webapp.model import ( + Category as SaCategory, + Repository as SaRepository, + RepositoryCategoryAssociation, + RepositoryMetadata as SaRepositoryMetadata, + User as SaUser, +) + +USER_FIELDS = ( + "id", + "username", +) + +CATEGORY_FIELDS = ( + "id", + "create_time", + "update_time", + "name", + "description", + "deleted", +) + +REPOSITORY_FIELDS = ( + "id", + "create_time", + "update_time", + "name", + "type", + "remote_repository_url", + "homepage_url", + "description", + "long_description", +) + +REPOSITORY_METADATA_FIELDS = ( + "id", + "create_time" + "update_time" + "changeset_revision" + "numeric_revision" + "metadata" + "tool_versions" + "malicious" + "downloadable", +) + + +class InfoDict(TypedDict): + session: scoped_session + security: IdEncodingHelper + + +# Map these Galaxy-ism to Graphene for cleaner interfaces. +@convert_sqlalchemy_type.register(TrimmedString) +def convert_sqlalchemy_type_trimmed_string(*args, **kwd): + return graphene.String + + +@convert_sqlalchemy_hybrid_property_type.register(lambda t: t == TrimmedString) +def convert_sqlalchemy_hybrid_property_type_trimmed_string(arg): + return graphene.String + + +class HasIdMixin: + id = graphene.NonNull(graphene.ID) + encoded_id = graphene.NonNull(graphene.String) + + def resolve_encoded_id(self: SQLAlchemyObjectType, info): + return info.context["security"].encode_id(self.id) + + +class UserMixin(HasIdMixin): + username = graphene.NonNull(graphene.String) + + +class RelayUser(SQLAlchemyObjectType, UserMixin): + class Meta: + model = SaUser + only_fields = USER_FIELDS + interfaces = (relay.Node,) + + +class SimpleUser(SQLAlchemyObjectType, UserMixin): + class Meta: + model = SaUser + only_fields = USER_FIELDS + + +class CategoryQueryMixin(HasIdMixin): + name = graphene.NonNull(graphene.String) + repositories = graphene.List(lambda: SimpleRepository) + + def resolve_repositories(self, info: InfoDict): + return [a.repository for a in self.repositories] + + +class SimpleCategory(SQLAlchemyObjectType, CategoryQueryMixin): + class Meta: + model = SaCategory + only_fields = CATEGORY_FIELDS + + +class RelayCategory(SQLAlchemyObjectType, CategoryQueryMixin): + class Meta: + model = SaCategory + only_fields = CATEGORY_FIELDS + interfaces = (relay.Node,) + + +class RepositoryMixin(HasIdMixin): + name = graphene.NonNull(graphene.String) + + +class RelayRepository(SQLAlchemyObjectType, RepositoryMixin): + class Meta: + model = SaRepository + only_fields = REPOSITORY_FIELDS + interfaces = (relay.Node,) + + categories = graphene.List(SimpleCategory) + user = graphene.NonNull(SimpleUser) + + +class RevisionQueryMixin(HasIdMixin): + # I think because it is imperatively mapped, but the fields are not + # auto-populated for this and so we need to be a bit more explicit + create_time = graphene.DateTime() + update_time = graphene.DateTime() + repository = graphene.NonNull(lambda: SimpleRepository) + changeset_revision = graphene.NonNull(graphene.String) + numeric_revision = graphene.Int() + malicious = graphene.Boolean() + downloadable = graphene.Boolean() + + +class SimpleRepositoryMetadata(SQLAlchemyObjectType, RevisionQueryMixin): + class Meta: + model = SaRepositoryMetadata + only_fields = REPOSITORY_METADATA_FIELDS + + +class SimpleRepository(SQLAlchemyObjectType, RepositoryMixin): + class Meta: + model = SaRepository + only_fields = REPOSITORY_FIELDS + + categories = graphene.List(SimpleCategory) + user = graphene.NonNull(SimpleUser) + metadata_revisions = graphene.List(lambda: SimpleRepositoryMetadata) + downloadable_revisions = graphene.List(lambda: SimpleRepositoryMetadata) + + +class RelayRepositoryMetadata(SQLAlchemyObjectType, RevisionQueryMixin): + class Meta: + model = SaRepositoryMetadata + only_fields = REPOSITORY_METADATA_FIELDS + interfaces = (relay.Node,) + + +class RepositoriesForCategoryField(SQLAlchemyConnectionField): + def __init__(self): + super().__init__(RelayRepository.connection, id=graphene.Int(), encoded_id=graphene.String()) + + @classmethod + def get_query(cls, model, info: GraphQLResolveInfo, sort=None, **args): + repository_query = super().get_query(model, info, sort=sort, **args) + context: InfoDict = info.root_value + query_id = args.get("id") + if not query_id: + encoded_id = args.get("encoded_id") + assert encoded_id, f"Invalid encodedId found {encoded_id} in args {args}" + query_id = context["security"].decode_id(encoded_id) + if query_id: + rval = repository_query.join( + RepositoryCategoryAssociation, + SaRepository.id == RepositoryCategoryAssociation.repository_id, + ).filter(RepositoryCategoryAssociation.category_id == query_id) + return rval + else: + return repository_query + + +class RepositoriesForOwnerField(SQLAlchemyConnectionField): + def __init__(self): + super().__init__(RelayRepository.connection, username=graphene.String()) + + @classmethod + def get_query(cls, model, info: GraphQLResolveInfo, sort=None, **args): + repository_query = super().get_query(model, info, sort=sort, **args) + username = args.get("username") + rval = repository_query.join( + SaUser, + ).filter(SaUser.username == username) + return rval + + +class Query(graphene.ObjectType): + users = graphene.List(SimpleUser) + repositories = graphene.List(SimpleRepository) + categories = graphene.List(SimpleCategory) + revisions = graphene.List(SimpleRepositoryMetadata) + + node = relay.Node.Field() + relay_users = SQLAlchemyConnectionField(RelayUser.connection) + relay_repositories_for_category = RepositoriesForCategoryField() + relay_repositories_for_owner = RepositoriesForOwnerField() + relay_repositories = SQLAlchemyConnectionField(RelayRepository.connection) + relay_categories = SQLAlchemyConnectionField(RelayCategory.connection) + relay_revisions = SQLAlchemyConnectionField(RelayRepositoryMetadata.connection) + + def resolve_users(self, info: InfoDict): + query = SimpleUser.get_query(info) + return query.all() + + def resolve_repositories(self, info: InfoDict): + query = SimpleRepository.get_query(info) + return query.all() + + def resolve_categories(self, info: InfoDict): + query = SimpleCategory.get_query(info) + return query.all() + + def resolve_revisions(self, info: InfoDict): + query = SimpleRepositoryMetadata.get_query(info) + return query.all() + + +schema = graphene.Schema(query=Query, types=[SimpleCategory]) diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 1a6761156dd6..f748f7938735 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -399,6 +399,7 @@ class Repository(Base, Dictifiable): "times_downloaded", "deprecated", "create_time", + "update_time", ] dict_element_visible_keys = [ "id", @@ -414,6 +415,7 @@ class Repository(Base, Dictifiable): "times_downloaded", "deprecated", "create_time", + "update_time", ] file_states = Bunch(NORMAL="n", NEEDS_MERGING="m", MARKED_FOR_REMOVAL="r", MARKED_FOR_ADDITION="a", NOT_TRACKED="?") diff --git a/lib/tool_shed/webapp/search/tool_search.py b/lib/tool_shed/webapp/search/tool_search.py index bbe53d7df1d5..bf3617c4cd38 100644 --- a/lib/tool_shed/webapp/search/tool_search.py +++ b/lib/tool_shed/webapp/search/tool_search.py @@ -31,7 +31,7 @@ class ToolSearch: - def search(self, trans, search_term, page, page_size, boosts): + def search(self, app, search_term, page, page_size, boosts): """ Perform the search on the given search_term @@ -39,7 +39,7 @@ def search(self, trans, search_term, page, page_size, boosts): :returns results: dictionary containing number of hits, hits themselves and matched terms for each """ - tool_index_dir = os.path.join(trans.app.config.whoosh_index_dir, "tools") + tool_index_dir = os.path.join(app.config.whoosh_index_dir, "tools") index_exists = whoosh.index.exists_in(tool_index_dir) if index_exists: index = whoosh.index.open_dir(tool_index_dir) diff --git a/lib/tool_shed/webapp/security/__init__.py b/lib/tool_shed/webapp/security/__init__.py index b8bbbd30cc6e..a2cea3ade3bf 100644 --- a/lib/tool_shed/webapp/security/__init__.py +++ b/lib/tool_shed/webapp/security/__init__.py @@ -1,5 +1,6 @@ """Tool Shed Security""" import logging +from typing import List from sqlalchemy import ( and_, @@ -239,9 +240,12 @@ def set_entity_user_associations(self, users=None, roles=None, groups=None, dele for group in groups: self.associate_components(user=user, group=group) + def usernames_that_can_push(self, repository) -> List[str]: + return listify(repository.allow_push()) + def can_push(self, app, user, repository): if user: - return user.username in listify(repository.allow_push()) + return user.username in self.usernames_that_can_push(repository) return False def user_can_administer_repository(self, user, repository): diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako index 945b0a65f1e2..978837fb4fcc 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/common/repository_actions_menu.mako @@ -33,11 +33,6 @@ can_browse_contents = not is_new - if trans.user and trans.user != repository.user: - can_contact_owner = True - else: - can_contact_owner = False - if not is_new and trans.user and ( is_admin or repository.user == trans.user ) and not is_deprecated: can_deprecate = True else: @@ -58,11 +53,6 @@ else: can_reset_all_metadata = False - if can_push and not is_deprecated: - can_upload = True - else: - can_upload = False - if not is_new and not is_deprecated and trans.user and repository.user != trans.user: can_rate = True else: @@ -97,18 +87,12 @@

    %if is_new: - %if can_upload: - Upload files to repository - %endif %if can_undeprecate: Mark repository as not deprecated %endif %else:
  • Repository Actions
  • - %if can_upload: - Upload files to repository - %endif %if can_administer: Manage repository %else: @@ -123,9 +107,6 @@ %if can_rate: Rate repository %endif - %if can_contact_owner: - Contact repository owner - %endif %if can_reset_all_metadata: Reset all repository metadata %endif diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako index 79c69e562e97..1da62ed987fc 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/common.mako @@ -35,10 +35,6 @@ var selKeys = $.map(selNodes, function(node) { return node.data.key; }); - // The following is used only in ~/templates/webapps/tool_shed/repository/upload.mako. - if (document.forms["upload_form"]) { - document.upload_form.upload_point.value = selKeys.slice(-1); - } }, onActivate: function(dtnode) { var cell = $("#file_contents"); diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako deleted file mode 100644 index 3b2d5b8a50ba..000000000000 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/contact_owner.mako +++ /dev/null @@ -1,38 +0,0 @@ -<%inherit file="/base.mako"/> -<%namespace file="/message.mako" import="render_msg" /> -<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" /> - -<%! - def inherit(context): - if context.get('use_panels'): - return '/webapps/tool_shed/base_panels.mako' - else: - return '/base.mako' -%> -<%inherit file="${inherit(context)}"/> - -${render_tool_shed_repository_actions( repository, metadata=metadata )} - -%if message: - ${render_msg( message, status )} -%endif - -
    -
    Contact the owner of the repository named '${repository.name | h}'
    -
    -
    - This feature is intended to streamline appropriate communication between - Galaxy tool developers and those in the Galaxy community that use them. - Please don't send messages unnecessarily. -
    -
    -
    - - -
    -
    - -
    -
    -
    -
    diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako index 6a49ea0a0414..a9ea1825c304 100644 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako +++ b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/find_tools.mako @@ -55,7 +55,7 @@
    - +
    diff --git a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako b/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako deleted file mode 100644 index 634f957817ed..000000000000 --- a/lib/tool_shed/webapp/templates/webapps/tool_shed/repository/upload.mako +++ /dev/null @@ -1,160 +0,0 @@ -<%namespace file="/message.mako" import="render_msg" /> -<%namespace file="/webapps/tool_shed/repository/common.mako" import="*" /> -<%namespace file="/webapps/tool_shed/common/repository_actions_menu.mako" import="render_tool_shed_repository_actions" /> - -<% - is_new = repository.is_new() -%> - -<%! - def inherit(context): - if context.get('use_panels'): - return '/webapps/tool_shed/base_panels.mako' - else: - return '/base.mako' -%> - -<%inherit file="${inherit(context)}"/> - -<%def name="stylesheets()"> - ${parent.stylesheets()} - ${h.css( "dynatree_skin/ui.dynatree" )} - - -<%def name="javascripts()"> - ${parent.javascripts()} - ${common_javascripts(repository)} - - - -%if message: - ${render_msg( message, status )} -%endif - -${render_tool_shed_repository_actions( repository=repository)} - -
    -
    -
    -
    - Upload a single file or tarball. Uploading may take a while, depending upon the size of the file. - Wait until a message is displayed in your browser after clicking the Upload button below. -
    -
    -
    -
    -
    - -
    -
    Repository '${repository.name | h}'
    -
    -
    -
    - -
    - -
    -
    -
    -
    - -
    - -
    -
    - Enter a url to upload your files. In addition to http and ftp urls, urls that point to mercurial repositories (urls that start - with hg:// or hgs://) are allowed. This mechanism results in the tip revision of an external mercurial repository being added - to the Tool Shed repository as a single new changeset. The revision history of the originating external mercurial repository is - not uploaded to the Tool Shed repository. -
    -
    -
    -
    - <% - if uncompress_file: - yes_selected = 'selected' - no_selected = '' - else: - yes_selected = '' - no_selected = 'selected' - %> - -
    - -
    -
    - Supported compression types are gz and bz2. If Yes is selected, the uploaded file will be uncompressed. However, - if the uploaded file is an archive that contains compressed files, the contained files will not be uncompressed. For - example, if the uploaded compressed file is some_file.tar.gz, some_file.tar will be uncompressed and extracted, but if - some_file.tar contains some_contained_file.gz, the contained file will not be uncompressed. -
    -
    - %if not is_new: -
    - <% - if remove_repo_files_not_in_tar: - yes_selected = 'selected' - no_selected = '' - else: - yes_selected = '' - no_selected = 'selected' - %> - -
    - -
    -
    - This selection pertains only to uploaded tar archives, not to single file uploads. If Yes is selected, files - that exist in the repository (relative to the root or selected upload point) but that are not in the uploaded archive - will be removed from the repository. Otherwise, all existing repository files will remain and the uploaded archive - files will be added to the repository. -
    -
    - %endif -
    - -
    - %if commit_message: -
    - %else: - - %endif -
    -
    - This is the commit message for the mercurial change set that will be created by this upload. -
    -
    -
    - %if not repository.is_new(): -
    - -
    - Loading... -
    - -
    - Select a location within the repository to upload your files by clicking a check box next to the location. The - selected location is considered the upload point. If a location is not selected, the upload point will be the - repository root. -
    -
    -
    - %endif -
    - -
    -
    -
    -
    diff --git a/lib/tool_shed_client/py.typed b/lib/tool_shed_client/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/lib/tool_shed_client/schema/__init__.py b/lib/tool_shed_client/schema/__init__.py index 870bacd5ea90..2e7e1d7a9014 100644 --- a/lib/tool_shed_client/schema/__init__.py +++ b/lib/tool_shed_client/schema/__init__.py @@ -33,6 +33,21 @@ class Repository(BaseModel): times_downloaded: int deprecated: bool create_time: str + update_time: str + + +class DetailedRepository(Repository): + long_description: Optional[str] + + +class RepositoryPermissions(BaseModel): + allow_push: List[str] + can_manage: bool # can the requesting user manage the repository + can_push: bool + + +class RepositoryRevisionReadmes(BaseModel): + __root__: Dict[str, str] class CreateUserRequest(BaseModel): @@ -49,6 +64,8 @@ class User(BaseModel): class Category(BaseModel): id: str name: str + description: str + repositories: int class CreateCategoryRequest(BaseModel): @@ -113,19 +130,28 @@ def is_ok(self): return isinstance(self.__root__, ValidRepostiroyUpdateMessage) -class RepositoryDependency(BaseModel): - pass - - class RepositoryTool(BaseModel): - pass + # Added back in post v2 in order for the frontend to render + # tool descriptions on the repository page. + description: str + guid: str + id: str + name: str + requirements: list + tool_config: str + tool_type: str + version: str + # add_to_tool_panel: bool + # tests: list + # version_string_cmd: Optional[str] class RepositoryRevisionMetadata(BaseModel): id: str repository: Repository - repository_dependencies: List[RepositoryDependency] - tools: Optional[List[RepositoryTool]] + repository_dependencies: List["RepositoryDependency"] + tools: Optional[List["RepositoryTool"]] + invalid_tools: List[str] # added for rendering list of invalid tools in 2.0 frontend repository_id: str numeric_revision: int changeset_revision: str @@ -141,6 +167,15 @@ class RepositoryRevisionMetadata(BaseModel): includes_workflows: Optional[bool] +class RepositoryDependency(RepositoryRevisionMetadata): + # This only needs properties for tests it seems? + # e.g. test_0550_metadata_updated_dependencies.py + pass + + +RepositoryRevisionMetadata.update_forward_refs() + + class RepositoryMetadata(BaseModel): __root__: Dict[str, RepositoryRevisionMetadata] @@ -218,7 +253,10 @@ class RepositoryIndexRequest(BaseModel): deleted: str = "false" -class RepositoriesByCategory(Category): +class RepositoriesByCategory(BaseModel): + id: str + name: str + description: str repository_count: int repositories: List[Repository] @@ -402,7 +440,7 @@ def from_legacy_dict(as_dict: RepositoryMetadataInstallInfoDict) -> "RepositoryM malicious=as_dict["malicious"], repository_id=as_dict["repository_id"], url=as_dict["url"], - valid_tools=ValidTool.from_legacy_list(as_dict["valid_tools"]), + valid_tools=ValidTool.from_legacy_list(as_dict.get("valid_tools", [])), ) @@ -429,3 +467,14 @@ def from_legacy_install_info(legacy_install_info: LegacyInstallInfoTuple) -> Ins metadata_info=metadata_info, repo_info=repo_info, ) + + +class BuildSearchIndexResponse(BaseModel): + repositories_indexed: int + tools_indexed: int + + +class Version(BaseModel): + version_major: str + version: str + api_version: str = "v1" diff --git a/lib/tool_shed_client/schema/gen.sh b/lib/tool_shed_client/schema/gen.sh new file mode 100755 index 000000000000..d6ee47360c55 --- /dev/null +++ b/lib/tool_shed_client/schema/gen.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# must be run from a virtualenv with... +# https://github.com/koxudaxi/datamodel-code-generator +#for model in AccessMethod Checksum DrsObject Error AccessURL ContentsObject DrsService +#do +# datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh/tool-registry-service-schemas/develop/openapi/ga4gh-tool-discovery.yaml" --output "$model.py" +#one + +#datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-info/v1.0.0/service-info.yaml#/components/schemas/Service" --output Service.py + +datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-info/v1.0.0/service-info.yaml#/paths/~1service-info" --output trs_service_info.py +datamodel-codegen --url "https://raw.githubusercontent.com/ga4gh/tool-registry-service-schemas/develop/openapi/openapi.yaml" --output trs.py diff --git a/lib/tool_shed_client/schema/trs.py b/lib/tool_shed_client/schema/trs.py new file mode 100644 index 000000000000..fceff90e76f7 --- /dev/null +++ b/lib/tool_shed_client/schema/trs.py @@ -0,0 +1,216 @@ +# generated by datamodel-codegen: +# filename: https://raw.githubusercontent.com/ga4gh/tool-registry-service-schemas/develop/openapi/openapi.yaml +# timestamp: 2022-12-20T21:01:58+00:00 + +from __future__ import annotations + +from enum import Enum +from typing import ( + Dict, + List, + Optional, + Union, +) + +from pydantic import ( + BaseModel, + Field, +) + + +class Checksum(BaseModel): + checksum: str = Field(..., description="The hex-string encoded checksum for the data. ") + type: str = Field( + ..., + description="The digest method used to create the checksum.\nThe value (e.g. `sha-256`) SHOULD be listed as `Hash Name String` in the https://github.com/ga4gh-discovery/ga4gh-checksum/blob/master/hash-alg.csv[GA4GH Checksum Hash Algorithm Registry].\nOther values MAY be used, as long as implementors are aware of the issues discussed in https://tools.ietf.org/html/rfc6920#section-9.4[RFC6920].\nGA4GH may provide more explicit guidance for use of non-IANA-registered algorithms in the future.", + ) + + +class FileType(Enum): + TEST_FILE = "TEST_FILE" + PRIMARY_DESCRIPTOR = "PRIMARY_DESCRIPTOR" + SECONDARY_DESCRIPTOR = "SECONDARY_DESCRIPTOR" + CONTAINERFILE = "CONTAINERFILE" + OTHER = "OTHER" + + +class ToolFile(BaseModel): + path: Optional[str] = Field( + None, + description="Relative path of the file. A descriptor's path can be used with the GA4GH .../{type}/descriptor/{relative_path} endpoint.", + ) + file_type: Optional[FileType] = None + checksum: Optional[Checksum] = None + + +class ToolClass(BaseModel): + id: Optional[str] = Field(None, description="The unique identifier for the class.") + name: Optional[str] = Field(None, description="A short friendly name for the class.") + description: Optional[str] = Field( + None, description="A longer explanation of what this class is and what it can accomplish." + ) + + +class ImageType(Enum): + Docker = "Docker" + Singularity = "Singularity" + Conda = "Conda" + + +class DescriptorType(Enum): + CWL = "CWL" + WDL = "WDL" + NFL = "NFL" + GALAXY = "GALAXY" + SMK = "SMK" + + +class DescriptorTypeVersion(BaseModel): + __root__: str = Field( + ..., + description="The language version for a given descriptor type. The version should correspond to the actual declared version of the descriptor. For example, tools defined in CWL could have a version of `v1.0.2` whereas WDL tools may have a version of `1.0` or `draft-2`", + ) + + +class DescriptorTypeWithPlain(Enum): + CWL = "CWL" + WDL = "WDL" + NFL = "NFL" + GALAXY = "GALAXY" + SMK = "SMK" + PLAIN_CWL = "PLAIN_CWL" + PLAIN_WDL = "PLAIN_WDL" + PLAIN_NFL = "PLAIN_NFL" + PLAIN_GALAXY = "PLAIN_GALAXY" + PLAIN_SMK = "PLAIN_SMK" + + +class FileWrapper(BaseModel): + content: Optional[str] = Field( + None, description="The content of the file itself. One of url or content is required." + ) + checksum: Optional[List[Checksum]] = Field( + None, + description="A production (immutable) tool version is required to have a hashcode. Not required otherwise, but might be useful to detect changes. ", + example=[{"checksum": "ea2a5db69bd20a42976838790bc29294df3af02b", "type": "sha1"}], + ) + image_type: Optional[Union[ImageType, DescriptorType]] = Field( + None, description="Optionally return additional information on the type of file this is" + ) + url: Optional[str] = Field( + None, + description="Optional url to the underlying content, should include version information, and can include a git hash. Note that this URL should resolve to the raw unwrapped content that would otherwise be available in content. One of url or content is required.", + example={ + "descriptorfile": { + "url": "https://raw.githubusercontent.com/ICGC-TCGA-PanCancer/pcawg_delly_workflow/ea2a5db69bd20a42976838790bc29294df3af02b/delly_docker/Delly.cwl" + }, + "containerfile": { + "url": "https://raw.githubusercontent.com/ICGC-TCGA-PanCancer/pcawg_delly_workflow/c83478829802b4d36374870843821abe1b625a71/delly_docker/Dockerfile" + }, + }, + ) + + +class Error(BaseModel): + code: int + message: Optional[str] = "Internal Server Error" + + +class ImageData(BaseModel): + registry_host: Optional[str] = Field( + None, + description="A docker registry or a URL to a Singularity registry. Used along with image_name to locate a specific image.", + example=["registry.hub.docker.com"], + ) + image_name: Optional[str] = Field( + None, + description="Used in conjunction with a registry_url if provided to locate images.", + example=["quay.io/seqware/seqware_full/1.1", "ubuntu:latest"], + ) + size: Optional[int] = Field(None, description="Size of the container in bytes.") + updated: Optional[str] = Field(None, description="Last time the container was updated.") + checksum: Optional[List[Checksum]] = Field( + None, + description="A production (immutable) tool version is required to have a hashcode. Not required otherwise, but might be useful to detect changes. This exposes the hashcode for specific image versions to verify that the container version pulled is actually the version that was indexed by the registry.", + example=[{"checksum": "77af4d6b9913e693e8d0b4b294fa62ade6054e6b2f1ffb617ac955dd63fb0182", "type": "sha256"}], + ) + image_type: Optional[ImageType] = None + + +class ToolVersion(BaseModel): + author: Optional[List[str]] = Field( + None, + description="Contact information for the author of this version of the tool in the registry. (More complex authorship information is handled by the descriptor).", + ) + name: Optional[str] = Field(None, description="The name of the version.") + url: str = Field( + ..., + description="The URL for this tool version in this registry.", + example="http://agora.broadinstitute.org/tools/123456/versions/1", + ) + id: str = Field( + ..., description="An identifier of the version of this tool for this particular tool registry.", example="v1" + ) + is_production: Optional[bool] = Field( + None, + description="This version of a tool is guaranteed to not change over time (for example, a tool built from a tag in git as opposed to a branch). A production quality tool is required to have a checksum", + ) + images: Optional[List[ImageData]] = Field( + None, + description="All known docker images (and versions/hashes) used by this tool. If the tool has to evaluate any of the docker images strings at runtime, those ones cannot be reported here.", + ) + descriptor_type: Optional[List[DescriptorType]] = Field( + None, description="The type (or types) of descriptors available." + ) + descriptor_type_version: Optional[Dict[str, List[DescriptorTypeVersion]]] = Field( + None, + description="A map providing information about the language versions used in this tool. The keys should be the same values used in the `descriptor_type` field, and the value should be an array of all the language versions used for the given `descriptor_type`. Depending on the `descriptor_type` (e.g. CWL) multiple version values may be used in a single tool.", + example='{\n "WDL": ["1.0", "1.0"],\n "CWL": ["v1.0.2"],\n "NFL": ["DSL2"]\n}\n', + ) + containerfile: Optional[bool] = Field( + None, + description="Reports if this tool has a containerfile available. (For Docker-based tools, this would indicate the presence of a Dockerfile)", + ) + meta_version: Optional[str] = Field( + None, + description="The version of this tool version in the registry. Iterates when fields like the description, author, etc. are updated.", + ) + verified: Optional[bool] = Field( + None, description="Reports whether this tool has been verified by a specific organization or individual." + ) + verified_source: Optional[List[str]] = Field( + None, description="Source of metadata that can support a verified tool, such as an email or URL." + ) + signed: Optional[bool] = Field(None, description="Reports whether this version of the tool has been signed.") + included_apps: Optional[List[str]] = Field( + None, + description="An array of IDs for the applications that are stored inside this tool.", + example=["https://bio.tools/tool/mytum.de/SNAP2/1", "https://bio.tools/bioexcel_seqqc"], + ) + + +class Tool(BaseModel): + url: str = Field( + ..., + description="The URL for this tool in this registry.", + example="http://agora.broadinstitute.org/tools/123456", + ) + id: str = Field(..., description="A unique identifier of the tool, scoped to this registry.", example=123456) + aliases: Optional[List[str]] = Field( + None, + description="Support for this parameter is optional for tool registries that support aliases.\nA list of strings that can be used to identify this tool which could be straight up URLs. \nThis can be used to expose alternative ids (such as GUIDs) for a tool\nfor registries. Can be used to match tools across registries.", + ) + organization: str = Field(..., description="The organization that published the image.") + name: Optional[str] = Field(None, description="The name of the tool.") + toolclass: ToolClass + description: Optional[str] = Field(None, description="The description of the tool.") + meta_version: Optional[str] = Field( + None, + description="The version of this tool in the registry. Iterates when fields like the description, author, etc. are updated.", + ) + has_checker: Optional[bool] = Field(None, description="Whether this tool has a checker tool associated with it.") + checker_url: Optional[str] = Field( + None, + description="Optional url to the checker tool that will exit successfully if this tool produced the expected result given test data.", + ) + versions: List[ToolVersion] = Field(..., description="A list of versions for this tool.") diff --git a/lib/tool_shed_client/schema/trs_service_info.py b/lib/tool_shed_client/schema/trs_service_info.py new file mode 100644 index 000000000000..68b2f04287b6 --- /dev/null +++ b/lib/tool_shed_client/schema/trs_service_info.py @@ -0,0 +1,87 @@ +# generated by datamodel-codegen: +# filename: https://raw.githubusercontent.com/ga4gh-discovery/ga4gh-service-info/v1.0.0/service-info.yaml#/paths/~1service-info +# timestamp: 2022-12-20T21:01:57+00:00 + +from __future__ import annotations + +from datetime import datetime +from typing import Optional + +from pydantic import ( + AnyUrl, + BaseModel, + Field, +) + + +class Organization(BaseModel): + name: str = Field( + ..., description="Name of the organization responsible for the service", example="My organization" + ) + url: AnyUrl = Field( + ..., description="URL of the website of the organization (RFC 3986 format)", example="https://example.com" + ) + + +class ServiceType(BaseModel): + group: str = Field( + ..., + description="Namespace in reverse domain name format. Use `org.ga4gh` for implementations compliant with official GA4GH specifications. For services with custom APIs not standardized by GA4GH, or implementations diverging from official GA4GH specifications, use a different namespace (e.g. your organization's reverse domain name).", + example="org.ga4gh", + ) + artifact: str = Field( + ..., + description="Name of the API or GA4GH specification implemented. Official GA4GH types should be assigned as part of standards approval process. Custom artifacts are supported.", + example="beacon", + ) + version: str = Field( + ..., + description="Version of the API or specification. GA4GH specifications use semantic versioning.", + example="1.0.0", + ) + + +class Service(BaseModel): + id: str = Field( + ..., + description="Unique ID of this service. Reverse domain name notation is recommended, though not required. The identifier should attempt to be globally unique so it can be used in downstream aggregator services e.g. Service Registry.", + example="org.ga4gh.myservice", + ) + name: str = Field(..., description="Name of this service. Should be human readable.", example="My project") + type: ServiceType + description: Optional[str] = Field( + None, + description="Description of the service. Should be human readable and provide information about the service.", + example="This service provides...", + ) + organization: Organization = Field(..., description="Organization providing the service") + contactUrl: Optional[AnyUrl] = Field( + None, + description="URL of the contact for the provider of this service, e.g. a link to a contact form (RFC 3986 format), or an email (RFC 2368 format).", + example="mailto:support@example.com", + ) + documentationUrl: Optional[AnyUrl] = Field( + None, + description="URL of the documentation of this service (RFC 3986 format). This should help someone learn how to use your service, including any specifics required to access data, e.g. authentication.", + example="https://docs.myservice.example.com", + ) + createdAt: Optional[datetime] = Field( + None, + description="Timestamp describing when the service was first deployed and available (RFC 3339 format)", + example="2019-06-04T12:58:19Z", + ) + updatedAt: Optional[datetime] = Field( + None, + description="Timestamp describing when the service was last updated (RFC 3339 format)", + example="2019-06-04T12:58:19Z", + ) + environment: Optional[str] = Field( + None, + description="Environment the service is running in. Use this to distinguish between production, development and testing/staging deployments. Suggested values are prod, test, dev, staging. However this is advised and not enforced.", + example="test", + ) + version: str = Field( + ..., + description="Version of the service being described. Semantic versioning is recommended, but other identifiers, such as dates or commit hashes, are also allowed. The version should be changed whenever the service is updated.", + example="1.0.0", + ) diff --git a/lib/tool_shed_client/trs_util.py b/lib/tool_shed_client/trs_util.py new file mode 100644 index 000000000000..a9d46238b96a --- /dev/null +++ b/lib/tool_shed_client/trs_util.py @@ -0,0 +1,24 @@ +from typing import NamedTuple + + +class EncodedIdentifier(NamedTuple): + tool_shed_base: str + encoded_id: str + + +# TRS specified encoding/decoding according to... +# https://datatracker.ietf.org/doc/html/rfc3986#section-2.4 +# Failed to get whole tool shed IDs working with FastAPI +# - https://github.com/tiangolo/fastapi/issues/791#issuecomment-742799299 +# - urllib.parse.quote(identifier, safe='') will produce the URL fragements but +# but FastAPI eat them. + + +def decode_identifier(tool_shed_base: str, quoted_tool_id: str) -> str: + suffix = "/".join(quoted_tool_id.split("~")) + return f"{tool_shed_base}/repos/{suffix}" + + +def encode_identifier(identifier: str) -> EncodedIdentifier: + base, rest = identifier.split("/repos/", 1) + return EncodedIdentifier(base, "~".join(rest.split("/"))) diff --git a/mypy.ini b/mypy.ini index 99ab2d8a9073..fe479abe7da6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -4,7 +4,7 @@ plugins = pydantic.mypy show_error_codes = True ignore_missing_imports = True check_untyped_defs = True -exclude = lib/galaxy/tools/bundled|test/functional +exclude = lib/galaxy/tools/bundled|test/functional|.*tool_shed/test/test_data/repos pretty = True no_implicit_reexport = True no_implicit_optional = True @@ -479,8 +479,6 @@ check_untyped_defs = False check_untyped_defs = False [mypy-galaxy.jobs] check_untyped_defs = False -[mypy-galaxy.tool_shed.metadata.metadata_generator] -check_untyped_defs = False [mypy-galaxy.jobs.handler] check_untyped_defs = False [mypy-galaxy.workflow.scheduling_manager] diff --git a/packages/data/galaxy/schema b/packages/data/galaxy/schema deleted file mode 120000 index 14df7cabc56f..000000000000 --- a/packages/data/galaxy/schema +++ /dev/null @@ -1 +0,0 @@ -../../../lib/galaxy/schema \ No newline at end of file diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index a0e4228e33f8..02f74386b9d5 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -36,6 +36,7 @@ install_requires = galaxy-objectstore galaxy-util[template] alembic + alembic-utils bdbag bx-python dnspython diff --git a/packages/packages_by_dep_dag.txt b/packages/packages_by_dep_dag.txt index 2975c52c5150..3d9bc1f307aa 100644 --- a/packages/packages_by_dep_dag.txt +++ b/packages/packages_by_dep_dag.txt @@ -1,4 +1,5 @@ util +schema config files job_metrics @@ -17,3 +18,4 @@ web_apps test_base test_driver test_api +tool_shed \ No newline at end of file diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst new file mode 100644 index 000000000000..e947a90a461d --- /dev/null +++ b/packages/schema/HISTORY.rst @@ -0,0 +1,10 @@ +History +------- + +.. to_doc + +--------------------- +21.1.0.dev0 +--------------------- + +* First release. diff --git a/packages/schema/LICENSE b/packages/schema/LICENSE new file mode 120000 index 000000000000..1ef648f64b34 --- /dev/null +++ b/packages/schema/LICENSE @@ -0,0 +1 @@ +../../LICENSE.txt \ No newline at end of file diff --git a/packages/schema/MANIFEST.in b/packages/schema/MANIFEST.in new file mode 100644 index 000000000000..12302eb8dff0 --- /dev/null +++ b/packages/schema/MANIFEST.in @@ -0,0 +1 @@ +include *.rst *.txt LICENSE */py.typed diff --git a/packages/schema/Makefile b/packages/schema/Makefile new file mode 120000 index 000000000000..37af8bae5baa --- /dev/null +++ b/packages/schema/Makefile @@ -0,0 +1 @@ +../package.Makefile \ No newline at end of file diff --git a/packages/schema/README.rst b/packages/schema/README.rst new file mode 100644 index 000000000000..aed8193c9a85 --- /dev/null +++ b/packages/schema/README.rst @@ -0,0 +1,14 @@ + +.. image:: https://badge.fury.io/py/galaxy-schema.svg + :target: https://pypi.org/project/galaxy-schema/ + + + +Overview +-------- + +The Galaxy_ API schema objects. + +* Code: https://github.com/galaxyproject/galaxy + +.. _Galaxy: http://galaxyproject.org/ diff --git a/packages/schema/dev-requirements.txt b/packages/schema/dev-requirements.txt new file mode 120000 index 000000000000..467b90d7a232 --- /dev/null +++ b/packages/schema/dev-requirements.txt @@ -0,0 +1 @@ +../package-dev-requirements.txt \ No newline at end of file diff --git a/packages/schema/galaxy/__init__.py b/packages/schema/galaxy/__init__.py new file mode 100644 index 000000000000..2e50d9cce896 --- /dev/null +++ b/packages/schema/galaxy/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore[has-type] diff --git a/packages/schema/galaxy/py.typed b/packages/schema/galaxy/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/schema/galaxy/schema b/packages/schema/galaxy/schema new file mode 120000 index 000000000000..baebb232dbd6 --- /dev/null +++ b/packages/schema/galaxy/schema @@ -0,0 +1 @@ +../../../lib/galaxy/schema/ \ No newline at end of file diff --git a/packages/schema/mypy.ini b/packages/schema/mypy.ini new file mode 120000 index 000000000000..141a30f41afc --- /dev/null +++ b/packages/schema/mypy.ini @@ -0,0 +1 @@ +../../mypy.ini \ No newline at end of file diff --git a/packages/schema/pyproject.toml b/packages/schema/pyproject.toml new file mode 120000 index 000000000000..01a3b08b8872 --- /dev/null +++ b/packages/schema/pyproject.toml @@ -0,0 +1 @@ +../package-pyproject.toml \ No newline at end of file diff --git a/packages/schema/scripts b/packages/schema/scripts new file mode 120000 index 000000000000..9aec9dc5a067 --- /dev/null +++ b/packages/schema/scripts @@ -0,0 +1 @@ +../build_scripts \ No newline at end of file diff --git a/packages/schema/setup.cfg b/packages/schema/setup.cfg new file mode 100644 index 000000000000..507f563909ea --- /dev/null +++ b/packages/schema/setup.cfg @@ -0,0 +1,41 @@ +[metadata] +author = Galaxy Project and Community +author_email = galaxy-committers@lists.galaxyproject.org +classifiers = + Development Status :: 5 - Production/Stable + Environment :: Console + Intended Audience :: Developers + License :: OSI Approved :: Academic Free License (AFL) + Natural Language :: English + Operating System :: POSIX + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Topic :: Software Development + Topic :: Software Development :: Code Generators + Topic :: Software Development :: Testing +description = Galaxy auth framework and implementations +keywords = + Galaxy +license = AFL +license_files = + LICENSE +long_description = file: README.rst, HISTORY.rst +long_description_content_type = text/x-rst +name = galaxy-schema +url = https://github.com/galaxyproject/galaxy +version = 23.1.0.dev0 + +[options] +include_package_data = True +install_requires = + galaxy-util + pydantic[email] +packages = find: +python_requires = >=3.7 + +[options.packages.find] +exclude = + tests* diff --git a/packages/schema/test-requirements.txt b/packages/schema/test-requirements.txt new file mode 100644 index 000000000000..e079f8a6038d --- /dev/null +++ b/packages/schema/test-requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/packages/schema/tests/__init__.py b/packages/schema/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/schema/tests/schema b/packages/schema/tests/schema new file mode 120000 index 000000000000..f2bc0c0ba5cd --- /dev/null +++ b/packages/schema/tests/schema @@ -0,0 +1 @@ +../../../test/unit/schema/ \ No newline at end of file diff --git a/packages/test_driver/setup.cfg b/packages/test_driver/setup.cfg index 699fd1706699..cd64afd37a8c 100644 --- a/packages/test_driver/setup.cfg +++ b/packages/test_driver/setup.cfg @@ -40,6 +40,8 @@ install_requires = galaxy-util galaxy-web-apps pytest + graphene-sqlalchemy==3.0.0b3 # these are only needed by tool shed - which we've split out but the test driver loads + starlette-graphene3 packages = find: python_requires = >=3.7 diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst new file mode 100644 index 000000000000..7fccc8db62eb --- /dev/null +++ b/packages/tool_shed/HISTORY.rst @@ -0,0 +1,10 @@ +History +------- + +.. to_doc + +--------------------- +23.1.0.dev0 +--------------------- + +* First release. diff --git a/packages/tool_shed/LICENSE b/packages/tool_shed/LICENSE new file mode 120000 index 000000000000..1ef648f64b34 --- /dev/null +++ b/packages/tool_shed/LICENSE @@ -0,0 +1 @@ +../../LICENSE.txt \ No newline at end of file diff --git a/packages/tool_shed/MANIFEST.in b/packages/tool_shed/MANIFEST.in new file mode 100644 index 000000000000..12302eb8dff0 --- /dev/null +++ b/packages/tool_shed/MANIFEST.in @@ -0,0 +1 @@ +include *.rst *.txt LICENSE */py.typed diff --git a/packages/tool_shed/Makefile b/packages/tool_shed/Makefile new file mode 120000 index 000000000000..37af8bae5baa --- /dev/null +++ b/packages/tool_shed/Makefile @@ -0,0 +1 @@ +../package.Makefile \ No newline at end of file diff --git a/packages/tool_shed/README.rst b/packages/tool_shed/README.rst new file mode 100644 index 000000000000..6ae09c66ae5b --- /dev/null +++ b/packages/tool_shed/README.rst @@ -0,0 +1,14 @@ + +.. image:: https://badge.fury.io/py/galaxy-tool-shed.svg + :target: https://pypi.org/project/galaxy-tool-shed/ + + + +Overview +-------- + +The Galaxy_ tool shed server. + +* Code: https://github.com/galaxyproject/galaxy + +.. _Galaxy: http://galaxyproject.org/ diff --git a/packages/tool_shed/dev-requirements.txt b/packages/tool_shed/dev-requirements.txt new file mode 120000 index 000000000000..467b90d7a232 --- /dev/null +++ b/packages/tool_shed/dev-requirements.txt @@ -0,0 +1 @@ +../package-dev-requirements.txt \ No newline at end of file diff --git a/packages/tool_shed/mypy.ini b/packages/tool_shed/mypy.ini new file mode 120000 index 000000000000..141a30f41afc --- /dev/null +++ b/packages/tool_shed/mypy.ini @@ -0,0 +1 @@ +../../mypy.ini \ No newline at end of file diff --git a/packages/tool_shed/pyproject.toml b/packages/tool_shed/pyproject.toml new file mode 120000 index 000000000000..01a3b08b8872 --- /dev/null +++ b/packages/tool_shed/pyproject.toml @@ -0,0 +1 @@ +../package-pyproject.toml \ No newline at end of file diff --git a/packages/tool_shed/scripts b/packages/tool_shed/scripts new file mode 120000 index 000000000000..9aec9dc5a067 --- /dev/null +++ b/packages/tool_shed/scripts @@ -0,0 +1 @@ +../build_scripts \ No newline at end of file diff --git a/packages/tool_shed/setup.cfg b/packages/tool_shed/setup.cfg new file mode 100644 index 000000000000..4c5cc82377d2 --- /dev/null +++ b/packages/tool_shed/setup.cfg @@ -0,0 +1,40 @@ +[metadata] +author = Galaxy Project and Community +author_email = galaxy-committers@lists.galaxyproject.org +classifiers = + Development Status :: 5 - Production/Stable + Environment :: Console + Intended Audience :: Developers + License :: OSI Approved :: Academic Free License (AFL) + Natural Language :: English + Operating System :: POSIX + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Topic :: Software Development + Topic :: Software Development :: Code Generators + Topic :: Software Development :: Testing +description = Galaxy auth framework and implementations +keywords = + Galaxy +license = AFL +license_files = + LICENSE +long_description = file: README.rst, HISTORY.rst +long_description_content_type = text/x-rst +name = galaxy-tool-shed +url = https://github.com/galaxyproject/galaxy +version = 23.1.0.dev0 + +[options] +include_package_data = True +install_requires = + galaxy-webapps +packages = find: +python_requires = >=3.7 + +[options.packages.find] +exclude = + tests* diff --git a/packages/tool_shed/test-requirements.txt b/packages/tool_shed/test-requirements.txt new file mode 100644 index 000000000000..e079f8a6038d --- /dev/null +++ b/packages/tool_shed/test-requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/packages/tool_shed/tests/__init__.py b/packages/tool_shed/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/tool_shed/tests/tool_shed b/packages/tool_shed/tests/tool_shed new file mode 120000 index 000000000000..8f2e9fcd98e2 --- /dev/null +++ b/packages/tool_shed/tests/tool_shed @@ -0,0 +1 @@ +../../../test/unit/tool_shed \ No newline at end of file diff --git a/packages/web_apps/tool_shed b/packages/tool_shed/tool_shed similarity index 100% rename from packages/web_apps/tool_shed rename to packages/tool_shed/tool_shed diff --git a/pyproject.toml b/pyproject.toml index 3a74fe4a38ca..4794d781629d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ include = '\.pyi?$' extend-exclude = ''' ^/( | packages + | lib/tool_shed/test/test_data/repos )/ ''' @@ -61,6 +62,7 @@ fastapi-utils = "*" fs = "*" future = "*" galaxy_sequence_utils = "*" +graphene-sqlalchemy = "3.0.0b3" # need a beta release to be compat. with starlette plugin gravity = ">=1.0" gunicorn = "*" gxformat2 = "*" @@ -111,6 +113,7 @@ sqlitedict = "*" sqlparse = "*" starlette = "*" starlette-context = "*" +starlette-graphene3 = "*" svgwrite = "*" tifffile = "*" tuswsgi = "*" @@ -146,6 +149,7 @@ pytest-httpserver = "*" python-irodsclient = "!=1.1.2" # https://github.com/irods/python-irodsclient/issues/356 pytest-json-report = "*" pytest-mock = "*" +pytest-playwright = "*" pytest-postgresql = "!=3.0.0" # https://github.com/ClearcodeHQ/pytest-postgresql/issues/426 pytest-shard = "*" responses = "*" @@ -201,3 +205,5 @@ relative-imports-order = "closest-to-furthest" # Don't check some pyupgrade rules on generated files "lib/galaxy/schema/bco/*" = ["UP006", "UP007"] "lib/galaxy/schema/drs/*" = ["UP006", "UP007"] +"lib/tool_shed_client/schema/trs.py" = ["UP006", "UP007"] +"lib/tool_shed_client/schema/trs_service_info.py" = ["UP006", "UP007"] diff --git a/run_tool_shed.sh b/run_tool_shed.sh index ff88729db125..adff4ba9cdb6 100755 --- a/run_tool_shed.sh +++ b/run_tool_shed.sh @@ -3,6 +3,7 @@ cd "$(dirname "$0")" +export GALAXY_SKIP_CLIENT_BUILD=1 TOOL_SHED_PID=${TOOL_SHED_PID:-tool_shed_webapp.pid} TOOL_SHED_LOG=${TOOL_SHED_LOG:-tool_shed_webapp.log} PID_FILE=$TOOL_SHED_PID diff --git a/scripts/bootstrap_test_shed.py b/scripts/bootstrap_test_shed.py new file mode 100644 index 000000000000..58c7c475b033 --- /dev/null +++ b/scripts/bootstrap_test_shed.py @@ -0,0 +1,253 @@ +"""Script to bootstrap a tool shed server for development. + +- Create categories. +- Create some users. +- Create some repositories +""" + +import argparse +import os +import subprocess +import sys +import tempfile +from typing import ( + Any, + Dict, + List, + Optional, +) + +import requests + +sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, "lib"))) + + +from galaxy.tool_shed.util.hg_util import clone_repository +from tool_shed.test.base.api import ensure_user_with_email +from tool_shed.test.base.api_util import ( + create_user, + ShedApiInteractor, +) +from tool_shed.test.base.populators import ToolShedPopulator +from tool_shed_client.schema import ( + Category, + CreateRepositoryRequest, +) + +DESCRIPTION = "Script to bootstrap a tool shed server for development" +DEFAULT_USER = "jmchilton@gmail.com" +DEFAULT_USER_PASSWORD = "password123" # it is safe because of the 123 + +TEST_CATEGORY_NAME = "Testing Category" +TEST_CATEGORY_DESCRIPTION = "A longer description of the testing category" + +MAIN_SHED_URL = "https://toolshed.g2.bx.psu.edu/" +MAIN_SHED_API = f"{MAIN_SHED_URL}/api" + +CATEGORIES_TO_COPY = ["Data Export", "Climate Analysis", "Materials science"] + + +def main(argv: List[str]) -> None: + arg_parser = _arg_parser() + namespace = arg_parser.parse_args(argv) + populator = init_populator(namespace) + + category = populator.new_category_if_needed( + {"name": "Invalid Test Tools", "description": "A contains a repository with invalid tools."} + ) + populator.setup_bismark_repo(category_id=category.id) + populator.setup_test_data_repo("0010", category_id=category.id, assert_ok=False) + + category = populator.new_category_if_needed({"name": "Test Category", "description": "A longer test description."}) + mirror_main_categories(populator) + mirror_main_users(populator) + + populator.new_user_if_needed({"email": "bob@bobsdomain.com"}) + populator.new_user_if_needed({"email": "alice@alicesdomain.com"}) + populator.new_user_if_needed({"email": "thirduser@threeis.com"}) + + populator.setup_test_data_repo("column_maker_with_readme", category_id=category.id) + populator.setup_column_maker_repo(prefix="bootstrap", category_id=category.id) + populator.setup_column_maker_repo(prefix="bootstrap2", category_id=category.id) + + main_categories = get_main_categories() + for category in main_categories: + category_id = category["id"] + category_name = category["name"] + if category_name in CATEGORIES_TO_COPY: + local_category = populator.get_category_with_name(category_name) + repos = get_main_repositories_for_category(category_id) + for repo in repos: + mirror_main_repository(populator, repo, local_category.id) + + +def get_main_categories() -> List[Dict[str, Any]]: + main_categories_endpoint = f"{MAIN_SHED_API}/categories" + main_categories = requests.get(main_categories_endpoint).json() + return main_categories + + +def get_main_users() -> List[Dict[str, Any]]: + main_users_endpoint = f"{MAIN_SHED_API}/users" + main_users = requests.get(main_users_endpoint).json() + return main_users + + +def get_main_repositories_for_category(category_id) -> List[Dict[str, Any]]: + main_category_repos_endpoint = f"{MAIN_SHED_API}/categories/{category_id}/repositories" + main_repos_for_category_response = requests.get(main_category_repos_endpoint) + main_repos_for_category = main_repos_for_category_response.json() + assert "repositories" in main_repos_for_category + return main_repos_for_category["repositories"] + + +class RemoteToolShedPopulator(ToolShedPopulator): + """Extend the tool shed populator with some state tracking... + + ... tailored toward bootstrapping dev instances instead of + for tests. + """ + + _categories_by_name: Optional[Dict[str, Category]] = None + _users_by_username: Optional[Dict[str, Dict[str, Any]]] = None + _populators_by_username: Dict[str, "RemoteToolShedPopulator"] = {} + + def __init__(self, admin_interactor: ShedApiInteractor, user_interactor: ShedApiInteractor): + super().__init__(admin_interactor, user_interactor) + + def populator_for_user(self, username): + if username not in self._populators_by_username: + user = self.users_by_username[username] + assert user + mock_email = f"{username}@galaxyproject.org" + password = "testpass" + api_key = self._admin_api_interactor.create_api_key(mock_email, password) + user_interactor = ShedApiInteractor(self._admin_api_interactor.url, api_key) + self._populators_by_username[username] = RemoteToolShedPopulator( + self._admin_api_interactor, user_interactor + ) + return self._populators_by_username[username] + + @property + def categories_by_name(self) -> Dict[str, Category]: + if self._categories_by_name is None: + categories = self.get_categories() + self._categories_by_name = {c.name: c for c in categories} + return self._categories_by_name + + @property + def users_by_username(self) -> Dict[str, Dict[str, Any]]: + if self._users_by_username is None: + users_response = self._api_interactor.get("users") + if users_response.status_code == 400: + error_response = users_response.json() + raise Exception(str(error_response)) + users_response.raise_for_status() + users = users_response.json() + self._users_by_username = {u["username"]: u for u in users} + return self._users_by_username + + def new_category_if_needed(self, as_json: Dict[str, Any]) -> Category: + name = as_json["name"] + description = as_json["description"] + if name in self.categories_by_name: + return self.categories_by_name[name] + return self.new_category(name, description) + + def new_user_if_needed(self, as_json: Dict[str, Any]) -> Dict[str, Any]: + if "username" not in as_json: + email = as_json["email"] + as_json["username"] = email.split("@", 1)[0] + username = as_json["username"] + if username in self.users_by_username: + return self.users_by_username[username] + if "email" not in as_json: + mock_email = f"{username}@galaxyproject.org" + as_json["email"] = mock_email + request = {"username": username, "email": as_json["email"]} + print(f"creating user: {username}") + user = create_user(self._admin_api_interactor, request) + self.users_by_username[username] = user + return user + + +def mirror_main_categories(populator: RemoteToolShedPopulator): + main_categories = get_main_categories() + for category in main_categories: + populator.new_category_if_needed(category) + + +def mirror_main_users(populator: RemoteToolShedPopulator): + main_users = get_main_users() + for user in main_users: + assert isinstance(user, dict) + populator.new_user_if_needed(user) + + +def mirror_main_repository(populator: RemoteToolShedPopulator, repository: Dict[str, Any], category_id: str): + # TODO: mirror the user + as_dict = repository.copy() + as_dict["category_ids"] = category_id + as_dict["synopsis"] = repository["description"] + request = CreateRepositoryRequest(**as_dict) + username = repository["owner"] + user_populator = populator.populator_for_user(username) + new_repository = None + try: + new_repository = user_populator.create_repository(request) + except AssertionError as e: + # if the problem is just a repository already + # exists, continue + err_msg = str(e) + if "already own" not in err_msg: + raise + if new_repository: + name = repository["name"] + clone_url = f"{MAIN_SHED_URL}/repos/{username}/{name}" + temp_dir = tempfile.mkdtemp() + clone_repository(clone_url, temp_dir) + url_base = populator._api_interactor.hg_url_base + prefix, rest = url_base.split("://", 1) + target = f"{prefix}://{username}@{rest}/repos/{username}/{name}" + try: + _push_to(temp_dir, target) + except Exception as e: + print(f"Problem cloning repository {e}, continuing bootstrap though...") + pass + populator.reset_metadata(new_repository) + + +def _push_to(repo_path: str, repo_target: str) -> None: + subprocess.check_output(["hg", "push", repo_target], cwd=repo_path) + + +def init_populator(namespace) -> RemoteToolShedPopulator: + admin_interactor = ShedApiInteractor( + namespace.shed_url, + namespace.admin_key, + ) + if namespace.user_key is None: + ensure_user_with_email(admin_interactor, DEFAULT_USER, DEFAULT_USER_PASSWORD) + user_key = admin_interactor.create_api_key(DEFAULT_USER, DEFAULT_USER_PASSWORD) + else: + user_key = namespace.user_key + + user_interactor = ShedApiInteractor(namespace.shed_url, user_key) + return RemoteToolShedPopulator( + admin_interactor, + user_interactor, + ) + + +def _arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=DESCRIPTION) + parser.add_argument("-u", "--shed-url", default="http://localhost:9009", help="Tool Shed URL") + parser.add_argument("-a", "--admin-key", default="tsadminkey", help="Tool Shed Admin API Key") + parser.add_argument( + "-k", "--user-key", default=None, help="Tool Shed User API Key (will create a new user if unspecified)" + ) + return parser + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/scripts/dump_openapi_schema.py b/scripts/dump_openapi_schema.py index 3215a9b73e51..c112733513fa 100644 --- a/scripts/dump_openapi_schema.py +++ b/scripts/dump_openapi_schema.py @@ -28,8 +28,17 @@ class YamlDumper(yaml.SafeDumper): @click.command("Write openapi schema to path") @click.argument("schema_path", type=click.Path(dir_okay=False, writable=True), required=False) -def write_open_api_schema(schema_path): - openapi_schema = get_openapi_schema() +@click.option("--app", type=click.Choice(["gx", "shed"]), required=False, default="gx") +def write_open_api_schema(schema_path, app: str): + if app == "shed": + # Importing this causes the Galaxy schema to generate + # in a different fashion and causes a diff in downstream + # typescript generation for instance. So delay this. + from tool_shed.webapp.fast_app import get_openapi_schema as get_openapi_schema_shed + + openapi_schema = get_openapi_schema_shed() + else: + openapi_schema = get_openapi_schema() if schema_path: if schema_path.endswith((".yml", ".yaml")): with open(schema_path, "w") as f: diff --git a/templates/webapps/tool_shed/repository/common.mako b/templates/webapps/tool_shed/repository/common.mako index 677fcdfb08a7..88761598d423 100644 --- a/templates/webapps/tool_shed/repository/common.mako +++ b/templates/webapps/tool_shed/repository/common.mako @@ -230,11 +230,6 @@ ${ sharable_link } -<%def name="render_clone_str( repository )"><% - from tool_shed.util.common_util import generate_clone_url_for_repository_in_tool_shed - clone_str = generate_clone_url_for_repository_in_tool_shed( trans.user, repository ) - %>hg clone ${ clone_str } - <%def name="render_folder( folder, folder_pad, parent=None, row_counter=None, is_root_folder=False, render_repository_actions_for='tool_shed' )"> <% encoded_id = trans.security.encode_id( folder.id ) diff --git a/test/unit/app/jobs/test_job_wrapper.py b/test/unit/app/jobs/test_job_wrapper.py index 43d958bc5400..40862720e24a 100644 --- a/test/unit/app/jobs/test_job_wrapper.py +++ b/test/unit/app/jobs/test_job_wrapper.py @@ -53,7 +53,7 @@ def setUp(self): self.model_objects: Dict[Type[Base], Dict[int, Base]] = {Job: {345: job}} self.app.model.session = MockContext(self.model_objects) - self.app.toolbox = cast(ToolBox, MockToolbox(MockTool(self))) + self.app._toolbox = cast(ToolBox, MockToolbox(MockTool(self))) self.working_directory = os.path.join(self.test_directory, "working") self.app.object_store = cast(BaseObjectStore, MockObjectStore(self.working_directory)) diff --git a/test/unit/app/test_galaxy_install.py b/test/unit/app/test_galaxy_install.py new file mode 100644 index 000000000000..f0898a79d348 --- /dev/null +++ b/test/unit/app/test_galaxy_install.py @@ -0,0 +1,60 @@ +"""Test installation using galaxy.tool_shed package. + +It should be able to quickly test installing things from the real tool shed +and from bootstrapped tool sheds. +""" +from pathlib import Path +from typing import ( + Any, + Dict, +) + +from galaxy.model.tool_shed_install import ToolShedRepository +from galaxy.tool_shed.galaxy_install.client import InstallationTarget +from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager +from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager +from galaxy.tool_shed.unittest_utils import StandaloneInstallationTarget +from galaxy.tool_shed.util.repository_util import check_for_updates +from galaxy.util.tool_shed.tool_shed_registry import DEFAULT_TOOL_SHED_URL + + +def test_against_production_shed(tmp_path: Path): + repo_owner = "iuc" + repo_name = "collection_column_join" + repo_revision = "dfde09461b1e" + + install_target: InstallationTarget = StandaloneInstallationTarget(tmp_path) + install_manager = InstallRepositoryManager(install_target) + install_options: Dict[str, Any] = {} + install_manager.install( + DEFAULT_TOOL_SHED_URL, + repo_name, + repo_owner, + repo_revision, # revision 2, a known installable revision + install_options, + ) + with open(tmp_path / "shed_conf.xml") as f: + assert "toolshed.g2.bx.psu.edu/repos/iuc/collection_column_join/collection_column_join/0.0.2" in f.read() + repo_path = tmp_path / "tools" / "toolshed.g2.bx.psu.edu" / "repos" / repo_owner / repo_name / repo_revision + assert repo_path.exists() + + install_model_context = install_target.install_model.context + query = install_model_context.query(ToolShedRepository).where(ToolShedRepository.name == repo_name) + tsr = query.first() + assert tsr + message, status = check_for_updates( + install_target.tool_shed_registry, + install_model_context, + tsr.id, + ) + assert status + + irm = InstalledRepositoryManager(install_target) + errors = irm.uninstall_repository(repository=tsr, remove_from_disk=True) + assert not errors + + with open(tmp_path / "shed_conf.xml") as f: + assert "toolshed.g2.bx.psu.edu/repos/iuc/collection_column_join/collection_column_join/0.0.2" not in f.read() + + repo_path = tmp_path / "tools" / "toolshed.g2.bx.psu.edu" / "repos" / repo_owner / repo_name / repo_revision + assert not repo_path.exists() diff --git a/test/unit/app/tools/test_toolbox.py b/test/unit/app/tools/test_toolbox.py index 9b0e80ce4c23..f003fb519271 100644 --- a/test/unit/app/tools/test_toolbox.py +++ b/test/unit/app/tools/test_toolbox.py @@ -1,201 +1,21 @@ -import collections -import json import logging -import os -import string import time -from typing import Optional import pytest import routes from galaxy import model -from galaxy.app_unittest_utils.tools_support import UsesTools -from galaxy.config_watchers import ConfigWatchers -from galaxy.model import tool_shed_install +from galaxy.app_unittest_utils.toolbox_support import BaseToolBoxTestCase from galaxy.model.base import transaction -from galaxy.model.tool_shed_install import mapping from galaxy.tool_util.unittest_utils import mock_trans from galaxy.tool_util.unittest_utils.sample_data import ( SIMPLE_MACRO, SIMPLE_TOOL_WITH_MACRO, ) -from galaxy.tools import ToolBox -from galaxy.tools.cache import ToolCache -from galaxy.util.unittest import TestCase log = logging.getLogger(__name__) -CONFIG_TEST_TOOL_VERSION_TEMPLATE = string.Template( - """ - github.com - example - galaxyproject - ${version} - github.com/galaxyproject/example/test_tool/0.${version} - 0.${version} - - """ -) -CONFIG_TEST_TOOL_VERSION_1 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="1")) -CONFIG_TEST_TOOL_VERSION_2 = CONFIG_TEST_TOOL_VERSION_TEMPLATE.safe_substitute(dict(version="2")) - -REPO_TYPE = collections.namedtuple( - "REPO_TYPE", - "tool_shed owner name changeset_revision installed_changeset_revision description status", -) -DEFAULT_TEST_REPO = REPO_TYPE("github.com", "galaxyproject", "example", "1", "1", "description", "OK") - - -class SimplifiedToolBox(ToolBox): - def __init__(self, test_case: "BaseToolBoxTestCase"): - app = test_case.app - app.watchers.tool_config_watcher.reload_callback = lambda: reload_callback(test_case) - # Handle app/config stuff needed by toolbox but not by tools. - app.tool_cache = ToolCache() if not hasattr(app, "tool_cache") else app.tool_cache - config_files = test_case.config_files - tool_root_dir = test_case.test_directory - super().__init__( - config_files, - tool_root_dir, - app, - ) - # Need to start thread now for new reload callback to take effect - self.app.watchers.start() - - -class BaseToolBoxTestCase(TestCase, UsesTools): - _toolbox: Optional[SimplifiedToolBox] = None - - @property - def integrated_tool_panel_path(self): - return os.path.join(self.test_directory, "integrated_tool_panel.xml") - - def assert_integerated_tool_panel(self, exists=True): - does_exist = os.path.exists(self.integrated_tool_panel_path) - if exists: - assert does_exist - else: - assert not does_exist - - @property - def toolbox(self): - if self._toolbox is None: - self.app.toolbox = self._toolbox = SimplifiedToolBox(self) - return self._toolbox - - def setUp(self): - self.reindexed = False - self.setup_app() - install_model = mapping.init("sqlite:///:memory:", create_tables=True) - self.app.tool_cache = ToolCache() - self.app.install_model = install_model - self.app.reindex_tool_search = self.__reindex # type: ignore[assignment] - itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml") - self.app.config.integrated_tool_panel_config = itp_config - self.app.watchers = ConfigWatchers(self.app) - self._toolbox = None - self.config_files = [] - - def tearDown(self): - self.app.watchers.shutdown() - - def _repo_install(self, changeset, config_filename=None): - metadata = { - "tools": [ - { - "add_to_tool_panel": False, # to have repository.includes_tools_for_display_in_tool_panel=False in InstalledRepositoryManager.activate_repository() - "guid": f"github.com/galaxyproject/example/test_tool/0.{changeset}", - "tool_config": "tool.xml", - } - ], - } - if config_filename: - metadata["shed_config_filename"] = config_filename - repository = tool_shed_install.ToolShedRepository(metadata_=metadata) - repository.tool_shed = DEFAULT_TEST_REPO.tool_shed - repository.owner = DEFAULT_TEST_REPO.owner - repository.name = DEFAULT_TEST_REPO.name - repository.changeset_revision = changeset - repository.installed_changeset_revision = changeset - repository.deleted = False - repository.uninstalled = False - self.app.install_model.context.add(repository) - session = self.app.install_model.context - with transaction(session): - session.commit() - return repository - - def _setup_two_versions(self): - self._repo_install(changeset="1") - version1 = tool_shed_install.ToolVersion() - version1.tool_id = "github.com/galaxyproject/example/test_tool/0.1" - self.app.install_model.context.add(version1) - session = self.app.install_model.context - with transaction(session): - session.commit() - - self._repo_install(changeset="2") - version2 = tool_shed_install.ToolVersion() - version2.tool_id = "github.com/galaxyproject/example/test_tool/0.2" - self.app.install_model.context.add(version2) - session = self.app.install_model.context - with transaction(session): - session.commit() - - version_association = tool_shed_install.ToolVersionAssociation() - version_association.parent_id = version1.id - version_association.tool_id = version2.id - - self.app.install_model.context.add(version_association) - session = self.app.install_model.context - with transaction(session): - session.commit() - - def _setup_two_versions_in_config(self, section=False): - if section: - template = """ -
    - %s -
    -
    - %s -
    -
    """ - else: - template = """ - %s - %s -""" - self._add_config(template % (self.test_directory, CONFIG_TEST_TOOL_VERSION_1, CONFIG_TEST_TOOL_VERSION_2)) - - def _add_config(self, content, name="tool_conf.xml"): - is_json = name.endswith(".json") - path = self._tool_conf_path(name=name) - with open(path, "w") as f: - if not is_json or isinstance(content, str): - f.write(content) - else: - json.dump(content, f) - self.config_files.append(path) - - def _init_dynamic_tool_conf(self): - # Add a dynamic tool conf (such as a ToolShed managed one) to list of configs. - self._add_config(f"""""") - - def _tool_conf_path(self, name="tool_conf.xml"): - path = os.path.join(self.test_directory, name) - return path - - def _tool_path(self, name="tool.xml"): - path = os.path.join(self.test_directory, name) - return path - - def __reindex(self): - self.reindexed = True - - class TestToolBox(BaseToolBoxTestCase): def test_load_file(self): self._init_tool() @@ -609,18 +429,3 @@ def __verify_get_tool_for_default_lineage(self): default_tool = self.toolbox.get_tool("test_tool") assert default_tool.id == "test_tool" assert default_tool.version == "0.2" - - def __setup_shed_tool_conf(self): - self._add_config("""""") - - self.toolbox # noqa: B018 create toolbox - assert not self.reindexed - - os.remove(self.integrated_tool_panel_path) - - -def reload_callback(test_case): - test_case.app.tool_cache.cleanup() - log.debug("Reload callback called, toolbox contains %s", test_case._toolbox._tool_versions_by_id) - test_case._toolbox = test_case.app.toolbox = SimplifiedToolBox(test_case) - log.debug("After callback toolbox contains %s", test_case._toolbox._tool_versions_by_id) diff --git a/test/unit/schema/__init__.py b/test/unit/schema/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/unit/data/test_schema.py b/test/unit/schema/test_schema.py similarity index 100% rename from test/unit/data/test_schema.py rename to test/unit/schema/test_schema.py diff --git a/test/unit/shed_unit/test_installed_repository_manager.py b/test/unit/shed_unit/test_installed_repository_manager.py deleted file mode 100644 index 27b21ef9eea3..000000000000 --- a/test/unit/shed_unit/test_installed_repository_manager.py +++ /dev/null @@ -1,141 +0,0 @@ -import os -from typing import ( - Any, - Dict, -) -from unittest.mock import MagicMock - -from galaxy.tool_shed.galaxy_install.install_manager import InstallRepositoryManager -from galaxy.tool_shed.galaxy_install.installed_repository_manager import InstalledRepositoryManager -from galaxy.tool_shed.galaxy_install.update_repository_manager import UpdateRepositoryManager -from galaxy.tool_shed.util import ( - hg_util, - repository_util, -) -from galaxy.util.tool_shed import common_util -from ..app.tools.test_toolbox import ( - BaseToolBoxTestCase, - DEFAULT_TEST_REPO, -) - - -class ToolShedRepoBaseTestCase(BaseToolBoxTestCase): - def setUp(self): - super().setUp() - self._init_dynamic_tool_conf() - self.app.config.tool_configs = self.config_files - self.app.config.manage_dependency_relationships = False - self.app.toolbox = self.toolbox - - def _setup_repository(self): - return self._repo_install(changeset="1", config_filename=self.config_files[0]) - - -class TestInstallRepositoryManager(ToolShedRepoBaseTestCase): - def setUp(self): - super().setUp() - self.irm = InstallRepositoryManager(self.app) - self.app.config.enable_tool_shed_check = False - self.app.update_repository_manager = UpdateRepositoryManager(self.app) - - def test_tool_shed_repository_install(self): - hg_util.clone_repository = MagicMock(return_value=(True, None)) - repository_util.get_tool_shed_status_for = MagicMock(return_value={"revision_update": "true"}) - self._install_tool_shed_repository(start_status="New", end_status="Installed", changeset_revision="1") - hg_util.clone_repository.assert_called_with( - "github.com/repos/galaxyproject/example", - os.path.abspath(os.path.join("../shed_tools", "github.com/repos/galaxyproject/example/1/example")), - "1", - ) - - def test_tool_shed_repository_update(self): - common_util.get_tool_shed_url_from_tool_shed_registry = MagicMock(return_value="https://github.com") - repository_util.get_tool_shed_status_for = MagicMock(return_value={"revision_update": "false"}) - hg_util.pull_repository = MagicMock() - hg_util.update_repository = MagicMock(return_value=(True, None)) - self._install_tool_shed_repository(start_status="Installed", end_status="Installed", changeset_revision="2") - assert hg_util.pull_repository.call_args[0][0].endswith("github.com/repos/galaxyproject/example/1/example") - assert hg_util.pull_repository.call_args[0][1] == "https://github.com/repos/galaxyproject/example" - assert hg_util.pull_repository.call_args[0][2] == "2" - assert hg_util.update_repository.call_args[0][0].endswith("github.com/repos/galaxyproject/example/1/example") - assert hg_util.update_repository.call_args[0][1] == "2" - - def _install_tool_shed_repository(self, start_status, end_status, changeset_revision): - repository = self._setup_repository() - repository.status = start_status - repo_info_dict: Dict[str, Any] = { - "example": ( - "description", - "github.com/repos/galaxyproject/example", - changeset_revision, - changeset_revision, - "galaxyproject", - [], - [], - ) - } - self.irm.install_tool_shed_repository( - repository, - repo_info_dict, - "section_key", - self.app.config.tool_configs[0], - "../shed_tools", - False, - False, - reinstalling=False, - ) - assert repository.status == end_status - assert repository.changeset_revision == changeset_revision - - -class TestInstalledRepositoryManager(ToolShedRepoBaseTestCase): - def setUp(self): - super().setUp() - self.irm = InstalledRepositoryManager(self.app) - - def test_uninstall_repository(self): - repository = self._setup_repository() - assert repository.uninstalled is False - self.irm.uninstall_repository(repository=repository, remove_from_disk=True) - assert repository.uninstalled is True - - def test_deactivate_repository(self): - self._deactivate_repository() - - def test_activate_repository(self): - repository = self._deactivate_repository() - self.irm.activate_repository(repository) - assert repository.status == self.app.install_model.ToolShedRepository.installation_status.INSTALLED - - def test_create_or_update_tool_shed_repository_update(self): - repository = self._setup_repository() - self._create_or_update_tool_shed_repository(repository=repository, changeset_revision="2") - - def test_create_or_update_tool_shed_repository_create(self): - self._create_or_update_tool_shed_repository(repository=None, changeset_revision="2") - - def _create_or_update_tool_shed_repository(self, repository=None, changeset_revision="2"): - if repository is None: - repository = DEFAULT_TEST_REPO - new_repository = repository_util.create_or_update_tool_shed_repository( - app=self.app, - name=repository.name, - description=repository.description, - installed_changeset_revision=repository.installed_changeset_revision, - ctx_rev=repository.changeset_revision, - repository_clone_url=f"https://github.com/galaxyproject/example/test_tool/0.{repository.installed_changeset_revision}", # not needed if owner is given - status=repository.status, - metadata_dict=None, - current_changeset_revision=str(int(repository.changeset_revision) + 1), - owner=repository.owner, - dist_to_shed=False, - ) - assert new_repository.changeset_revision == changeset_revision - - def _deactivate_repository(self): - repository = self._setup_repository() - assert repository.uninstalled is False - self.irm.uninstall_repository(repository=repository, remove_from_disk=False) - assert repository.uninstalled is False - assert repository.status == self.app.install_model.ToolShedRepository.installation_status.DEACTIVATED - return repository diff --git a/test/unit/shed_unit/__init__.py b/test/unit/tool_shed/__init__.py similarity index 100% rename from test/unit/shed_unit/__init__.py rename to test/unit/tool_shed/__init__.py diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py new file mode 100644 index 000000000000..d59991bca0f1 --- /dev/null +++ b/test/unit/tool_shed/_util.py @@ -0,0 +1,193 @@ +import os +import random +import string +import tarfile +from pathlib import Path +from tempfile import ( + mkdtemp, + NamedTemporaryFile, +) +from typing import ( + Any, + Dict, + Optional, +) + +import tool_shed.repository_registry +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.util import safe_makedirs +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.managers.repositories import upload_tar_and_set_metadata +from tool_shed.managers.users import create_user +from tool_shed.repository_types import util as rt_util +from tool_shed.repository_types.registry import Registry as RepositoryTypesRegistry +from tool_shed.structured_app import ToolShedApp +from tool_shed.test.base.populators import ( + repo_tars, + TEST_DATA_REPO_FILES, +) +from tool_shed.util.hgweb_config import hgweb_config_manager +from tool_shed.util.repository_util import create_repository +from tool_shed.webapp.model import ( + Category, + mapping, + Repository, + User, +) +from tool_shed_client.schema import CreateCategoryRequest + +TEST_DATA_FILES = TEST_DATA_REPO_FILES +TEST_HOST = "localhost" +TEST_COMMIT_MESSAGE = "Test Commit Message" + + +class TestToolShedConfig: + user_activation_on = False + file_path: str + id_secret: str = "thisistheshedunittestsecret" + smtp_server: Optional[str] = None + config_hg_for_dev = False + + def __init__(self, temp_directory): + files_path = os.path.join(temp_directory, "files") + safe_makedirs(files_path) + self.file_path = files_path + + def get(self, key, default): + assert key == "admin_users" + return "admin@galaxyproject.org" + + +class TestToolShedApp(ToolShedApp): + repository_types_registry = RepositoryTypesRegistry() + config: TestToolShedConfig + hgweb_config_manager = hgweb_config_manager + repository_registry: tool_shed.repository_registry.Registry + security: IdEncodingHelper + name: str = "ToolShed" + + def __init__(self, temp_directory=None): + self.model = mapping.init( + "sqlite:///:memory:", + create_tables=True, + ) + temp_directory = temp_directory or mkdtemp() + hgweb_config_dir = os.path.join(temp_directory, "hgweb") + safe_makedirs(hgweb_config_dir) + self.hgweb_config_manager.hgweb_config_dir = hgweb_config_dir + self.config = TestToolShedConfig(temp_directory) + self.security = IdEncodingHelper(id_secret=self.config.id_secret) + self.repository_registry = tool_shed.repository_registry.Registry(self) + + @property + def security_agent(self): + return self.model.security_agent + + +def user_fixture(app: ToolShedApp, username: str, password: str = "testpassword", email: Optional[str] = None) -> User: + email = email or f"{username}@galaxyproject.org" + return create_user( + app, + email, + username, + password, + ) + + +class ProvidesRepositoriesImpl(ProvidesRepositoriesContext): + def __init__(self, app: TestToolShedApp, user: User): + self._app = app + self._user = user + + @property + def app(self) -> ToolShedApp: + return self._app + + @property + def user(self) -> User: + return self._user + + @property + def repositories_hostname(self) -> str: + return "shed_unit_test://localhost" + + +def provides_repositories_fixture( + app: TestToolShedApp, + user: User, +): + return ProvidesRepositoriesImpl(app, user) + + +def repository_fixture(app: ToolShedApp, user: User, name: str, category: Optional[Category] = None) -> Repository: + type = rt_util.UNRESTRICTED + description = f"test repo named {name}" + long_description = f"test repo named {name} a longer description" + category_ids = [] + if category: + category_ids.append(app.security.encode_id(category.id)) + repository, message = create_repository( + app, + name, + type, + description, + long_description, + user.id, + category_ids=category_ids, + remote_repository_url=None, + homepage_url=None, + ) + assert "created" in message + return repository + + +def _mock_url_for(x, qualified: bool = False): + return "shed_unit_test://localhost/" + + +from unittest import mock + +patch_url_for = mock.patch("galaxy.util.tool_shed.common_util.url_for", _mock_url_for) + + +def upload( + provides_repositories: ProvidesRepositoriesContext, + repository: Repository, + path: Path, + arcname: Optional[str] = None, +): + if path.is_dir(): + tf = NamedTemporaryFile(delete=False) + with tarfile.open(tf.name, "w:gz") as tar: + print(path.name) + print(str(path)) + tar.add(str(path), arcname=arcname or path.name) + tar_path = tf.name + else: + tar_path = str(path) + return upload_tar_and_set_metadata( + provides_repositories, + TEST_HOST, + repository, + tar_path, + commit_message=TEST_COMMIT_MESSAGE, + ) + + +def upload_directories_to_repository( + provides_repositories: ProvidesRepositoriesContext, repository: Repository, test_data_path: str +): + paths = repo_tars(test_data_path) + for path in paths: + upload(provides_repositories, repository, Path(path), arcname=test_data_path) + + +def random_name(len: int = 10) -> str: + return "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)) + + +def create_category(provides_repositories: ProvidesRepositoriesContext, create: Dict[str, Any]) -> Category: + from tool_shed.managers.categories import CategoryManager + + request = CreateCategoryRequest(**create) + return CategoryManager(provides_repositories.app).create(provides_repositories, request) diff --git a/test/unit/tool_shed/conftest.py b/test/unit/tool_shed/conftest.py new file mode 100644 index 000000000000..44bdbd58be00 --- /dev/null +++ b/test/unit/tool_shed/conftest.py @@ -0,0 +1,34 @@ +import pytest + +from tool_shed.webapp.model import ( + Repository, + User, +) +from ._util import ( + provides_repositories_fixture, + random_name, + repository_fixture, + TestToolShedApp, + user_fixture, +) + + +@pytest.fixture +def shed_app(): + app = TestToolShedApp() + yield app + + +@pytest.fixture +def new_user(shed_app: TestToolShedApp) -> User: + return user_fixture(shed_app, random_name()) + + +@pytest.fixture +def new_repository(shed_app: TestToolShedApp, new_user: User) -> Repository: + return repository_fixture(shed_app, new_user, random_name()) + + +@pytest.fixture +def provides_repositories(shed_app: TestToolShedApp, new_user: User) -> User: + return provides_repositories_fixture(shed_app, new_user) diff --git a/test/unit/tool_shed/model/__init__.py b/test/unit/tool_shed/model/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/unit/shed_unit/test_dbscript.py b/test/unit/tool_shed/test_dbscript.py similarity index 100% rename from test/unit/shed_unit/test_dbscript.py rename to test/unit/tool_shed/test_dbscript.py diff --git a/test/unit/tool_shed/test_graphql.py b/test/unit/tool_shed/test_graphql.py new file mode 100644 index 000000000000..111dbf6fccd8 --- /dev/null +++ b/test/unit/tool_shed/test_graphql.py @@ -0,0 +1,331 @@ +from typing import ( + Callable, + List, + Optional, + Tuple, +) + +from graphql.execution import ExecutionResult + +from tool_shed.context import ( + ProvidesRepositoriesContext, + ProvidesUserContext, +) +from tool_shed.webapp.graphql.schema import schema +from tool_shed.webapp.model import ( + Category, + Repository, + RepositoryCategoryAssociation, +) +from ._util import ( + create_category, + repository_fixture, + upload_directories_to_repository, + user_fixture, +) + + +def relay_query(query_name: str, params: Optional[str], node_def: str) -> str: + params_call = f"({params})" if params else "" + return f""" +query {{ + {query_name}{params_call} {{ + edges {{ + cursor + node {{ + {node_def} + }} + }} + pageInfo {{ + endCursor + hasNextPage + }} + }} +}} +""" + + +class PageInfo: + def __init__(self, result: dict): + assert "pageInfo" in result + self.info = result["pageInfo"] + + @property + def end_cursor(self) -> str: + return self.info["endCursor"] + + @property + def has_next_page(self) -> bool: + return self.info["hasNextPage"] + + +def relay_result(result: ExecutionResult) -> Tuple[list, PageInfo]: + data = result.data + assert data + data_values = data.values() + query_result = list(data_values)[0] + return query_result["edges"], PageInfo(query_result) + + +QueryExecutor = Callable[[str], ExecutionResult] + + +def query_execution_builder_for_trans(trans: ProvidesRepositoriesContext) -> QueryExecutor: + cv = context_value(trans) + + def e(query: str) -> ExecutionResult: + return schema.execute(query, context_value=cv, root_value=cv) + + return e + + +def context_value(trans: ProvidesUserContext): + return { + "session": trans.app.model.context, + "security": trans.security, + } + + +def test_simple_repositories(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + e = query_execution_builder_for_trans(provides_repositories) + repositories_query = """ + query { + repositories { + id + encodedId + name + categories { + name + } + user { + username + } + } + } + """ + result = e(repositories_query) + _assert_no_errors(result) + repos = _assert_result_data_has_key(result, "repositories") + repository_names = [r["name"] for r in repos] + assert new_repository.name in repository_names + + +def attach_category(provides_repositories: ProvidesRepositoriesContext, repository: Repository, category: Category): + assoc = RepositoryCategoryAssociation( + repository=repository, + category=category, + ) + provides_repositories.sa_session.add(assoc) + provides_repositories.sa_session.flush() + + +def test_relay_repos_by_category(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + name = new_repository.name + category = create_category(provides_repositories, {"name": "test_graphql_relay_categories_1"}) + user = provides_repositories.user + assert user + uc1 = repository_fixture(provides_repositories.app, user, "uc1") + uc2 = repository_fixture(provides_repositories.app, user, "uc2") + + other_user = user_fixture(provides_repositories.app, "otherusernamec") + ouc1 = repository_fixture(provides_repositories.app, other_user, "ouc1") + ouc2 = repository_fixture(provides_repositories.app, other_user, "ouc2") + + category_id = category.id + e = query_execution_builder_for_trans(provides_repositories) + + names = repository_names(e, "relayRepositoriesForCategory", f"id: {category_id}") + assert len(names) == 0 + + encoded_id = provides_repositories.security.encode_id(category_id) + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 0 + attach_category(provides_repositories, new_repository, category) + + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 1 + assert name in names + + names = repository_names(e, "relayRepositoriesForCategory", f"id: {category_id}") + assert len(names) == 1 + assert name in names + + attach_category(provides_repositories, uc1, category) + attach_category(provides_repositories, ouc1, category) + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 3, names + assert "uc1" in names, names + assert "ouc1" in names, names + + category2 = create_category(provides_repositories, {"name": "test_graphql_relay_categories_2"}) + attach_category(provides_repositories, uc2, category2) + attach_category(provides_repositories, ouc2, category2) + + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id}"') + assert len(names) == 3, names + assert "uc1" in names, names + assert "ouc1" in names, names + + encoded_id_2 = provides_repositories.security.encode_id(category2.id) + names = repository_names(e, "relayRepositoriesForCategory", f'encodedId: "{encoded_id_2}"') + assert len(names) == 2, names + assert "uc2" in names, names + assert "ouc2" in names, names + + +def test_simple_categories(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + assert provides_repositories.user + + category = create_category(provides_repositories, {"name": "test_graphql"}) + e = query_execution_builder_for_trans(provides_repositories) + result = e( + """ + query { + categories { + name + encodedId + } + } +""" + ) + _assert_no_errors(result) + categories = _assert_result_data_has_key(result, "categories") + category_names = [c["name"] for c in categories] + assert "test_graphql" in category_names + encoded_id = [c["encodedId"] for c in categories if c["name"] == "test_graphql"][0] + assert encoded_id == provides_repositories.security.encode_id(category.id) + + repository_fixture(provides_repositories.app, provides_repositories.user, "foo1", category=category) + result = e( + """ + query { + categories { + name + repositories { + name + } + } + } +""" + ) + _assert_no_errors(result) + categories = _assert_result_data_has_key(result, "categories") + repositories = [c["repositories"] for c in categories if c["name"] == "test_graphql"][0] + assert repositories + repository_names = [r["name"] for r in repositories] + assert "foo1" in repository_names + + +def test_simple_revisions(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + upload_directories_to_repository(provides_repositories, new_repository, "column_maker_with_download_gaps") + e = query_execution_builder_for_trans(provides_repositories) + # (id: "1") + query = """ + query { + revisions { + id + encodedId + createTime + repository { + name + } + changesetRevision + numericRevision + downloadable + } + } +""" + + result = e(query) + _assert_no_errors(result) + + +def test_relay(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + assert provides_repositories.user + repository_fixture(provides_repositories.app, provides_repositories.user, "foo1") + repository_fixture(provides_repositories.app, provides_repositories.user, "f002") + repository_fixture(provides_repositories.app, provides_repositories.user, "cow") + repository_fixture(provides_repositories.app, provides_repositories.user, "u3") + + e = query_execution_builder_for_trans(provides_repositories) + q1 = relay_query("relayRepositories", "sort: NAME_ASC first: 2", "encodedId, name, type, createTime") + result = e(q1) + _assert_no_errors(result) + edges, page_info = relay_result(result) + has_next_page = page_info.has_next_page + assert has_next_page + + last_cursor = edges[-1]["cursor"] + q2 = relay_query("relayRepositories", f'sort: NAME_ASC first: 2 after: "{last_cursor}"', "name, type, createTime") + result = e(q2) + _assert_no_errors(result) + edges, page_info = relay_result(result) + has_next_page = page_info.has_next_page + + +def test_relay_by_owner(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + user = provides_repositories.user + assert user + repository_fixture(provides_repositories.app, user, "u1") + repository_fixture(provides_repositories.app, user, "u2") + repository_fixture(provides_repositories.app, user, "u3") + repository_fixture(provides_repositories.app, user, "u4") + other_user = user_fixture(provides_repositories.app, "otherusername") + repository_fixture(provides_repositories.app, other_user, "ou1") + repository_fixture(provides_repositories.app, other_user, "ou2") + repository_fixture(provides_repositories.app, other_user, "ou3") + repository_fixture(provides_repositories.app, other_user, "ou4") + + e = query_execution_builder_for_trans(provides_repositories) + names = repository_names(e, "relayRepositoriesForOwner", f'username: "{user.username}"') + assert "u1" in names + assert "ou1" not in names + + names = repository_names(e, "relayRepositoriesForOwner", 'username: "otherusername"') + assert "ou4" in names + assert "u4" not in names + + +def repository_names(e: QueryExecutor, field: str, base_variables: str) -> List[str]: + edges = walk_relay(e, field, base_variables, "name") + return [e["node"]["name"] for e in edges] + + +def walk_relay(e: QueryExecutor, field: str, base_variables: str, fragment: str): + variables = f"{base_variables} first: 2" + query = relay_query(field, variables, fragment) + result: ExecutionResult = e(query) + _assert_no_errors(result, query) + all_edges, page_info = relay_result(result) + has_next_page = page_info.has_next_page + while has_next_page: + variables = f'{base_variables} first: 2 after: "${page_info.end_cursor}"' + query = relay_query(field, variables, fragment) + result = e(query) + _assert_no_errors(result, query) + these_edges, page_info = relay_result(result) + if len(these_edges) == 0: + # I was using .options instead of .join and such with the queries + # and this would break. The queries are better now anyway, but + # be careful with new queries - there seem to be bugs around this + # potentially + assert not page_info.has_next_page + break + all_edges.extend(these_edges) + has_next_page = page_info.has_next_page + return all_edges + + +def _assert_result_data_has_key(result: ExecutionResult, key: str): + data = result.data + assert data + assert key in data + return data[key] + + +def _assert_no_errors(result: ExecutionResult, query=None): + if result.errors is not None: + message = f"Found unexpected GraphQL errors {str(result.errors)}" + if query is not None: + message = f"{message} in query {query}" + raise AssertionError(message) diff --git a/test/unit/shed_unit/test_hg_util.py b/test/unit/tool_shed/test_hg_util.py similarity index 100% rename from test/unit/shed_unit/test_hg_util.py rename to test/unit/tool_shed/test_hg_util.py diff --git a/test/unit/tool_shed/test_repository_metadata_manager.py b/test/unit/tool_shed/test_repository_metadata_manager.py new file mode 100644 index 000000000000..da24e64ff158 --- /dev/null +++ b/test/unit/tool_shed/test_repository_metadata_manager.py @@ -0,0 +1,59 @@ +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.metadata import repository_metadata_manager +from tool_shed.webapp.model import Repository +from ._util import upload_directories_to_repository + + +def test_reset_simple(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + shed_app = provides_repositories.app + upload_directories_to_repository(provides_repositories, new_repository, "column_maker") + assert len(new_repository.downloadable_revisions) == 3 + assert "2:" in new_repository.revision() + rmm = repository_metadata_manager.RepositoryMetadataManager( + provides_repositories, + repository=new_repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + repo_path = new_repository.repo_path(app=shed_app) + rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) + assert len(new_repository.downloadable_revisions) == 3 + + +def test_reset_on_repo_with_uninstallable_revisions( + provides_repositories: ProvidesRepositoriesContext, new_repository: Repository +): + shed_app = provides_repositories.app + upload_directories_to_repository(provides_repositories, new_repository, "column_maker_with_download_gaps") + assert len(new_repository.downloadable_revisions) == 3 + assert "3:" in new_repository.revision() + rmm = repository_metadata_manager.RepositoryMetadataManager( + provides_repositories, + repository=new_repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + repo_path = new_repository.repo_path(app=shed_app) + rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) + assert len(new_repository.downloadable_revisions) == 3 + + +def test_reset_dm_with_uninstallable_revisions( + provides_repositories: ProvidesRepositoriesContext, new_repository: Repository +): + shed_app = provides_repositories.app + upload_directories_to_repository(provides_repositories, new_repository, "data_manager_gaps") + assert len(new_repository.downloadable_revisions) == 1 + assert "2:" in new_repository.revision() + rmm = repository_metadata_manager.RepositoryMetadataManager( + provides_repositories, + repository=new_repository, + resetting_all_metadata_on_repository=True, + updating_installed_repository=False, + persist=False, + ) + repo_path = new_repository.repo_path(app=shed_app) + rmm.reset_all_metadata_on_repository_in_tool_shed(repository_clone_url=repo_path) + assert len(new_repository.downloadable_revisions) == 2 diff --git a/test/unit/tool_shed/test_repository_utils.py b/test/unit/tool_shed/test_repository_utils.py new file mode 100644 index 000000000000..d23388b8a897 --- /dev/null +++ b/test/unit/tool_shed/test_repository_utils.py @@ -0,0 +1,97 @@ +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.util.repository_content_util import upload_tar +from tool_shed.webapp.model import ( + Repository, + User, +) +from ._util import ( + repository_fixture, + TEST_DATA_FILES, + TestToolShedApp, +) + + +def test_create_repository(shed_app: TestToolShedApp, new_user: User): + name = "testname" + manager = shed_app.hgweb_config_manager + entry = None + entry_name = f"repos/{new_user.username}/{name}" + try: + entry = manager.get_entry(entry_name) + except Exception: + pass + assert not entry + repository_fixture(shed_app, new_user, name) + entry = manager.get_entry(entry_name) + assert entry + + +def test_upload_tar(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("column_maker/column_maker.tar") + old_tip = new_repository.tip() + upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( + provides_repositories, + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + ) + assert upload_ok + assert alert == "" + assert dirs_removed == 0 + assert files_removed == 0 + new_tip = new_repository.tip() + assert old_tip != new_tip + changesets = new_repository.get_changesets_for_setting_metadata(provides_repositories.app) + assert len(changesets) == 1 + for change in changesets: + ctx = new_repository.hg_repo[change] + assert str(ctx) == new_tip + + +def test_upload_fails_if_contains_symlink( + provides_repositories: ProvidesRepositoriesContext, new_repository: Repository +): + tar_resource = TEST_DATA_FILES.joinpath("safetar_with_symlink.tar") + upload_ok, message, _, _, _, _ = upload_tar( + provides_repositories, + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + ) + assert not upload_ok + assert "Invalid paths" in message + + +def test_upload_dry_run_ok(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("column_maker/column_maker.tar") + old_tip = new_repository.tip() + upload_ok, _, _, alert, dirs_removed, files_removed = upload_tar( + provides_repositories, + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + dry_run=True, + ) + assert upload_ok + assert alert == "" + assert dirs_removed == 0 + assert files_removed == 0 + new_tip = new_repository.tip() + assert old_tip == new_tip + + +def test_upload_dry_run_failed(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + tar_resource = TEST_DATA_FILES.joinpath("safetar_with_symlink.tar") + upload_ok, message, _, _, _, _ = upload_tar( + provides_repositories, + new_repository.user.username, + new_repository, + tar_resource, + commit_message="Commit Message", + dry_run=True, + ) + assert not upload_ok + assert "Invalid paths" in message diff --git a/test/unit/shed_unit/test_shed_index.py b/test/unit/tool_shed/test_shed_index.py similarity index 100% rename from test/unit/shed_unit/test_shed_index.py rename to test/unit/tool_shed/test_shed_index.py diff --git a/test/unit/shed_unit/test_tool_panel_manager.py b/test/unit/tool_shed/test_tool_panel_manager.py similarity index 99% rename from test/unit/shed_unit/test_tool_panel_manager.py rename to test/unit/tool_shed/test_tool_panel_manager.py index a206bfff60d0..751a7c5f5c4d 100644 --- a/test/unit/shed_unit/test_tool_panel_manager.py +++ b/test/unit/tool_shed/test_tool_panel_manager.py @@ -1,12 +1,12 @@ import os -from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager -from galaxy.util import parse_xml -from tool_shed.tools import tool_version_manager -from ..app.tools.test_toolbox import ( +from galaxy.app_unittest_utils.toolbox_support import ( BaseToolBoxTestCase, SimplifiedToolBox, ) +from galaxy.tool_shed.galaxy_install.tools import tool_panel_manager +from galaxy.util import parse_xml +from tool_shed.tools import tool_version_manager DEFAULT_GUID = "123456" diff --git a/test/unit/tool_shed/test_trs_tool.py b/test/unit/tool_shed/test_trs_tool.py new file mode 100644 index 000000000000..79aef77bca4f --- /dev/null +++ b/test/unit/tool_shed/test_trs_tool.py @@ -0,0 +1,21 @@ +from tool_shed.context import ProvidesRepositoriesContext +from tool_shed.managers.trs import get_tool +from tool_shed.webapp.model import Repository +from tool_shed_client.schema.trs import Tool +from ._util import upload_directories_to_repository + + +def test_get_tool(provides_repositories: ProvidesRepositoriesContext, new_repository: Repository): + upload_directories_to_repository(provides_repositories, new_repository, "column_maker") + owner = new_repository.user.username + name = new_repository.name + encoded_id = f"{owner}~{name}~Add_a_column1" + tool: Tool = get_tool(provides_repositories, encoded_id) + assert tool + assert tool.organization == owner + assert tool.id == encoded_id + assert tool.aliases + assert tool.aliases[0] == f"localhost/repos/{owner}/{name}/Add_a_column1" + + tool_versions = tool.versions + assert len(tool_versions) == 3 diff --git a/test/unit/webapps/test_tool_validation.py b/test/unit/webapps/test_tool_validation.py index 02326c67e397..a1f421120ecc 100644 --- a/test/unit/webapps/test_tool_validation.py +++ b/test/unit/webapps/test_tool_validation.py @@ -1,7 +1,5 @@ import os import shutil -import tarfile -import tempfile from contextlib import contextmanager from galaxy.app_unittest_utils.galaxy_mock import MockApp @@ -9,14 +7,15 @@ from galaxy.util import galaxy_directory from tool_shed.tools.tool_validator import ToolValidator -BISMARK_TAR = os.path.join(galaxy_directory(), "lib/tool_shed/test/test_data/bismark/bismark.tar") +BISMARK_DIR = os.path.join(galaxy_directory(), "lib/tool_shed/test/test_data/repos/bismark/0") BOWTIE2_INDICES = os.path.join( galaxy_directory(), "lib/tool_shed/test/test_data/bowtie2_loc_sample/bowtie2_indices.loc.sample" ) def test_validate_valid_tool(): - with get_tool_validator() as tv, setup_bismark() as repo_dir: + repo_dir = BISMARK_DIR + with get_tool_validator() as tv: full_path = os.path.join(repo_dir, "bismark_methylation_extractor.xml") tool, valid, message = tv.load_tool_from_config(repository_id=None, full_path=full_path) assert tool.name == "Bismark" @@ -27,14 +26,16 @@ def test_validate_valid_tool(): def test_tool_validation_denies_allow_codefile(): - with get_tool_validator() as tv, setup_bismark() as repo_dir: + repo_dir = BISMARK_DIR + with get_tool_validator() as tv: full_path = os.path.join(repo_dir, "bismark_methylation_extractor.xml") tool, valid, message = tv.load_tool_from_config(repository_id=None, full_path=full_path) assert tool._allow_code_files is False def test_validate_tool_without_index(): - with get_tool_validator() as tv, setup_bismark() as repo_dir: + repo_dir = BISMARK_DIR + with get_tool_validator() as tv: full_path = os.path.join(repo_dir, "bismark_bowtie2_wrapper.xml") tool, valid, message = tv.load_tool_from_config(repository_id=None, full_path=full_path) assert valid is True @@ -63,15 +64,6 @@ def test_validate_tool_without_index(): assert not tool.params_with_missing_index_file -@contextmanager -def setup_bismark(): - repo_dir = tempfile.mkdtemp() - with tarfile.open(BISMARK_TAR) as archive: - archive.extractall(repo_dir) - yield repo_dir - shutil.rmtree(repo_dir, ignore_errors=True) - - @contextmanager def get_tool_validator(): app = MockApp() diff --git a/test/unit/workflows/workflow_support.py b/test/unit/workflows/workflow_support.py index f0b6fd064020..05064e722fac 100644 --- a/test/unit/workflows/workflow_support.py +++ b/test/unit/workflows/workflow_support.py @@ -35,7 +35,7 @@ def user(self): class MockApp(galaxy_mock.MockApp): def __init__(self): super().__init__() - self.toolbox = MockToolbox() + self._toolbox = MockToolbox() self.workflow_manager = WorkflowsManager(self)