From 2b87a186d67c9b766dda0ba48aceb820cfd96d1d Mon Sep 17 00:00:00 2001 From: Bruno Rocha Date: Thu, 26 Sep 2024 18:06:19 +0100 Subject: [PATCH] The Big Cleanup (#2275) The Big Cleanup Removed: - All related to pulp plugin_template (except those scripts reused by `ci_full`) - All related to the legacy `./compose` stack - Useless Makefile targets - References of old compose stack and plugin template on docs and other files Skips: - Package_install test on dab_jwt because DAB is not released yet --- .ci/ansible/Containerfile.j2 | 48 -- .ci/ansible/ansible.cfg | 7 - .ci/ansible/build_container.yaml | 27 - .ci/ansible/filter/repr.py | 31 -- .ci/ansible/inventory.yaml | 12 - .ci/ansible/settings.py.j2 | 60 --- .ci/ansible/smash-config.json | 34 -- .ci/ansible/start_container.yaml | 109 ----- .ci/assets/.gitkeep | 0 .ci/assets/httpie/config.json | 7 - .ci/assets/release_requirements.txt | 3 - .ci/scripts/calc_deps_lowerbounds.py | 34 -- .ci/scripts/check_gettext.sh | 7 - .ci/scripts/check_pulpcore_imports.sh | 7 - .ci/scripts/check_release.py | 121 ----- .ci/scripts/check_requirements.py | 66 --- .ci/scripts/collect_changes.py | 110 ----- .ci/scripts/schema.py | 23 - .ci/scripts/validate_commit_message.py | 62 --- .ci/scripts/validate_commit_message_custom.py | 169 ------- .flake8 | 6 - .github/pre-job-template.yml.j2 | 30 -- .github/stale.yml | 59 --- .github/workflows/build.yml | 45 -- .github/workflows/changelog.yml | 58 --- .github/workflows/ci.yml | 81 --- .../ci_automation_hub_collection.yml | 9 - .github/workflows/ci_full.yml | 27 - .github/workflows/codeql-analysis.yml | 42 -- .github/workflows/create-branch.yml | 106 ---- .github/workflows/deactivated/ci_insights.yml | 65 --- .../ci_standalone-certified-sync.yml | 73 --- .../deactivated/ci_standalone-community.yml | 70 --- .../ci_standalone-iqe-rbac-tests.yml | 61 --- .../deactivated/ci_standalone-ldap.yml | 64 --- .../ci_standalone-rbac-on-repos.yml | 58 --- .../deactivated/ci_standalone-rbac-roles.yml | 61 --- .../ci_standalone-x-repo-search.yml | 58 --- .../workflows/deactivated/ci_standalone.yml | 58 --- .github/workflows/lint.yml | 52 -- .github/workflows/nightly.yml | 96 ---- .github/workflows/nightly_latest.yml | 155 ------ .github/workflows/pr_checks.yml | 62 --- .github/workflows/publish.yml | 136 ------ .github/workflows/pulp_constraints.yml | 14 - .github/workflows/release.yml | 61 --- .github/workflows/scripts/before_install.sh | 75 --- .github/workflows/scripts/before_script.sh | 50 -- .github/workflows/scripts/check_commit.sh | 22 - .github/workflows/scripts/docs-publisher.py | 262 ---------- .github/workflows/scripts/install.sh | 168 ------- .../scripts/install_python_client.sh | 69 --- .../workflows/scripts/install_ruby_client.sh | 42 -- .../workflows/scripts/post_before_script.sh | 1 - .../workflows/scripts/publish_client_gem.sh | 35 -- .../workflows/scripts/publish_client_pypi.sh | 31 -- .github/workflows/scripts/publish_docs.sh | 48 -- .../workflows/scripts/publish_plugin_pypi.sh | 33 -- .../scripts/push_branch_and_tag_to_github.sh | 16 - .github/workflows/scripts/script.sh | 159 ------ .../stage-changelog-for-default-branch.py | 70 --- .../scripts/update_backport_labels.py | 59 --- .github/workflows/scripts/utils.sh | 30 -- .github/workflows/test.yml | 139 ------ .github/workflows/update-labels.yml | 39 -- .github/workflows/update_ci.yml | 68 --- .gitleaks.toml | 1 - CHANGES/.TEMPLATE.md | 39 -- CHANGES/.gitignore | 1 - Makefile | 133 ----- README.md | 2 - ROADMAP.rst | 6 - compose | 73 --- dev/common/check_pulp_template.sh | 35 -- dev/common/poll.py | 90 ---- docs/dev/docker_environment.md | 461 ------------------ docs/dev/getting_started.md | 24 +- docs/dev/vagrant.md | 442 ----------------- .../package/test_package_install.py | 6 +- lint_requirements.txt | 7 - setup.py | 24 - template_config.yml | 88 ---- 82 files changed, 7 insertions(+), 5385 deletions(-) delete mode 100644 .ci/ansible/Containerfile.j2 delete mode 100644 .ci/ansible/ansible.cfg delete mode 100644 .ci/ansible/build_container.yaml delete mode 100644 .ci/ansible/filter/repr.py delete mode 100644 .ci/ansible/inventory.yaml delete mode 100644 .ci/ansible/settings.py.j2 delete mode 100644 .ci/ansible/smash-config.json delete mode 100644 .ci/ansible/start_container.yaml delete mode 100644 .ci/assets/.gitkeep delete mode 100644 .ci/assets/httpie/config.json delete mode 100644 .ci/assets/release_requirements.txt delete mode 100755 .ci/scripts/calc_deps_lowerbounds.py delete mode 100755 .ci/scripts/check_release.py delete mode 100755 .ci/scripts/check_requirements.py delete mode 100755 .ci/scripts/collect_changes.py delete mode 100644 .ci/scripts/schema.py delete mode 100755 .ci/scripts/validate_commit_message.py delete mode 100644 .ci/scripts/validate_commit_message_custom.py delete mode 100644 .github/pre-job-template.yml.j2 delete mode 100644 .github/stale.yml delete mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/changelog.yml delete mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/codeql-analysis.yml delete mode 100644 .github/workflows/create-branch.yml delete mode 100644 .github/workflows/deactivated/ci_insights.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-certified-sync.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-community.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-iqe-rbac-tests.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-ldap.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-rbac-on-repos.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-rbac-roles.yml delete mode 100644 .github/workflows/deactivated/ci_standalone-x-repo-search.yml delete mode 100644 .github/workflows/deactivated/ci_standalone.yml delete mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/nightly.yml delete mode 100644 .github/workflows/nightly_latest.yml delete mode 100644 .github/workflows/pr_checks.yml delete mode 100644 .github/workflows/publish.yml delete mode 100644 .github/workflows/pulp_constraints.yml delete mode 100644 .github/workflows/release.yml delete mode 100755 .github/workflows/scripts/before_install.sh delete mode 100755 .github/workflows/scripts/before_script.sh delete mode 100755 .github/workflows/scripts/check_commit.sh delete mode 100755 .github/workflows/scripts/docs-publisher.py delete mode 100755 .github/workflows/scripts/install.sh delete mode 100755 .github/workflows/scripts/install_python_client.sh delete mode 100755 .github/workflows/scripts/install_ruby_client.sh delete mode 100755 .github/workflows/scripts/publish_client_gem.sh delete mode 100755 .github/workflows/scripts/publish_client_pypi.sh delete mode 100755 .github/workflows/scripts/publish_docs.sh delete mode 100755 .github/workflows/scripts/publish_plugin_pypi.sh delete mode 100755 .github/workflows/scripts/push_branch_and_tag_to_github.sh delete mode 100755 .github/workflows/scripts/script.sh delete mode 100755 .github/workflows/scripts/stage-changelog-for-default-branch.py delete mode 100755 .github/workflows/scripts/update_backport_labels.py delete mode 100755 .github/workflows/scripts/utils.sh delete mode 100644 .github/workflows/test.yml delete mode 100644 .github/workflows/update-labels.yml delete mode 100644 .github/workflows/update_ci.yml delete mode 100644 CHANGES/.TEMPLATE.md delete mode 100644 CHANGES/.gitignore delete mode 100644 ROADMAP.rst delete mode 100755 compose delete mode 100755 dev/common/check_pulp_template.sh delete mode 100644 dev/common/poll.py delete mode 100644 docs/dev/docker_environment.md delete mode 100644 docs/dev/vagrant.md delete mode 100644 template_config.yml diff --git a/.ci/ansible/Containerfile.j2 b/.ci/ansible/Containerfile.j2 deleted file mode 100644 index 338ff69741..0000000000 --- a/.ci/ansible/Containerfile.j2 +++ /dev/null @@ -1,48 +0,0 @@ -FROM {{ ci_base | default(pulp_default_container) }} - -# Add source directories to container -{% for item in plugins %} -ADD ./{{ item.name }} ./{{ item.name }} -{% endfor %} - -# Install python packages -# S3 botocore needs to be patched to handle responses from minio during 0-byte uploads -# Hacking botocore (https://github.com/boto/botocore/pull/1990) - -RUN pip3 install -{%- if s3_test | default(false) -%} -{{ " " }}git+https://github.com/gerrod3/botocore.git@fix-100-continue -{%- endif -%} -{%- for item in plugins -%} -{{ " " }}{{ item.source }} -{%- if item.lowerbounds | default(false) -%} -{{ " " }}-c ./{{ item.name }}/lowerbounds_constraints.txt -{%- endif -%} -{%- if item.ci_requirements | default(false) -%} -{{ " " }}-r ./{{ item.name }}/ci_requirements.txt -{%- endif -%} -{%- endfor %} - -{% if pulp_env is defined and pulp_env %} -{% for key, value in pulp_env.items() %} -ENV {{ key | upper }}={{ value }} -{% endfor %} -{% endif %} - -{% if pulp_scenario_env is defined and pulp_scenario_env %} -{% for key, value in pulp_scenario_env.items() %} -ENV {{ key | upper }}={{ value }} -{% endfor %} -{% endif %} - -USER pulp:pulp -RUN PULP_STATIC_ROOT=/var/lib/operator/static/ PULP_CONTENT_ORIGIN=localhost \ - /usr/local/bin/pulpcore-manager collectstatic --clear --noinput --link -USER root:root - -{% for item in plugins %} -RUN export plugin_path="$(pip3 show {{ item.name }} | sed -n -e 's/Location: //p')/{{ item.name }}" && \ - ln $plugin_path/app/webserver_snippets/nginx.conf /etc/nginx/pulp/{{ item.name }}.conf || true -{% endfor %} - -ENTRYPOINT ["/init"] diff --git a/.ci/ansible/ansible.cfg b/.ci/ansible/ansible.cfg deleted file mode 100644 index 7acf1d9dfe..0000000000 --- a/.ci/ansible/ansible.cfg +++ /dev/null @@ -1,7 +0,0 @@ -[defaults] -inventory = inventory.yaml -filter_plugins = filter -retry_files_enabled = False -transport = local -nocows = 1 -stdout_callback = yaml diff --git a/.ci/ansible/build_container.yaml b/.ci/ansible/build_container.yaml deleted file mode 100644 index c380b430a5..0000000000 --- a/.ci/ansible/build_container.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Ansible playbook to create the pulp service containers image ---- -- hosts: localhost - gather_facts: false - vars_files: - - vars/main.yaml - tasks: - - name: "Generate Containerfile from template" - template: - src: Containerfile.j2 - dest: Containerfile - - - name: "Build pulp image" - # We build from the ../.. (parent dir of pulpcore git repo) Docker build - # "context" so that repos like pulp-smash are accessible to Docker - # build. So that PR branches can be used via relative paths. - # - # We default to using the docker build / podman buildah cache, for - # 1-off-builds and CI purposes (which has no cache across CI runs.) - # Run build.yaml with -e cache=false if your builds are using outdated - # layers. - command: "docker build --network host --no-cache={{ not cache | default(true) | bool }} -t {{ image.name }}:{{ image.tag }} -f {{ playbook_dir }}/Containerfile ../../.." - - - name: "Clean image cache" - docker_prune: - images : true -... diff --git a/.ci/ansible/filter/repr.py b/.ci/ansible/filter/repr.py deleted file mode 100644 index 8455c3442f..0000000000 --- a/.ci/ansible/filter/repr.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import absolute_import, division, print_function -from packaging.version import parse as parse_version - -__metaclass__ = type - - -ANSIBLE_METADATA = { - "metadata_version": "1.1", - "status": ["preview"], - "supported_by": "community", -} - - -def _repr_filter(value): - return repr(value) - - -def _canonical_semver_filter(value): - return str(parse_version(value)) - - -# ---- Ansible filters ---- -class FilterModule(object): - """Repr filter.""" - - def filters(self): - """Filter associations.""" - return { - "repr": _repr_filter, - "canonical_semver": _canonical_semver_filter, - } diff --git a/.ci/ansible/inventory.yaml b/.ci/ansible/inventory.yaml deleted file mode 100644 index 3b85c5f405..0000000000 --- a/.ci/ansible/inventory.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -all: - children: - containers: - hosts: - pulp: - pulp-fixtures: - minio: - ci-sftp: - vars: - ansible_connection: docker -... diff --git a/.ci/ansible/settings.py.j2 b/.ci/ansible/settings.py.j2 deleted file mode 100644 index 024df4e4cf..0000000000 --- a/.ci/ansible/settings.py.j2 +++ /dev/null @@ -1,60 +0,0 @@ -CONTENT_ORIGIN = "{{ pulp_scheme }}://pulp:{{ 443 if pulp_scheme == 'https' else 80 }}" -ANSIBLE_API_HOSTNAME = "{{ pulp_scheme }}://pulp:{{ 443 if pulp_scheme == 'https' else 80 }}" -ANSIBLE_CONTENT_HOSTNAME = "{{ pulp_scheme }}://pulp:{{ 443 if pulp_scheme == 'https' else 80 }}/pulp/content" -PRIVATE_KEY_PATH = "/etc/pulp/certs/token_private_key.pem" -PUBLIC_KEY_PATH = "/etc/pulp/certs/token_public_key.pem" -TOKEN_SERVER = "{{ pulp_scheme }}://pulp:{{ 443 if pulp_scheme == 'https' else 80 }}/token/" -TOKEN_SIGNATURE_ALGORITHM = "ES256" -CACHE_ENABLED = True -REDIS_HOST = "localhost" -REDIS_PORT = 6379 -ANALYTICS = False - -{% if api_root is defined %} -API_ROOT = {{ api_root | repr }} -{% endif %} - -{% if pulp_settings %} -{% for key, value in pulp_settings.items() %} -{{ key | upper }} = {{ value | repr }} -{% endfor %} -{% endif %} - -{% if pulp_scenario_settings is defined and pulp_scenario_settings %} -{% for key, value in pulp_scenario_settings.items() %} -{{ key | upper }} = {{ value | repr }} -{% endfor %} -{% endif %} - -{% if s3_test | default(false) %} -DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" -MEDIA_ROOT = "" -AWS_ACCESS_KEY_ID = "{{ minio_access_key }}" -AWS_SECRET_ACCESS_KEY = "{{ minio_secret_key }}" -AWS_S3_REGION_NAME = "eu-central-1" -AWS_S3_ADDRESSING_STYLE = "path" -S3_USE_SIGV4 = True -AWS_S3_SIGNATURE_VERSION = "s3v4" -AWS_STORAGE_BUCKET_NAME = "pulp3" -AWS_S3_ENDPOINT_URL = "http://minio:9000" -AWS_DEFAULT_ACL = "@none None" -{% endif %} - -{% if azure_test | default(false) %} -DEFAULT_FILE_STORAGE = "storages.backends.azure_storage.AzureStorage" -MEDIA_ROOT = "" -AZURE_ACCOUNT_KEY = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==" -AZURE_ACCOUNT_NAME = "devstoreaccount1" -AZURE_CONTAINER = "pulp-test" -AZURE_LOCATION = "pulp3" -AZURE_OVERWRITE_FILES = True -AZURE_URL_EXPIRATION_SECS = 120 -AZURE_CONNECTION_STRING = 'DefaultEndpointsProtocol={{ pulp_scheme }};AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint={{ pulp_scheme }}://ci-azurite:10000/devstoreaccount1;' -{% endif %} - -{% if gcp_test | default(false) %} -DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" -MEDIA_ROOT = "" -GS_BUCKET_NAME = "gcppulp" -GS_CUSTOM_ENDPOINT = "http://ci-gcp:4443" -{% endif %} diff --git a/.ci/ansible/smash-config.json b/.ci/ansible/smash-config.json deleted file mode 100644 index 941122debe..0000000000 --- a/.ci/ansible/smash-config.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "pulp": { - "auth": [ - "admin", - "password" - ], - "selinux enabled": false, - "version": "3", - "aiohttp_fixtures_origin": "127.0.0.1" - }, - "hosts": [ - { - "hostname": "pulp", - "roles": { - "api": { - "port": 443, - "scheme": "https", - "service": "nginx" - }, - "content": { - "port": 443, - "scheme": "https", - "service": "pulp_content_app" - }, - "pulp resource manager": {}, - "pulp workers": {}, - "redis": {}, - "shell": { - "transport": "local" - } - } - } - ] -} diff --git a/.ci/ansible/start_container.yaml b/.ci/ansible/start_container.yaml deleted file mode 100644 index 47e5221e58..0000000000 --- a/.ci/ansible/start_container.yaml +++ /dev/null @@ -1,109 +0,0 @@ -# Ansible playbook to start the pulp service container and its supporting services ---- -- hosts: localhost - gather_facts: false - vars_files: - - vars/main.yaml - tasks: - - name: "Create Settings Directories" - file: - path: "{{ item }}" - state: directory - mode: "0755" - loop: - - settings - - ssh - - ~/.config/pulp_smash - - - name: "Generate Pulp Settings" - template: - src: settings.py.j2 - dest: settings/settings.py - - - name: "Configure pulp-smash" - copy: - src: smash-config.json - dest: ~/.config/pulp_smash/settings.json - - - name: "Setup docker networking" - docker_network: - name: pulp_ci_bridge - - - name: "Start Service Containers" - docker_container: - name: "{{ item.name }}" - image: "{{ item.image }}" - auto_remove: true - recreate: true - privileged: true - networks: - - name: pulp_ci_bridge - aliases: "{{ item.name }}" - volumes: "{{ item.volumes | default(omit) }}" - env: "{{ item.env | default(omit) }}" - command: "{{ item.command | default(omit) }}" - state: started - loop: "{{ services | default([]) }}" - - - name: "Retrieve Docker Network Info" - docker_network_info: - name: pulp_ci_bridge - register: pulp_ci_bridge_info - - - name: "Update /etc/hosts" - lineinfile: - path: /etc/hosts - regexp: "\\s{{ item.value.Name }}\\s*$" - line: "{{ item.value.IPv4Address | ipaddr('address') }}\t{{ item.value.Name }}" - loop: "{{ pulp_ci_bridge_info.network.Containers | dict2items }}" - become: true - - - name: "Create Pulp Bucket" - amazon.aws.s3_bucket: - aws_access_key: "{{ minio_access_key }}" - aws_secret_key: "{{ minio_secret_key }}" - s3_url: "http://minio:9000" - region: eu-central-1 - name: pulp3 - state: present - when: s3_test | default(false) - - - block: - - name: "Wait for Pulp" - uri: - url: "http://pulp{{ lookup('env', 'PULP_API_ROOT') | default('\/pulp\/', True) }}api/v3/status/" - follow_redirects: all - validate_certs: no - register: result - until: result.status == 200 - retries: 12 - delay: 5 - rescue: - - name: "Output pulp container log" - command: "docker logs pulp" - failed_when: true - - - name: "Check version of component being tested" - assert: - that: - - (result.json.versions | items2dict(key_name="component", value_name="version"))[item.app_label] | canonical_semver == (component_version | canonical_semver) - fail_msg: | - Component {{ item.app_label }} was expected to be installed in version {{ component_version }}. - Instead it is reported as version {{ (result.json.versions | items2dict(key_name="component", value_name="version"))[item.app_label] }}. - loop: "{{ 'plugins' | ansible.builtin.extract(lookup('ansible.builtin.file', '../../template_config.yml') | from_yaml) }}" - - - name: "Set pulp password in .netrc" - copy: - dest: "~/.netrc" - content: | - machine pulp - login admin - password password - -- hosts: pulp - gather_facts: false - tasks: - - name: "Set pulp admin password" - command: - cmd: "pulpcore-manager reset-admin-password --password password" -... diff --git a/.ci/assets/.gitkeep b/.ci/assets/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/.ci/assets/httpie/config.json b/.ci/assets/httpie/config.json deleted file mode 100644 index 0ed96f0f2f..0000000000 --- a/.ci/assets/httpie/config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "default_options": [ - "--ignore-stdin", - "--pretty=format", - "--traceback" - ] -} diff --git a/.ci/assets/release_requirements.txt b/.ci/assets/release_requirements.txt deleted file mode 100644 index c064e94779..0000000000 --- a/.ci/assets/release_requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -bump2version -gitpython -towncrier diff --git a/.ci/scripts/calc_deps_lowerbounds.py b/.ci/scripts/calc_deps_lowerbounds.py deleted file mode 100755 index 2565a1f2d4..0000000000 --- a/.ci/scripts/calc_deps_lowerbounds.py +++ /dev/null @@ -1,34 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -from packaging.requirements import Requirement - - -def main(): - """Calculate the lower bound of dependencies where possible.""" - with open("requirements.txt") as req_file: - for line in req_file: - try: - requirement = Requirement(line) - except ValueError: - print(line.strip()) - else: - for spec in requirement.specifier: - if spec.operator == ">=": - if requirement.name == "pulpcore": - operator = "~=" - else: - operator = "==" - min_version = str(spec)[2:] - print(f"{requirement.name}{operator}{min_version}") - break - else: - print(line.strip()) - - -if __name__ == "__main__": - main() diff --git a/.ci/scripts/check_gettext.sh b/.ci/scripts/check_gettext.sh index 99d9daf757..9b01f86417 100755 --- a/.ci/scripts/check_gettext.sh +++ b/.ci/scripts/check_gettext.sh @@ -1,12 +1,5 @@ #!/bin/bash -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - # make sure this script runs at the repo root cd "$(dirname "$(realpath -e "$0")")"/../.. diff --git a/.ci/scripts/check_pulpcore_imports.sh b/.ci/scripts/check_pulpcore_imports.sh index af50a1b65d..daac8966e6 100755 --- a/.ci/scripts/check_pulpcore_imports.sh +++ b/.ci/scripts/check_pulpcore_imports.sh @@ -1,12 +1,5 @@ #!/bin/bash -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - # make sure this script runs at the repo root cd "$(dirname "$(realpath -e "$0")")"/../.. diff --git a/.ci/scripts/check_release.py b/.ci/scripts/check_release.py deleted file mode 100755 index 9d0ed5f51a..0000000000 --- a/.ci/scripts/check_release.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env python - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import argparse -import re -import os -import yaml -from tempfile import TemporaryDirectory -from packaging.version import Version -from git import Repo - -UPSTREAM_REMOTE = "https://github.com/ansible/galaxy_ng.git" -DEFAULT_BRANCH = "master" -RELEASE_BRANCH_REGEX = r"^([0-9]+)\.([0-9]+)$" -Y_CHANGELOG_EXTS = [".feature", ".removal", ".deprecation"] -Z_CHANGELOG_EXTS = [".bugfix", ".doc", ".misc"] - - -def main(): - """Check which branches need a release.""" - parser = argparse.ArgumentParser() - parser.add_argument( - "--branches", - default="supported", - help="A comma separated list of branches to check for releases. Can also use keyword: " - "'supported'. Defaults to 'supported', see `supported_release_branches` in " - "`plugin_template.yml`.", - ) - opts = parser.parse_args() - - with TemporaryDirectory() as d: - # Clone from upstream to ensure we have updated branches & main - repo = Repo.clone_from(UPSTREAM_REMOTE, d, filter="blob:none") - heads = [h.split("/")[-1] for h in repo.git.ls_remote("--heads").split("\n")] - available_branches = [h for h in heads if re.search(RELEASE_BRANCH_REGEX, h)] - available_branches.sort(key=lambda ver: Version(ver)) - available_branches.append(DEFAULT_BRANCH) - - branches = opts.branches - if branches == "supported": - with open(f"{d}/template_config.yml", mode="r") as f: - tc = yaml.safe_load(f) - branches = set(tc["supported_release_branches"]) - latest_release_branch = tc["latest_release_branch"] - if latest_release_branch is not None: - branches.add(latest_release_branch) - branches.add(DEFAULT_BRANCH) - else: - branches = set(branches.split(",")) - - if diff := branches - set(available_branches): - print(f"Supplied branches contains non-existent branches! {diff}") - exit(1) - - print(f"Checking for releases on branches: {branches}") - - releases = [] - for branch in branches: - if branch != DEFAULT_BRANCH: - # Check if a Z release is needed - changes = repo.git.ls_tree("-r", "--name-only", f"origin/{branch}", "CHANGES/") - z_changelog = False - for change in changes.split("\n"): - # Check each changelog file to make sure everything checks out - _, ext = os.path.splitext(change) - if ext in Y_CHANGELOG_EXTS: - print( - f"Warning: A non-backported changelog ({change}) is present in the " - f"{branch} release branch!" - ) - elif ext in Z_CHANGELOG_EXTS: - z_changelog = True - - last_tag = repo.git.describe("--tags", "--abbrev=0", f"origin/{branch}") - req_txt_diff = repo.git.diff( - f"{last_tag}", f"origin/{branch}", "--name-only", "--", "requirements.txt" - ) - if z_changelog or req_txt_diff: - curr_version = Version(last_tag) - assert curr_version.base_version.startswith( - branch - ), "Current-version has to belong to the current branch!" - next_version = Version(f"{branch}.{curr_version.micro + 1}") - reason = "CHANGES" if z_changelog else "requirements.txt" - print( - f"A Z-release is needed for {branch}, " - f"Prev: {last_tag}, " - f"Next: {next_version.base_version}, " - f"Reason: {reason}" - ) - releases.append(next_version) - else: - # Check if a Y release is needed - changes = repo.git.ls_tree("-r", "--name-only", DEFAULT_BRANCH, "CHANGES/") - for change in changes.split("\n"): - _, ext = os.path.splitext(change) - if ext in Y_CHANGELOG_EXTS: - # We don't put Y release bumps in the commit message, check file instead - # The 'current_version' is always the next version to release - next_version = repo.git.grep( - "current_version", DEFAULT_BRANCH, "--", ".bumpversion.cfg" - ).split("=")[-1] - next_version = Version(next_version) - print( - f"A new Y-release is needed! New Version: {next_version.base_version}" - ) - releases.append(next_version) - break - - if len(releases) == 0: - print("No new releases to perform.") - - -if __name__ == "__main__": - main() diff --git a/.ci/scripts/check_requirements.py b/.ci/scripts/check_requirements.py deleted file mode 100755 index 3826add61f..0000000000 --- a/.ci/scripts/check_requirements.py +++ /dev/null @@ -1,66 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import warnings -from pkg_resources import Requirement - - -CHECK_MATRIX = [ - ("requirements.txt", True, True, True), - ("dev_requirements.txt", False, True, False), - ("ci_requirements.txt", False, True, True), - ("doc_requirements.txt", False, True, False), - ("lint_requirements.txt", False, True, True), - ("unittest_requirements.txt", False, True, True), - ("functest_requirements.txt", False, True, True), - ("clitest_requirements.txt", False, True, True), -] - -errors = [] - -for filename, check_upperbound, check_prereleases, check_r in CHECK_MATRIX: - try: - with open(filename, "r") as fd: - for nr, line in enumerate(fd.readlines()): - line = line.strip() - if not line or line.startswith("#"): - continue - try: - req = Requirement.parse(line) - except ValueError: - if line.startswith("git+"): - # The single exception... - if "pulp-smash" not in line: - errors.append(f"{filename}:{nr}: Invalid source requirement: {line}") - elif line.startswith("-r "): - if check_r: - errors.append(f"{filename}:{nr}: Invalid deferred requirement: {line}") - else: - errors.append(f"{filename}:{nr}: Unreadable requirement {line}") - else: - if check_prereleases and req.specifier.prereleases: - # Do not even think about begging for more exceptions! - if ( - not req.name.startswith("opentelemetry") - and req.name != "galaxy-ng-client" - ): - errors.append(f"{filename}:{nr}: Prerelease versions found in {line}.") - ops = [op for op, ver in req.specs] - spec = str(req.specs) - if "~=" in ops: - warnings.warn(f"{filename}:{nr}: Please avoid using ~= on {req.name}!") - elif "<" not in ops and "<=" not in ops and "==" not in ops: - if check_upperbound: - errors.append(f"{filename}:{nr}: Upper bound missing in {line}.") - except FileNotFoundError: - # skip this test for plugins that don't use this requirements.txt - pass - -if errors: - print("Dependency issues found:") - print("\n".join(errors)) - exit(1) diff --git a/.ci/scripts/collect_changes.py b/.ci/scripts/collect_changes.py deleted file mode 100755 index 18422fe63c..0000000000 --- a/.ci/scripts/collect_changes.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/bin/env python3 -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import itertools -import os -import re -import tomllib - -from git import GitCommandError, Repo -from packaging.version import parse as parse_version - -# Read Towncrier settings -with open("pyproject.toml", "rb") as fp: - tc_settings = tomllib.load(fp)["tool"]["towncrier"] - -CHANGELOG_FILE = tc_settings.get("filename", "NEWS.rst") -START_STRING = tc_settings.get( - "start_string", - ( - "\n" - if CHANGELOG_FILE.endswith(".md") - else ".. towncrier release notes start\n" - ), -) -TITLE_FORMAT = tc_settings.get("title_format", "{name} {version} ({project_date})") - - -# Build a regex to find the header of a changelog section. -# It must have a single capture group to single out the version. -# see help(re.split) for more info. -NAME_REGEX = r".*" -VERSION_REGEX = r"[0-9]+\.[0-9]+\.[0-9][0-9ab]*" -VERSION_CAPTURE_REGEX = rf"({VERSION_REGEX})" -DATE_REGEX = r"[0-9]{4}-[0-9]{2}-[0-9]{2}" -TITLE_REGEX = ( - "(" - + re.escape( - TITLE_FORMAT.format(name="NAME_REGEX", version="VERSION_REGEX", project_date="DATE_REGEX") - ) - .replace("NAME_REGEX", NAME_REGEX) - .replace("VERSION_REGEX", VERSION_CAPTURE_REGEX, 1) - .replace("VERSION_REGEX", VERSION_REGEX) - .replace("DATE_REGEX", DATE_REGEX) - + ")" -) - - -def get_changelog(repo, branch): - return repo.git.show(f"{branch}:{CHANGELOG_FILE}") + "\n" - - -def _tokenize_changes(splits): - assert len(splits) % 3 == 0 - for i in range(len(splits) // 3): - title = splits[3 * i] - version = parse_version(splits[3 * i + 1]) - yield [version, title + splits[3 * i + 2]] - - -def split_changelog(changelog): - preamble, rest = changelog.split(START_STRING, maxsplit=1) - split_rest = re.split(TITLE_REGEX, rest) - return preamble + START_STRING + split_rest[0], list(_tokenize_changes(split_rest[1:])) - - -def main(): - repo = Repo(os.getcwd()) - remote = repo.remotes[0] - branches = [ref for ref in remote.refs if re.match(r"^([0-9]+)\.([0-9]+)$", ref.remote_head)] - branches.sort(key=lambda ref: parse_version(ref.remote_head), reverse=True) - branches = [ref.name for ref in branches] - - with open(CHANGELOG_FILE, "r") as f: - main_changelog = f.read() - preamble, main_changes = split_changelog(main_changelog) - old_length = len(main_changes) - - for branch in branches: - print(f"Looking for './{CHANGELOG_FILE}' at branch {branch}") - try: - changelog = get_changelog(repo, branch) - except GitCommandError: - print("No changelog found on this branch.") - continue - dummy, changes = split_changelog(changelog) - new_changes = sorted(main_changes + changes, key=lambda x: x[0], reverse=True) - # Now remove duplicates (retain the first one) - main_changes = [new_changes[0]] - for left, right in itertools.pairwise(new_changes): - if left[0] != right[0]: - main_changes.append(right) - - new_length = len(main_changes) - if old_length < new_length: - print(f"{new_length - old_length} new versions have been added.") - with open(CHANGELOG_FILE, "w") as fp: - fp.write(preamble) - for change in main_changes: - fp.write(change[1]) - - repo.git.commit("-m", "Update Changelog", "-m" "No-Issue", CHANGELOG_FILE) - - -if __name__ == "__main__": - main() diff --git a/.ci/scripts/schema.py b/.ci/scripts/schema.py deleted file mode 100644 index 9f56caa669..0000000000 --- a/.ci/scripts/schema.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Customizing OpenAPI validation. - -OpenAPI requires paths to start with slashes: -https://spec.openapis.org/oas/v3.0.3#patterned-fields - -But some pulp paths start with curly brackets e.g. {artifact_href} -This script modifies drf-spectacular schema validation to accept slashes and curly brackets. -""" -import json -from drf_spectacular.validation import JSON_SCHEMA_SPEC_PATH - -with open(JSON_SCHEMA_SPEC_PATH) as fh: - openapi3_schema_spec = json.load(fh) - -properties = openapi3_schema_spec["definitions"]["Paths"]["patternProperties"] -# Making OpenAPI validation to accept paths starting with / and { -if "^\\/|{" not in properties: - properties["^\\/|{"] = properties["^\\/"] - del properties["^\\/"] - -with open(JSON_SCHEMA_SPEC_PATH, "w") as fh: - json.dump(openapi3_schema_spec, fh) diff --git a/.ci/scripts/validate_commit_message.py b/.ci/scripts/validate_commit_message.py deleted file mode 100755 index edf70bd3f5..0000000000 --- a/.ci/scripts/validate_commit_message.py +++ /dev/null @@ -1,62 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import re -import sys -from pathlib import Path -import subprocess - - -NO_ISSUE = "[noissue]" -CHANGELOG_EXTS = [".feature", ".bugfix", ".doc", ".removal", ".misc", ".deprecation"] -sha = sys.argv[1] -message = subprocess.check_output(["git", "log", "--format=%B", "-n 1", sha]).decode("utf-8") - - -KEYWORDS = [] - - -def __check_status(issue): - pass - - -def __check_changelog(issue): - matches = list(Path("CHANGES").rglob(f"{issue}.*")) - - if len(matches) < 1: - sys.exit(f"Could not find changelog entry in CHANGES/ for {issue}.") - for match in matches: - if match.suffix not in CHANGELOG_EXTS: - sys.exit(f"Invalid extension for changelog entry '{match}'.") - if match.suffix == ".feature" and "cherry picked from commit" in message: - sys.exit(f"Can not backport '{match}' as it is a feature.") - - -print("Checking commit message for {sha}.".format(sha=sha[0:7])) - -# validate the issue attached to the commit -regex = r"(?:{keywords})[\s:]+#(\d+)".format(keywords=("|").join(KEYWORDS)) -pattern = re.compile(regex, re.IGNORECASE) - -issues = pattern.findall(message) - -if issues: - for issue in pattern.findall(message): - __check_status(issue) - __check_changelog(issue) -else: - if NO_ISSUE in message: - print("Commit {sha} has no issues but is tagged {tag}.".format(sha=sha[0:7], tag=NO_ISSUE)) - elif "Merge" in message and "cherry picked from commit" in message: - pass - else: - sys.exit( - "Error: no attached issues found for {sha}. If this was intentional, add " - " '{tag}' to the commit message.".format(sha=sha[0:7], tag=NO_ISSUE) - ) - -print("Commit message for {sha} passed.".format(sha=sha[0:7])) diff --git a/.ci/scripts/validate_commit_message_custom.py b/.ci/scripts/validate_commit_message_custom.py deleted file mode 100644 index 9c2469e7a8..0000000000 --- a/.ci/scripts/validate_commit_message_custom.py +++ /dev/null @@ -1,169 +0,0 @@ -import glob -import logging -import os -import re -import subprocess -import sys - -import requests - - -LOG = logging.getLogger() - - -NO_ISSUE = "No-Issue" -ISSUE_LABELS = [ - "Closes-Bug", - "Closes-Issue", - "Partial-Bug", - "Partial-Issue", - "Implements", - "Partial-Implements", - "Issue", - "Partial-Issue", - # ChangeLog record not required - "Related", - "Related-Bug", - "Related-Issue", -] -CHANGELOG_REQUIRED_LABELS = set(ISSUE_LABELS) - { - "Related", - "Related-Bug", - "Related-Issue", -} - -NO_ISSUE_REGEX = re.compile(r"^\s*{}\s*$".format(NO_ISSUE), re.MULTILINE) -ISSUE_LABEL_REGEX = re.compile( - r"^\s*({}):\s+AAH-(\d+)\s*$".format("|".join(ISSUE_LABELS)), re.MULTILINE, -) - -JIRA_URL = "https://issues.redhat.com/rest/api/latest/issue/AAH-{issue}" - - -def get_github_api_headers(): - headers = {} - if os.environ.get('GITHUB_TOKEN'): - headers['Authorization'] = f'token {os.environ["GITHUB_TOKEN"]}' - return headers - - -def git_list_commits(commit_range): - git_range = "..".join(commit_range) - cmd = ["git", "rev-list", "--no-merges", git_range] - result = subprocess.run(cmd, stdout=subprocess.PIPE, encoding="utf-8", check=True) - return result.stdout.strip().split("\n") - - -def git_commit_message(commit_sha): - cmd = ["git", "show", "-s", "--format=%B", commit_sha] - result = subprocess.run(cmd, stdout=subprocess.PIPE, encoding="utf-8", check=True) - return result.stdout - - -def check_changelog_record(issue): - changelog_filenames = glob.glob(f"CHANGES/{issue}.*") - if len(changelog_filenames) == 0: - LOG.error(f"Missing change log entry for issue AAH-{issue}.") - return False - if len(changelog_filenames) > 1: - LOG.error(f"Multiple change log records found for issue AAH-{issue}.") - return False - with open(changelog_filenames[0]) as f: - if len(f.readlines()) != 1: - LOG.error(f"Expected log entry for issue AAH-{issue} to be a single line.") - return False - return True - - -def check_issue_exists(issue): - response = requests.head(JIRA_URL.format(issue=issue)) - if response.status_code == 404: - # 200 is returned for logged in sessions - # 401 is for not authorized access on existing issue - # 404 is returned if issue is not found even for unlogged users. - LOG.error(f"Referenced issue AAH-{issue} not found in Jira.") - return False - return True - - -def check_commit(commit_sha): - commit_message = git_commit_message(commit_sha) - issue_labels = ISSUE_LABEL_REGEX.findall(commit_message) - if not issue_labels: - no_issue_match = NO_ISSUE_REGEX.search(commit_message) - if not no_issue_match: - LOG.error( - f"Commit {commit_sha[:8]} has no issue attached. It must contain either 'No-Issue' or something like 'Issue: AAH-1111' which points to https://issues.redhat.com/projects/AAH project." - ) - return False - - repo = os.environ.get('GITHUB_REPOSITORY') - commit_url = f'https://api.github.com/repos/{repo}/commits/{commit_sha}' - rr = requests.get(commit_url, headers=get_github_api_headers()) - signed = rr.json().get('commit', {}).get('verification', {}).get('verified') - if not signed: - LOG.error(f"Commit {commit_sha[:8]} is not signed") - return False - - ok = True - for label, issue in issue_labels: - if not check_issue_exists(issue): - ok = False - if label in CHANGELOG_REQUIRED_LABELS and not check_changelog_record(issue): - ok = False - return ok - - -def validate_push_commits(start_commit, end_commit): - commit_list = git_list_commits([start_commit, end_commit]) - all_commits_ok = True - for commit_sha in commit_list: - LOG.info(f"Checking commit {commit_sha[:8]} ...") - if not check_commit(commit_sha): - all_commits_ok = False - break - return all_commits_ok - - -def validate_pr_commits(github_pr_commits_url): - request = requests.get(github_pr_commits_url, headers=get_github_api_headers()) - if not isinstance(request.json(), list): - raise Exception(f'malformed api response for commit list: {request.json()}') - commit_list = [c['sha'] for c in request.json()] - - at_least_one_commit_ok = False - for commit_sha in commit_list: - LOG.info(f"Checking commit {commit_sha[:8]} ...") - if check_commit(commit_sha): - at_least_one_commit_ok = True - break - return at_least_one_commit_ok - - -def main(): - logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") - - github_user = os.environ.get("GITHUB_USER") - github_actor = os.environ.get("GITHUB_ACTOR") - github_pr_commits_url = os.environ["GITHUB_PR_COMMITS_URL"] - start_commit = os.environ["START_COMMIT"] - end_commit = os.environ["END_COMMIT"] - skip_users = ['dependabot[bot]', 'patchback[bot]'] - - if github_user in skip_users: ## NOTE: patchback[bot] not included in GITHUB_USER - is_valid = True - elif github_actor in skip_users: - is_valid = True - elif github_pr_commits_url: - is_valid = validate_pr_commits(github_pr_commits_url) - else: - is_valid = validate_push_commits(start_commit, end_commit) - - if is_valid: - sys.exit(0) - else: - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/.flake8 b/.flake8 index 1cf9cd5170..744a5156d4 100644 --- a/.flake8 +++ b/.flake8 @@ -1,9 +1,3 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template [flake8] exclude = ./docs/*,*/migrations/* per-file-ignores = */__init__.py: F401 diff --git a/.github/pre-job-template.yml.j2 b/.github/pre-job-template.yml.j2 deleted file mode 100644 index b6ee54f3c4..0000000000 --- a/.github/pre-job-template.yml.j2 +++ /dev/null @@ -1,30 +0,0 @@ -check_commit: - - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - with: - ref: {{ "${{ github.event.after }}" }} # for PR avoids checking out merge commit - fetch-depth: 0 # include all history - - - name: Run script to validate commits for both pull request and a push - env: - {{ set_env_vars() | indent(10) }} - GITHUB_PR_COMMITS_URL: {{ "${{ github.event.pull_request.commits_url }}" }} - START_COMMIT: {{ "${{ github.event.before }}" }} - END_COMMIT: {{ "${{ github.event.after }}" }} - run: | - cd .. - python .ci/scripts/validate_commit_message_custom.py - - lint_po: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - run: | - cd .. - pip install lint-po - lint-po ./galaxy_ng/locale/*/LC_MESSAGES/*.po diff --git a/.github/stale.yml b/.github/stale.yml deleted file mode 100644 index 95d38ac59b..0000000000 --- a/.github/stale.yml +++ /dev/null @@ -1,59 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template -# Configuration for probot-stale - https://github.com/probot/stale - -# Number of days of inactivity before an Issue or Pull Request becomes stale -daysUntilStale: 90 - -# Number of days of inactivity before an Issue or Pull Request with the stale label is closed. -# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. -daysUntilClose: 30 - -# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) -onlyLabels: [] - -# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable -exemptLabels: - - security - - planned - -# Set to true to ignore issues in a project (defaults to false) -exemptProjects: false - -# Set to true to ignore issues in a milestone (defaults to false) -exemptMilestones: false - -# Set to true to ignore issues with an assignee (defaults to false) -exemptAssignees: false - -# Label to use when marking as stale -staleLabel: stale - -# Limit the number of actions per hour, from 1-30. Default is 30 -limitPerRun: 30 -# Limit to only `issues` or `pulls` -only: pulls - -pulls: - markComment: |- - This pull request has been marked 'stale' due to lack of recent activity. If there is no further activity, the PR will be closed in another 30 days. Thank you for your contribution! - - unmarkComment: >- - This pull request is no longer marked for closure. - - closeComment: >- - This pull request has been closed due to inactivity. If you feel this is in error, please reopen the pull request or file a new PR with the relevant details. - -issues: - markComment: |- - This issue has been marked 'stale' due to lack of recent activity. If there is no further activity, the issue will be closed in another 30 days. Thank you for your contribution! - - unmarkComment: >- - This issue is no longer marked for closure. - - closeComment: >- - This issue has been closed due to inactivity. If you feel this is in error, please reopen the issue or file a new issue with the relevant details. diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index 0c14efb384..0000000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,45 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Build" -on: - workflow_call: - -defaults: - run: - working-directory: "galaxy_ng" - -jobs: - build: - runs-on: "ubuntu-latest" - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - path: "galaxy_ng" - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install packaging twine wheel - echo ::endgroup:: - - name: "Build package" - run: | - python3 setup.py sdist bdist_wheel --python-tag py3 - twine check dist/* - - name: "Upload Package whl" - uses: "actions/upload-artifact@v4" - with: - name: "plugin_package" - path: "galaxy_ng/dist/" - if-no-files-found: "error" - retention-days: 5 - overwrite: true diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml deleted file mode 100644 index 82e641c1fe..0000000000 --- a/.github/workflows/changelog.yml +++ /dev/null @@ -1,58 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Galaxy changelog update" -on: - push: - branches: - - "master" - paths: - - "CHANGES.rst" - - "CHANGES.md" - workflow_dispatch: - -jobs: - - update-changelog: - runs-on: "ubuntu-latest" - strategy: - fail-fast: false - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install -r doc_requirements.txt - echo ::endgroup:: - - - name: "Fake api schema" - run: | - mkdir -p docs/_build/html - echo "{}" > docs/_build/html/api.json - mkdir -p docs/_static - echo "{}" > docs/_static/api.json - - name: "Build Docs" - run: | - make diagrams html - working-directory: "./docs" - env: - PULP_CONTENT_ORIGIN: "http://localhost/" - - - name: "Publish changlog to pulpproject.org" - run: | - .github/workflows/scripts/publish_docs.sh changelog ${GITHUB_REF##*/} - env: - PULP_DOCS_KEY: "${{ secrets.PULP_DOCS_KEY }}" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 5fee7d06a8..0000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,81 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Galaxy CI" -on: workflow_dispatch - -concurrency: - group: ${{ github.ref_name }}-${{ github.workflow }} - cancel-in-progress: true - -defaults: - run: - working-directory: "galaxy_ng" - -jobs: - check_commit: - - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - with: - ref: ${{ github.event.after }} # for PR avoids checking out merge commit - fetch-depth: 0 # include all history - - - name: Run script to validate commits for both pull request and a push - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - GITHUB_PR_COMMITS_URL: ${{ github.event.pull_request.commits_url }} - START_COMMIT: ${{ github.event.before }} - END_COMMIT: ${{ github.event.after }} - run: | - cd .. - python .ci/scripts/validate_commit_message_custom.py - - lint_po: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - run: | - cd .. - pip install lint-po - lint-po ./galaxy_ng/locale/*/LC_MESSAGES/*.po - - lint: - needs: check_commit - uses: "./.github/workflows/lint.yml" - - build: - needs: "lint" - uses: "./.github/workflows/build.yml" - - test: - needs: "build" - uses: "./.github/workflows/test.yml" - - ready-to-ship: - # This is a dummy dependent task to have a single entry for the branch protection rules. - runs-on: "ubuntu-latest" - needs: - - "lint" - - "test" - if: "always()" - steps: - - name: "Collect needed jobs results" - working-directory: "." - run: | - echo '${{toJson(needs)}}' | jq -r 'to_entries[]|select(.value.result!="success")|.key + ": " + .value.result' - echo '${{toJson(needs)}}' | jq -e 'to_entries|map(select(.value.result!="success"))|length == 0' - echo "CI says: Looks good!" diff --git a/.github/workflows/ci_automation_hub_collection.yml b/.github/workflows/ci_automation_hub_collection.yml index eb6a1dcf1a..26f8bec7a0 100644 --- a/.github/workflows/ci_automation_hub_collection.yml +++ b/.github/workflows/ci_automation_hub_collection.yml @@ -50,15 +50,6 @@ jobs: ansible-galaxy collection build -vvv ansible-galaxy collection install galaxy-galaxy-1.0.0.tar.gz -vvv - #- name: Spin up a standalone galaxy_ng installation - # run: | - # echo "COMPOSE_PROFILE=standalone" > .compose.env - # echo "DEV_SOURCE_PATH=galaxy_ng" >> .compose.env - # make docker/all - # ./compose up -d - # pip3 install --upgrade requests pyyaml - # python3 dev/common/poll.py - - name: set env vars run: | echo "OCI_ENV_PATH=${HOME}/work/galaxy_ng/oci_env" >> $GITHUB_ENV diff --git a/.github/workflows/ci_full.yml b/.github/workflows/ci_full.yml index 05e6704a21..b882fc423b 100644 --- a/.github/workflows/ci_full.yml +++ b/.github/workflows/ci_full.yml @@ -4,33 +4,6 @@ on: {pull_request: {branches: ['**']}, push: {branches: ['**']}} jobs: - check_commit: - runs-on: ubuntu-latest - steps: - - - name: Checkout code - uses: actions/checkout@v2 - with: - ref: ${{ github.event.after }} # for PR avoids checking out merge commit - fetch-depth: 0 # include all history - - - name: Run script to validate commits for both pull request and a push - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - GITHUB_USER: ${{ github.event.pull_request.user.login }} - GITHUB_PR_COMMITS_URL: ${{ github.event.pull_request.commits_url }} - START_COMMIT: ${{ github.event.before }} - END_COMMIT: ${{ github.event.after }} - run: | - python .ci/scripts/validate_commit_message_custom.py - lint_po: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index 03b752012e..0000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,42 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template -name: "Galaxy CodeQL" - -on: - workflow_dispatch: - schedule: - - cron: '37 1 * * 6' - -concurrency: - group: ${{ github.ref_name }}-${{ github.workflow }} - cancel-in-progress: true - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'python' ] - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/create-branch.yml b/.github/workflows/create-branch.yml deleted file mode 100644 index d36c10c662..0000000000 --- a/.github/workflows/create-branch.yml +++ /dev/null @@ -1,106 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: Create New Release Branch -on: - workflow_dispatch: - -env: - RELEASE_WORKFLOW: true - -jobs: - create-branch: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 0 - path: "galaxy_ng" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install bump2version jinja2 pyyaml packaging - echo ::endgroup:: - - - name: "Setting secrets" - working-directory: "galaxy_ng" - run: | - python3 .github/workflows/scripts/secrets.py "$SECRETS_CONTEXT" - env: - SECRETS_CONTEXT: "${{ toJson(secrets) }}" - - - name: Determine new branch name - working-directory: galaxy_ng - run: | - # Just to be sure... - git checkout master - NEW_BRANCH="$(bump2version --dry-run --list release | sed -Ene 's/^new_version=([[:digit:]]+\.[[:digit:]]+)\..*$/\1/p')" - if [ -z "$NEW_BRANCH" ] - then - echo Could not determine the new branch name. - exit 1 - fi - echo "NEW_BRANCH=${NEW_BRANCH}" >> "$GITHUB_ENV" - - - name: Create release branch - working-directory: galaxy_ng - run: | - git branch "${NEW_BRANCH}" - - - name: Bump version on master branch - working-directory: galaxy_ng - run: | - bump2version --no-commit minor - - - name: Remove entries from CHANGES directory - working-directory: galaxy_ng - run: | - find CHANGES -type f -regex ".*\.\(bugfix\|doc\|feature\|misc\|deprecation\|removal\)" -exec git rm {} + - - - name: Checkout plugin template - uses: actions/checkout@v4 - with: - repository: pulp/plugin_template - path: plugin_template - fetch-depth: 0 - - - name: Update CI branches in template_config - working-directory: plugin_template - run: | - python3 ./plugin-template galaxy_ng --github --latest-release-branch "${NEW_BRANCH}" - git add -A - - - name: Make a PR with version bump and without CHANGES/* - uses: peter-evans/create-pull-request@v6 - with: - path: galaxy_ng - token: ${{ secrets.RELEASE_TOKEN }} - committer: ansible - author: ansible - branch: minor-version-bump - base: master - title: Bump minor version - body: '[noissue]' - commit-message: | - Bump minor version - [noissue] - delete-branch: true - - - name: Push release branch - working-directory: galaxy_ng - run: | - git push origin "${NEW_BRANCH}" diff --git a/.github/workflows/deactivated/ci_insights.yml b/.github/workflows/deactivated/ci_insights.yml deleted file mode 100644 index 75e38152d3..0000000000 --- a/.github/workflows/deactivated/ci_insights.yml +++ /dev/null @@ -1,65 +0,0 @@ ---- -name: Insights -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: set the compose profile - run: sed -i.bak 's/COMPOSE_PROFILE=standalone/COMPOSE_PROFILE=insights/' .compose.env - - - name: workaround github worker permissions issues - disable editable installs - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - disable dev installs - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: require approval - run: sed -i.bak 's/# PULP_GALAXY_REQUIRE_CONTENT_APPROVAL=false/PULP_GALAXY_REQUIRE_CONTENT_APPROVAL=true/' .compose.env - - - name: enable signing but do not setup a signing service - run: sed -i.bak 's/ENABLE_SIGNING=1/ENABLE_SIGNING=2/' .compose.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - # run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py http://localhost:8080/api/automation-hub/pulp/api/v3/status/ - run: sleep 120 - - - name: run the integration tests - run: ./dev/insights/RUN_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-certified-sync.yml b/.github/workflows/deactivated/ci_standalone-certified-sync.yml deleted file mode 100644 index f52ffbb344..0000000000 --- a/.github/workflows/deactivated/ci_standalone-certified-sync.yml +++ /dev/null @@ -1,73 +0,0 @@ ---- -name: Local Standalone Sync Against Local Insights -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - # Note: COMPOSE_INTERACTIVE_NO_CLI=1 is required for oci-env to work correctly when there's no interactive terminal - - name: Set environment variables - run: | - echo "OCI_ENV_PATH=${HOME}/work/galaxy_ng/oci_env" >> $GITHUB_ENV - echo "COMPOSE_INTERACTIVE_NO_CLI=1" >> $GITHUB_ENV - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: setup oci-env - run: | - git clone https://github.com/pulp/oci_env.git $OCI_ENV_PATH - pip install -e $OCI_ENV_PATH/client/ - mkdir $OCI_ENV_PATH/db_backup/ - cp dev/data/insights-fixture.tar.gz $OCI_ENV_PATH/db_backup/insights-fixture.tar.gz - - - name: launch test instances - run: | - oci-env -e dev/oci_env_configs/sync-test.compose.env compose build - oci-env -e dev/oci_env_configs/sync-test.compose.env compose up -d - oci-env -e dev/oci_env_configs/standalone.compose.env compose up -d - - - name: wait for test instances to come online - run: | - oci-env -e dev/oci_env_configs/standalone.compose.env poll --wait 10 --attempts 30 - oci-env -e dev/oci_env_configs/sync-test.compose.env poll - - - name: give the stack extra time to start - run: sleep 30 - - - name: finish insights instance setup - run: | - oci-env -e dev/oci_env_configs/sync-test.compose.env db restore -f insights-fixture --migrate - oci-env -e dev/oci_env_configs/sync-test.compose.env pulpcore-manager migrate - oci-env -e dev/oci_env_configs/sync-test.compose.env poll - - - name: give the stack extra time to re-start - run: sleep 30 - - - name: run the integration tests - run: ./dev/standalone-certified-sync/RUN_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-community.yml b/.github/workflows/deactivated/ci_standalone-community.yml deleted file mode 100644 index 705c7de807..0000000000 --- a/.github/workflows/deactivated/ci_standalone-community.yml +++ /dev/null @@ -1,70 +0,0 @@ ---- -name: Standalone Community -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: set the profile to community - run: sed -i.bak 's/COMPOSE_PROFILE=standalone/COMPOSE_PROFILE=standalone-community/' .compose.env - - - name: workaround github worker permissions issues - disable editable installs - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - disable dev installs - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: set the github key - run: echo "SOCIAL_AUTH_GITHUB_KEY='abcd1234'" >> .compose.env - - - name: set the github secret - run: echo "SOCIAL_AUTH_GITHUB_SECRET='abcd1234'" >> .compose.env - - - name: set the github base url - run: echo "SOCIAL_AUTH_GITHUB_BASE_URL='http://github:8082'" >> .compose.env - - - name: set the github api url - run: echo "SOCIAL_AUTH_GITHUB_API_URL='http://github:8082'" >> .compose.env - - - name: build everything and load data - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: run the integration tests - run: DUMP_LOGS=1 ./dev/standalone-community/RUN_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-iqe-rbac-tests.yml b/.github/workflows/deactivated/ci_standalone-iqe-rbac-tests.yml deleted file mode 100644 index 33449ab8a7..0000000000 --- a/.github/workflows/deactivated/ci_standalone-iqe-rbac-tests.yml +++ /dev/null @@ -1,61 +0,0 @@ ---- -name: Standalone IQE RBAC tests -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: set keyring on staging repo for signature upload - run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y - - #- name: install python 3.10 - # run: sudo apt install software-properties-common -y; sudo add-apt-repository --yes ppa:deadsnakes/ppa; sudo apt install python3.10 - - - name: run the integration tests - run: ./dev/standalone-iqe-tests/RUN_RBAC_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-ldap.yml b/.github/workflows/deactivated/ci_standalone-ldap.yml deleted file mode 100644 index 2226add0c5..0000000000 --- a/.github/workflows/deactivated/ci_standalone-ldap.yml +++ /dev/null @@ -1,64 +0,0 @@ ---- -name: Standalone LDAP -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: Enable the LDAP compose profile - run: sed -i.bak 's/COMPOSE_PROFILE=standalone/COMPOSE_PROFILE=standalone-ldap/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: disable approval setting override - run: sed -i.bak 's/PULP_GALAXY_REQUIRE_CONTENT_APPROVAL/#PULP_GALAXY_REQUIRE_CONTENT_APPROVAL/' dev/standalone/galaxy_ng.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: set keyring on staging repo for signature upload - run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y - - - name: run the integration tests - run: HUB_LOCAL=1 ./dev/standalone-ldap/RUN_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-rbac-on-repos.yml b/.github/workflows/deactivated/ci_standalone-rbac-on-repos.yml deleted file mode 100644 index eae646efa8..0000000000 --- a/.github/workflows/deactivated/ci_standalone-rbac-on-repos.yml +++ /dev/null @@ -1,58 +0,0 @@ ---- -name: Standalone RBAC on Repositories tests -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: set keyring on staging repo for signature upload - run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y - - - name: run the integration tests - run: ./dev/standalone-iqe-tests/RUN_RBAC_REPOS_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-rbac-roles.yml b/.github/workflows/deactivated/ci_standalone-rbac-roles.yml deleted file mode 100644 index e9a8b65973..0000000000 --- a/.github/workflows/deactivated/ci_standalone-rbac-roles.yml +++ /dev/null @@ -1,61 +0,0 @@ ---- -name: Standalone RBAC Roles -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: disable approval setting override - run: sed -i.bak 's/PULP_GALAXY_REQUIRE_CONTENT_APPROVAL/#PULP_GALAXY_REQUIRE_CONTENT_APPROVAL/' dev/standalone/galaxy_ng.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: set keyring on staging repo for signature upload - run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y - - - name: run the integration tests - run: HUB_LOCAL=1 ./dev/standalone-rbac-roles/RUN_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone-x-repo-search.yml b/.github/workflows/deactivated/ci_standalone-x-repo-search.yml deleted file mode 100644 index 0fc751c92e..0000000000 --- a/.github/workflows/deactivated/ci_standalone-x-repo-search.yml +++ /dev/null @@ -1,58 +0,0 @@ ---- -name: Standalone Cross Repository Search tests -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: set keyring on staging repo for signature upload - run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y - - - name: run the integration tests - run: ./dev/standalone-iqe-tests/RUN_X_REPO_SEARCH_INTEGRATION.sh diff --git a/.github/workflows/deactivated/ci_standalone.yml b/.github/workflows/deactivated/ci_standalone.yml deleted file mode 100644 index 7629ffbef4..0000000000 --- a/.github/workflows/deactivated/ci_standalone.yml +++ /dev/null @@ -1,58 +0,0 @@ ---- -name: Standalone -on: - pull_request: - branches: - - '**' - push: - branches: - - '**' - workflow_dispatch: - -jobs: - - standalone_integration: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Update apt - run: sudo apt -y update - - - name: Install LDAP requirements - run: sudo apt-get install -y libsasl2-dev python3 libldap2-dev libssl-dev build-essential - - - name: Install python requirements required to run integration tests - run: pip install requests pyyaml - - - uses: KengoTODA/actions-setup-docker-compose@v1 - with: - version: '2.17.3' - - - name: create the .compose.env file - run: rm -f .compose.env; cp .compose.env.example .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/PIP_EDITABLE_INSTALL=1/PIP_EDITABLE_INSTALL=0/' .compose.env - - - name: workaround github worker permissions issues - run: sed -i.bak 's/WITH_DEV_INSTALL=1/WITH_DEV_INSTALL=0/' .compose.env - - - name: build stack - run: make docker/all - - - name: start the compose stack - run: ./compose up -d - - - name: give stack some time to spin up - run: COMPOSE_INTERACTIVE_NO_CLI=1 python dev/common/poll.py - - - name: set keyring on staging repo for signature upload - run: ./compose exec -T api ./entrypoint.sh manage set-repo-keyring --repository staging --keyring /etc/pulp/certs/galaxy.kbx -y - - - name: run the integration tests - run: DUMP_LOGS=1 ./dev/standalone/RUN_INTEGRATION.sh diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index e4ce5f42ce..0000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,52 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Lint" -on: - workflow_call: - -defaults: - run: - working-directory: "galaxy_ng" - -jobs: - lint: - runs-on: ubuntu-latest - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - path: "galaxy_ng" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install -r lint_requirements.txt - echo ::endgroup:: - - - name: Lint workflow files - run: | - yamllint -s -d '{extends: relaxed, rules: {line-length: disable}}' .github/workflows - - - name: Run extra lint checks - run: "[ ! -x .ci/scripts/extra_linting.sh ] || .ci/scripts/extra_linting.sh" - - # check for any files unintentionally left out of MANIFEST.in - - name: Check manifest - run: check-manifest - - - name: Check for pulpcore imports outside of pulpcore.plugin - run: sh .ci/scripts/check_pulpcore_imports.sh - - - name: Check for gettext problems - run: sh .ci/scripts/check_gettext.sh diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index e2402d4633..0000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,96 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Galaxy Nightly CI" -on: - schedule: - # * is a special character in YAML so you have to quote this string - # runs at 3:00 UTC daily - - cron: '00 3 * * *' - workflow_dispatch: - -defaults: - run: - working-directory: "galaxy_ng" - -concurrency: - group: "${{ github.ref_name }}-${{ github.workflow }}" - cancel-in-progress: true - -jobs: - build: - uses: "./.github/workflows/build.yml" - - test: - needs: "build" - uses: "./.github/workflows/test.yml" - - changelog: - runs-on: ubuntu-latest - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 0 - path: "galaxy_ng" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install gitpython packaging toml - echo ::endgroup:: - - - name: "Configure Git with ansible name and email" - run: | - git config --global user.name 'ansible' - git config --global user.email 'ansible-infra@redhat.com' - - - name: Collect changes from all branches - run: python .ci/scripts/collect_changes.py - - - name: Create Pull Request - uses: peter-evans/create-pull-request@v6 - with: - token: ${{ secrets.RELEASE_TOKEN }} - title: "Update Changelog" - body: "" - branch: "changelog/update" - delete-branch: true - path: "galaxy_ng" - - publish: - runs-on: ubuntu-latest - needs: test - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - path: "galaxy_ng" - - - uses: actions/download-artifact@v4 - with: - name: "plugin_package" - path: "galaxy_ng/dist/" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install requests 'packaging~=21.3' mkdocs pymdown-extensions 'Jinja2<3.1' - echo ::endgroup:: - - - name: "Set environment variables" - run: | - echo "TEST=${{ matrix.env.TEST }}" >> $GITHUB_ENV diff --git a/.github/workflows/nightly_latest.yml b/.github/workflows/nightly_latest.yml deleted file mode 100644 index bd7b2cb890..0000000000 --- a/.github/workflows/nightly_latest.yml +++ /dev/null @@ -1,155 +0,0 @@ -name: Nightly Latest -on: - schedule: - # * is a special character in YAML so you have to quote this string - # runs at 4:00 UTC daily - - cron: '00 4 * * *' - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - - -jobs: - test: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - env: - - TEST: pulp - - TEST: azure - - TEST: s3 - outputs: - deprecations-pulp: ${{ steps.deprecations.outputs.deprecations-pulp }} - deprecations-azure: ${{ steps.deprecations.outputs.deprecations-azure }} - deprecations-s3: ${{ steps.deprecations.outputs.deprecations-s3 }} - - steps: - - uses: actions/checkout@v4 - with: - # by default, it uses a depth of 1 - # this fetches all history so that we can read each commit - fetch-depth: 0 - - - uses: actions/setup-python@v4 - with: - python-version: "3.8" - - - name: Install httpie - run: | - echo ::group::HTTPIE - sudo apt-get update -yq - sudo -E apt-get -yq --no-install-suggests --no-install-recommends install httpie - echo ::endgroup:: - echo "TEST=${{ matrix.env.TEST }}" >> $GITHUB_ENV - echo "HTTPIE_CONFIG_DIR=$GITHUB_WORKSPACE/.ci/assets/httpie/" >> $GITHUB_ENV - - - name: Before Install - - run: .github/workflows/scripts/before_install.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - uses: actions/setup-ruby@v1 - if: ${{ env.TEST == 'bindings' || env.TEST == 'generate-bindings' }} - with: - ruby-version: "2.6" - - - name: Install - - run: .github/workflows/scripts/install.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - name: Before Script - - run: .github/workflows/scripts/before_script.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - name: Setting secrets - - run: python3 .github/workflows/scripts/secrets.py "$SECRETS_CONTEXT" - env: - SECRETS_CONTEXT: ${{ toJson(secrets) }} - - - name: Install Python client - - run: .github/workflows/scripts/install_python_client.sh - shell: bash - - - name: Install Ruby client - if: ${{ env.TEST == 'bindings' || env.TEST == 'generate-bindings' }} - run: .github/workflows/scripts/install_ruby_client.sh - shell: bash - - - name: Script - run: .github/workflows/scripts/script.sh - shell: bash - env: - PY_COLORS: '1' - ANSIBLE_FORCE_COLOR: '1' - GITHUB_PULL_REQUEST: ${{ github.event.number }} - GITHUB_PULL_REQUEST_BODY: ${{ github.event.pull_request.body }} - GITHUB_BRANCH: ${{ github.head_ref }} - GITHUB_REPO_SLUG: ${{ github.repository }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_CONTEXT: ${{ github.event.pull_request.commits_url }} - - - name: Extract Deprecations from Logs - id: deprecations - run: echo "::set-output name=deprecations-${{ matrix.env.TEST }}::$(docker logs pulp 2>&1 | grep -i pulpcore.deprecation | base64 -w 0)" - - - name: Logs - if: always() - run: | - echo "Need to debug? Please check: https://github.com/marketplace/actions/debugging-with-tmate" - http --timeout 30 --check-status --pretty format --print hb https://pulp/pulp/api/v3/status/ || true - docker images || true - docker ps -a || true - docker logs pulp || true - docker exec pulp ls -latr /etc/yum.repos.d/ || true - docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp pip3 list - - deprecations: - runs-on: ubuntu-latest - if: always() - needs: test - steps: - - name: Fail on deprecations - run: | - test -z "${{ needs.test.outputs.deprecations-pulp }}" - test -z "${{ needs.test.outputs.deprecations-azure }}" - test -z "${{ needs.test.outputs.deprecations-s3 }}" - - name: Print deprecations - if: failure() - run: | - echo "${{ needs.test.outputs.deprecations-pulp }}" | base64 -d - echo "${{ needs.test.outputs.deprecations-azure }}" | base64 -d - echo "${{ needs.test.outputs.deprecations-s3 }}" | base64 -d diff --git a/.github/workflows/pr_checks.yml b/.github/workflows/pr_checks.yml deleted file mode 100644 index 9b368affe6..0000000000 --- a/.github/workflows/pr_checks.yml +++ /dev/null @@ -1,62 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: Galaxy PR static checks -on: - pull_request_target: - types: [opened, synchronize, reopened] - -# This workflow runs with elevated permissions. -# Do not even think about running a single bit of code from the PR. -# Static analysis should be fine however. - -concurrency: - group: ${{ github.event.pull_request.number }}-${{ github.workflow }} - cancel-in-progress: true - -jobs: - single_commit: - runs-on: ubuntu-latest - name: Label multiple commit PR - permissions: - pull-requests: write - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 0 - - name: Commit Count Check - run: | - git fetch origin ${{ github.event.pull_request.head.sha }} - echo "COMMIT_COUNT=$(git log --oneline --no-merges origin/${{ github.base_ref }}..${{ github.event.pull_request.head.sha }} | wc -l)" >> "$GITHUB_ENV" - - uses: actions/github-script@v7 - with: - script: | - const labelName = "multi-commit"; - const { COMMIT_COUNT } = process.env; - - if (COMMIT_COUNT == 1) - { - try { - await github.rest.issues.removeLabel({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - name: labelName, - }); - } catch(err) { - } - } - else - { - await github.rest.issues.addLabels({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - labels: [labelName], - }); - } diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index 1f819e5ca3..0000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,136 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Galaxy Publish Release" -on: - push: - tags: - - "[0-9]+.[0-9]+.[0-9]+" - -defaults: - run: - working-directory: "galaxy_ng" - -jobs: - build: - uses: "./.github/workflows/build.yml" - - build-bindings-docs: - needs: - - "build" - runs-on: "ubuntu-latest" - # Install scripts expect TEST to be set, 'docs' is most appropriate even though we don't run tests - env: - TEST: "docs" - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - path: "galaxy_ng" - - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - repository: "pulp/pulp-openapi-generator" - path: "pulp-openapi-generator" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - uses: "actions/download-artifact@v4" - with: - name: "plugin_package" - path: "galaxy_ng/dist/" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install towncrier twine wheel httpie docker netaddr boto3 ansible mkdocs - echo "HTTPIE_CONFIG_DIR=$GITHUB_WORKSPACE/galaxy_ng/.ci/assets/httpie/" >> $GITHUB_ENV - echo ::endgroup:: - - # Building the bindings and docs requires accessing the OpenAPI specs endpoint, so we need to - # setup the Pulp instance. - - name: "Before Install" - run: | - .github/workflows/scripts/before_install.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - - - name: "Install" - run: | - .github/workflows/scripts/install.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - - - name: "Install Python client" - run: | - .github/workflows/scripts/install_python_client.sh - shell: "bash" - - - name: "Upload python client packages" - uses: "actions/upload-artifact@v4" - with: - name: "python-client.tar" - path: | - galaxy_ng/galaxy-python-client.tar - if-no-files-found: "error" - overwrite: true - - - name: "Upload python client docs" - uses: "actions/upload-artifact@v4" - with: - name: "python-client-docs.tar" - path: | - galaxy_ng/galaxy-python-client-docs.tar - if-no-files-found: "error" - overwrite: true - - - name: "Logs" - if: always() - run: | - echo "Need to debug? Please check: https://github.com/marketplace/actions/debugging-with-tmate" - http --timeout 30 --check-status --pretty format --print hb "https://pulp${PULP_API_ROOT}api/v3/status/" || true - docker images || true - docker ps -a || true - docker logs pulp || true - docker exec pulp ls -latr /etc/yum.repos.d/ || true - docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" - - create-gh-release: - runs-on: "ubuntu-latest" - needs: - - "build-bindings-docs" - - steps: - - name: "Create release on GitHub" - uses: "actions/github-script@v7" - env: - TAG_NAME: "${{ github.ref_name }}" - with: - script: | - const { TAG_NAME } = process.env; - - await github.rest.repos.createRelease({ - owner: context.repo.owner, - repo: context.repo.repo, - tag_name: TAG_NAME, - make_latest: "legacy", - }); diff --git a/.github/workflows/pulp_constraints.yml b/.github/workflows/pulp_constraints.yml deleted file mode 100644 index 3d68ef1e56..0000000000 --- a/.github/workflows/pulp_constraints.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -name: Verify pulp imposed constraints -on: {pull_request: {branches: ['*']}, push: {branches: ['*']}} -jobs: - - check_pulp_template_plugin: - - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Ensure sanity with plugin-template managed files - run: make pulp/plugin-template-check diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index a7ce5d0563..0000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,61 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: Galaxy Release Pipeline -on: - workflow_dispatch: - -defaults: - run: - working-directory: "galaxy_ng" - -jobs: - build-artifacts: - runs-on: "ubuntu-latest" - - strategy: - fail-fast: false - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 0 - path: "galaxy_ng" - token: ${{ secrets.RELEASE_TOKEN }} - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install bump2version towncrier - echo ::endgroup:: - - - name: "Configure Git with ansible name and email" - run: | - git config --global user.name 'ansible' - git config --global user.email 'ansible-infra@redhat.com' - - - name: "Setting secrets" - run: | - python3 .github/workflows/scripts/secrets.py "$SECRETS_CONTEXT" - env: - SECRETS_CONTEXT: "${{ toJson(secrets) }}" - - - name: "Tag the release" - run: | - .github/workflows/scripts/release.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" diff --git a/.github/workflows/scripts/before_install.sh b/.github/workflows/scripts/before_install.sh deleted file mode 100755 index 20f4d8ab59..0000000000 --- a/.github/workflows/scripts/before_install.sh +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -set -mveuo pipefail - -if [ "${GITHUB_REF##refs/heads/}" = "${GITHUB_REF}" ] -then - BRANCH_BUILD=0 -else - BRANCH_BUILD=1 - BRANCH="${GITHUB_REF##refs/heads/}" -fi -if [ "${GITHUB_REF##refs/tags/}" = "${GITHUB_REF}" ] -then - TAG_BUILD=0 -else - TAG_BUILD=1 - BRANCH="${GITHUB_REF##refs/tags/}" -fi - -COMMIT_MSG=$(git log --format=%B --no-merges -1) -export COMMIT_MSG - -COMPONENT_VERSION=$(sed -ne "s/\s*version.*=.*['\"]\(.*\)['\"][\s,]*/\1/p" setup.py) - -mkdir .ci/ansible/vars || true -echo "---" > .ci/ansible/vars/main.yaml -echo "legacy_component_name: galaxy_ng" >> .ci/ansible/vars/main.yaml -echo "component_name: galaxy" >> .ci/ansible/vars/main.yaml -echo "component_version: '${COMPONENT_VERSION}'" >> .ci/ansible/vars/main.yaml - -export PRE_BEFORE_INSTALL=$PWD/.github/workflows/scripts/pre_before_install.sh -export POST_BEFORE_INSTALL=$PWD/.github/workflows/scripts/post_before_install.sh - -if [ -f $PRE_BEFORE_INSTALL ]; then - source $PRE_BEFORE_INSTALL -fi - -if [[ -n $(echo -e $COMMIT_MSG | grep -P "Required PR:.*") ]]; then - echo "The Required PR mechanism has been removed. Consider adding a scm requirement to requirements.txt." - exit 1 -fi - -if [ "$GITHUB_EVENT_NAME" = "pull_request" ] || [ "${BRANCH_BUILD}" = "1" -a "${BRANCH}" != "master" ] -then - echo $COMMIT_MSG | sed -n -e 's/.*CI Base Image:\s*\([-_/[:alnum:]]*:[-_[:alnum:]]*\).*/ci_base: "\1"/p' >> .ci/ansible/vars/main.yaml -fi - -for i in {1..3} -do - ansible-galaxy collection install "amazon.aws:1.5.0" && s=0 && break || s=$? && sleep 3 -done -if [[ $s -gt 0 ]] -then - echo "Failed to install amazon.aws" - exit $s -fi - -if [[ "$TEST" = "lowerbounds" ]]; then - python3 .ci/scripts/calc_deps_lowerbounds.py > lowerbounds_constraints.txt - sed -i 's/\[.*\]//g' lowerbounds_constraints.txt -fi - -if [ -f $POST_BEFORE_INSTALL ]; then - source $POST_BEFORE_INSTALL -fi diff --git a/.github/workflows/scripts/before_script.sh b/.github/workflows/scripts/before_script.sh deleted file mode 100755 index 5ddafb7193..0000000000 --- a/.github/workflows/scripts/before_script.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -set -euv - -source .github/workflows/scripts/utils.sh - -export PRE_BEFORE_SCRIPT=$PWD/.github/workflows/scripts/pre_before_script.sh -export POST_BEFORE_SCRIPT=$PWD/.github/workflows/scripts/post_before_script.sh - -if [[ -f $PRE_BEFORE_SCRIPT ]]; then - source $PRE_BEFORE_SCRIPT -fi - -# Developers should be able to reproduce the containers with this config -echo "CI vars:" -tail -v -n +1 .ci/ansible/vars/main.yaml - -# Developers often want to know the final pulp config -echo "PULP CONFIG:" -tail -v -n +1 .ci/ansible/settings/settings.* ~/.config/pulp_smash/settings.json - -echo "Containerfile:" -tail -v -n +1 .ci/ansible/Containerfile - -# Needed for some functional tests -cmd_prefix bash -c "echo '%wheel ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/nopasswd" -cmd_prefix bash -c "usermod -a -G wheel pulp" - -if [[ "${REDIS_DISABLED:-false}" == true ]]; then - cmd_prefix bash -c "s6-rc -d change redis" - echo "The Redis service was disabled for $TEST" -fi - -if [[ -f $POST_BEFORE_SCRIPT ]]; then - source $POST_BEFORE_SCRIPT -fi - -# Lots of plugins try to use this path, and throw warnings if they cannot access it. -cmd_prefix mkdir /.pytest_cache -cmd_prefix chown pulp:pulp /.pytest_cache diff --git a/.github/workflows/scripts/check_commit.sh b/.github/workflows/scripts/check_commit.sh deleted file mode 100755 index 920970913b..0000000000 --- a/.github/workflows/scripts/check_commit.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")/../../.." - -set -euv - -for SHA in $(curl -H "Authorization: token $GITHUB_TOKEN" "$GITHUB_CONTEXT" | jq -r '.[].sha') -do - python3 .ci/scripts/validate_commit_message.py "$SHA" - VALUE=$? - if [ "$VALUE" -gt 0 ]; then - exit $VALUE - fi -done diff --git a/.github/workflows/scripts/docs-publisher.py b/.github/workflows/scripts/docs-publisher.py deleted file mode 100755 index f5932469d4..0000000000 --- a/.github/workflows/scripts/docs-publisher.py +++ /dev/null @@ -1,262 +0,0 @@ -#!/usr/bin/env python - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import argparse -import subprocess -import os -import re -from shutil import rmtree -import tempfile -import requests -import json -from packaging import version - -WORKING_DIR = os.environ["WORKSPACE"] - -VERSION_REGEX = r"(\s*)(version)(\s*)(=)(\s*)(['\"])(.*)(['\"])(.*)" -RELEASE_REGEX = r"(\s*)(release)(\s*)(=)(\s*)(['\"])(.*)(['\"])(.*)" - -USERNAME = "doc_builder_galaxy_ng" -HOSTNAME = "8.43.85.236" - -SITE_ROOT = "/var/www/docs.pulpproject.org/galaxy_ng/" - - -def make_directory_with_rsync(remote_paths_list): - """ - Ensure the remote directory path exists. - - :param remote_paths_list: The list of parameters. e.g. ['en', 'latest'] to be en/latest on the - remote. - :type remote_paths_list: a list of strings, with each string representing a directory. - """ - try: - tempdir_path = tempfile.mkdtemp() - cwd = os.getcwd() - os.chdir(tempdir_path) - os.makedirs(os.sep.join(remote_paths_list)) - remote_path_arg = "%s@%s:%s%s" % ( - USERNAME, - HOSTNAME, - SITE_ROOT, - remote_paths_list[0], - ) - local_path_arg = tempdir_path + os.sep + remote_paths_list[0] + os.sep - rsync_command = ["rsync", "-avzh", local_path_arg, remote_path_arg] - exit_code = subprocess.call(rsync_command) - if exit_code != 0: - raise RuntimeError("An error occurred while creating remote directories.") - finally: - rmtree(tempdir_path) - os.chdir(cwd) - - -def ensure_dir(target_dir, clean=True): - """ - Ensure that the directory specified exists and is empty. - - By default this will delete the directory if it already exists - - :param target_dir: The directory to process - :type target_dir: str - :param clean: Whether or not the directory should be removed and recreated - :type clean: bool - """ - if clean: - rmtree(target_dir, ignore_errors=True) - try: - os.makedirs(target_dir) - except OSError: - pass - - -def main(): - """ - Builds documentation using the 'make html' command and rsyncs to docs.pulpproject.org. - """ - parser = argparse.ArgumentParser() - parser.add_argument( - "--build-type", required=True, help="Build type: nightly, tag or changelog." - ) - parser.add_argument("--branch", required=True, help="Branch or tag name.") - opts = parser.parse_args() - - build_type = opts.build_type - - branch = opts.branch - - publish_at_root = False - - # rsync the docs - print("rsync the docs") - docs_directory = os.sep.join([WORKING_DIR, "docs"]) - local_path_arg = os.sep.join([docs_directory, "_build", "html"]) + os.sep - if build_type == "nightly": - # This is a nightly build - remote_path_arg = "%s@%s:%sen/%s/%s/" % ( - USERNAME, - HOSTNAME, - SITE_ROOT, - branch, - build_type, - ) - make_directory_with_rsync(["en", branch, build_type]) - rsync_command = ["rsync", "-avzh", "--delete", local_path_arg, remote_path_arg] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - elif build_type == "tag": - if (not re.search("[a-zA-Z]", branch) or "post" in branch) and len(branch.split(".")) > 2: - # Only publish docs at the root if this is the latest version - r = requests.get("https://pypi.org/pypi/galaxy-ng/json") - latest_version = version.parse(json.loads(r.text)["info"]["version"]) - docs_version = version.parse(branch) - # This is to mitigate delays on PyPI which doesn't update metadata in timely manner. - # It doesn't prevent incorrect docs being published at root if 2 releases are done close - # to each other, within PyPI delay. E.g. Release 3.11.0 an then 3.10.1 immediately - # after. - if docs_version >= latest_version: - publish_at_root = True - # Post releases should use the x.y.z part of the version string to form a path - if "post" in branch: - branch = ".".join(branch.split(".")[:-1]) - - # This is a GA build. - # publish to the root of docs.pulpproject.org - if publish_at_root: - version_components = branch.split(".") - x_y_version = "{}.{}".format(version_components[0], version_components[1]) - remote_path_arg = "%s@%s:%s" % (USERNAME, HOSTNAME, SITE_ROOT) - rsync_command = [ - "rsync", - "-avzh", - "--delete", - "--exclude", - "en", - "--omit-dir-times", - local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - # publish to docs.pulpproject.org/en/3.y/ - make_directory_with_rsync(["en", x_y_version]) - remote_path_arg = "%s@%s:%sen/%s/" % ( - USERNAME, - HOSTNAME, - SITE_ROOT, - x_y_version, - ) - rsync_command = [ - "rsync", - "-avzh", - "--delete", - "--omit-dir-times", - local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - # publish to docs.pulpproject.org/en/3.y.z/ - make_directory_with_rsync(["en", branch]) - remote_path_arg = "%s@%s:%sen/%s/" % (USERNAME, HOSTNAME, SITE_ROOT, branch) - rsync_command = [ - "rsync", - "-avzh", - "--delete", - "--omit-dir-times", - local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - else: - # This is a pre-release - make_directory_with_rsync(["en", branch]) - remote_path_arg = "%s@%s:%sen/%s/%s/" % ( - USERNAME, - HOSTNAME, - SITE_ROOT, - branch, - build_type, - ) - rsync_command = [ - "rsync", - "-avzh", - "--delete", - "--exclude", - "nightly", - "--exclude", - "testing", - local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - # publish to docs.pulpproject.org/en/3.y/ - version_components = branch.split(".") - x_y_version = "{}.{}".format(version_components[0], version_components[1]) - make_directory_with_rsync(["en", x_y_version]) - remote_path_arg = "%s@%s:%sen/%s/" % ( - USERNAME, - HOSTNAME, - SITE_ROOT, - x_y_version, - ) - rsync_command = [ - "rsync", - "-avzh", - "--delete", - "--omit-dir-times", - local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - # publish to docs.pulpproject.org/en/3.y.z/ - make_directory_with_rsync(["en", branch]) - remote_path_arg = "%s@%s:%sen/%s/" % (USERNAME, HOSTNAME, SITE_ROOT, branch) - rsync_command = [ - "rsync", - "-avzh", - "--delete", - "--omit-dir-times", - local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - elif build_type == "changelog": - if branch != "master": - raise RuntimeError("Can only publish CHANGELOG from master") - # Publish the CHANGELOG from master branch at the root directory - remote_path_arg = "%s@%s:%s" % (USERNAME, HOSTNAME, SITE_ROOT) - changelog_local_path_arg = os.path.join(local_path_arg, "changes.html") - rsync_command = [ - "rsync", - "-vzh", - "--omit-dir-times", - changelog_local_path_arg, - remote_path_arg, - ] - exit_code = subprocess.call(rsync_command, cwd=docs_directory) - if exit_code != 0: - raise RuntimeError("An error occurred while pushing docs.") - else: - raise RuntimeError("Build type must be either 'nightly', 'tag' or 'changelog'.") - - -if __name__ == "__main__": - main() diff --git a/.github/workflows/scripts/install.sh b/.github/workflows/scripts/install.sh deleted file mode 100755 index 2b368b1138..0000000000 --- a/.github/workflows/scripts/install.sh +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. -REPO_ROOT="$PWD" - -set -euv - -source .github/workflows/scripts/utils.sh - -PLUGIN_VERSION="$(sed -n -e 's/^\s*current_version\s*=\s*//p' .bumpversion.cfg | python -c 'from packaging.version import Version; print(Version(input()))')" -PLUGIN_SOURCE="./galaxy_ng/dist/galaxy_ng-${PLUGIN_VERSION}-py3-none-any.whl" - -export PULP_API_ROOT="/api/galaxy/pulp/" - -PIP_REQUIREMENTS=("pulp-cli") -if [[ "$TEST" = "docs" || "$TEST" = "publish" ]] -then - PIP_REQUIREMENTS+=("-r" "doc_requirements.txt") -fi - -pip install ${PIP_REQUIREMENTS[*]} - - - -cd .ci/ansible/ -if [ "$TEST" = "s3" ]; then - PLUGIN_SOURCE="${PLUGIN_SOURCE} pulpcore[s3]" -fi -if [ "$TEST" = "azure" ]; then - PLUGIN_SOURCE="${PLUGIN_SOURCE} pulpcore[azure]" -fi - -cat >> vars/main.yaml << VARSYAML -image: - name: pulp - tag: "ci_build" -plugins: - - name: galaxy_ng - source: "${PLUGIN_SOURCE}" -VARSYAML -if [[ -f ../../ci_requirements.txt ]]; then - cat >> vars/main.yaml << VARSYAML - ci_requirements: true -VARSYAML -fi - -cat >> vars/main.yaml << VARSYAML -services: - - name: pulp - image: "pulp:ci_build" - volumes: - - ./settings:/etc/pulp - - ./ssh:/keys/ - - ~/.config:/var/lib/pulp/.config - - ../../../pulp-openapi-generator:/root/pulp-openapi-generator - env: - PULP_WORKERS: "4" - PULP_HTTPS: "true" -VARSYAML - -cat >> vars/main.yaml << VARSYAML -pulp_env: {} -pulp_settings: {"allowed_export_paths": "/tmp", "allowed_import_paths": "/tmp", "galaxy_api_default_distribution_base_path": "published", "galaxy_enable_api_access_log": true, "galaxy_require_content_approval": false, "rh_entitlement_required": "insights"} -pulp_scheme: https -pulp_default_container: ghcr.io/pulp/pulp-ci-centos9:latest -VARSYAML - -if [ "$TEST" = "s3" ]; then - export MINIO_ACCESS_KEY=AKIAIT2Z5TDYPX3ARJBA - export MINIO_SECRET_KEY=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 - sed -i -e '/^services:/a \ - - name: minio\ - image: minio/minio\ - env:\ - MINIO_ACCESS_KEY: "'$MINIO_ACCESS_KEY'"\ - MINIO_SECRET_KEY: "'$MINIO_SECRET_KEY'"\ - command: "server /data"' vars/main.yaml - sed -i -e '$a s3_test: true\ -minio_access_key: "'$MINIO_ACCESS_KEY'"\ -minio_secret_key: "'$MINIO_SECRET_KEY'"\ -pulp_scenario_settings: null\ -pulp_scenario_env: {}\ -' vars/main.yaml -fi - -if [ "$TEST" = "azure" ]; then - mkdir -p azurite - cd azurite - openssl req -newkey rsa:2048 -x509 -nodes -keyout azkey.pem -new -out azcert.pem -sha256 -days 365 -addext "subjectAltName=DNS:ci-azurite" -subj "/C=CO/ST=ST/L=LO/O=OR/OU=OU/CN=CN" - sudo cp azcert.pem /usr/local/share/ca-certificates/azcert.crt - sudo dpkg-reconfigure ca-certificates - cd .. - sed -i -e '/^services:/a \ - - name: ci-azurite\ - image: mcr.microsoft.com/azure-storage/azurite\ - volumes:\ - - ./azurite:/etc/pulp\ - command: "azurite-blob --blobHost 0.0.0.0 --cert /etc/pulp/azcert.pem --key /etc/pulp/azkey.pem"' vars/main.yaml - sed -i -e '$a azure_test: true\ -pulp_scenario_settings: null\ -pulp_scenario_env: {}\ -' vars/main.yaml -fi - -echo "PULP_API_ROOT=${PULP_API_ROOT}" >> "$GITHUB_ENV" - -if [ "${PULP_API_ROOT:-}" ]; then - sed -i -e '$a api_root: "'"$PULP_API_ROOT"'"' vars/main.yaml -fi - -pulp config create --base-url https://pulp --api-root "$PULP_API_ROOT" --username "admin" --password "password" - - -ansible-playbook build_container.yaml -ansible-playbook start_container.yaml - -# .config needs to be accessible by the pulp user in the container, but some -# files will likely be modified on the host by post/pre scripts. -chmod 777 ~/.config/pulp_smash/ -chmod 666 ~/.config/pulp_smash/settings.json - -sudo chown -R 700:700 ~/.config -echo ::group::SSL -# Copy pulp CA -sudo docker cp pulp:/etc/pulp/certs/pulp_webserver.crt /usr/local/share/ca-certificates/pulp_webserver.crt - -# Hack: adding pulp CA to certifi.where() -CERTIFI=$(python -c 'import certifi; print(certifi.where())') -cat /usr/local/share/ca-certificates/pulp_webserver.crt | sudo tee -a "$CERTIFI" > /dev/null -if [[ "$TEST" = "azure" ]]; then - cat /usr/local/share/ca-certificates/azcert.crt | sudo tee -a "$CERTIFI" > /dev/null -fi - -# Hack: adding pulp CA to default CA file -CERT=$(python -c 'import ssl; print(ssl.get_default_verify_paths().openssl_cafile)') -cat "$CERTIFI" | sudo tee -a "$CERT" > /dev/null - -# Updating certs -sudo update-ca-certificates -echo ::endgroup:: - -# Add our azcert.crt certificate to the container image along with the certificates from certifi -# so that we can use HTTPS with our fake Azure CI. certifi is self-contained and doesn't allow -# extension or modification of the trust store, so we do a weird and hacky thing (above) where we just -# overwrite or append to certifi's trust store behind it's back. -# -# We do this for both the CI host and the CI image. -if [[ "$TEST" = "azure" ]]; then - AZCERTIFI=$(/opt/az/bin/python3 -c 'import certifi; print(certifi.where())') - PULPCERTIFI=$(cmd_prefix python3 -c 'import certifi; print(certifi.where())') - cat /usr/local/share/ca-certificates/azcert.crt >> $AZCERTIFI - cat /usr/local/share/ca-certificates/azcert.crt | cmd_stdin_prefix tee -a "$PULPCERTIFI" > /dev/null - cat /usr/local/share/ca-certificates/azcert.crt | cmd_stdin_prefix tee -a /etc/pki/tls/cert.pem > /dev/null - AZURE_STORAGE_CONNECTION_STRING='DefaultEndpointsProtocol=https;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=https://ci-azurite:10000/devstoreaccount1;' - az storage container create --name pulp-test --connection-string $AZURE_STORAGE_CONNECTION_STRING -fi - -echo ::group::PIP_LIST -cmd_prefix bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" -echo ::endgroup:: diff --git a/.github/workflows/scripts/install_python_client.sh b/.github/workflows/scripts/install_python_client.sh deleted file mode 100755 index f204b29964..0000000000 --- a/.github/workflows/scripts/install_python_client.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -mveuo pipefail - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -source .github/workflows/scripts/utils.sh - -PULP_URL="${PULP_URL:-https://pulp}" -export PULP_URL -PULP_API_ROOT="${PULP_API_ROOT:-/pulp/}" -export PULP_API_ROOT - -REPORTED_STATUS="$(pulp status)" -REPORTED_VERSION="$(echo "$REPORTED_STATUS" | jq --arg plugin "galaxy" -r '.versions[] | select(.component == $plugin) | .version')" -VERSION="$(echo "$REPORTED_VERSION" | python -c 'from packaging.version import Version; print(Version(input()))')" - -pushd ../pulp-openapi-generator -rm -rf galaxy_ng-client - -if pulp debug has-plugin --name "core" --specifier ">=3.44.0.dev" -then - curl --fail-with-body -k -o api.json "${PULP_URL}${PULP_API_ROOT}api/v3/docs/api.json?bindings&component=galaxy" - USE_LOCAL_API_JSON=1 ./generate.sh galaxy_ng python "$VERSION" -else - ./generate.sh galaxy_ng python "$VERSION" -fi - -pushd galaxy_ng-client -python setup.py sdist bdist_wheel --python-tag py3 - -twine check "dist/galaxy_ng_client-$VERSION-py3-none-any.whl" -twine check "dist/galaxy_ng-client-$VERSION.tar.gz" - -cmd_prefix pip3 install "/root/pulp-openapi-generator/galaxy_ng-client/dist/galaxy_ng_client-${VERSION}-py3-none-any.whl" -tar cvf ../../galaxy_ng/galaxy-python-client.tar ./dist - -find ./docs/* -exec sed -i 's/Back to README/Back to HOME/g' {} \; -find ./docs/* -exec sed -i 's/README//g' {} \; -cp README.md docs/index.md -sed -i 's/docs\///g' docs/index.md -find ./docs/* -exec sed -i 's/\.md//g' {} \; - -cat >> mkdocs.yml << DOCSYAML ---- -site_name: GalaxyNg Client -site_description: Galaxy bindings -site_author: Pulp Team -site_url: https://docs.pulpproject.org/galaxy_ng_client/ -repo_name: pulp/galaxy_ng -repo_url: https://github.com/pulp/galaxy_ng -theme: readthedocs -DOCSYAML - -# Building the bindings docs -mkdocs build - -# Pack the built site. -tar cvf ../../galaxy_ng/galaxy-python-client-docs.tar ./site -popd -popd diff --git a/.github/workflows/scripts/install_ruby_client.sh b/.github/workflows/scripts/install_ruby_client.sh deleted file mode 100755 index 561da4dfcd..0000000000 --- a/.github/workflows/scripts/install_ruby_client.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -mveuo pipefail - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -source .github/workflows/scripts/utils.sh - -PULP_URL="${PULP_URL:-https://pulp}" -export PULP_URL -PULP_API_ROOT="${PULP_API_ROOT:-/pulp/}" -export PULP_API_ROOT - -REPORTED_STATUS="$(pulp status)" -REPORTED_VERSION="$(echo "$REPORTED_STATUS" | jq --arg plugin "galaxy" -r '.versions[] | select(.component == $plugin) | .version')" -VERSION="$(echo "$REPORTED_VERSION" | python -c 'from packaging.version import Version; print(Version(input()))')" - -pushd ../pulp-openapi-generator -rm -rf galaxy_ng-client - -if pulp debug has-plugin --name "core" --specifier ">=3.44.0.dev" -then - curl --fail-with-body -k -o api.json "${PULP_URL}${PULP_API_ROOT}api/v3/docs/api.json?bindings&component=galaxy" - USE_LOCAL_API_JSON=1 ./generate.sh galaxy_ng ruby "$VERSION" -else - ./generate.sh galaxy_ng ruby "$VERSION" -fi - -pushd galaxy_ng-client -gem build galaxy_ng_client -gem install --both "./galaxy_ng_client-$VERSION.gem" -tar cvf ../../galaxy_ng/galaxy-ruby-client.tar "./galaxy_ng_client-$VERSION.gem" -popd -popd diff --git a/.github/workflows/scripts/post_before_script.sh b/.github/workflows/scripts/post_before_script.sh index 2b9724b737..8fb154dc3c 100644 --- a/.github/workflows/scripts/post_before_script.sh +++ b/.github/workflows/scripts/post_before_script.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash set -mveuo pipefail -source .github/workflows/scripts/utils.sh cmd_prefix bash -c "django-admin compilemessages" echo "machine pulp diff --git a/.github/workflows/scripts/publish_client_gem.sh b/.github/workflows/scripts/publish_client_gem.sh deleted file mode 100755 index ae76f8fa88..0000000000 --- a/.github/workflows/scripts/publish_client_gem.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -euv - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -VERSION="$1" - -if [[ -z "$VERSION" ]]; then - echo "No version specified." - exit 1 -fi - -RESPONSE="$(curl --write-out '%{http_code}' --silent --output /dev/null "https://rubygems.org/gems/galaxy_ng_client/versions/$VERSION")" - -if [ "$RESPONSE" == "200" ]; -then - echo "galaxy_ng client $VERSION has already been released. Skipping." - exit -fi - -mkdir -p ~/.gem -touch ~/.gem/credentials -echo "--- -:rubygems_api_key: $RUBYGEMS_API_KEY" > ~/.gem/credentials -sudo chmod 600 ~/.gem/credentials -gem push "galaxy_ng_client-${VERSION}.gem" diff --git a/.github/workflows/scripts/publish_client_pypi.sh b/.github/workflows/scripts/publish_client_pypi.sh deleted file mode 100755 index bfd446e3f0..0000000000 --- a/.github/workflows/scripts/publish_client_pypi.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -euv - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")/../../.." - -VERSION="$1" - -if [[ -z "$VERSION" ]]; then - echo "No version specified." - exit 1 -fi - -RESPONSE="$(curl --write-out '%{http_code}' --silent --output /dev/null "https://pypi.org/project/galaxy-ng-client/$VERSION/")" - -if [ "$RESPONSE" == "200" ]; -then - echo "galaxy_ng client $VERSION has already been released. Skipping." -else - twine upload -u __token__ -p "$PYPI_API_TOKEN" \ - "dist/galaxy_ng_client-$VERSION-py3-none-any.whl" \ - "dist/galaxy_ng-client-$VERSION.tar.gz" -fi diff --git a/.github/workflows/scripts/publish_docs.sh b/.github/workflows/scripts/publish_docs.sh deleted file mode 100755 index 573782b9a6..0000000000 --- a/.github/workflows/scripts/publish_docs.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -euv - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")/../../.." - -mkdir ~/.ssh -touch ~/.ssh/pulp-infra -chmod 600 ~/.ssh/pulp-infra -echo "$PULP_DOCS_KEY" > ~/.ssh/pulp-infra - -echo "docs.pulpproject.org,8.43.85.236 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBGXG+8vjSQvnAkq33i0XWgpSrbco3rRqNZr0SfVeiqFI7RN/VznwXMioDDhc+hQtgVhd6TYBOrV07IMcKj+FAzg=" >> ~/.ssh/known_hosts -chmod 644 ~/.ssh/known_hosts - -export PYTHONUNBUFFERED=1 -export DJANGO_SETTINGS_MODULE=pulpcore.app.settings -export PULP_SETTINGS=$PWD/.ci/ansible/settings/settings.py -export WORKSPACE=$PWD - -# start the ssh agent -eval "$(ssh-agent -s)" -ssh-add ~/.ssh/pulp-infra - -python3 .github/workflows/scripts/docs-publisher.py --build-type "$1" --branch "$2" - -if [[ "$GITHUB_WORKFLOW" == "Galaxy changelog update" ]]; then - # Do not build bindings docs on changelog update - exit -fi - -mkdir -p ../galaxy-bindings -tar -xvf galaxy-python-client-docs.tar --directory ../galaxy-bindings -pushd ../galaxy-bindings - -# publish to docs.pulpproject.org/galaxy_ng_client -rsync -avzh site/ doc_builder_galaxy_ng@docs.pulpproject.org:/var/www/docs.pulpproject.org/galaxy_ng_client/ - -# publish to docs.pulpproject.org/galaxy_ng_client/en/{release} -rsync -avzh site/ doc_builder_galaxy_ng@docs.pulpproject.org:/var/www/docs.pulpproject.org/galaxy_ng_client/en/"$2" -popd diff --git a/.github/workflows/scripts/publish_plugin_pypi.sh b/.github/workflows/scripts/publish_plugin_pypi.sh deleted file mode 100755 index 46ecabd8a1..0000000000 --- a/.github/workflows/scripts/publish_plugin_pypi.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -euv - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -VERSION="$1" - -if [[ -z "$VERSION" ]]; then - echo "No version specified." - exit 1 -fi - -RESPONSE="$(curl --write-out '%{http_code}' --silent --output /dev/null "https://pypi.org/project/galaxy-ng/$VERSION/")" - -if [ "$RESPONSE" == "200" ]; -then - echo "galaxy_ng $VERSION has already been released. Skipping." - exit -fi - -twine upload -u __token__ -p "$PYPI_API_TOKEN" \ -"dist/galaxy_ng-$VERSION-py3-none-any.whl" \ -"dist/galaxy-ng-$VERSION.tar.gz" \ -; diff --git a/.github/workflows/scripts/push_branch_and_tag_to_github.sh b/.github/workflows/scripts/push_branch_and_tag_to_github.sh deleted file mode 100755 index 24494c5b43..0000000000 --- a/.github/workflows/scripts/push_branch_and_tag_to_github.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -eu - -BRANCH_NAME="$(echo "$GITHUB_REF" | sed -rn 's/refs\/heads\/(.*)/\1/p')" - -remote_repo="https://ansible:${RELEASE_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" - -git push "${remote_repo}" "$BRANCH_NAME" "$1" diff --git a/.github/workflows/scripts/script.sh b/.github/workflows/scripts/script.sh deleted file mode 100755 index 55dce7c4ee..0000000000 --- a/.github/workflows/scripts/script.sh +++ /dev/null @@ -1,159 +0,0 @@ -#!/usr/bin/env bash -# coding=utf-8 - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -set -mveuo pipefail - -# make sure this script runs at the repo root -cd "$(dirname "$(realpath -e "$0")")"/../../.. - -source .github/workflows/scripts/utils.sh - -export POST_SCRIPT=$PWD/.github/workflows/scripts/post_script.sh -export POST_DOCS_TEST=$PWD/.github/workflows/scripts/post_docs_test.sh -export FUNC_TEST_SCRIPT=$PWD/.github/workflows/scripts/func_test_script.sh - -# Needed for both starting the service and building the docs. -# Gets set in .github/settings.yml, but doesn't seem to inherited by -# this script. -export DJANGO_SETTINGS_MODULE=pulpcore.app.settings -export PULP_SETTINGS=$PWD/.ci/ansible/settings/settings.py - -export PULP_URL="https://pulp" - -if [[ "$TEST" = "docs" ]]; then - if [[ "$GITHUB_WORKFLOW" == "Galaxy CI" ]]; then - towncrier build --yes --version 4.0.0.ci - fi - # Legacy Docs Build - cd docs - make PULP_URL="$PULP_URL" diagrams html - tar -cvf docs.tar ./_build - cd .. - - if [ -f "$POST_DOCS_TEST" ]; then - source "$POST_DOCS_TEST" - fi - exit -fi - -REPORTED_STATUS="$(pulp status)" - -echo "machine pulp -login admin -password password -" | cmd_user_stdin_prefix bash -c "cat >> ~pulp/.netrc" -# Some commands like ansible-galaxy specifically require 600 -cmd_prefix bash -c "chmod 600 ~pulp/.netrc" - -# Generate and install binding -pushd ../pulp-openapi-generator -if pulp debug has-plugin --name "core" --specifier ">=3.44.0.dev" -then - # Use app_label to generate api.json and package to produce the proper package name. - - if [ "$(jq -r '.domain_enabled' <<<"$REPORTED_STATUS")" = "true" ] - then - # Workaround: Domains are not supported by the published bindings. - # Generate new bindings for all packages. - for item in $(jq -r '.versions[] | tojson' <<<"$REPORTED_STATUS") - do - echo $item - COMPONENT="$(jq -r '.component' <<<"$item")" - VERSION="$(jq -r '.version' <<<"$item")" - MODULE="$(jq -r '.module' <<<"$item")" - PACKAGE="${MODULE%%.*}" - curl --fail-with-body -k -o api.json "${PULP_URL}${PULP_API_ROOT}api/v3/docs/api.json?bindings&component=$COMPONENT" - USE_LOCAL_API_JSON=1 ./generate.sh "${PACKAGE}" python "${VERSION}" - cmd_prefix pip3 install "/root/pulp-openapi-generator/${PACKAGE}-client" - sudo rm -rf "./${PACKAGE}-client" - done - else - # Sadly: Different pulpcore-versions aren't either... - for item in $(jq -r '.versions[]| select(.component!="galaxy")| tojson' <<<"$REPORTED_STATUS") - do - echo $item - COMPONENT="$(jq -r '.component' <<<"$item")" - VERSION="$(jq -r '.version' <<<"$item")" - MODULE="$(jq -r '.module' <<<"$item")" - PACKAGE="${MODULE%%.*}" - curl --fail-with-body -k -o api.json "${PULP_URL}${PULP_API_ROOT}api/v3/docs/api.json?bindings&component=$COMPONENT" - USE_LOCAL_API_JSON=1 ./generate.sh "${PACKAGE}" python "${VERSION}" - cmd_prefix pip3 install "/root/pulp-openapi-generator/${PACKAGE}-client" - sudo rm -rf "./${PACKAGE}-client" - done - fi -else - # Infer the client name from the package name by replacing "-" with "_". - # Use the component to infer the package name on older versions of pulpcore. - - if [ "$(echo "$REPORTED_STATUS" | jq -r '.domain_enabled')" = "true" ] - then - # Workaround: Domains are not supported by the published bindings. - # Generate new bindings for all packages. - for item in $(echo "$REPORTED_STATUS" | jq -r '.versions[]|(.package // ("pulp_" + .component)|sub("pulp_core"; "pulpcore"))|sub("-"; "_")') - do - ./generate.sh "${item}" python - cmd_prefix pip3 install "/root/pulp-openapi-generator/${item}-client" - sudo rm -rf "./${item}-client" - done - else - # Sadly: Different pulpcore-versions aren't either... - for item in $(echo "$REPORTED_STATUS" | jq -r '.versions[]|select(.component!="galaxy")|(.package // ("pulp_" + .component)|sub("pulp_core"; "pulpcore"))|sub("-"; "_")') - do - ./generate.sh "${item}" python - cmd_prefix pip3 install "/root/pulp-openapi-generator/${item}-client" - sudo rm -rf "./${item}-client" - done - fi -fi -popd - -# At this point, this is a safeguard only, so let's not make too much fuzz about the old status format. -echo "$REPORTED_STATUS" | jq -r '.versions[]|select(.package)|(.package|sub("_"; "-")) + "-client==" + .version' > bindings_requirements.txt -cmd_stdin_prefix bash -c "cat > /tmp/unittest_requirements.txt" < unittest_requirements.txt -cmd_stdin_prefix bash -c "cat > /tmp/functest_requirements.txt" < functest_requirements.txt -cmd_stdin_prefix bash -c "cat > /tmp/bindings_requirements.txt" < bindings_requirements.txt -cmd_prefix pip3 install -r /tmp/unittest_requirements.txt -r /tmp/functest_requirements.txt -r /tmp/bindings_requirements.txt - -CERTIFI=$(cmd_prefix python3 -c 'import certifi; print(certifi.where())') -cmd_prefix bash -c "cat /etc/pulp/certs/pulp_webserver.crt >> '$CERTIFI'" - -# check for any uncommitted migrations -echo "Checking for uncommitted migrations..." -cmd_user_prefix bash -c "django-admin makemigrations galaxy --check --dry-run" - -# Run unit tests. -cmd_user_prefix bash -c "PULP_DATABASES__default__USER=postgres pytest -v -r sx --color=yes --suppress-no-test-exit-code -p no:pulpcore --pyargs galaxy_ng.tests.unit" -# Run functional tests -if [[ "$TEST" == "performance" ]]; then - if [[ -z ${PERFORMANCE_TEST+x} ]]; then - cmd_user_prefix bash -c "pytest -vv -r sx --color=yes --suppress-no-test-exit-code --capture=no --durations=0 --pyargs galaxy_ng.tests.performance" - else - cmd_user_prefix bash -c "pytest -vv -r sx --color=yes --suppress-no-test-exit-code --capture=no --durations=0 --pyargs galaxy_ng.tests.performance.test_${PERFORMANCE_TEST}" - fi - exit -fi - -if [ -f "$FUNC_TEST_SCRIPT" ]; then - source "$FUNC_TEST_SCRIPT" -else - if [[ "$GITHUB_WORKFLOW" =~ "Nightly" ]] - then - cmd_user_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs galaxy_ng.tests.functional -m parallel -n 8 --nightly" - cmd_user_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs galaxy_ng.tests.functional -m 'not parallel' --nightly" - else - cmd_user_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs galaxy_ng.tests.functional -m parallel -n 8" - cmd_user_prefix bash -c "pytest -v -r sx --color=yes --suppress-no-test-exit-code --pyargs galaxy_ng.tests.functional -m 'not parallel'" - fi -fi - -if [ -f "$POST_SCRIPT" ]; then - source "$POST_SCRIPT" -fi diff --git a/.github/workflows/scripts/stage-changelog-for-default-branch.py b/.github/workflows/scripts/stage-changelog-for-default-branch.py deleted file mode 100755 index d7e2e81d6d..0000000000 --- a/.github/workflows/scripts/stage-changelog-for-default-branch.py +++ /dev/null @@ -1,70 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import argparse -import os -import textwrap - -from git import Repo -from git.exc import GitCommandError - - -helper = textwrap.dedent( - """\ - Stage the changelog for a release on master branch. - - Example: - $ python .github/workflows/scripts/stage-changelog-for-default-branch.py 3.4.0 - - """ -) - -parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=helper) - -parser.add_argument( - "release_version", - type=str, - help="The version string for the release.", -) - -args = parser.parse_args() - -release_version_arg = args.release_version - -release_path = os.path.dirname(os.path.abspath(__file__)) -plugin_path = release_path.split("/.github")[0] - -if not release_version_arg.endswith(".0"): - os._exit(os.system("python .ci/scripts/changelog.py")) - -print(f"\n\nRepo path: {plugin_path}") -repo = Repo(plugin_path) - -changelog_commit = None -# Look for a commit with the requested release version -for commit in repo.iter_commits(): - if f"{release_version_arg} changelog" == commit.message.split("\n")[0]: - changelog_commit = commit - break - if f"Add changelog for {release_version_arg}" == commit.message.split("\n")[0]: - changelog_commit = commit - break - -if not changelog_commit: - raise RuntimeError("Changelog commit for {release_version_arg} was not found.") - -git = repo.git -git.stash() -git.checkout("origin/master") -try: - git.cherry_pick(changelog_commit.hexsha) -except GitCommandError: - git.add("CHANGES/") - # Don't try opening an editor for the commit message - with git.custom_environment(GIT_EDITOR="true"): - git.cherry_pick("--continue") -git.reset("origin/master") diff --git a/.github/workflows/scripts/update_backport_labels.py b/.github/workflows/scripts/update_backport_labels.py deleted file mode 100755 index e8a2b860c2..0000000000 --- a/.github/workflows/scripts/update_backport_labels.py +++ /dev/null @@ -1,59 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -import requests -import yaml -import random -import os - - -def random_color(): - """Generates a random 24-bit number in hex""" - color = random.randrange(0, 2**24) - return format(color, "06x") - - -session = requests.Session() -token = os.getenv("GITHUB_TOKEN") - -headers = { - "Authorization": f"token {token}", - "Accept": "application/vnd.github+json", - "X-GitHub-Api-Version": "2022-11-28", -} -session.headers.update(headers) - -# get all labels from the repository's current state -response = session.get("https://api.github.com/repos/pulp/galaxy_ng/labels", headers=headers) -assert response.status_code == 200 -old_labels = set([x["name"] for x in response.json() if x["name"].startswith("backport-")]) - -# get list of branches from template_config.yml -with open("./template_config.yml", "r") as f: - plugin_template = yaml.safe_load(f) -branches = set(plugin_template["supported_release_branches"]) -latest_release_branch = plugin_template["latest_release_branch"] -if latest_release_branch is not None: - branches.add(latest_release_branch) -new_labels = {"backport-" + x for x in branches} - -# delete old labels that are not in new labels -for label in old_labels.difference(new_labels): - response = session.delete( - f"https://api.github.com/repos/pulp/galaxy_ng/labels/{label}", headers=headers - ) - assert response.status_code == 204 - -# create new labels that are not in old labels -for label in new_labels.difference(old_labels): - color = random_color() - response = session.post( - "https://api.github.com/repos/pulp/galaxy_ng/labels", - headers=headers, - json={"name": label, "color": color}, - ) - assert response.status_code == 201 diff --git a/.github/workflows/scripts/utils.sh b/.github/workflows/scripts/utils.sh deleted file mode 100755 index 1eaf72e9be..0000000000 --- a/.github/workflows/scripts/utils.sh +++ /dev/null @@ -1,30 +0,0 @@ -# This file is meant to be sourced by ci-scripts - -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - -PULP_CI_CONTAINER=pulp - -# Run a command -cmd_prefix() { - docker exec "$PULP_CI_CONTAINER" "$@" -} - -# Run a command as the limited pulp user -cmd_user_prefix() { - docker exec -u pulp "$PULP_CI_CONTAINER" "$@" -} - -# Run a command, and pass STDIN -cmd_stdin_prefix() { - docker exec -i "$PULP_CI_CONTAINER" "$@" -} - -# Run a command as the lmited pulp user, and pass STDIN -cmd_user_stdin_prefix() { - docker exec -i -u pulp "$PULP_CI_CONTAINER" "$@" -} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index d556e83a84..0000000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,139 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - ---- -name: "Test" -on: - workflow_call: - -defaults: - run: - working-directory: "galaxy_ng" - -jobs: - test: - runs-on: "ubuntu-latest" - strategy: - fail-fast: false - matrix: - env: - - TEST: pulp - - TEST: azure - - TEST: s3 - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - path: "galaxy_ng" - - - uses: "actions/checkout@v4" - with: - fetch-depth: 1 - repository: "pulp/pulp-openapi-generator" - path: "pulp-openapi-generator" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - uses: "actions/download-artifact@v4" - with: - name: "plugin_package" - path: "galaxy_ng/dist/" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install towncrier twine wheel httpie docker netaddr boto3 ansible mkdocs - echo "HTTPIE_CONFIG_DIR=$GITHUB_WORKSPACE/galaxy_ng/.ci/assets/httpie/" >> $GITHUB_ENV - echo ::endgroup:: - - - name: "Set environment variables" - run: | - echo "TEST=${{ matrix.env.TEST }}" >> $GITHUB_ENV - - - name: "Before Install" - run: | - .github/workflows/scripts/before_install.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - - - name: "Install" - run: | - .github/workflows/scripts/install.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - - - name: "Before Script" - run: | - .github/workflows/scripts/before_script.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - REDIS_DISABLED: "${{ contains('', matrix.env.TEST) }}" - - - name: "Install Python client" - run: | - .github/workflows/scripts/install_python_client.sh - shell: "bash" - - - name: "Script" - run: | - .github/workflows/scripts/script.sh - shell: "bash" - env: - PY_COLORS: "1" - ANSIBLE_FORCE_COLOR: "1" - GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" - GITHUB_CONTEXT: "${{ github.event.pull_request.commits_url }}" - GITHUB_USER: "${{ github.event.pull_request.user.login }}" - - name: Upload python client packages - if: ${{ env.TEST == 'pulp' }} - uses: actions/upload-artifact@v4 - with: - name: "python-client.tar" - path: "galaxy_ng/galaxy-python-client.tar" - if-no-files-found: "error" - retention-days: 5 - overwrite: true - - - name: Upload python client docs - if: ${{ env.TEST == 'pulp' }} - uses: actions/upload-artifact@v4 - with: - name: "python-client-docs.tar" - path: "galaxy_ng/galaxy-python-client-docs.tar" - if-no-files-found: "error" - retention-days: 5 - overwrite: true - - - name: "Logs" - if: always() - run: | - echo "Need to debug? Please check: https://github.com/marketplace/actions/debugging-with-tmate" - http --timeout 30 --check-status --pretty format --print hb "https://pulp${PULP_API_ROOT}api/v3/status/" || true - docker images || true - docker ps -a || true - docker logs pulp || true - docker exec pulp ls -latr /etc/yum.repos.d/ || true - docker exec pulp cat /etc/yum.repos.d/* || true - docker exec pulp bash -c "pip3 list && pip3 install pipdeptree && pipdeptree" diff --git a/.github/workflows/update-labels.yml b/.github/workflows/update-labels.yml deleted file mode 100644 index 5c37327279..0000000000 --- a/.github/workflows/update-labels.yml +++ /dev/null @@ -1,39 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - - ---- -name: "Galaxy Update Labels" -on: - push: - branches: - - "main" - paths: - - "template_config.yml" - -jobs: - update_backport_labels: - runs-on: "ubuntu-latest" - steps: - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - name: "Configure Git with ansible name and email" - run: | - git config --global user.name 'ansible' - git config --global user.email 'ansible-infra@redhat.com' - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install requests pyyaml - echo ::endgroup:: - - uses: "actions/checkout@v4" - - name: "Update labels" - run: | - python3 .github/workflows/scripts/update_backport_labels.py - env: - GITHUB_TOKEN: "${{ secrets.RELEASE_TOKEN }}" diff --git a/.github/workflows/update_ci.yml b/.github/workflows/update_ci.yml deleted file mode 100644 index e132acf7db..0000000000 --- a/.github/workflows/update_ci.yml +++ /dev/null @@ -1,68 +0,0 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - - ---- -name: "Galaxy CI Update" -on: - workflow_dispatch: - -jobs: - update: - runs-on: "ubuntu-latest" - - strategy: - fail-fast: false - - steps: - - uses: "actions/checkout@v4" - with: - fetch-depth: 0 - repository: "pulp/plugin_template" - path: "plugin_template" - - - uses: "actions/setup-python@v5" - with: - python-version: "3.11" - - - name: "Install python dependencies" - run: | - echo ::group::PYDEPS - pip install gitpython requests packaging jinja2 pyyaml - echo ::endgroup:: - - - name: "Configure Git with ansible name and email" - run: | - git config --global user.name 'ansible' - git config --global user.email 'ansible-infra@redhat.com' - - uses: "actions/checkout@v4" - with: - fetch-depth: 0 - path: "galaxy_ng" - ref: "master" - - - name: "Run update" - working-directory: "galaxy_ng" - run: | - ../plugin_template/scripts/update_ci.sh - - - name: "Create Pull Request for CI files" - uses: "peter-evans/create-pull-request@v6" - with: - token: "${{ secrets.RELEASE_TOKEN }}" - path: "galaxy_ng" - committer: "ansible " - author: "ansible " - title: "Update CI files for branch master" - body: "" - branch: "update-ci/master" - base: "master" - commit-message: | - Update CI files - - No-Issue - delete-branch: true diff --git a/.gitleaks.toml b/.gitleaks.toml index afaa25da74..5235b3edf7 100644 --- a/.gitleaks.toml +++ b/.gitleaks.toml @@ -1,7 +1,6 @@ [allowlist] description = "Our test install exports a test only MINIO ACCESS KEY" paths = [ - ".github/workflows/scripts/install.sh", "openshift/clowder/clowd-app.yaml", "dev/Dockerfile.base", "dev/standalone-ldap/galaxy_ng.env", diff --git a/CHANGES/.TEMPLATE.md b/CHANGES/.TEMPLATE.md deleted file mode 100644 index 2308193b90..0000000000 --- a/CHANGES/.TEMPLATE.md +++ /dev/null @@ -1,39 +0,0 @@ -{# TOWNCRIER TEMPLATE #} -{% for section, _ in sections.items() %} -{%- set section_slug = "-" + section|replace(" ", "-")|replace("_", "-")|lower %} -{%- if section %} - -### {{section}} {: #{{versiondata.version}}{{section_slug}} } -{% else %} -{%- set section_slug = "" %} -{% endif %} -{% if sections[section] %} -{% for category, val in definitions.items() if category in sections[section]%} - -#### {{ definitions[category]['name'] }} {: #{{versiondata.version}}{{section_slug}}-{{category}} } - -{% if definitions[category]['showcontent'] %} -{% for text, values in sections[section][category].items() %} -- {{ text }} -{% if values %} - {{ values|join(',\n ') }} -{% endif %} -{% endfor %} -{% else %} -- {{ sections[section][category]['']|join(', ') }} -{% endif %} -{% if sections[section][category]|length == 0 %} - -No significant changes. -{% else %} -{% endif %} -{% endfor %} -{% else %} - -No significant changes. -{% endif %} -{% endfor %} - ---- - - diff --git a/CHANGES/.gitignore b/CHANGES/.gitignore deleted file mode 100644 index f935021a8f..0000000000 --- a/CHANGES/.gitignore +++ /dev/null @@ -1 +0,0 @@ -!.gitignore diff --git a/Makefile b/Makefile index 17895a704b..037aac642b 100644 --- a/Makefile +++ b/Makefile @@ -1,25 +1,8 @@ .SILENT: -DOCKER_IMAGE_NAME = localhost/galaxy_ng/galaxy_ng -RUNNING = $(shell docker ps -q -f name=api) - -# if running is empty, then DJ_MANAGER = manage, else DJ_MANAGER = django-admin -DJ_MANAGER = $(shell if [ "$(RUNNING)" = "" ]; then echo manage; else echo django-admin; fi) # set the OCI_ENV_PATH to be ../oci_env/ if this isn't set in the user's environment export OCI_ENV_PATH = $(shell if [ -n "$$OCI_ENV_PATH" ]; then echo "$$OCI_ENV_PATH"; else echo ${PWD}/../oci_env/; fi) -define exec_or_run - # Tries to run on existing container if it exists, otherwise starts a new one. - @echo $(1)$(2)$(3)$(4)$(5)$(6) - @if [ "$(RUNNING)" != "" ]; then \ - echo "Running on existing container $(RUNNING)" 1>&2; \ - ./compose exec $(1) $(2) $(3) $(4) $(5) $(6); \ - else \ - echo "Starting new container" 1>&2; \ - ./compose run --use-aliases --service-ports --rm $(1) $(2) $(3) $(4) $(5) $(6); \ - fi -endef - .DEFAULT: .PHONY: help @@ -50,21 +33,8 @@ requirements/pip-upgrade-all: ## Update based on setup.py and *.in files, an pip-compile -o requirements/requirements.insights.txt setup.py requirements/requirements.insights.in --upgrade pip-compile -o requirements/requirements.standalone.txt setup.py requirements/requirements.standalone.in --upgrade -.PHONY: pulp/plugin-template-check -pulp/plugin-template-check: - ./dev/common/check_pulp_template.sh - -.PHONY: pulp/run-plugin-template-script -pulp/run-plugin-template-script: - echo "Running plugin_template script in sibling directory" - cd ../plugin_template/ && ./plugin-template --github galaxy_ng - # Repository management -.PHONY: changelog -changelog: ## Build the changelog - towncrier build - .PHONY: lint lint: ## Lint the code check-manifest @@ -77,30 +47,6 @@ fmt: ## Format the code using Darker # Container environment management -.PHONY: docker/prune -docker/prune: ## Clean all development images and volumes - @docker system prune --all --volumes - -.PHONY: docker/build -docker/build: ## Build all development images. - ./compose build - -.PHONY: docker/test/unit -docker/test/unit: ## Run unit tests with option TEST param otherwise run all, ex: TEST=.api.test_api_ui_sync_config - $(call exec_or_run, api, $(DJ_MANAGER), test, galaxy_ng.tests.unit$(TEST)) - -.PHONY: docker/test/integration -docker/test/integration: ## Run integration tests with optional MARK param otherwise run all, ex: MARK=galaxyapi_smoke - if [ "$(shell docker exec -it galaxy_ng_api_1 env | grep PULP_GALAXY_REQUIRE_CONTENT_APPROVAL)" != "PULP_GALAXY_REQUIRE_CONTENT_APPROVAL=true" ]; then\ - echo "The integration tests will not run correctly unless you set PULP_GALAXY_REQUIRE_CONTENT_APPROVAL=true";\ - exit 1;\ - fi - if [ "$(MARK)" ]; then\ - HUB_LOCAL=1 ./dev/standalone/RUN_INTEGRATION.sh "-m $(MARK)";\ - else\ - HUB_LOCAL=1 ./dev/standalone/RUN_INTEGRATION.sh;\ - fi - .PHONY: docker/test/integration/container docker/test/integration/container: ## Run integration tests. docker build . -f dev/standalone/integration-test-dockerfile -t galaxy-integration-runner @@ -154,29 +100,6 @@ gh-action/dab_jwt: gh-action/certified-sync: python3 dev/oci_env_integration/actions/certified-sync.py -.PHONY: docker/loaddata -docker/loaddata: ## Load initial data from python script - $(call exec_or_run, api, "/bin/bash", "-c", "/entrypoint.sh manage shell < app/dev/common/setup_test_data.py") - -.PHONY: docker/makemigrations -docker/makemigrations: ## Run django migrations - $(call exec_or_run, api, $(DJ_MANAGER), makemigrations) - -.PHONY: docker/migrate -docker/migrate: ## Run django migrations - $(call exec_or_run, api, $(DJ_MANAGER), migrate) - -.PHONY: docker/add-signing-service -docker/add-signing-service: ## Add a Signing service using default GPG key - $(call exec_or_run, worker, $(DJ_MANAGER), add-signing-service, ansible-default, /var/lib/pulp/scripts/collection_sign.sh, galaxy3@ansible.com) - -.PHONY: docker/resetdb -docker/resetdb: ## Cleans database - # Databases must be stopped to be able to reset them. - ./compose down - ./compose stop - ./compose run --rm api /bin/bash -c "yes yes | ./entrypoint.sh manage reset_db && django-admin migrate" - .PHONY: docker/db_snapshot NAME ?= galaxy docker/db_snapshot: ## Snapshot database with optional NAME param. Example: make docker/db_snapshot NAME=my_special_backup @@ -190,69 +113,13 @@ docker/db_restore: ## Restore database from a snapshot with optional NAME para docker cp db_snapshots/$(NAME).backup galaxy_ng_postgres_1:/galaxy.backup docker exec galaxy_ng_postgres_1 pg_restore --clean -U galaxy_ng -d galaxy_ng "/galaxy.backup" -.PHONY: docker/translations -docker/translations: ## Generate the translation messages - $(call exec_or_run, api, "/bin/bash", "-c", "cd /app/galaxy_ng && /entrypoint.sh manage makemessages --all") - -.PHONY: docker/all -docker/all: ## Build, migrate, loaddata, translate and add test collections. - make docker/build - make docker/migrate - make docker/loaddata - make docker/translations - # Application management and debugging -# e.g: make api/get URL=/content/community/v3/collections/ -.PHONY: api/get -api/get: ## Make an api get request using 'httpie' - # Makes 2 requests: One to get the token and another to request given URL - http --version && (http :8002/api/automation-hub/$(URL) "Authorization: Token $$(http --session DEV_SESSION --auth admin:admin -v POST 'http://localhost:5001/api/automation-hub/v3/auth/token/' username=admin password=admin -b | jq -r '.token')" || echo "http error, check if api is running.") || echo "!!! this command requires httpie - please run 'pip install httpie'" - -.PHONY: api/shell -api/shell: ## Opens django management shell in api container - $(call exec_or_run, api, $(DJ_MANAGER), shell_plus) - -.PHONY: api/bash -api/bash: ## Opens bash session in the api container - $(call exec_or_run, api, /bin/bash) - -.PHONY: api/runserver -api/runserver: ## Runs api using django webserver for debugging - # Stop all running containers if any - ./compose stop - # Start only services if containers exists, else create the containers and start. - ./compose start worker content-app ui || ./compose up -d worker content-app ui - # ensure API is not running - ./compose stop api - # Run api using django runserver for debugging - ./compose run --service-ports --use-aliases --name api --rm api manage runserver 0.0.0.0:8000 - -.PHONY: api/routes -api/routes: ## Prints all available routes - $(call exec_or_run, api, $(DJ_MANAGER), show_urls) - -.EXPORT_ALL_VARIABLES: -.ONESHELL: -.PHONY: api/create-test-collections -api/create-test-collections: ## Creates a set of test collections - @read -p "How many namespaces to create? : " NS; \ - read -p "Number of collections on each namespace? : " COLS; \ - read -p "Add a prefix? : " PREFIX; \ - ARGS="--prefix=$${PREFIX:-dev} --strategy=$${STRATEGY:-faux} --ns=$${NS:-6} --cols=$${COLS:-6}"; \ - echo "Creating test collections with args: $${ARGS}"; \ - export ARGS; \ - ./compose exec api django-admin create-test-collections $${ARGS} - .PHONY: api/push-test-images api/push-test-images: ## Pushes a set of test container images docker login -u admin -p admin localhost:5001 || echo "!!! docker login failed, check if docker is running" for foo in postgres treafik mongo mariadb redis node mysql busybox alpine docker python hhtpd nginx memcached golang; do docker pull $$foo; docker image tag $$foo localhost:5001/$$foo:latest; docker push localhost:5001/$$foo:latest; done -.PHONY: api/list-permissions -api/list-permissions: ## List all permissions - CONTAINS=str - $(call exec_or_run, api, $(DJ_MANAGER), shell -c 'from django.contrib.auth.models import Permission;from pprint import pprint;pprint([f"{perm.content_type.app_label}:{perm.codename}" for perm in Permission.objects.filter(name__icontains="$(CONTAINS)")])') - # Version / bumpversion management # 'bumpversion path' to go from 4.1.0 -> 4.1.1 diff --git a/README.md b/README.md index ffbdf4657b..56ccc55454 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,6 @@ This is a brand new take on Ansible Galaxy, so it will look and feel a bit diffe Our mission is to help organizations share Ansible automation and promote a culture of collaboration around Ansible automation development. We'll be providing features that make it easy to create, discover, use and distribute Ansible automation content. -To see what we're currently working on, [view the Roadmap](/ROADMAP.rst). - To learn more about Pulp, [view the Pulp project page](https://pulpproject.org/). ## Documentation diff --git a/ROADMAP.rst b/ROADMAP.rst deleted file mode 100644 index 10d013178d..0000000000 --- a/ROADMAP.rst +++ /dev/null @@ -1,6 +0,0 @@ -Roadmap -======= - -**NOTE**: Future dates are estimated and subject to change. - -Our roadmap is in our issue tracker as features tied to minor releases, for example see our `roadmap for 4.4.0 `_ diff --git a/compose b/compose deleted file mode 100755 index 665bedfc04..0000000000 --- a/compose +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/bash - -set -o nounset -set -o errexit - -# the internal mechanisms for installing and testing do not -# work if the checkout path is not named "galaxy_ng", so -# this script should -always- abort if that is not the case. -CWD=$(basename $(pwd)) -if [[ "${CWD}" != "galaxy_ng" ]]; then - cat >&2 <&2 <&2 -echo "INFO: ${DEV_SOURCE_PATH:-No} packages installed from source" >&2 -echo "INFO: Image suffix ${DEV_IMAGE_SUFFIX:-is unset}" >&2 -echo "INFO: Volume suffix ${DEV_VOLUME_SUFFIX:-${DEV_IMAGE_SUFFIX:-is unset}}" >&2 - -compose_args=( - -f 'dev/docker-compose.yml' - -f "dev/${COMPOSE_PROFILE}/docker-compose.yml" -) - -if [[ -z "${ANSIBLE_HUB_UI_PATH:-}" ]]; then - cat >&2 << EOF -INFO: \$ANSIBLE_HUB_UI_PATH is unset. - If you want to run the UI inside compose please set \$ANSIBLE_HUB_UI_PATH - to the location of your local copy of https://github.com/ansible/ansible-hub-ui. -EOF -else - compose_args+=( - -f 'dev/common/docker-compose-ui.yaml' - -f "dev/${COMPOSE_PROFILE}/docker-compose-ui.yaml" - ) -fi - -declare -xr DEV_SOURCE_PATH=${DEV_SOURCE_PATH:-galaxy_ng} -declare -xr COMPOSE_CONTEXT=".." -declare -xr LOCK_REQUIREMENTS="${LOCK_REQUIREMENTS:-1}" -declare -xr COMPOSE_PROFILE="${COMPOSE_PROFILE}" -declare -xr ENABLE_SIGNING="${ENABLE_SIGNING:-1}" -declare -xr DEV_IMAGE_SUFFIX="${DEV_IMAGE_SUFFIX:-}" -declare -xr DEV_VOLUME_SUFFIX="${DEV_VOLUME_SUFFIX:-${DEV_IMAGE_SUFFIX}}" -declare -xr COMPOSE_PROJECT_NAME="${COMPOSE_PROJECT_NAME:-galaxy_ng${DEV_IMAGE_SUFFIX:-}}" -declare -xr WITH_DEV_INSTALL="${WITH_DEV_INSTALL:-1}" - -#if [ -v SOCIAL_AUTH_GITHUB_KEY ]; then -# declare -xr SOCIAL_AUTH_GITHUB_KEY="${SOCIAL_AUTH_GITHUB_KEY:null}" -#fi -#if [ -v SOCIAL_AUTH_GITHUB_SECRET ]; then -# declare -xr SOCIAL_AUTH_GITHUB_SECRET="${SOCIAL_AUTH_GITHUB_SECRET:null}" -#fi - -exec docker-compose "${compose_args[@]}" "$@" diff --git a/dev/common/check_pulp_template.sh b/dev/common/check_pulp_template.sh deleted file mode 100755 index 1ddfc2271b..0000000000 --- a/dev/common/check_pulp_template.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# awcrosby 2022-02-25 11:15AM -# everytime we update pulp_ansible or pulpcore we run that plugin template -# https://github.com/ansible/galaxy_ng/wiki/Updating-Dependencies, -# its worth checking if [any modified files] gets overwritten - -set -e - -CHECKOUT=$(pwd) -BASEDIR=$(python -c 'import tempfile; print(tempfile.mkdtemp(prefix="pulp_template_check-"))') -PLUGIN_GIT_REF=$(cat .github/template_gitref | awk -F\- '{print $NF}' | cut -dg -f2) - -rm -rf $BASEDIR; mkdir -p $BASEDIR -cp -Rp $CHECKOUT $BASEDIR/galaxy_ng -git clone https://github.com/pulp/plugin_template $BASEDIR/plugin_template - -cd $BASEDIR/plugin_template -git checkout $PLUGIN_GIT_REF -pip install -r requirements.txt -./plugin-template --github galaxy_ng - -echo "Results ..." -cd $BASEDIR/galaxy_ng -MODIFIED_FILES=$(git status 2>/dev/null | grep 'modified:' | awk '{print $2}') -EXIT_CODE=0 -for MF in $MODIFIED_FILES; do - echo "FAILURE template-plugin would modifiy $BASEDIR/galaxy_ng/$MF" - git diff $MF - EXIT_CODE=1 -done -if [[ $EXIT_CODE == 0 ]]; then - echo 'SUCCESS - all files are clean' -fi -exit $EXIT_CODE diff --git a/dev/common/poll.py b/dev/common/poll.py deleted file mode 100644 index 46e306176c..0000000000 --- a/dev/common/poll.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python - -# Poll the api until it is ready. - -import requests -import subprocess -import sys -import time -import yaml - - -def get_dynaconf_variable(varname): - '''Run the dynaconf get subcommand for a specific key''' - cmd = f'dynaconf get {varname}' - cmd = f'./compose exec -T api bash -c "{cmd}"' - pid = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - if pid.returncode != 0: - print(f'ERROR: {pid.stderr.decode("utf-8")}') - return None - stdout = pid.stdout.decode('utf-8') - return stdout.strip() - - -def get_compose_config(): - '''Get a dump of the running|aggregated compose config''' - cmd = './compose config' - pid = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout = pid.stdout.decode('utf-8') - config = yaml.safe_load(stdout) - return config - - -def poll(url=None, attempts=100, wait_time=1): - '''Wait for the API to report status or abnormal exit''' - - if url is None: - - # get the compose config - config = get_compose_config() - - # extract the api service - api = config['services']['api'] - - # get the api's env - env = api['environment'] - - # hostname includes the prefix - hostname = env['PULP_ANSIBLE_API_HOSTNAME'] - - for i in range(attempts): - print(f"Waiting for API to start (attempt {i + 1} of {attempts})") - # re request the api root each time because it's not alwasy available until the - # app boots - - if url is not None: - this_url = url - else: - print(f'\tHOSTNAME: {hostname}') - api_root = get_dynaconf_variable("API_ROOT") - print(f'\tAPI_ROOT: {api_root}') - if api_root is None: - print('\tAPI_ROOT is null') - time.sleep(wait_time) - continue - - this_url = f"{hostname}{api_root}api/v3/status/" - - print(f'\tURL: {this_url}') - try: - rr = requests.get(this_url) - print(f'\tresponse: {rr.status_code}') - if rr.status_code == 200: - print(f"{this_url} online after {(i * wait_time)} seconds") - return - except Exception as e: - print(e) - time.sleep(wait_time) - - raise Exception("polling the api service failed to complete in the allowed timeframe") - - -def main(): - url = None - if len(sys.argv) > 1: - url = sys.argv[1] - poll(url=url) - - -if __name__ == "__main__": - main() diff --git a/docs/dev/docker_environment.md b/docs/dev/docker_environment.md deleted file mode 100644 index bc0564c82e..0000000000 --- a/docs/dev/docker_environment.md +++ /dev/null @@ -1,461 +0,0 @@ -# Development Setup - -## The Project Repository - -1. Create your own fork of the repository -2. Clone it to your projects folder - -!!! Note - It's recommended to put all of your git checkouts in the same directory. This makes it easier to include other projects from source. - - ``` - galaxy/ - ├── ansible-hub-ui/ - ├── galaxy-importer/ - ├── galaxy_ng/ - └── pulp_ansible/ - ``` - -```bash -cd your/preferred/projects/folder -git clone git@github.com:/galaxy_ng.git -``` - - -## Configuring your local code editor - -Set your working directory to Galaxy folder - -```bash -cd galaxy_ng -``` - -You can use your editor of choice and if you want to have the editor (ex: VsCode) to inspect -the code for you might need to create a virtual environment and install the packages. - -```bash -python -m venv .venv -source .venv/bin/activate -python -m pip install -r dev_requirements.txt -python -m pip install -r doc_requirements.txt -python -m pip install -r integration_requirements.txt -python -m pip install -e . -``` - -Now you can for example open `code .` and have VsCode to find the libraries you need. - -## Running the container based dev environment - - -Our containerized development environment configuration is loaded from -the `.compose.env` script. You'll first need to configure it, and the -easiest way to do that is by copying an example script -`.compose.env.example`: - - -```bash -cp .compose.env.example .compose.env -``` - -All of your local project settings can be set in your `.compose.env` file. - -### Enable the UI (optional) - -If you would like to develop using the UI, simply do the following: - -1. Clone https://github.com/ansible/ansible-hub-ui to the same path where `galaxy_ng` is located. - ```bash - cd your/preferred/projects/folder - git clone https://github.com/ansible/ansible-hub-ui - cd galaxy_ng - ``` - -2. Set `ANSIBLE_HUB_UI_PATH` in your `.compose.env` file to point to - the location of the cloned UI repo. Absolute paths aren't required, - but they're easier to set up. If you want to use a relative path, it - has to be relative to `dev/docker-compose.yml` - - ```bash - ANSIBLE_HUB_UI_PATH='/your/preferred/projects/folder/ansible-hub-ui' - ``` - -3. Complete the rest of the steps in the next section. Once everything - is running the UI can be accessed at http://localhost:8002 - -### Run the Build Steps - -Next, run the following steps to build your local development -environment: - -1. Build the docker image - - ```bash - make docker/build - ``` - -2. Initialize and migrate the database - - ```bash - make docker/migrate - ``` - -3. Load dev data - - ```bash - make docker/loaddata - make docker/loadtoken - ``` - - -!!! tip - You can run everything at once with - ```bash - make docker/build docker/migrate docker/loaddata docker/loadtoken - ``` - - -### Start the services - -In foreground keeping terminal opened for watching outputs -```bash -./compose up -``` - -In Background (you can close the terminal later) -```bash -./compose up -d -``` - -By default, the development environment is configured to run in -*insights* mode, which requires a 3rd party authentication provider. If -you're working outside of the Red Hat cloud platform, you'll want to -switch it to *standalone* mode by modifying your `.compose.env` file, -and setting the `COMPOSE_PROFILE` variable to `standalone`, as shown in -the following example: - -```env -COMPOSE_PROFILE=standalone -``` - -If you want to run in standalone mode while using Keycloak to provide -single sign-on with a -`pre-populated LDAP server `\_ -you'll want to switch it to *standalone-keycloak* mode by modifying your -`.compose.env` file, and setting the `COMPOSE_PROFILE` variable to -`standalone-keycloak`, as shown in the following example: - -```env -COMPOSE_PROFILE=standalone-keycloak -``` - -## Other Development Modes - -### Insights - -"Insights" mode refers to running Galaxy NG as it would be run on console.redhat.com. - -!!! Note - This option is only relevant to Red Hat employees. Community contributors should skip this. - -1. In your `.compose.env` file set `COMPOSE_PROFILE=insights` and comment out `ANSIBLE_HUB_UI_PATH` (if its set). -2. Install node. Node v18+ is known to work. Older versions may work as well. -3. Switch to your `ansible-hub-ui` checkout and run the following - - ```bash - npm install - npm run start - ``` - -The app will run on http://localhost:8002/beta/ansible/automation-hub and proxy requests for `/api/automation-hub` to the api on `http://localhost:5001`. - -### Keycloak - -Keycloak mode launches an LDAP and keycloak server via docker and configures the app to authenticate using keycloak. - -To run in keycloak mode set `COMPOSE_PROFILE=standalone-keycloak` in your `.compose.env`. You will need to initialize your Keycloak -instance before running migrations and starting the remaining services. - -1. Start the Keycloak instance and dependencies - - ```bash - ./compose up -d keycloak kc-postgres ldap - ``` - -2. Bootstrap the Keycloak instance with a Realm and Client then capture - the needed public key - - ```bash - ansible-playbook ./dev/standalone-keycloak/keycloak-playbook.yaml - ``` - > **NOTE** Try again if it fails at the first run, services might not be - available yet. - -3. Update your `.compose.env` file with the public key found at the end - of the playbook run - - ```bash - PULP_SOCIAL_AUTH_KEYCLOAK_PUBLIC_KEY="keycloak-public-key" - ``` - -After the standard development set up steps, when you access -http://localhost:8002 it will redirect to a Keycloak Open ID Connect -flow login page, where you can login with one of the development SSO -user's credentials (the password is the username). If you want to login -with non-Keycloak users, you need to use the -`Django admin console `\_. - -If you want to login as a superuser, you can do one of two things: - -1. Login to the - `Django admin console `\_ - with the admin user - -2. Login to the `Keycloak instance `\_ with - admin/admin to edit the LDAP user's roles: Choose a development SSO - user, select Role Mappings \> Client Roles \> automation-hub and add - the `hubadmin` role. A user is associated with the appropriate - group(s) using the user\_group pipeline. - -## Running API tests - -Unit and integration tests can be easily run using docker compose. At the moment, there is no easy way to run pulp functional tests with docker. - -For more information on tests, refer to [writing tests](integration_tests.md). - -### Unit tests - -Run unit all unit tests: - -```bash -make docker/test/unit -``` - -Run a specific test case: - -```bash -make docker/test/unit TEST=.api.test_api_ui_sync_config -``` - -### Integration tests - -Integration tests can be run from the host machine or via a docker container. Before running either, the following steps have to be taken: - -- set `PULP_GALAXY_REQUIRE_CONTENT_APPROVAL=true` in your `.compose.env`. -- run - ``` - make docker/loadtoken - make docker/load_test_data - ``` - -#### Via docker - -!!! note - Tests that require docker or podman won't run inside docker and will be skipped. If you need to write container tests, run the integration tests via the host machine. - -Run all the integration tests: - -```bash -make docker/test/integration/container -``` - -Any set of pytest flags can be passed to this command as well: - -```bash -# run any test who's name matches my_test -make docker/test/integration/container FLAGS="-k my_test" - -# run tests marked as galaxyapi_smoke -make docker/test/integration/container FLAGS="-m galaxyapi_smoke" -``` - -#### Via host machine - -!!! warning - This requires that the `python` executable in your shell be python 3, and may not work on systems synch as Mac OS where `python` refers to `python2`. This also requires that `virtualenv` in installed on your machine. - -Run all the integration tests: - -```bash -make docker/test/integration -``` - -Run integration tests with a specific mark: - -```bash -make docker/test/integration MARK=galaxyapi_smoke -``` - -## Testing data - -??? tip "Push Container Images to local registry" - - !!! info - ``` - make api/push-test-images - ``` - will push a bunch of testing images to your running system - - To push images into the container image registry hosted by galaxy\_ng - (via pulp\_container), you need to tag an image first to tell Docker or - Podman that you want to associate the image with the registry. On a - local development setup, the pulp\_container runs along with the Galaxy - API on port 5001. - - Tag an image like this: - - docker image tag localhost:5001/: - - or, to associate with a namespace: - - docker image tag localhost:5001//: - - And then push the image and the engine will upload it to the - now-associated registry: - - docker push localhost:5001/testflat - -??? tip "Creating a set of collections for testing" - - !!! info - ``` - make api/create-test-collections - ``` - Will generate collections and populate the system - - -## Additional Dependencies - -When running docker environment, the project's parent directory is -mounted into container as `/app`. All projects listed in -`DEV_SOURCE_PATH` environment variable are installed in editable mode -(`pip install -e`) in the container. To load additional projects such as -`galaxy-importer` or `pulp_ansible` into the container from host file -system you should clone them into the parent directory relative to your -`galaxy_ng` working copy location. - -For example you want to work on `galaxy-importer` project and run -development environment with your changes made locally. - -1. Clone `galaxy-importer` to parent directory:: - - cd your/preferred/projects/folder - git clone https://github.com/ansible/galaxy-importer - cd galaxy_ng - -2. Add `galaxy-importer` to `DEV_SOURCE_PATH` variable in your - `.compose.env` file:: - - export DEV_SOURCE_PATH='galaxy_ng:galaxy-importer' - -3. Recreate your development environment:: - - ./compose down - make docker/build docker/migrate - ./compose up - - -!!! tip - The step above can be done for other Pulp plugins such as `pulp_ansible` or `pulp_container` - - -## Steps to run dev environment with specific upstream branch - -1. **Clone** locally `galaxy_ng`, `pulpcore` and `pulp_ansible` all the - repos must be located at the same directory level. - - cd ~/projects/ - git clone https://github.com/pulp/pulpcore - git clone https://github.com/pulp/pulp_ansible - git clone https://github.com/ansible/galaxy_ng - # and optionally - git clone https://github.com/ansible/ansible-hub-ui - git clone https://github.com/ansible/galaxy_importer - -2. **Checkout to desired branches.** `pulp_ansible` main is - compatible with a specific range of `pulpcore` versions. So it is - recommended to checkout to a specific branch or tag following the - constraints defined on pulp\_ansible/requirements.txt or leave it - checked out to main if you know it is compatible with the - pulp\_ansible branch you have. Example: - - cd ~/projects/pulpcore - git checkout 3.9.0 - - This is also possible to checkout to specific pull-requests by its - `refs/pull/id`. - -3. Edit the `galaxy_ng/.compose.env` file. - - cd ~/projects/galaxy_ng - cat .compose.env - - COMPOSE_PROFILE=standalone - DEV_SOURCE_PATH='pulpcore:pulp_ansible:galaxy_ng' - LOCK_REQUIREMENTS=0 - - **DEV\_SOURCE\_PATH** refers to the repositories you cloned locally, - the order is important from the highest to the low dependency, - otherwise pip will raise version conflicts. - - So **pulpcore** is a dependency to **pulp\_ansible** which is a - dependency to **galaxy\_ng**, this order must be respected on - **DEV\_SOURCE\_PATH** variable. - - **LOCK\_REQUIREMENTS** when set to 0 it tells docker to bypass the - install of pinned requirements and rely only on packages defined on - `setup.py` for each repo. - -4. Run `./compose build` to make those changes effective. - -5. Run desired compose command: `./compose up`, `./compose run` etc... - -## Bumping The Version - -The canonical source of truth for the 'version' is now in setup.cfg in -the `bumpversion` stanza: - -```ini -[bumpversion] -current_version = 4.3.0.dev -``` - -To update version, it is recommended to "bump" the version instead of -explicitly specifying it. - -Use bump2version to increment the 'version' string wherever it is -needed. - -It can 'bump' the 'patch', 'minor', 'major' version components. - -There are also Makefile targets for bumping versions. To do a 'patch' -version bump, for example: - - $ make dev/bumpversion-patch - -The above command will rev the 'patch' level and update all the files -that use it. - -Note: Currently, the bump2version config does not git commit or git tag -the changes. So after bumping the version, you need to commit the -changes and tag manually. - - $ git commit -v -a - $ git tag $NEWVERSION - -bump2version can also do this automatically if we want to enable it. - -## Debugging - - -https://github.com/ansible/galaxy_ng/wiki/Debugging-with-PDB - -## Add galaxy-importer.cfg to dev environment - - -To set your own galaxy-importer.cfg, add something like this to -`/dev/Dockerfile.base`: - - RUN mkdir /etc/galaxy-importer \ - && printf "[galaxy-importer]\n \ - REQUIRE_V1_OR_GREATER = True\n \ - LOG_LEVEL_MAIN = INFO\n" | tee /etc/galaxy-importer/galaxy-importer.cfg diff --git a/docs/dev/getting_started.md b/docs/dev/getting_started.md index 65233f2b67..8752655f59 100644 --- a/docs/dev/getting_started.md +++ b/docs/dev/getting_started.md @@ -2,18 +2,9 @@ ## Setting up the developer environment -### Docker Compose environment +### Docker Compose -This is recommended for beginners. It's less error prone and easier to setup, however it offers less flexibility than vagrant. Use this if you're: - -- primarily developing the UI -- primarily contributing to galaxy_ng and not pulpcore, pulp_ansible or pulp_container -- need to develop a feature for console.redhat.com -- need to test keycloak or LDAP authentication -- are developing using a Mac -- writing documentation - -[Docker environment developer setup guide](docker_environment.md). +> **TO BE DEFINED** ### OCI Env @@ -21,17 +12,6 @@ This is the new preferred way to develop with pulp. It provides a flexible, cont [OCI Env developer setup guide](oci_env.md) -### Vagrant Environment - -This uses the [Pulp developer environment](https://docs.pulpproject.org/pulpcore/contributing/index.html) which is based off of vagrant. It offers a developer environment that is much closer to a production installation of Galaxy NG. It allows users to run on a wide variety of operating systems with any set of pulp plugins installed. Use this if you're: - -- contributing to one of the pulp projects -- need to debug an issue that's presenting itself in production installations -- need to run pulpcore and pulp_ansible without the galaxy_ng plugin installed -- need to debug an issue on an operating system other than RHEL/Centos - -[Vagrant environment developer setup guide](vagrant.md) - ## Issue Tracker Issues for Galaxy NG are tracked in Jira at https://issues.redhat.com/browse/AAH. Issues labeled with [quickfix](https://issues.redhat.com/browse/AAH-1202?jql=project%20%3D%20AAH%20AND%20resolution%20%3D%20Unresolved%20AND%20labels%20%3D%20quickfix%20ORDER%20BY%20priority%20DESC%2C%20updated%20DESC) are a good place for beginners to get started. diff --git a/docs/dev/vagrant.md b/docs/dev/vagrant.md deleted file mode 100644 index 13679d4b91..0000000000 --- a/docs/dev/vagrant.md +++ /dev/null @@ -1,442 +0,0 @@ -# Run Galaxy NG using the Pulp Developer Environment - -[Pulp Installer](https://pulp-installer.readthedocs.io/en/latest/pulplift/) is a vagrant configuration based on [forklift](https://github.com/theforeman/forklift). - -## Setup the environment - -### Requirements - -- Python 3+ -- Ansible 2.9+ -- Vagrant 1.8+ -- Vagrant [provider plugin] (https://www.vagrantup.com/docs/providers/installation.html) -- Libvirt or Virtualbox -- Vagrant SSHfs -- Enabled virtualization in BIOS - -### 1. Install Vagrant and its plugins - -#### On a fedora system - -```bash -sudo dnf install ansible vagrant-libvirt vagrant-sshfs @virtualization -``` - -#### On a debian system - -```bash -# virtualbox (requires sid in sources) -sudo apt install ansible vagrant vagrant-sshfs virtualbox/sid - -# libvirt -sudo apt install ansible vagrant vagrant-sshfs vagrant-libvirt dnsmasq libvirt-clients libvirt-daemon libvirt-dbus qemu-system-x86 qemu-utils -sudo usermod -aG libvirt,libvirt-qemu,libvirtdbus $USER -``` - -#### On a Mac - -```bash -brew install ansible -brew cask install virtualbox -brew cask install vagrant -``` - -#### On other host systems -Refer to the package manager and search for equivalent packages. For example, `pacman -S vagrant` - -#### Install vagrant plugins - -Required vagrant plugins: - -```bash -vagrant plugin install vagrant-sshfs -``` - -**Optional** plugins: - -```bash -vagrant plugin install vagrant-libvirt # to connect to libvirt -vagrant plugin install vagrant-hostmanager # to manage local dns -``` - -### 2. Clone the repositories from source - -!!! tip - replace `:pulp/` and `:ansible/` with your own github username if you plan to work on your own forks. - -```bash -# required -git clone git@github.com:pulp/pulp_installer -git clone git@github.com:pulp/pulp_ansible.git -git clone git@github.com:pulp/pulp_container.git -git clone git@github.com:ansible/galaxy_ng.git -git clone git@github.com:pulp/pulpcore.git - -# optional -git clone git@github.com:ansible/galaxy-importer.git -git clone git@github.com:ansible/ansible-hub-ui.git -``` - -Ensure repositories are located on the same folder level - -```bash -$ tree -a -L 1 -. -├── ansible-hub-ui/ -├── galaxy-importer/ -├── galaxy_ng/ -├── pulp_ansible/ -├── pulp_container/ -├── pulpcore/ -└── pulp_installer/ -``` - -In order to avoid version conflicts, each component has to be checked out with a version of the plugin that is compatible with galaxy_ng. These versions can be found in [setup.py](https://github.com/ansible/galaxy_ng/blob/master/setup.py) under the `requirements` list. In setup.py find the following versions: - -- pulpcore -- pulp_ansible -- pulp_container - -```bash -cd pulpcore -git checkout -cd ../pulp_ansible -git checkout -cd ../pulp_container -git checkout -cd .. -``` - -### 3. Set your working directory to the `pulp_installer` directory - -```bash -cd pulp_installer -``` - -### 4. make sure you're running the latest compatible version of pulp_installer. - -```bash -git checkout -``` - -### 5. Initialize submodules - -```bash -git submodule update --init -``` - -### 6. Create the installer config file - -In the root of the `pulp_installer` directory create a new file named `local.dev-config.yml` with the following contents. - -!!! Tip - If you don't want to run pulpcore or one of the plugins from source, you can comment out `source_dir` under `pulp_install_plugins` and add `version` or comment out `pulp_source_dir` and add `pulpcore_version`. - -!!! Tip - Documentation for the variables in this config can be found [here](https://docs.pulpproject.org/pulp_installer/roles/pulp_common/). - -```yaml ---- -# Pulp plugins and Python libs -pulp_install_plugins: - pulp-ansible: - source_dir: "/home/vagrant/devel/pulp_ansible" - # version: "" - pulp-container: - source_dir: "/home/vagrant/devel/pulp_container" - # version: "" - galaxy-ng: - source_dir: "/home/vagrant/devel/galaxy_ng" - # Uncomment this to run galaxy-importer from source. Other python libs can be installed like this - # as well. - # galaxy-importer: - # source_dir: "/home/vagrant/devel/galaxy-importer" - -# Pulp configuration - -pulp_source_dir: "/home/vagrant/devel/pulpcore" -pulp_pip_editable: true -# pulpcore_version: "" -pulp_devel_supplement_bashrc: true -pulp_default_admin_password: password -pulp_webserver_disable_https: true -pulp_user: "vagrant" -developer_user: "vagrant" -developer_user_home: "/home/vagrant" -pulp_workers: 4 -pulp_api_workers: 4 -pulp_settings: - secret_key: "unsafe_default" - content_origin: "http://{{ ansible_fqdn }}" - x_pulp_api_host: 127.0.0.1 - x_pulp_api_port: 24817 - x_pulp_api_user: "admin" - x_pulp_api_password: "{{ pulp_default_admin_password }}" - x_pulp_api_prefix: "pulp_ansible/galaxy/automation-hub/api" - galaxy_require_content_approval: False - pulp_token_auth_disabled: True - galaxy_api_default_distribution_base_path: "published" - allowed_export_paths: ["/tmp"] - allowed_import_paths: ["/tmp"] - ansible_api_hostname: "http://{{ ansible_fqdn }}" - -# Galaxy Configuration -# Set this __galaxy variables according to your needs. -# __galaxy_profile: 'insights'or 'standalone' -__galaxy_profile: 'standalone' -# __galaxy_dev_source_path: `:` separated relative paths to the repos you cloned. -__galaxy_dev_source_path: 'pulpcore:pulp_ansible:pulp_container:galaxy_ng:galaxy-importer' -# __galaxy_lock_requirements: Set to 0 to avoid pinning of galaxy_ng/setup.py versions -__galaxy_lock_requirements: '1' - -# options: precompiled, source, none -# __galaxy_ui_source: precompiled -``` - -!!! warning - When provisioning the VM you can see errors such as `Version Conflict Error` and those errors are all related to set the correct version/branch/tag on each repo. - -### 7. Start the vagrant VM - -Use of the the [available boxes](https://github.com/pulp/pulp_installer/blob/main/vagrant/boxes.d/30-source.yaml) or run `vagrant status` to see the list of available boxes. - -Example: - -!!! note - The following commands must run inside `pulp_installer` directory. - -```bash -vagrant up --provider=libvirt VAGRANT_BOX_NAME # recommended -vagrant up --provider=libvirt pulp3-source-centos8 # if you need RHEL specific features -``` - -> The above command will use `--provider` to provision a Vm and you use `libvirt` or `virtualbox`, ensure the respective service is running and accessible. Then it will use `local.dev-config.yml` to configure the VM. - -You can use the virtualbox application or virt-manager to check the state of the VM or run `vagrant status VAGRANT_BOX_NAME` - -!!! note - The `libvirt` plugin is not available on all platforms, skip `--provider=libvirt` if things break. - -!!! warning - This command may take several minutes to run, it may ask your root password and in case of `Version Conflict Error` refer to https://github.com/ansible/galaxy_ng/wiki/Installing-from-source---development-environment/_edit#2-clone-the-repositories-from-source step. - -!!! Warning - Vagrant silently ignores `--provider=...` if user before `up`. The right syntax is `vagrant up --provider=...`, not ~~`vagrant --provider=... up`~~. - -## 8. Access Galaxy NG and PULP - -**Pulp-Installer** will expose the services on the DNS `.localhost.example.com` for example, if you installed on a fedora system -it will be http://VAGRANT_BOX_NAME.localhost.example.com/ui/ - -If you installed `vagrant-hostmanager` you can then run `vagrant hostmanager` to update your hosts file. - -Otherwise you will need to add manually to the `/etc/hosts` file. run `vagrant ssh VAGRANT_BOX_NAME` to connect to the VM and then `ifconfig` to see its ip address and then. - -``` -# /etc/hosts -... -192.168.121.51 VAGRANT_BOX_NAME.localhost.example.com -``` - -To enter the **SSH** just run `vagrant ssh VAGRANT_BOX_NAME` - -The http server will either listen on `http://VAGRANT_BOX_NAME.localhost.example.com` (port 80), or on `http://localhost:8080`. - - -### 9. Optional - Switch to the source version of `galaxy-importer` by doing the following: - - ``` - ## SSH into the vagrant box: - $ vagrant ssh VAGRANT_BOX_NAME - - ## Within the vagrant box, install the local copy of `galaxy-importer` and restart Pulp: - $ source /usr/local/lib/pulp/bin/activate - $ cd /home/vagrant/devel/galaxy-importer - $ pip install --upgrade -e . - $ prestart - ``` - -### 10. Optional - Enable running `ansible-test` during Collection import: - - ``` - # SSH into the vagrant guest: - $ vagrant ssh pulp-source-fedora32 - - # Install podman-docker - $ sudo yum install podman-docker - - # Configure galaxy-importer - $ sudo mkdir /etc/galaxy-importer - ``` - - Copy the following to `/etc/galaxy-importer/galaxy-importer.cfg` - - ``` - [galaxy-importer] - LOG_LEVEL_MAIN = INFO - RUN_FLAKE8 = True - RUN_ANSIBLE_TEST = True - INFRA_LOCAL_IMAGE = True - INFRA_LOCAL_DOCKER = False - INFRA_OSD = False - ``` - -### 11. SSH into the Box - -Now that everything is running, you can SSH into the box with `vagrant ssh VAGRANT_BOX_NAME` and begin development work. Once you're in you can run - -- `pjournal`: shows the server logs -- `prestart`: restarts pulp - -Keep in mind that the server has to be restarted any time changes are made to the code. - -#### Tips and tricks - -The installation comes with some useful [dev aliases](https://docs.pulpproject.org/pulp_installer/roles/pulp_devel/#aliases), once in a `vagrant ssh` session you can: - -Activate pulp virtualenv - -```bash -workon pulp -``` - -* `phelp`: List all available aliases. -* `pstart`: Start all pulp-related services -* `pstop`: Stop all pulp-related services -* `prestart`: Restart all pulp-related services -* `pstatus`: Report the status of all pulp-related services -* `pdbreset`: Reset the Pulp database - **THIS DESTROYS YOUR PULP DATA** -* `pclean`: Restore pulp to a clean-installed state - **THIS DESTROYS YOUR PULP DATA** -* `pjournal`: Interact with the journal for pulp-related unit -* `reset_pulp2`: Resets Pulp 2 - drop the DB, remove content and publications from FS, restart services. -* `populate_pulp2_iso`: Syncs 4 ISO repos. -* `populate_pulp2_rpm`: Sync 1 RPM repo. -* `populate_pulp2_docker`: Sync 1 Docker repo. -* `populate_pulp2`: Reset Pulp 2 and sync ISO, RPM, Docker repos. -* `pyclean`: Cleanup extra python files -* `pfixtures`: Run pulp-fixtures container in foreground -* `pbindings`: Create and install bindings. Example usage: `pbindings pulpcore python` -* `pminio`: Switch to minio for S3 testing. For stopping it: `pminio stop` - -## Running tests - -Functional and unit tests can be run from vagrant. Integration tests can only be run in [the docker environment](docker_environment.md). - -### Integration Tests - -1. SSH into the vagrant box `vagrant ssh YOUR_BOX_NAME` -2. Activate the python virtual env `workon pulp` -3. cd to the plugin you want to test `cd ~/devel/pulp_ansible/` -4. Install the testing requirements `pip install -r functest_requirements.txt` -5. Build the pulp bindings `pbindings pulp_ansible python` -6. Run the tests `pytest -v -r sx --color=yes --pyargs pulp_ansible.tests.functional` - -!!! note - Any time the APIs change, the server needs to be restarted with `prestart` and the bindings have to be rebuilt with `pbindings PLUGIN_NAME python` - -!!! warning - Some pulp_ansible tests require extra setup and others will always fail if galaxy_ng is installed. - -!!! tip - You can run a single test by passing `-k my_test_name` to the pytest command. - -### Unit Tests - -1. SSH into the vagrant box `vagrant ssh YOUR_BOX_NAME` -2. Activate the python virtual env `workon pulp` -3. cd to the plugin you want to test `cd ~/devel/pulp_ansible/` -4. Install the testing requirements `pip install -r unittest_requirements.txt` -5. Run the tests `pytest -v -r sx --color=yes --pyargs pulp_ansible.tests.unit` - -## Troubleshooting - -### Centos 8 - -When using Centos 8, [there's currently a bug](https://github.com/dustymabe/vagrant-sshfs/pull/111) in `vagrant-sshfs` that causes the `fuse-sshfs` package to not install in the guest. Until that gets fixed, best to use Fedora 31+ to test an Enterprise Linux distro. - -To use Centos 8 with Virtualbox (assuming the `vagrant-sshfs` issue is fixed), check `vagrant/boxes.d/30-source.yaml` to see if the box being referenced points to a URL. If so, take a look at `https://cloud.centos.org/centos/8-stream/x86_64/images/`, and update the URL to reference an image that's compatible with Virtualbox. The delivered URL was pointing to a Libvirt compatible box. - -### Centos 7 - -If using Centos 7 with a clone of the `ansible-hub-ui` project, the UI will not build without first upgrading the version of Node. This might be accomplished by adding an inline script to the config section of the `Vagrantfile`. Otherwise, expect the build to fail :-( - -`Call to virConnectOpen failed: Failed to connect socket to '/var/run/libvirt/libvirt-sock': No such file or directory` - `libvirtd` or `libvirt-daemon-system` needs to be installed and running - -`Call to virConnectOpen failed: authentication unavailable: no polkit agent available to authenticate action 'org.libvirt.unix.manage'` - the current user needs to be a member of the `libvirt` system group - -### Running vagrant on MacOS - -In some cases, the default `pulp3-source-fedoraXX` boxes don't work on MacOS. Custom pulp boxes can also be created by adding a `pulp_installer/vagrant/boxes.d/99-local.yaml` file. The `generic/fedoraXX` boxes seem to work reliably well on MacOS and can be created like so: - -```yaml -# 99-local.yaml - -mycustombox: - box_name: 'generic/fedora34' - image_name: !ruby/regexp '/Fedora 34.*/' - pty: true - ansible: - variables: - ansible_python_interpreter: /usr/bin/python3 - -hub: - box: 'mycustombox' - sshfs: - host_path: '..' - guest_path: '/home/vagrant/devel' - reverse: False - memory: 6144 - cpus: 4 - ansible: - playbook: "vagrant/playbooks/source-install.yml" - galaxy_role_file: "requirements.yml" - - # The default network configuration may not work for vagrant host manager. If that's the case, assigning an IP address - # to the box may fix the issue. - networks: - - type: 'private_network' - options: - ip: 192.168.150.5 - -``` - -## Working on master branches - - **I need to work on pulp_ansible or pulp_container master how do I do?** - -If you need to work on pulp_ansible,pulpcore or pulp_container master branches do the following: - -1. first do the normal provisioning using compatible versions/tags/branched *described above* -2. ssh in to the VM `vagrant ssh VAGRANT_BOX_NAME` -3. stop pulp serviced `pstop` -4. go to the repo and checkout to the desired branch or tag, you can do that inside VM in `/home/vagrant/devel` or on your local host directory as they are mounted inside VM. -5. Run `workon pulp` inside the VM ssh session and then run `django-admin migrate` and resolve any conflict -6. restart pulp services `pstart` - - -## Some Handy Bash aliases - -If you're like me and you can't be bothered to remember the commands for starting and stopping vagrant boxes, here are some handy aliases that you can add to your bash profile. - -```bash -# Set the path to the directory that contains pulp_installer, galaxy_ng, pulpcore etc. -HUB_BASE_PATH="/path/to/your/pulp/source" - -# Set the pulp box you wish to use -PULP_BOX="VAGRANT_BOX_NAME" - -# Start the vagrant box if it is already provisioned -alias pulp_up="cd ${HUB_BASE_PATH}/pulp_installer && SSH_AUTH_SOCK= vagrant up ${PULP_BOX}" - -# Re provision the vangant box -alias pulp_provision="cd ${HUB_BASE_PATH}/pulp_installer && SSH_AUTH_SOCK= vagrant up --provision ${PULP_BOX}" - -# Destroy the current vagrant box -alias pulp_destroy="cd ${HUB_BASE_PATH}/pulp_installer && SSH_AUTH_SOCK= vagrant destroy -f ${PULP_BOX}" - -# SSH into the vagrant box -alias pulp_ssh="cd ${HUB_BASE_PATH}/pulp_installer && SSH_AUTH_SOCK= vagrant ssh ${PULP_BOX}" - -# Stop the vagrant box from running -alias pulp_stop="cd ${HUB_BASE_PATH}/pulp_installer && SSH_AUTH_SOCK= vagrant halt ${PULP_BOX}" -``` diff --git a/galaxy_ng/tests/integration/package/test_package_install.py b/galaxy_ng/tests/integration/package/test_package_install.py index 2950ef3fc3..21f5c2a16b 100644 --- a/galaxy_ng/tests/integration/package/test_package_install.py +++ b/galaxy_ng/tests/integration/package/test_package_install.py @@ -3,7 +3,7 @@ See: https://issues.redhat.com/browse/AAH-1545 """ - +import os import pytest import subprocess import tempfile @@ -21,6 +21,10 @@ {'LOCK_REQUIREMENTS': '0'} ] ) +@pytest.mark.skipif( + os.environ.get('JWT_PROXY') is not None, + reason="django-ansible-base fails to install under dab profile" +) def test_package_install(env_vars): """smoktest setup.py""" diff --git a/lint_requirements.txt b/lint_requirements.txt index 987eeb1fc2..b76e72e15b 100644 --- a/lint_requirements.txt +++ b/lint_requirements.txt @@ -1,10 +1,3 @@ -# WARNING: DO NOT EDIT! -# -# This file was generated by plugin_template, and is managed by it. Please use -# './plugin-template --github galaxy_ng' to update this file. -# -# For more info visit https://github.com/pulp/plugin_template - # python packages handy for developers, but not required by pulp check-manifest flake8 diff --git a/setup.py b/setup.py index 46bbba903f..2af715e836 100644 --- a/setup.py +++ b/setup.py @@ -87,29 +87,6 @@ def run(self): return super().run() -# FIXME: this currently works for CI and dev env, but pip-tools misses dependencies when -# generating requirements.*.txt files. This needs to be fixed before use in the master branch. -def _format_pulp_requirement(plugin, specifier=None, ref=None, gh_namespace="pulp"): - """ - Formats the pulp plugin requirement. - - The plugin template is VERY picky about the format we use for git refs. This will - help format git refs in a way that won't break CI when we need to pin to development - branches of pulp. - - example: - _format_pulp_requirement("pulpcore", specifier=">=3.18.1,<3.19.0") - _format_pulp_requirement("pulpcore", ref="6e44fb2fe609f92dc1f502b19c67abd08879148f") - """ - if specifier: - return plugin + specifier - else: - repo = plugin.replace("-", "_") - return ( - f"{plugin}@git+https://git@github.com/" f"{gh_namespace}/{repo}.git@{ref}#egg={plugin}" - ) - - django_ansible_base_branch = os.getenv('DJANGO_ANSIBLE_BASE_BRANCH', 'devel') django_ansible_base_dependency = ( 'django-ansible-base[jwt_consumer] @ ' @@ -148,7 +125,6 @@ def strip_package_name(spec): return spec -# next line can be replaced via sed in ci scripts/post_before_install.sh unpin_requirements = os.getenv("LOCK_REQUIREMENTS") == "0" if unpin_requirements: """ diff --git a/template_config.yml b/template_config.yml deleted file mode 100644 index be312f643d..0000000000 --- a/template_config.yml +++ /dev/null @@ -1,88 +0,0 @@ -# This config represents the latest values used when running the plugin-template. Any settings that -# were not present before running plugin-template have been added with their default values. - -# generated with plugin_template@2021.08.26-338-g2237db8 - -api_root: /api/galaxy/pulp/ -black: false -check_commit_message: false -check_gettext: true -check_manifest: true -check_stray_pulpcore_imports: true -ci_base_image: ghcr.io/pulp/pulp-ci-centos9 -ci_env: - GITHUB_USER: ${{ github.event.pull_request.user.login }} -ci_trigger: workflow_dispatch -ci_update_docs: false -cli_package: pulp-cli -cli_repo: https://github.com/pulp/pulp-cli.git -core_import_allowed: -- pulpcore.app.*viewsets -- pulpcore\.app.*admin -- ProgressReportSerializer -- pulpcore.app.*tasks -- pulpcore.openapi.* -deploy_client_to_pypi: false -deploy_client_to_rubygems: false -deploy_to_pypi: false -disabled_redis_runners: [] -doc_requirements_from_pulpcore: false -docker_fixtures: false -docs_test: false -extra_docs_requirements: [] -flake8: false -flake8_ignore: [] -github_org: ansible -issue_tracker: null -kanban: false -latest_release_branch: null -lint_requirements: false -noissue_marker: No-Issue -parallel_test_workers: 8 -plugin_app_label: galaxy -plugin_default_branch: master -plugin_name: galaxy_ng -plugins: -- app_label: galaxy - name: galaxy_ng -post_job_template: {} -pre_job_template: - name: check_commit - path: galaxy_ng/.github/pre-job-template.yml.j2 -publish_docs_to_pulpprojectdotorg: false -pulp_env: {} -pulp_env_azure: {} -pulp_env_gcp: {} -pulp_env_s3: {} -pulp_scheme: https -pulp_settings: - allowed_export_paths: /tmp - allowed_import_paths: /tmp - galaxy_api_default_distribution_base_path: published - galaxy_enable_api_access_log: true - galaxy_require_content_approval: false - rh_entitlement_required: insights -pulp_settings_azure: null -pulp_settings_gcp: null -pulp_settings_s3: null -pydocstyle: true -release_email: ansible-infra@redhat.com -release_user: ansible -stalebot: true -stalebot_days_until_close: 30 -stalebot_days_until_stale: 90 -stalebot_limit_to_pulls: true -supported_release_branches: [] -sync_ci: false -test_azure: true -test_cli: false -test_deprecations: false -test_gcp: false -test_lowerbounds: false -test_performance: false -test_reroute: false -test_s3: true -use_issue_template: false -use_legacy_docs: true -use_unified_docs: false -