From 9772dec75f1aa20f1c2fe785747cb7722e0329d6 Mon Sep 17 00:00:00 2001 From: Russell Martin Date: Wed, 6 Mar 2024 12:21:22 -0500 Subject: [PATCH] Add workflow to bump unmanaged dependency versions --- .github/workflows/dep-version-bump.yml | 136 ++++++++++++++++ .pre-commit-config.yaml | 25 +++ scripts/bump_versions.py | 208 +++++++++++++++++++++++++ scripts/install_requirement.py | 179 +++++++++++++++++++++ 4 files changed, 548 insertions(+) create mode 100644 .github/workflows/dep-version-bump.yml create mode 100644 scripts/bump_versions.py create mode 100644 scripts/install_requirement.py diff --git a/.github/workflows/dep-version-bump.yml b/.github/workflows/dep-version-bump.yml new file mode 100644 index 00000000..5763909d --- /dev/null +++ b/.github/workflows/dep-version-bump.yml @@ -0,0 +1,136 @@ +name: Update Dependency Versions + +####### +# Updates versions for dependencies that are otherwise unmanaged by other processes. +####### + +on: + schedule: + - cron: "0 20 * * SUN" # Sunday @ 2000 UTC + workflow_dispatch: + workflow_call: + inputs: + subdirectory: + description: "Whitespace-delimited list of directories containing pyproject.toml and tox.ini files; defaults to repo's base directory." + default: "" + type: string + create-changenote: + description: "Defaults 'true' to create a misc changenote in the './changes' directory." + default: true + type: boolean + workflow-repo: + # Only needed for PRs in other repos wanting to test new workflow changes before they are merged. + # These inputs should not be specified by another repo on their main branch. + description: "The repo to use to run additional workflows and actions." + default: "beeware/.github" + type: string + workflow-repo-ref: + description: "The repo ref to use to run additional workflows and actions." + default: "" + type: string + secrets: + BRUTUS_PAT_TOKEN: + required: true + +permissions: + pull-requests: write + +env: + BRANCH_PREFIX: "autoupdates" + CHANGENOTE_DIR: "./changes" + FORCE_COLOR: "1" + +defaults: + run: + shell: bash + +jobs: + dep-version-bump: + name: Bump Config File Dependencies + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - name: Checkout ${{ github.repository }} + uses: actions/checkout@v4.1.1 + with: + token: ${{ secrets.BRUTUS_PAT_TOKEN }} + path: "repo" + + - name: Checkout ${{ inputs.workflow-repo }}${{ inputs.workflow-repo-ref && format('@{0}', inputs.workflow-repo-ref) || '' }} + uses: actions/checkout@v4.1.1 + with: + repository: ${{ inputs.workflow-repo }} + ref: ${{ inputs.workflow-repo-ref }} + path: "beeware-.github" + + - name: Configure git + working-directory: "repo" + run: | + git config user.email "brutus@beeware.org" + git config user.name "Brutus (robot)" + + - name: Set up Python + uses: actions/setup-python@v5.0.0 + with: + python-version: 3.X + cache: pip + cache-dependency-path: | + **/setup.cfg + **/pyproject.toml + + - name: Install Dependencies + run: | + python -m pip install pip --upgrade + python -m pip install configupdater packaging requests tomlkit --upgrade --upgrade-strategy eager + + - name: Update Versions + working-directory: "repo" + run: | + if [ "${{ inputs.subdirectory }}" == "" ]; then + python ../beeware-.github/scripts/bump_versions.py + else + for SUBDIR in ${{ inputs.subdirectory }}; do + python ../beeware-.github/scripts/bump_versions.py ${SUBDIR} + done + fi + + - name: PR Needed? + id: pr + working-directory: "repo" + run: | + if [[ $(git status --porcelain) ]]; then + echo "needed=true" >> ${GITHUB_OUTPUT} + else + echo "needed=false" >> ${GITHUB_OUTPUT} + fi + + - name: Create Pull Request + id: created-pr + if: steps.pr.outputs.needed == 'true' + uses: peter-evans/create-pull-request@v6.0.1 + with: + token: ${{ secrets.BRUTUS_PAT_TOKEN }} + path: "repo" + title: "Bump dependencies in pyproject.toml and tox.ini" + branch: "${{ env.BRANCH_PREFIX }}/config-files" + commit-message: "Bump dependencies in pyproject.toml and tox.ini" + committer: "Brutus (robot) " + author: "Brutus (robot) " + body: "Bumps versions for dependencies in pyproject.toml and tox.ini." + labels: "dependencies" + + - name: Add changenote + if: (inputs.create-changenote == true) && (steps.created-pr.outputs.pull-request-number != '') + working-directory: "repo" + run: | + BRANCH_NAME="${{ env.BRANCH_PREFIX }}/config-files" + + git fetch origin + git checkout "${BRANCH_NAME}" + + FILENAME="${{ env.CHANGENOTE_DIR }}/${{ steps.created-pr.outputs.pull-request-number }}.misc.rst" + printf 'The pinned dependencies in pyproject.toml and tox.ini were updated to their latest versions.\n' > "${FILENAME}" + + git add "${FILENAME}" + git commit -m "Add changenote." + git push --set-upstream origin "${BRANCH_NAME}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3d9dbff2..25ad4ea4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,3 +8,28 @@ repos: - id: trailing-whitespace - id: check-json - id: check-xml + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.1 + hooks: + - id: pyupgrade + args: [--py38-plus] + - repo: https://github.com/PyCQA/isort + rev: 5.13.2 + hooks: + - id: isort + args: [--profile=black, --split-on-trailing-comma, --combine-as] + - repo: https://github.com/PyCQA/docformatter + rev: v1.7.5 + hooks: + - id: docformatter + args: [--in-place, --black] + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 24.2.0 + hooks: + - id: black + language_version: python3 + - repo: https://github.com/PyCQA/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + args: [--max-line-length=119] diff --git a/scripts/bump_versions.py b/scripts/bump_versions.py new file mode 100644 index 00000000..875247fe --- /dev/null +++ b/scripts/bump_versions.py @@ -0,0 +1,208 @@ +# bump_versions.py - Bumps versions for Python packages not managed by Dependabot +# +# Usage +# ----- +# $ python bump_versions.py [subdirectory] +# +# Finds pinned dependencies in pyproject.toml and tox.ini and updates them to the +# latest version available on PyPI. +# +# positional arguments: +# subdirectory Directory that contains pyproject.toml/tox.ini; defaults to +# current directory +# Dependencies +# ------------ +# configupdater packaging requests tomlkit + +from __future__ import annotations + +import sys +from argparse import ArgumentParser, RawDescriptionHelpFormatter +from functools import lru_cache +from pathlib import Path +from shutil import get_terminal_size + +import configupdater +import requests +import tomlkit +from packaging.requirements import InvalidRequirement, Requirement, SpecifierSet +from requests.adapters import HTTPAdapter +from urllib3.util.retry import Retry + + +class BumpVersionError(Exception): + def __init__(self, msg: str, error_no: int): + self.msg = msg + self.error_no = error_no + + +def validate_directory(subdirectory: str) -> Path: + subdirectory = Path.cwd() / subdirectory + + if subdirectory == Path.cwd() or Path.cwd() in subdirectory.parents: + return subdirectory + + raise BumpVersionError( + f"{subdirectory} is not a subdirectory of {Path.cwd()}", error_no=10 + ) + + +def parse_args(): + width = max(min(get_terminal_size().columns, 80) - 2, 20) + parser = ArgumentParser( + description="Bumps versions for Python packages not managed by Dependabot", + formatter_class=lambda prog: RawDescriptionHelpFormatter(prog, width=width), + ) + parser.add_argument( + "subdirectory", + default=".", + type=validate_directory, + nargs="?", + help=( + "Directory that contains pyproject.toml/tox.ini; " + "defaults to current directory" + ), + ) + + args = parser.parse_args() + print(f"\nEvaluating {args.subdirectory}") + + return args + + +def is_filepath_exist(filepath: Path) -> bool: + if not filepath.exists(): + print(f"\nSkipping {filepath.relative_to(Path.cwd())}; not found") + return False + + print(f"\n{filepath.relative_to(Path.cwd())}") + return True + + +def read_toml_file(file_path: Path) -> tomlkit.TOMLDocument: + with open(file_path, encoding="utf=8") as f: + return tomlkit.load(f) + + +def read_ini_file(file_path: Path) -> configupdater.ConfigUpdater: + config = configupdater.ConfigUpdater() + with open(file_path, encoding="utf=8") as f: + config.read_file(f) + return config + + +@lru_cache +def http_session() -> requests.Session: + sess = requests.Session() + adapter = HTTPAdapter(max_retries=Retry(status_forcelist={500, 502, 504})) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + return sess + + +@lru_cache +def latest_pypi_version(name: str) -> str | None: + """Fetch the latest version for a package from PyPI.""" + resp = http_session().get(f"https://pypi.org/pypi/{name}/json", timeout=(3.1, 30)) + try: + return resp.json()["info"]["version"] + except KeyError: + return None + + +def bump_version(req: str) -> str: + """Bump the version for a requirement to its latest version. + + Requires the requirement only uses == operator for version. + + :param req: requirement to bump, e.g. build==1.0.5 + :returns: requirement with bumped version or input requirement if cannot bump + """ + if req.startswith("#"): + return req + + try: + req_parsed = Requirement(req) + except InvalidRequirement: + print(f" 𐄂 {req}; invalid requirement") + return req + + if not (latest_version := latest_pypi_version(req_parsed.name)): + print(f" 𐄂 {req}; cannot determine latest version") + return req + + if len(req_parsed.specifier) != 1: + print(f" 𐄂 {req}; requires exactly one specifier (latest: {latest_version})") + return req + + spec = next(iter(req_parsed.specifier)) + + if spec.operator != "==": + print(f" 𐄂 {req}; must use == operator (latest: {latest_version})") + return req + + if spec.version != latest_version: + print(f" ↑ {req_parsed.name} from {spec.version} to {latest_version}") + req_parsed.specifier = SpecifierSet(f"=={latest_version}") + return str(req_parsed) + else: + print(f" ✓ {req} is already the latest version") + + return req + + +def update_pyproject_toml(base_dir: Path): + """Update pinned build-system requirements in pyproject.toml.""" + pyproject_path = base_dir / "pyproject.toml" + + if not is_filepath_exist(pyproject_path): + return + + pyproject_toml = read_toml_file(pyproject_path) + + if build_requires := pyproject_toml.get("build-system", {}).get("requires", []): + print(" build-system.requires") + for idx, req in enumerate(build_requires.copy()): + # update list directly to avoid losing existing formatting/comments + build_requires[idx] = bump_version(req) + + pyproject_toml["build-system"]["requires"] = build_requires + + with open(pyproject_path, "w") as f: + tomlkit.dump(pyproject_toml, f) + + +def update_tox_ini(base_dir: Path): + """Update pinned requirements in tox.ini.""" + tox_ini_path = base_dir / "tox.ini" + + if not is_filepath_exist(tox_ini_path): + return + + tox_ini = read_ini_file(tox_ini_path) + + for section in tox_ini: + if reqs := tox_ini[section].get("deps"): + print(f" {section.split('{')[0]}") + tox_ini[section]["deps"].set_values( + bump_version(req) for req in reqs.value.splitlines() if req + ) + + with open(tox_ini_path, "w", encoding="utf-8") as f: + tox_ini.write(f) + + +def main(): + ret_code = 0 + try: + args = parse_args() + update_pyproject_toml(base_dir=args.subdirectory) + update_tox_ini(base_dir=args.subdirectory) + except BumpVersionError as e: + print(e.msg) + ret_code = e.error_no + return ret_code + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/scripts/install_requirement.py b/scripts/install_requirement.py new file mode 100644 index 00000000..0c604a2f --- /dev/null +++ b/scripts/install_requirement.py @@ -0,0 +1,179 @@ +# install_requirement.py - Install a requirement from a PEP 517 project +# +# Usage +# ----- +# $ python install_requirement.py [-h] [--extra EXTRA] [--project-root PROJECT_ROOT] [requirements ...] +# +# Install one or more PEP 517 project defined requirements +# +# positional arguments: +# requirements List of project requirements to install. Any project +# requirements that start with any of these values will +# be installed. For instance, including 'pytest' in this +# list would install both pytest and pytest-xdist. +# +# options: +# -h, --help show this help message and exit +# --extra EXTRA Name of the extra where the requirements are defined +# --project-root PROJECT_ROOT +# File path to the root of the project. The current +# directory is used by default. +# +# Purpose +# ------- +# Installs one or more requested requirements as defined for the project. +# +# In certain workflows, such as automated coverage reporting, the coverage +# dependencies must be installed first. Since a project's requirements are often +# pinned to specific versions to ensure consistency for the project regardless of the +# environment, the coverage dependencies that are installed should match those pinned +# for the project. +# +# A simple method to accomplish this is ``pip install .[dev]`` in which ``pip`` will +# build the source and install the project with all its defined requirements. However, +# this is very inefficient when only one or a few specific requirements are needed. +# +# Therefore, this script will evaluate the requirements defined in the project's +# metadata and install the ones matching those being requested to be installed. +# +# Dependencies +# ------------ +# build setuptools wheel + +from __future__ import annotations + +import subprocess +import sys +from argparse import ArgumentParser, RawDescriptionHelpFormatter +from pathlib import Path +from shutil import get_terminal_size + +from build.util import project_wheel_metadata +from packaging.requirements import Requirement + + +class RequirementsInstallerError(Exception): + def __init__(self, msg: str, error_no: int): + self.msg = msg + self.error_no = error_no + + +class HelpText(RequirementsInstallerError): + """Shows script's help text.""" + + +class NoRequirementsFound(RequirementsInstallerError): + """No project requirements were found to install.""" + + +def parse_args(): + width = max(min(get_terminal_size().columns, 80) - 2, 20) + parser = ArgumentParser( + description="Installs one or more PEP 517 project defined requirements", + formatter_class=lambda prog: RawDescriptionHelpFormatter(prog, width=width), + ) + parser.add_argument( + "requirements", + type=str, + nargs="*", + help=( + "List of project requirements to install. If the project defines extras for " + "a requirement, do not include them in this list; they will be included " + "automatically when the requirement is installed. For instance, if " + "coverage[toml] is a project requirement, just include coverage in this list." + ), + ) + parser.add_argument( + "--extra", + type=str, + default="", + help="Name of the extra where the requirements are defined", + ) + parser.add_argument( + "--project-root", + type=Path, + default=".", + help=( + "File path to the root of the project. The current directory is used by " + "default." + ), + ) + + args = parser.parse_args() + + if not args.requirements: + raise HelpText(parser.format_help(), error_no=-1) + + return args + + +def gather_requirements( + project_root: str | Path, + requested_requirements: list[str], + extra_name: str = "", +) -> list[Requirement]: + """Identifies one or more matching requirements from a project.""" + project_root = Path(project_root).resolve() + project_metadata = project_wheel_metadata(project_root, isolated=False) + project_requirements = [ + requirement + for requirement in map(Requirement, project_metadata.get_all("Requires-Dist")) + if not requirement.marker or requirement.marker.evaluate({"extra": extra_name}) + ] + + matching_requirements = [ + requirement + for requirement in project_requirements + if requirement.name in requested_requirements + ] + + if not matching_requirements: + raise NoRequirementsFound( + f"No requirements matched requested requirements: " + f"{', '.join(requested_requirements)}\n\n" + f"The requirements below were evaluated for matching:\n " + f"{f'{chr(10)} '.join(req.name for req in project_requirements)}", + error_no=1, + ) + + return matching_requirements + + +def install_requirements(requirements: list[Requirement]): + """Install requirements from PyPI.""" + for requirement in requirements: + extras = f"[{','.join(requirement.extras)}]" if requirement.extras else "" + requirement_str = f"{requirement.name}{extras}{requirement.specifier}" + print(f"Installing {requirement_str}...") + subprocess.run( + [ + sys.executable, + "-m", + "pip", + "install", + "--upgrade", + requirement_str, + ], + check=True, + ) + + +def main(): + ret_code = 0 + try: + args = parse_args() + requirements_to_install = gather_requirements( + project_root=args.project_root, + requested_requirements=args.requirements, + extra_name=args.extra, + ) + install_requirements(requirements=requirements_to_install) + except RequirementsInstallerError as e: + print(e.msg) + ret_code = e.error_no + + return ret_code + + +if __name__ == "__main__": + sys.exit(main())