diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000..987b9961 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,54 @@ +# build jsonnet-bundler using a go environment +FROM docker.io/library/golang:1.18 AS builder-go +RUN go install -a github.com/jsonnet-bundler/jsonnet-bundler/cmd/jb@v0.5.1 + +# build otterdog using a python environment +FROM docker.io/library/python:3.10.10-slim as builder-python3 + +RUN apt-get update \ + && apt-get install -y \ + golang + +WORKDIR /app + +ENV PIP_DEFAULT_TIMEOUT=100 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PIP_NO_CACHE_DIR=1 \ + POETRY_VERSION=1.7.1 + +COPY ../otterdog ./otterdog +COPY ../pyproject.toml ../poetry.lock ../README.md ./hypercorn-cfg.toml ./ +COPY ./entrypoint.sh ./docker/entrypoint.sh +COPY ./start-webapp ./docker/start-webapp + +RUN pip install "poetry==$POETRY_VERSION" + +RUN poetry config virtualenvs.in-project true && \ + poetry install --only=main,app --no-root && \ + poetry build && \ + poetry install --only-root + +# create the final image having python3.10 as base +FROM python:3.10.10-slim + +RUN apt-get update \ + && apt-get install -y \ + git + +COPY --from=builder-go /go/bin/jb /usr/bin/jb +COPY --from=builder-python3 /app/.venv /app/.venv +COPY --from=builder-python3 /app/otterdog /app/otterdog +COPY --from=builder-python3 /app/docker/entrypoint.sh /app/entrypoint.sh +COPY --from=builder-python3 /app/docker/start-webapp /app/start-webapp +COPY --from=builder-python3 /app/hypercorn-cfg.toml /app/hypercorn-cfg.toml + +RUN chmod +x /app/entrypoint.sh +RUN chmod +x /app/start-webapp + +WORKDIR /app + +# set environment variables +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 + +ENTRYPOINT ["/app/entrypoint.sh"] \ No newline at end of file diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh new file mode 100644 index 00000000..5e42aef1 --- /dev/null +++ b/docker/entrypoint.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +# if any of the commands in your code fails for any reason, the entire script fails +set -o errexit +# fail exit if one of your pipe command fails +set -o pipefail +# exits if any of your variables is not set +set -o nounset + +exec "$@" diff --git a/docker/hypercorn-cfg.toml b/docker/hypercorn-cfg.toml new file mode 100644 index 00000000..00631bc3 --- /dev/null +++ b/docker/hypercorn-cfg.toml @@ -0,0 +1,5 @@ +bind = "0.0.0.0:5000" +workers = 1 +accesslog = '-' +loglevel = 'info' +h11_max_incomplete_size = 4 \ No newline at end of file diff --git a/docker/start-webapp b/docker/start-webapp new file mode 100644 index 00000000..83656db0 --- /dev/null +++ b/docker/start-webapp @@ -0,0 +1,17 @@ +#!/bin/bash + +# +# ******************************************************************************* +# Copyright (c) 2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the MIT License +# which is available at https://spdx.org/licenses/MIT.html +# SPDX-License-Identifier: MIT +# ******************************************************************************* +# + +set -o errexit +set -o pipefail +set -o nounset + +.venv/bin/hypercorn --config hypercorn-cfg.toml otterdog.app diff --git a/otterdog/app.py b/otterdog/app.py index 6064bf0e..ec5757d9 100644 --- a/otterdog/app.py +++ b/otterdog/app.py @@ -6,9 +6,43 @@ # SPDX-License-Identifier: EPL-2.0 # ******************************************************************************* +import os +from sys import exit + +from decouple import config # type: ignore + +from otterdog.webapp import create_app +from otterdog.webapp.config import config_dict + +# WARNING: Don't run with debug turned on in production! +DEBUG: bool = config("DEBUG", default=True, cast=bool) + +# Determine which configuration to use +config_mode = "Debug" if DEBUG else "Production" + +try: + app_config = config_dict[config_mode] +except KeyError: + exit("Error: Invalid . Expected values [Debug, Production] ") + +app = create_app(app_config) # type: ignore + +if os.path.exists(app_config.APP_ROOT): + os.chdir(app_config.APP_ROOT) +else: + app.logger.error(f"APP_ROOT '{app_config.APP_ROOT}' does not exist, exiting.") + exit(1) + +if DEBUG: + app.logger.info("DEBUG = " + str(DEBUG)) + app.logger.info("Environment = " + config_mode) + app.logger.info("QUART_APP = " + app_config.QUART_APP) + app.logger.info("APP_ROOT = " + app_config.APP_ROOT) + app.logger.info("OTTERDOG_CONFIG = " + app_config.OTTERDOG_CONFIG) + def run(): - print("Hello World!") + app.run(debug=True) if __name__ == "__main__": diff --git a/otterdog/cli.py b/otterdog/cli.py index 717f6e43..6c7021a0 100644 --- a/otterdog/cli.py +++ b/otterdog/cli.py @@ -6,6 +6,7 @@ # SPDX-License-Identifier: EPL-2.0 # ******************************************************************************* +import asyncio import importlib.metadata import sys import traceback @@ -521,7 +522,7 @@ def _execute_operation(organizations: list[str], operation: Operation): for organization in organizations: org_config = config.get_organization_config(organization) - exit_code = max(exit_code, operation.execute(org_config)) + exit_code = max(exit_code, asyncio.run(operation.execute(org_config))) operation.post_execute() sys.exit(exit_code) diff --git a/otterdog/config.py b/otterdog/config.py index ad70a4dc..5c6be5e3 100644 --- a/otterdog/config.py +++ b/otterdog/config.py @@ -16,7 +16,10 @@ import jq # type: ignore from . import credentials -from .credentials import CredentialProvider, bitwarden_provider, pass_provider +from .credentials import CredentialProvider +from .credentials.bitwarden_provider import BitwardenVault +from .credentials.inmemory_provider import InmemoryVault +from .credentials.pass_provider import PassVault from .jsonnet import JsonnetConfig @@ -25,14 +28,12 @@ def __init__( self, name: str, github_id: str, - eclipse_project: Optional[str], config_repo: str, jsonnet_config: JsonnetConfig, credential_data: dict[str, Any], ): self._name = name self._github_id = github_id - self._eclipse_project = eclipse_project self._config_repo = config_repo self._jsonnet_config = jsonnet_config self._credential_data = credential_data @@ -45,10 +46,6 @@ def name(self): def github_id(self) -> str: return self._github_id - @property - def eclipse_project(self) -> Optional[str]: - return self._eclipse_project - @property def config_repo(self) -> str: return self._config_repo @@ -61,9 +58,13 @@ def jsonnet_config(self) -> JsonnetConfig: def credential_data(self) -> dict[str, Any]: return self._credential_data + @credential_data.setter + def credential_data(self, data: dict[str, Any]) -> None: + self._credential_data = data + def __repr__(self) -> str: return ( - f"OrganizationConfig('{self.name}', '{self.github_id}', '{self.eclipse_project}', " + f"OrganizationConfig('{self.name}', '{self.github_id}', " f"'{self.config_repo}', {json.dumps(self.credential_data)})" ) @@ -77,8 +78,6 @@ def from_dict(cls, data: dict[str, Any], otterdog_config: OtterdogConfig) -> Org if github_id is None: raise RuntimeError(f"missing required github_id for organization config with name '{name}'") - eclipse_project = data.get("eclipse_project") - config_repo = data.get("config_repo", otterdog_config.default_config_repo) base_template = data.get("base_template", otterdog_config.default_base_template) @@ -93,11 +92,27 @@ def from_dict(cls, data: dict[str, Any], otterdog_config: OtterdogConfig) -> Org if data is None: raise RuntimeError(f"missing required credentials for organization config with name '{name}'") - return cls(name, github_id, eclipse_project, config_repo, jsonnet_config, data) + return cls(name, github_id, config_repo, jsonnet_config, data) + + @classmethod + def of( + cls, github_id: str, credential_data: dict[str, Any], work_dir: str, otterdog_config: OtterdogConfig + ) -> OrganizationConfig: + config_repo = otterdog_config.default_config_repo + base_dir = os.path.join(otterdog_config.jsonnet_base_dir, work_dir) + + jsonnet_config = JsonnetConfig( + github_id, + base_dir, + otterdog_config.default_base_template, + otterdog_config.local_mode, + ) + + return cls(github_id, github_id, config_repo, jsonnet_config, credential_data) class OtterdogConfig: - def __init__(self, config_file: str, local_mode: bool): + def __init__(self, config_file: str, local_mode: bool, working_dir: Optional[str] = None): if not os.path.exists(config_file): raise RuntimeError(f"configuration file '{config_file}' not found") @@ -112,8 +127,16 @@ def __init__(self, config_file: str, local_mode: bool): self._jsonnet_config = jq.compile(".defaults.jsonnet // {}").input(self._configuration).first() self._github_config = jq.compile(".defaults.github // {}").input(self._configuration).first() + self._default_credential_provider = ( + jq.compile('.defaults.credentials.provider // ""').input(self._configuration).first() + ) - self._jsonnet_base_dir = os.path.join(self._config_dir, self._jsonnet_config.get("config_dir", "orgs")) + if working_dir is None: + self._jsonnet_base_dir = os.path.join(self._config_dir, self._jsonnet_config.get("config_dir", "orgs")) + else: + self._jsonnet_base_dir = os.path.join(working_dir, self._jsonnet_config.get("config_dir", "orgs")) + if not os.path.exists(self._jsonnet_base_dir): + os.makedirs(self._jsonnet_base_dir) organizations = self._configuration.get("organizations", []) @@ -121,6 +144,7 @@ def __init__(self, config_file: str, local_mode: bool): for org in organizations: org_config = OrganizationConfig.from_dict(org, self) self._organizations[org_config.name] = org_config + self._organizations[org_config.github_id] = org_config @property def config_file(self) -> str: @@ -149,7 +173,7 @@ def organization_configs(self) -> dict[str, OrganizationConfig]: def get_organization_config(self, organization_name: str) -> OrganizationConfig: org_config = self._organizations.get(organization_name) if org_config is None: - raise RuntimeError(f"unknown organization with name '{organization_name}'") + raise RuntimeError(f"unknown organization with name / github_id '{organization_name}'") return org_config def _get_credential_provider(self, provider_type: str) -> credentials.CredentialProvider: @@ -163,7 +187,7 @@ def _get_credential_provider(self, provider_type: str) -> credentials.Credential .first() ) - provider = bitwarden_provider.BitwardenVault(api_token_key) + provider = BitwardenVault(api_token_key) self._credential_providers[provider_type] = provider case "pass": @@ -171,7 +195,29 @@ def _get_credential_provider(self, provider_type: str) -> credentials.Credential jq.compile('.defaults.pass.password_store_dir // ""').input(self._configuration).first() ) - provider = pass_provider.PassVault(password_store_dir) + username_pattern = ( + jq.compile('.defaults.pass.username_pattern // ""').input(self._configuration).first() + ) + + password_pattern = ( + jq.compile('.defaults.pass.password_pattern // ""').input(self._configuration).first() + ) + + twofa_seed_pattern = ( + jq.compile('.defaults.pass.twofa_seed_pattern // ""').input(self._configuration).first() + ) + + api_token_pattern = ( + jq.compile('.defaults.pass.username_pattern // ""').input(self._configuration).first() + ) + + provider = PassVault( + password_store_dir, username_pattern, password_pattern, twofa_seed_pattern, api_token_pattern + ) + self._credential_providers[provider_type] = provider + + case "inmemory": + provider = InmemoryVault() self._credential_providers[provider_type] = provider case _: @@ -181,11 +227,15 @@ def _get_credential_provider(self, provider_type: str) -> credentials.Credential def get_credentials(self, org_config: OrganizationConfig, only_token: bool = False) -> credentials.Credentials: provider_type = org_config.credential_data.get("provider") + if provider_type is None: + provider_type = self._default_credential_provider + + if not provider_type: raise RuntimeError(f"no credential provider configured for organization '{org_config.name}'") provider = self._get_credential_provider(provider_type) - return provider.get_credentials(org_config.eclipse_project, org_config.credential_data, only_token) + return provider.get_credentials(org_config.name, org_config.credential_data, only_token) def get_secret(self, secret_data: str) -> str: if secret_data and ":" in secret_data: diff --git a/otterdog/credentials/__init__.py b/otterdog/credentials/__init__.py index 0c8567ae..dddb9470 100644 --- a/otterdog/credentials/__init__.py +++ b/otterdog/credentials/__init__.py @@ -9,7 +9,7 @@ import dataclasses import time from abc import abstractmethod -from typing import Optional, Protocol +from typing import Any, Optional, Protocol import mintotp # type: ignore @@ -72,9 +72,7 @@ def __str__(self) -> str: class CredentialProvider(Protocol): @abstractmethod - def get_credentials( - self, eclipse_project: Optional[str], data: dict[str, str], only_token: bool = False - ) -> Credentials: + def get_credentials(self, org_name: str, data: dict[str, Any], only_token: bool = False) -> Credentials: ... @abstractmethod diff --git a/otterdog/credentials/bitwarden_provider.py b/otterdog/credentials/bitwarden_provider.py index 6c6cd869..8d0e3297 100644 --- a/otterdog/credentials/bitwarden_provider.py +++ b/otterdog/credentials/bitwarden_provider.py @@ -9,7 +9,7 @@ import json import re import subprocess -from typing import Optional +from typing import Any from otterdog import utils from otterdog.credentials import CredentialProvider, Credentials @@ -34,9 +34,7 @@ def __init__(self, api_token_key: str): def is_unlocked(self) -> bool: return self._status == 0 - def get_credentials( - self, eclipse_project: Optional[str], data: dict[str, str], only_token: bool = False - ) -> Credentials: + def get_credentials(self, org_name: str, data: dict[str, Any], only_token: bool = False) -> Credentials: assert self.is_unlocked() item_id = data.get("item_id") diff --git a/otterdog/credentials/inmemory_provider.py b/otterdog/credentials/inmemory_provider.py new file mode 100644 index 00000000..8978f60c --- /dev/null +++ b/otterdog/credentials/inmemory_provider.py @@ -0,0 +1,32 @@ +# ******************************************************************************* +# Copyright (c) 2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from typing import Any + +from otterdog.credentials import CredentialProvider, Credentials + + +class InmemoryVault(CredentialProvider): + """ + A simple credential provider for storing tokens in memory. + """ + + KEY_API_TOKEN = "api_token" + + def get_credentials(self, org_name: str, data: dict[str, Any], only_token: bool = False) -> Credentials: + if only_token is not True: + raise RuntimeError("in-memory vault only contains github tokens") + + github_token = data[self.KEY_API_TOKEN] + return Credentials(None, None, None, github_token) + + def get_secret(self, key_data: str) -> str: + raise RuntimeError("in-memory vault does not support secrets") + + def __repr__(self): + return "InmemoryProvider()" diff --git a/otterdog/credentials/pass_provider.py b/otterdog/credentials/pass_provider.py index e6a1f5f6..dbb68ad0 100644 --- a/otterdog/credentials/pass_provider.py +++ b/otterdog/credentials/pass_provider.py @@ -8,7 +8,7 @@ import os import subprocess -from typing import Optional +from typing import Any from otterdog import utils from otterdog.credentials import CredentialProvider, Credentials @@ -24,28 +24,38 @@ class PassVault(CredentialProvider): KEY_PASSWORD = "password" KEY_2FA_SEED = "2fa_seed" - def __init__(self, password_store_dir: str): + def __init__( + self, + password_store_dir: str, + username_pattern: str, + password_pattern: str, + twofa_seed_pattern: str, + api_token_pattern: str, + ): utils.print_debug("accessing pass vault") status, output = subprocess.getstatusoutput("pass ls") if status != 0: raise RuntimeError(f"could not access pass vault:\n{output}") if password_store_dir: - utils.print_debug(f"setting password store dir to {password_store_dir}") + utils.print_debug(f"setting password store dir to '{password_store_dir}'") os.environ["PASSWORD_STORE_DIR"] = password_store_dir + self._username_pattern = username_pattern + self._password_pattern = password_pattern + self._twofa_seed_pattern = twofa_seed_pattern + self._api_token_pattern = api_token_pattern + if status > 0: raise RuntimeError("pass vault is not accessible") - def get_credentials( - self, eclipse_project: Optional[str], data: dict[str, str], only_token: bool = False - ) -> Credentials: - github_token = self._retrieve_key(self.KEY_API_TOKEN, eclipse_project, data) + def get_credentials(self, org_name: str, data: dict[str, Any], only_token: bool = False) -> Credentials: + github_token = self._retrieve_key(self.KEY_API_TOKEN, org_name, data) if only_token is False: - username = self._retrieve_key(self.KEY_USERNAME, eclipse_project, data) - password = self._retrieve_key(self.KEY_PASSWORD, eclipse_project, data) - totp_secret = self._retrieve_key(self.KEY_2FA_SEED, eclipse_project, data) + username = self._retrieve_key(self.KEY_USERNAME, org_name, data) + password = self._retrieve_key(self.KEY_PASSWORD, org_name, data) + totp_secret = self._retrieve_key(self.KEY_2FA_SEED, org_name, data) else: username = None password = None @@ -56,29 +66,31 @@ def get_credentials( def get_secret(self, key_data: str) -> str: return self._retrieve_resolved_key(key_data) - @staticmethod - def _retrieve_key(key: str, eclipse_project: Optional[str], data: dict[str, str]) -> str: + def _retrieve_key(self, key: str, org_name: str, data: dict[str, str]) -> str: resolved_key = data.get(key) strict = True - # custom handling for eclipse projects, the keys are organized in the format - # bots//github.com/ - if resolved_key is None and eclipse_project is not None: + if resolved_key is None: match key: + case PassVault.KEY_USERNAME: + pattern = self._username_pattern + case PassVault.KEY_PASSWORD: + pattern = self._password_pattern + case PassVault.KEY_2FA_SEED: + pattern = self._twofa_seed_pattern case PassVault.KEY_API_TOKEN: - query_key = "otterdog-token" + pattern = self._api_token_pattern strict = False - case PassVault.KEY_2FA_SEED: - query_key = "2FA-seed" case _: - query_key = key + raise RuntimeError(f"unexpected key '{key}'") - return PassVault._retrieve_resolved_key(f"bots/{eclipse_project}/github.com/{query_key}", strict) + if pattern: + resolved_key = pattern.format(org_name) if resolved_key is None: - raise RuntimeError(f"required key '{key}' not found in authorization data") + raise RuntimeError(f"required key '{key}' not found in credential data") - return PassVault._retrieve_resolved_key(resolved_key) + return PassVault._retrieve_resolved_key(resolved_key, strict) @staticmethod def _retrieve_resolved_key(key: str, strict: bool = True) -> str: diff --git a/otterdog/models/__init__.py b/otterdog/models/__init__.py index 622c5deb..b473f900 100644 --- a/otterdog/models/__init__.py +++ b/otterdog/models/__init__.py @@ -65,7 +65,7 @@ class LivePatchType(Enum): class LivePatchApplyFn(Protocol): - def __call__(self, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def __call__(self, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: ... @@ -105,8 +105,8 @@ def of_changes( LivePatchType.CHANGE, expected_object, current_object, changes, parent_object, forced_update, fn ) - def apply(self, org_id: str, provider: GitHubProvider) -> None: - self.fn(self, org_id, provider) + async def apply(self, org_id: str, provider: GitHubProvider) -> None: + await self.fn(self, org_id, provider) @dataclasses.dataclass @@ -332,21 +332,25 @@ def from_provider_data(cls, org_id: str, data: dict[str, Any]): def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[str, Any]: ... - def to_provider_data(self, org_id: str, provider: GitHubProvider) -> dict[str, Any]: - return self.dict_to_provider_data(org_id, self.to_model_dict(), provider) + async def to_provider_data(self, org_id: str, provider: GitHubProvider) -> dict[str, Any]: + return await self.dict_to_provider_data(org_id, self.to_model_dict(), provider) @classmethod - def changes_to_provider(cls, org_id: str, data: dict[str, Change[Any]], provider: GitHubProvider) -> dict[str, Any]: - return cls.dict_to_provider_data(org_id, {key: change.to_value for key, change in data.items()}, provider) + async def changes_to_provider( + cls, org_id: str, data: dict[str, Change[Any]], provider: GitHubProvider + ) -> dict[str, Any]: + return await cls.dict_to_provider_data(org_id, {key: change.to_value for key, change in data.items()}, provider) @classmethod - def dict_to_provider_data(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: - mapping = cls.get_mapping_to_provider(org_id, data, provider) + async def dict_to_provider_data(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + mapping = await cls.get_mapping_to_provider(org_id, data, provider) return bend(mapping, data) @classmethod @abstractmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: ... def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: @@ -522,5 +526,5 @@ def generate_live_patch_of_list( cls.generate_live_patch(expected_object, None, parent_object, context, handler) @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: ... diff --git a/otterdog/models/branch_protection_rule.py b/otterdog/models/branch_protection_rule.py index 08f86246..82e1bce7 100644 --- a/otterdog/models/branch_protection_rule.py +++ b/otterdog/models/branch_protection_rule.py @@ -316,7 +316,9 @@ def transform_app(x): return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { snake_to_camel_case(field.name): S(field.name) for field in cls.provider_fields() @@ -331,28 +333,28 @@ def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: Gi mapping.pop("pushRestrictions") push_restrictions = data["push_restrictions"] if is_set_and_valid(push_restrictions): - actor_ids = provider.get_actor_node_ids(push_restrictions) + actor_ids = await provider.get_actor_node_ids(push_restrictions) mapping["pushActorIds"] = K(actor_ids) if "review_dismissal_allowances" in data: mapping.pop("reviewDismissalAllowances") review_dismissal_allowances = data["review_dismissal_allowances"] if is_set_and_valid(review_dismissal_allowances): - actor_ids = provider.get_actor_node_ids(review_dismissal_allowances) + actor_ids = await provider.get_actor_node_ids(review_dismissal_allowances) mapping["reviewDismissalActorIds"] = K(actor_ids) if "bypass_pull_request_allowances" in data: mapping.pop("bypassPullRequestAllowances") bypass_pull_request_allowances = data["bypass_pull_request_allowances"] if is_set_and_valid(bypass_pull_request_allowances): - actor_ids = provider.get_actor_node_ids(bypass_pull_request_allowances) + actor_ids = await provider.get_actor_node_ids(bypass_pull_request_allowances) mapping["bypassPullRequestActorIds"] = K(actor_ids) if "bypass_force_push_allowances" in data: mapping.pop("bypassForcePushAllowances") bypass_force_push_allowances = data["bypass_force_push_allowances"] if is_set_and_valid(bypass_force_push_allowances): - actor_ids = provider.get_actor_node_ids(bypass_force_push_allowances) + actor_ids = await provider.get_actor_node_ids(bypass_force_push_allowances) mapping["bypassForcePushActorIds"] = K(actor_ids) if "required_status_checks" in data: @@ -370,7 +372,7 @@ def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: Gi else: app_slugs.add("github-actions") - app_ids = provider.get_app_node_ids(app_slugs) + app_ids = await provider.get_app_node_ids(app_slugs) transformed_checks = [] for check in required_status_checks: @@ -393,24 +395,24 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_branch_protection_rule}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, BranchProtectionRule) assert isinstance(patch.parent_object, Repository) - provider.add_branch_protection_rule( + await provider.add_branch_protection_rule( org_id, patch.parent_object.name, patch.parent_object.node_id, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, BranchProtectionRule) assert isinstance(patch.parent_object, Repository) - provider.delete_branch_protection_rule( + await provider.delete_branch_protection_rule( org_id, patch.parent_object.name, patch.current_object.pattern, patch.current_object.id ) @@ -418,10 +420,10 @@ def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvide assert patch.changes is not None assert isinstance(patch.current_object, BranchProtectionRule) assert isinstance(patch.parent_object, Repository) - provider.update_branch_protection_rule( + await provider.update_branch_protection_rule( org_id, patch.parent_object.name, patch.current_object.pattern, patch.current_object.id, - cls.changes_to_provider(org_id, patch.changes, provider), + await cls.changes_to_provider(org_id, patch.changes, provider), ) diff --git a/otterdog/models/environment.py b/otterdog/models/environment.py index 0ec38662..3353489f 100644 --- a/otterdog/models/environment.py +++ b/otterdog/models/environment.py @@ -150,7 +150,9 @@ def transform_policy(x): return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } @@ -161,7 +163,7 @@ def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: Gi for actor_type, ( actor_id, actor_node_id, - ) in provider.get_actor_ids_with_type(reviewers): + ) in await provider.get_actor_ids_with_type(reviewers): reviewer_mapping.append({"type": actor_type, "id": actor_id}) mapping["reviewers"] = reviewer_mapping @@ -195,32 +197,32 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_environment}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, Environment) assert isinstance(patch.parent_object, Repository) - provider.add_repo_environment( + await provider.add_repo_environment( org_id, patch.parent_object.name, patch.expected_object.name, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, Environment) assert isinstance(patch.parent_object, Repository) - provider.delete_repo_environment(org_id, patch.parent_object.name, patch.current_object.name) + await provider.delete_repo_environment(org_id, patch.parent_object.name, patch.current_object.name) case LivePatchType.CHANGE: assert patch.changes is not None assert isinstance(patch.current_object, Environment) assert isinstance(patch.parent_object, Repository) - provider.update_repo_environment( + await provider.update_repo_environment( org_id, patch.parent_object.name, patch.current_object.name, - cls.changes_to_provider(org_id, patch.changes, provider), + await cls.changes_to_provider(org_id, patch.changes, provider), ) diff --git a/otterdog/models/github_organization.py b/otterdog/models/github_organization.py index d3523a4f..8be6a3e6 100644 --- a/otterdog/models/github_organization.py +++ b/otterdog/models/github_organization.py @@ -332,7 +332,7 @@ def load_from_file( return org @classmethod - def load_from_provider( + async def load_from_provider( cls, github_id: str, jsonnet_config: JsonnetConfig, @@ -348,7 +348,7 @@ def load_from_provider( # for now this is the same for organization settings, but there might be cases where it is different. default_settings = jsonnet_config.default_org_config["settings"] included_keys = set(default_settings.keys()) - github_settings = provider.get_org_settings(github_id, included_keys, no_web_ui) + github_settings = await provider.get_org_settings(github_id, included_keys, no_web_ui) if printer is not None and is_info_enabled(): end = datetime.now() @@ -357,7 +357,7 @@ def load_from_provider( settings = OrganizationSettings.from_provider_data(github_id, github_settings) if "workflows" in included_keys: - workflow_settings = provider.get_org_workflow_settings(github_id) + workflow_settings = await provider.get_org_workflow_settings(github_id) settings.workflows = OrganizationWorkflowSettings.from_provider_data(github_id, workflow_settings) org = cls(github_id, settings) @@ -367,7 +367,7 @@ def load_from_provider( printer.println("\nwebhooks: Reading...") if jsonnet_config.default_org_webhook_config is not None: - github_webhooks = provider.get_org_webhooks(github_id) + github_webhooks = await provider.get_org_webhooks(github_id) if printer is not None and is_info_enabled(): end = datetime.now() @@ -383,7 +383,7 @@ def load_from_provider( if printer is not None and is_info_enabled(): printer.println("\nsecrets: Reading...") - github_secrets = provider.get_org_secrets(github_id) + github_secrets = await provider.get_org_secrets(github_id) if printer is not None and is_info_enabled(): end = datetime.now() @@ -399,7 +399,7 @@ def load_from_provider( if printer is not None and is_info_enabled(): printer.println("\nvariables: Reading...") - github_variables = provider.get_org_variables(github_id) + github_variables = await provider.get_org_variables(github_id) if printer is not None and is_info_enabled(): end = datetime.now() @@ -411,7 +411,7 @@ def load_from_provider( print_debug("not reading org secrets, no default config available") if jsonnet_config.default_repo_config is not None: - for repo in _load_repos_from_provider(github_id, provider, jsonnet_config, printer): + for repo in await _load_repos_from_provider(github_id, provider, jsonnet_config, printer): org.add_repository(repo) else: print_debug("not reading repos, no default config available") @@ -430,10 +430,10 @@ async def _process_single_repo( rest_api = gh_client.rest_api # get repo data - github_repo_data = await rest_api.repo.async_get_repo_data(github_id, repo_name) + github_repo_data = await rest_api.repo.get_repo_data(github_id, repo_name) repo = Repository.from_provider_data(github_id, github_repo_data) - github_repo_workflow_data = await rest_api.repo.async_get_workflow_settings(github_id, repo_name) + github_repo_workflow_data = await rest_api.repo.get_workflow_settings(github_id, repo_name) repo.workflows = RepositoryWorkflowSettings.from_provider_data(github_id, github_repo_workflow_data) if jsonnet_config.default_branch_protection_rule_config is not None: @@ -448,7 +448,7 @@ async def _process_single_repo( # TODO: support rulesets in private repos with enterprise plan if jsonnet_config.default_repo_ruleset_config is not None and repo.private is False: # get rulesets of the repo - rulesets = await rest_api.repo.async_get_rulesets(github_id, repo_name) + rulesets = await rest_api.repo.get_rulesets(github_id, repo_name) for github_ruleset in rulesets: # FIXME: need to associate an app id to its slug # GitHub does not support that atm, so we lookup the currently installed @@ -477,7 +477,7 @@ async def _process_single_repo( if jsonnet_config.default_org_webhook_config is not None: # get webhooks of the repo - webhooks = await rest_api.repo.async_get_webhooks(github_id, repo_name) + webhooks = await rest_api.repo.get_webhooks(github_id, repo_name) for github_webhook in webhooks: repo.add_webhook(RepositoryWebhook.from_provider_data(github_id, github_webhook)) else: @@ -485,7 +485,7 @@ async def _process_single_repo( if jsonnet_config.default_repo_secret_config is not None: # get secrets of the repo - secrets = await rest_api.repo.async_get_secrets(github_id, repo_name) + secrets = await rest_api.repo.get_secrets(github_id, repo_name) for github_secret in secrets: repo.add_secret(RepositorySecret.from_provider_data(github_id, github_secret)) else: @@ -493,7 +493,7 @@ async def _process_single_repo( if jsonnet_config.default_repo_variable_config is not None: # get variables of the repo - variables = await rest_api.repo.async_get_variables(github_id, repo_name) + variables = await rest_api.repo.get_variables(github_id, repo_name) for github_variable in variables: repo.add_variable(RepositoryVariable.from_provider_data(github_id, github_variable)) else: @@ -501,7 +501,7 @@ async def _process_single_repo( if jsonnet_config.default_environment_config is not None: # get environments of the repo - environments = await rest_api.repo.async_get_environments(github_id, repo_name) + environments = await rest_api.repo.get_environments(github_id, repo_name) for github_environment in environments: repo.add_environment(Environment.from_provider_data(github_id, github_environment)) else: @@ -513,31 +513,30 @@ async def _process_single_repo( return repo_name, repo -def _load_repos_from_provider( - github_id: str, client: GitHubProvider, jsonnet_config: JsonnetConfig, printer: Optional[IndentingPrinter] = None +async def _load_repos_from_provider( + github_id: str, provider: GitHubProvider, jsonnet_config: JsonnetConfig, printer: Optional[IndentingPrinter] = None ) -> list[Repository]: start = datetime.now() if printer is not None and is_info_enabled(): printer.println("\nrepositories: Reading...") - repo_names = client.get_repos(github_id) + repo_names = await provider.get_repos(github_id) - teams = {str(team["id"]): f"{github_id}/{team['slug']}" for team in client.rest_api.org.get_teams(github_id)} + teams = { + str(team["id"]): f"{github_id}/{team['slug']}" for team in await provider.rest_api.org.get_teams(github_id) + } app_installations = { str(installation["app_id"]): installation["app_slug"] - for installation in client.rest_api.org.get_app_installations(github_id) + for installation in await provider.rest_api.org.get_app_installations(github_id) } - async def gather(): - return await asyncio.gather( - *[ - _process_single_repo(client, github_id, repo_name, jsonnet_config, teams, app_installations) - for repo_name in repo_names - ] - ) - - result = asyncio.run(gather()) + result = await asyncio.gather( + *[ + _process_single_repo(provider, github_id, repo_name, jsonnet_config, teams, app_installations) + for repo_name in repo_names + ] + ) github_repos = [] for data in result: diff --git a/otterdog/models/organization_secret.py b/otterdog/models/organization_secret.py index 0e5cd36f..245738c6 100644 --- a/otterdog/models/organization_secret.py +++ b/otterdog/models/organization_secret.py @@ -86,7 +86,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } @@ -96,7 +98,7 @@ def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: Gi if "selected_repositories" in mapping: mapping.pop("selected_repositories") - mapping["selected_repository_ids"] = K(provider.get_repo_ids(org_id, data["selected_repositories"])) + mapping["selected_repository_ids"] = K(await provider.get_repo_ids(org_id, data["selected_repositories"])) return mapping @@ -104,19 +106,24 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_org_secret}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, OrganizationSecret) - provider.add_org_secret(org_id, patch.expected_object.to_provider_data(org_id, provider)) + await provider.add_org_secret( + org_id, + await patch.expected_object.to_provider_data(org_id, provider), + ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, OrganizationSecret) - provider.delete_org_secret(org_id, patch.current_object.name) + await provider.delete_org_secret(org_id, patch.current_object.name) case LivePatchType.CHANGE: assert isinstance(patch.expected_object, OrganizationSecret) assert isinstance(patch.current_object, OrganizationSecret) - provider.update_org_secret( - org_id, patch.current_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.update_org_secret( + org_id, + patch.current_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/organization_settings.py b/otterdog/models/organization_settings.py index aca41bc4..5f02975a 100644 --- a/otterdog/models/organization_settings.py +++ b/otterdog/models/organization_settings.py @@ -183,7 +183,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } @@ -242,8 +244,8 @@ def generate_live_patch( ) @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: assert patch.patch_type == LivePatchType.CHANGE assert patch.changes is not None - github_settings = cls.changes_to_provider(org_id, patch.changes, provider) - provider.update_org_settings(org_id, github_settings) + github_settings = await cls.changes_to_provider(org_id, patch.changes, provider) + await provider.update_org_settings(org_id, github_settings) diff --git a/otterdog/models/organization_variable.py b/otterdog/models/organization_variable.py index acd3b078..8ea0fb21 100644 --- a/otterdog/models/organization_variable.py +++ b/otterdog/models/organization_variable.py @@ -84,7 +84,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } @@ -94,7 +96,7 @@ def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: Gi if "selected_repositories" in mapping: mapping.pop("selected_repositories") - mapping["selected_repository_ids"] = K(provider.get_repo_ids(org_id, data["selected_repositories"])) + mapping["selected_repository_ids"] = K(await provider.get_repo_ids(org_id, data["selected_repositories"])) return mapping @@ -102,19 +104,24 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_org_variable}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, OrganizationVariable) - provider.add_org_variable(org_id, patch.expected_object.to_provider_data(org_id, provider)) + await provider.add_org_variable( + org_id, + await patch.expected_object.to_provider_data(org_id, provider), + ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, OrganizationVariable) - provider.delete_org_variable(org_id, patch.current_object.name) + await provider.delete_org_variable(org_id, patch.current_object.name) case LivePatchType.CHANGE: assert isinstance(patch.expected_object, OrganizationVariable) assert isinstance(patch.current_object, OrganizationVariable) - provider.update_org_variable( - org_id, patch.current_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.update_org_variable( + org_id, + patch.current_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/organization_webhook.py b/otterdog/models/organization_webhook.py index b5911c2b..d318c174 100644 --- a/otterdog/models/organization_webhook.py +++ b/otterdog/models/organization_webhook.py @@ -31,19 +31,24 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_org_webhook}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, OrganizationWebhook) - provider.add_org_webhook(org_id, patch.expected_object.to_provider_data(org_id, provider)) + await provider.add_org_webhook( + org_id, + await patch.expected_object.to_provider_data(org_id, provider), + ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, OrganizationWebhook) - provider.delete_org_webhook(org_id, patch.current_object.id, patch.current_object.url) + await provider.delete_org_webhook(org_id, patch.current_object.id, patch.current_object.url) case LivePatchType.CHANGE: assert isinstance(patch.expected_object, OrganizationWebhook) assert isinstance(patch.current_object, OrganizationWebhook) - provider.update_org_webhook( - org_id, patch.current_object.id, patch.expected_object.to_provider_data(org_id, provider) + await provider.update_org_webhook( + org_id, + patch.current_object.id, + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/organization_workflow_settings.py b/otterdog/models/organization_workflow_settings.py index 7aa10b5e..3593b97e 100644 --- a/otterdog/models/organization_workflow_settings.py +++ b/otterdog/models/organization_workflow_settings.py @@ -91,12 +91,14 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: - mapping = super().get_mapping_to_provider(org_id, data, provider) + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: + mapping = await super().get_mapping_to_provider(org_id, data, provider) if "selected_repositories" in data: mapping.pop("selected_repositories") - mapping["selected_repository_ids"] = K(provider.get_repo_ids(org_id, data["selected_repositories"])) + mapping["selected_repository_ids"] = K(await provider.get_repo_ids(org_id, data["selected_repositories"])) return mapping @@ -133,8 +135,8 @@ def generate_live_patch( ) @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: assert patch.patch_type == LivePatchType.CHANGE assert patch.changes is not None - github_settings = cls.changes_to_provider(org_id, patch.changes, provider) - provider.update_org_workflow_settings(org_id, github_settings) + github_settings = await cls.changes_to_provider(org_id, patch.changes, provider) + await provider.update_org_workflow_settings(org_id, github_settings) diff --git a/otterdog/models/repo_ruleset.py b/otterdog/models/repo_ruleset.py index 1078c05e..93e1526e 100644 --- a/otterdog/models/repo_ruleset.py +++ b/otterdog/models/repo_ruleset.py @@ -31,21 +31,23 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_repo_ruleset}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, RepositoryRuleset) assert isinstance(patch.parent_object, Repository) - provider.add_repo_ruleset( - org_id, patch.parent_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.add_repo_ruleset( + org_id, + patch.parent_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, RepositoryRuleset) assert isinstance(patch.parent_object, Repository) - provider.delete_repo_ruleset( + await provider.delete_repo_ruleset( org_id, patch.parent_object.name, patch.current_object.id, patch.current_object.name ) @@ -53,9 +55,9 @@ def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvide assert isinstance(patch.expected_object, RepositoryRuleset) assert isinstance(patch.current_object, RepositoryRuleset) assert isinstance(patch.parent_object, Repository) - provider.update_repo_ruleset( + await provider.update_repo_ruleset( org_id, patch.parent_object.name, patch.current_object.id, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/repo_secret.py b/otterdog/models/repo_secret.py index 2a3b1664..df1e8d79 100644 --- a/otterdog/models/repo_secret.py +++ b/otterdog/models/repo_secret.py @@ -31,29 +31,31 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_repo_secret}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, RepositorySecret) assert isinstance(patch.parent_object, Repository) - provider.add_repo_secret( - org_id, patch.parent_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.add_repo_secret( + org_id, + patch.parent_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, RepositorySecret) assert isinstance(patch.parent_object, Repository) - provider.delete_repo_secret(org_id, patch.parent_object.name, patch.current_object.name) + await provider.delete_repo_secret(org_id, patch.parent_object.name, patch.current_object.name) case LivePatchType.CHANGE: assert isinstance(patch.expected_object, RepositorySecret) assert isinstance(patch.current_object, RepositorySecret) assert isinstance(patch.parent_object, Repository) - provider.update_repo_secret( + await provider.update_repo_secret( org_id, patch.parent_object.name, patch.current_object.name, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/repo_variable.py b/otterdog/models/repo_variable.py index ff40b2f7..13cfa2d0 100644 --- a/otterdog/models/repo_variable.py +++ b/otterdog/models/repo_variable.py @@ -31,29 +31,31 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_repo_variable}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, RepositoryVariable) assert isinstance(patch.parent_object, Repository) - provider.add_repo_variable( - org_id, patch.parent_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.add_repo_variable( + org_id, + patch.parent_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, RepositoryVariable) assert isinstance(patch.parent_object, Repository) - provider.delete_repo_variable(org_id, patch.parent_object.name, patch.current_object.name) + await provider.delete_repo_variable(org_id, patch.parent_object.name, patch.current_object.name) case LivePatchType.CHANGE: assert isinstance(patch.expected_object, RepositoryVariable) assert isinstance(patch.current_object, RepositoryVariable) assert isinstance(patch.parent_object, Repository) - provider.update_repo_variable( + await provider.update_repo_variable( org_id, patch.parent_object.name, patch.current_object.name, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/repo_webhook.py b/otterdog/models/repo_webhook.py index b49a7242..96dec460 100644 --- a/otterdog/models/repo_webhook.py +++ b/otterdog/models/repo_webhook.py @@ -31,21 +31,23 @@ def get_jsonnet_template_function(self, jsonnet_config: JsonnetConfig, extend: b return f"orgs.{jsonnet_config.create_repo_webhook}" @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, RepositoryWebhook) assert isinstance(patch.parent_object, Repository) - provider.add_repo_webhook( - org_id, patch.parent_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.add_repo_webhook( + org_id, + patch.parent_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.REMOVE: assert isinstance(patch.current_object, RepositoryWebhook) assert isinstance(patch.parent_object, Repository) - provider.delete_repo_webhook( + await provider.delete_repo_webhook( org_id, patch.parent_object.name, patch.current_object.id, patch.current_object.url ) @@ -53,9 +55,9 @@ def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvide assert isinstance(patch.expected_object, RepositoryWebhook) assert isinstance(patch.current_object, RepositoryWebhook) assert isinstance(patch.parent_object, Repository) - provider.update_repo_webhook( + await provider.update_repo_webhook( org_id, patch.parent_object.name, patch.current_object.id, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), ) diff --git a/otterdog/models/repo_workflow_settings.py b/otterdog/models/repo_workflow_settings.py index 4cd77e21..d664df74 100644 --- a/otterdog/models/repo_workflow_settings.py +++ b/otterdog/models/repo_workflow_settings.py @@ -138,11 +138,13 @@ def include_field_for_diff_computation(self, field: dataclasses.Field) -> bool: return super().include_field_for_diff_computation(field) @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: if "enabled" in data and data["enabled"] is False: return {"enabled": S("enabled")} else: - return super().get_mapping_to_provider(org_id, data, provider) + return await super().get_mapping_to_provider(org_id, data, provider) @classmethod def generate_live_patch( @@ -188,7 +190,7 @@ def generate_live_patch( ) @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: from .repository import Repository assert isinstance(patch.parent_object, Repository) @@ -196,14 +198,16 @@ def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvide match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, RepositoryWorkflowSettings) - provider.update_repo_workflow_settings( - org_id, patch.parent_object.name, patch.expected_object.to_provider_data(org_id, provider) + await provider.update_repo_workflow_settings( + org_id, + patch.parent_object.name, + await patch.expected_object.to_provider_data(org_id, provider), ) case LivePatchType.CHANGE: assert patch.changes is not None - github_settings = cls.changes_to_provider(org_id, patch.changes, provider) - provider.update_repo_workflow_settings(org_id, patch.parent_object.name, github_settings) + github_settings = await cls.changes_to_provider(org_id, patch.changes, provider) + await provider.update_repo_workflow_settings(org_id, patch.parent_object.name, github_settings) case _: raise RuntimeError(f"unexpected patch type '{patch.patch_type}'") diff --git a/otterdog/models/repository.py b/otterdog/models/repository.py index 17491b26..c4a2d197 100644 --- a/otterdog/models/repository.py +++ b/otterdog/models/repository.py @@ -521,7 +521,9 @@ def status_to_bool(status): return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } @@ -833,13 +835,13 @@ def generate_live_patch( ) @classmethod - def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: + async def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvider) -> None: match patch.patch_type: case LivePatchType.ADD: assert isinstance(patch.expected_object, Repository) - provider.add_repo( + await provider.add_repo( org_id, - patch.expected_object.to_provider_data(org_id, provider), + await patch.expected_object.to_provider_data(org_id, provider), patch.expected_object.template_repository, patch.expected_object.post_process_template_content, patch.expected_object.forked_repository, @@ -849,12 +851,14 @@ def apply_live_patch(cls, patch: LivePatch, org_id: str, provider: GitHubProvide case LivePatchType.REMOVE: assert isinstance(patch.current_object, Repository) - provider.delete_repo(org_id, patch.current_object.name) + await provider.delete_repo(org_id, patch.current_object.name) case LivePatchType.CHANGE: assert patch.changes is not None assert isinstance(patch.expected_object, Repository) assert isinstance(patch.current_object, Repository) - provider.update_repo( - org_id, patch.current_object.name, cls.changes_to_provider(org_id, patch.changes, provider) + await provider.update_repo( + org_id, + patch.current_object.name, + await cls.changes_to_provider(org_id, patch.changes, provider), ) diff --git a/otterdog/models/ruleset.py b/otterdog/models/ruleset.py index c39e047e..be38d4bb 100644 --- a/otterdog/models/ruleset.py +++ b/otterdog/models/ruleset.py @@ -341,7 +341,9 @@ def check_simple_rule(prop_key: str, rule_type: str, value_if_rule_is_present: b return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } @@ -388,11 +390,11 @@ def extract_actor_and_bypass_mode(encoded_data: str) -> tuple[str, str]: elif actor.startswith("@"): team, bypass_mode = extract_actor_and_bypass_mode(actor[1:]) actor_type = "Team" - actor_id = provider.rest_api.org.get_team_ids(team)[0] + actor_id = (await provider.rest_api.org.get_team_ids(team))[0] else: app, bypass_mode = extract_actor_and_bypass_mode(actor) actor_type = "Integration" - actor_id = provider.rest_api.app.get_app_ids(app)[0] + actor_id = (await provider.rest_api.app.get_app_ids(app))[0] transformed_actors.append( {"actor_id": K(int(actor_id)), "actor_type": K(actor_type), "bypass_mode": K(bypass_mode)} @@ -473,7 +475,7 @@ def add_parameter(prop_key: str, param_key: str, params: dict[str, Any]): if app_slug != "any": app_slugs.add(app_slug) - app_ids = provider.get_app_ids(app_slugs) + app_ids = await provider.get_app_ids(app_slugs) transformed_checks = [] for check in required_status_checks: diff --git a/otterdog/models/secret.py b/otterdog/models/secret.py index c18cd520..457f8607 100644 --- a/otterdog/models/secret.py +++ b/otterdog/models/secret.py @@ -97,7 +97,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: return { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } diff --git a/otterdog/models/variable.py b/otterdog/models/variable.py index f5edc6be..11a62bec 100644 --- a/otterdog/models/variable.py +++ b/otterdog/models/variable.py @@ -53,7 +53,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: return { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } diff --git a/otterdog/models/webhook.py b/otterdog/models/webhook.py index 6c876b19..affbc431 100644 --- a/otterdog/models/webhook.py +++ b/otterdog/models/webhook.py @@ -125,7 +125,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping: dict[str, Any] = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } diff --git a/otterdog/models/workflow_settings.py b/otterdog/models/workflow_settings.py index 40beede1..0c03d8df 100644 --- a/otterdog/models/workflow_settings.py +++ b/otterdog/models/workflow_settings.py @@ -132,7 +132,9 @@ def get_mapping_from_provider(cls, org_id: str, data: dict[str, Any]) -> dict[st return mapping @classmethod - def get_mapping_to_provider(cls, org_id: str, data: dict[str, Any], provider: GitHubProvider) -> dict[str, Any]: + async def get_mapping_to_provider( + cls, org_id: str, data: dict[str, Any], provider: GitHubProvider + ) -> dict[str, Any]: mapping = { field.name: S(field.name) for field in cls.provider_fields() if not is_unset(data.get(field.name, UNSET)) } diff --git a/otterdog/operations/__init__.py b/otterdog/operations/__init__.py index 961abeef..a2222d92 100644 --- a/otterdog/operations/__init__.py +++ b/otterdog/operations/__init__.py @@ -43,7 +43,7 @@ def pre_execute(self) -> None: ... @abstractmethod - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: ... def post_execute(self) -> None: diff --git a/otterdog/operations/apply.py b/otterdog/operations/apply.py index e40ddbc8..e8b0a470 100644 --- a/otterdog/operations/apply.py +++ b/otterdog/operations/apply.py @@ -77,7 +77,7 @@ def handle_modified_object( ) return modified - def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[LivePatch]) -> None: + async def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[LivePatch]) -> None: self.printer.println() if diff_status.total_changes(self._delete_resources) == 0: @@ -105,13 +105,12 @@ def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[Live import click self.printer.println("\nApplying changes:\n") - - with click.progressbar(patches) as bar: + with click.progressbar(patches, file=self.printer.writer) as bar: for patch in bar: if patch.patch_type == LivePatchType.REMOVE and not self._delete_resources: continue else: - patch.apply(org_id, self.gh_client) + await patch.apply(org_id, self.gh_client) delete_snippet = "deleted" if self._delete_resources else "live resources ignored" diff --git a/otterdog/operations/canonical_diff.py b/otterdog/operations/canonical_diff.py index 54f4efb1..c9c7dd0a 100644 --- a/otterdog/operations/canonical_diff.py +++ b/otterdog/operations/canonical_diff.py @@ -28,7 +28,7 @@ def __init__(self): def pre_execute(self) -> None: self.printer.println("Showing canonical diff:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() diff --git a/otterdog/operations/delete_file.py b/otterdog/operations/delete_file.py index 5b4b5fce..1a11e655 100644 --- a/otterdog/operations/delete_file.py +++ b/otterdog/operations/delete_file.py @@ -42,7 +42,7 @@ def message(self) -> str: def pre_execute(self) -> None: self.printer.println(f"Deleting file '{self._path}' in organization repository '{self.repo}':") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -85,7 +85,9 @@ def execute(self, org_config: OrganizationConfig) -> int: ) try: - deleted_file = rest_api.content.delete_content(github_id, repo.name, self.path, self.message) + deleted_file = await rest_api.content.delete_content( + github_id, repo.name, self.path, self.message + ) except RuntimeError as e: collected_error = e diff --git a/otterdog/operations/diff_operation.py b/otterdog/operations/diff_operation.py index 8b13ac8a..d4715d4d 100644 --- a/otterdog/operations/diff_operation.py +++ b/otterdog/operations/diff_operation.py @@ -61,7 +61,7 @@ def init(self, config: OtterdogConfig, printer: IndentingPrinter) -> None: super().init(config, printer) self._validator.init(config, printer) - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: self._org_config = org_config self.printer.println(f"\nOrganization {style(org_config.name, bright=True)}[id={org_config.github_id}]") @@ -75,13 +75,13 @@ def execute(self, org_config: OrganizationConfig) -> int: self.printer.level_up() try: - return self._generate_diff(org_config) + return await self._generate_diff(org_config) finally: self.printer.level_down() self._gh_client.close() def setup_github_client(self, org_config: OrganizationConfig) -> GitHubProvider: - return GitHubProvider(self.config.get_credentials(org_config)) + return GitHubProvider(self.config.get_credentials(org_config, only_token=self.no_web_ui)) @property def gh_client(self) -> GitHubProvider: @@ -94,7 +94,7 @@ def verbose_output(self): def resolve_secrets(self) -> bool: return True - def _generate_diff(self, org_config: OrganizationConfig) -> int: + async def _generate_diff(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -132,7 +132,7 @@ def _generate_diff(self, org_config: OrganizationConfig) -> int: ) try: - current_org = self.load_current_org(github_id, jsonnet_config) + current_org = await self.load_current_org(github_id, jsonnet_config) except RuntimeError as e: self.printer.print_error(f"failed to load current configuration\n{str(e)}") return 1 @@ -193,14 +193,14 @@ def handle(patch: LivePatch) -> None: if live_patch.expected_object is not None: live_patch.expected_object.resolve_secrets(self.config.get_secret) - self.handle_finish(github_id, diff_status, live_patches) + await self.handle_finish(github_id, diff_status, live_patches) return 0 def load_expected_org(self, github_id: str, org_file_name: str) -> GitHubOrganization: return GitHubOrganization.load_from_file(github_id, org_file_name, self.config) - def load_current_org(self, github_id: str, jsonnet_config: JsonnetConfig) -> GitHubOrganization: - return GitHubOrganization.load_from_provider( + async def load_current_org(self, github_id: str, jsonnet_config: JsonnetConfig) -> GitHubOrganization: + return await GitHubOrganization.load_from_provider( github_id, jsonnet_config, self.gh_client, self.no_web_ui, self.printer ) @@ -235,5 +235,5 @@ def handle_modified_object( ... @abstractmethod - def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[LivePatch]) -> None: + async def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[LivePatch]) -> None: ... diff --git a/otterdog/operations/dispatch_workflow.py b/otterdog/operations/dispatch_workflow.py index 5d2f2b95..aea49e65 100644 --- a/otterdog/operations/dispatch_workflow.py +++ b/otterdog/operations/dispatch_workflow.py @@ -34,7 +34,7 @@ def workflow_name(self) -> str: def pre_execute(self) -> None: self.printer.println(f"Dispatching workflow '{self.workflow_name}' in organization repo '{self.repo_name}':") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id self.printer.println(f"\nOrganization {style(org_config.name, bright=True)}[id={github_id}]") @@ -48,7 +48,7 @@ def execute(self, org_config: OrganizationConfig) -> int: return 1 with GitHubProvider(credentials) as provider: - success = provider.dispatch_workflow(github_id, self.repo_name, self.workflow_name) + success = await provider.dispatch_workflow(github_id, self.repo_name, self.workflow_name) if success is True: self.printer.println(f"workflow '{self.workflow_name}' dispatched for repo '{self.repo_name}'") else: diff --git a/otterdog/operations/fetch_config.py b/otterdog/operations/fetch_config.py index 0f1d439c..1e3a671c 100644 --- a/otterdog/operations/fetch_config.py +++ b/otterdog/operations/fetch_config.py @@ -28,7 +28,7 @@ def __init__(self, force_processing: bool, pull_request: str): def pre_execute(self) -> None: self.printer.println("Fetching organization configurations:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config @@ -59,13 +59,13 @@ def execute(self, org_config: OrganizationConfig) -> int: with GitHubProvider(credentials) as provider: try: if self.pull_request is not None: - ref = provider.get_ref_for_pull_request( + ref = await provider.get_ref_for_pull_request( org_config.github_id, org_config.config_repo, self.pull_request ) else: ref = None - definition = provider.get_content( + definition = await provider.get_content( org_config.github_id, org_config.config_repo, f"otterdog/{github_id}.jsonnet", diff --git a/otterdog/operations/import_configuration.py b/otterdog/operations/import_configuration.py index e2266e77..eaf9b262 100644 --- a/otterdog/operations/import_configuration.py +++ b/otterdog/operations/import_configuration.py @@ -39,7 +39,7 @@ def no_web_ui(self) -> bool: def pre_execute(self) -> None: self.printer.println("Importing resources:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -83,7 +83,7 @@ def execute(self, org_config: OrganizationConfig) -> int: ) with GitHubProvider(credentials) as provider: - organization = GitHubOrganization.load_from_provider( + organization = await GitHubOrganization.load_from_provider( github_id, jsonnet_config, provider, self.no_web_ui, self.printer ) diff --git a/otterdog/operations/list_apps.py b/otterdog/operations/list_apps.py index c6b7d284..f235fcdc 100644 --- a/otterdog/operations/list_apps.py +++ b/otterdog/operations/list_apps.py @@ -37,7 +37,7 @@ def post_execute(self) -> None: apps = [v for k, v in sorted(self.all_apps.items())] self.printer.println(json.dumps(apps, indent=2)) - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id if not self.json_output or is_info_enabled(): @@ -52,7 +52,7 @@ def execute(self, org_config: OrganizationConfig) -> int: return 1 with GitHubProvider(credentials) as provider: - apps = provider.rest_api.org.get_app_installations(github_id) + apps = await provider.rest_api.org.get_app_installations(github_id) if not self.json_output: for app in apps: diff --git a/otterdog/operations/list_members.py b/otterdog/operations/list_members.py index ec32798e..ace45f28 100644 --- a/otterdog/operations/list_members.py +++ b/otterdog/operations/list_members.py @@ -40,7 +40,7 @@ def pre_execute(self) -> None: def post_execute(self) -> None: pass - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -70,10 +70,10 @@ def execute(self, org_config: OrganizationConfig) -> int: return 1 with GitHubProvider(credentials) as provider: - members = provider.rest_api.org.list_members(github_id, self.two_factor_disabled) + members = await provider.rest_api.org.list_members(github_id, self.two_factor_disabled) if self.two_factor_disabled is True: - all_members = provider.rest_api.org.list_members(github_id, False) + all_members = await provider.rest_api.org.list_members(github_id, False) two_factor_status = ( style("enabled", fg="green") if organization.settings.two_factor_requirement is True diff --git a/otterdog/operations/local_plan.py b/otterdog/operations/local_plan.py index 5fba6c27..77b19a86 100644 --- a/otterdog/operations/local_plan.py +++ b/otterdog/operations/local_plan.py @@ -44,7 +44,7 @@ def resolve_secrets(self) -> bool: def setup_github_client(self, org_config: OrganizationConfig) -> GitHubProvider: return GitHubProvider(None) - def load_current_org(self, github_id: str, jsonnet_config: JsonnetConfig) -> GitHubOrganization: + async def load_current_org(self, github_id: str, jsonnet_config: JsonnetConfig) -> GitHubOrganization: other_org_file_name = jsonnet_config.org_config_file + self.suffix if not os.path.exists(other_org_file_name): diff --git a/otterdog/operations/plan.py b/otterdog/operations/plan.py index 8d7fc65d..4043e3fe 100644 --- a/otterdog/operations/plan.py +++ b/otterdog/operations/plan.py @@ -105,7 +105,7 @@ def handle_modified_object( return settings_to_change - def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[LivePatch]) -> None: + async def handle_finish(self, org_id: str, diff_status: DiffStatus, patches: list[LivePatch]) -> None: self.printer.println( f"\n{style('Plan', bright=True)}: {diff_status.additions} to add, " f"{diff_status.differences} to change, " diff --git a/otterdog/operations/push_config.py b/otterdog/operations/push_config.py index d98602be..28510241 100644 --- a/otterdog/operations/push_config.py +++ b/otterdog/operations/push_config.py @@ -32,7 +32,7 @@ def push_message(self) -> Optional[str]: def pre_execute(self) -> None: self.printer.println("Pushing organization configurations:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -67,7 +67,7 @@ def execute(self, org_config: OrganizationConfig) -> int: updated_files = [] updated = False - if provider.update_content( + if await provider.update_content( org_config.github_id, org_config.config_repo, f"otterdog/{github_id}.jsonnet", @@ -77,7 +77,7 @@ def execute(self, org_config: OrganizationConfig) -> int: updated_files.append(f"otterdog/{github_id}.jsonnet") updated = True - if provider.update_content( + if await provider.update_content( org_config.github_id, org_config.config_repo, "otterdog/jsonnetfile.json", @@ -87,7 +87,7 @@ def execute(self, org_config: OrganizationConfig) -> int: updated_files.append("otterdog/jsonnetfile.json") updated |= True - if provider.update_content( + if await provider.update_content( org_config.github_id, org_config.config_repo, "otterdog/jsonnetfile.lock.json", diff --git a/otterdog/operations/show.py b/otterdog/operations/show.py index fed86f42..3ed9f858 100644 --- a/otterdog/operations/show.py +++ b/otterdog/operations/show.py @@ -41,7 +41,7 @@ def pre_execute(self) -> None: if not self.markdown: self.printer.println("Showing organization resources:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() diff --git a/otterdog/operations/show_default.py b/otterdog/operations/show_default.py index 21a80e24..c20ec0c4 100644 --- a/otterdog/operations/show_default.py +++ b/otterdog/operations/show_default.py @@ -31,7 +31,7 @@ def pre_execute(self) -> None: if not self.markdown: self.printer.println("Showing defaults configurations:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() diff --git a/otterdog/operations/show_live.py b/otterdog/operations/show_live.py index e8dbd1c2..8c990df5 100644 --- a/otterdog/operations/show_live.py +++ b/otterdog/operations/show_live.py @@ -30,7 +30,7 @@ def no_web_ui(self) -> bool: def pre_execute(self) -> None: self.printer.println("Showing live resources:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -52,7 +52,7 @@ def execute(self, org_config: OrganizationConfig) -> int: "the resulting config will be incomplete." ) - organization = GitHubOrganization.load_from_provider( + organization = await GitHubOrganization.load_from_provider( github_id, jsonnet_config, provider, self.no_web_ui, self.printer ) diff --git a/otterdog/operations/sync_template.py b/otterdog/operations/sync_template.py index 85cb9df8..18b6ea8f 100644 --- a/otterdog/operations/sync_template.py +++ b/otterdog/operations/sync_template.py @@ -32,7 +32,7 @@ def repo(self) -> str: def pre_execute(self) -> None: self.printer.println(f"Syncing organization repos '{self.repo}' from template master:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() @@ -69,7 +69,7 @@ def execute(self, org_config: OrganizationConfig) -> int: if repo is not None and repo.archived is False: if is_set_and_present(repo.template_repository): self.printer.println(f'Syncing repository["{style(repo.name, bright=True)}"]') - updated_files = rest_api.repo.sync_from_template_repository( + updated_files = await rest_api.repo.sync_from_template_repository( github_id, repo.name, repo.template_repository, diff --git a/otterdog/operations/validate.py b/otterdog/operations/validate.py index 93b37541..65ba2124 100644 --- a/otterdog/operations/validate.py +++ b/otterdog/operations/validate.py @@ -27,7 +27,7 @@ def __init__(self): def pre_execute(self) -> None: self.printer.println("Validating organization configurations:") - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id jsonnet_config = org_config.jsonnet_config jsonnet_config.init_template() diff --git a/otterdog/operations/web_login.py b/otterdog/operations/web_login.py index a4dd3936..afdda828 100644 --- a/otterdog/operations/web_login.py +++ b/otterdog/operations/web_login.py @@ -24,7 +24,7 @@ def __init__(self): def pre_execute(self) -> None: pass - def execute(self, org_config: OrganizationConfig) -> int: + async def execute(self, org_config: OrganizationConfig) -> int: github_id = org_config.github_id self.printer.println(f"\nOrganization {style(org_config.name, bright=True)}[id={github_id}]") @@ -38,7 +38,7 @@ def execute(self, org_config: OrganizationConfig) -> int: return 1 with GitHubProvider(credentials) as provider: - provider.open_browser_with_logged_in_user(github_id) + await provider.web_client.open_browser_with_logged_in_user(github_id) return 0 finally: diff --git a/otterdog/providers/github/__init__.py b/otterdog/providers/github/__init__.py index 46d9378c..2c32e901 100644 --- a/otterdog/providers/github/__init__.py +++ b/otterdog/providers/github/__init__.py @@ -13,6 +13,7 @@ from otterdog import resources, utils from otterdog.credentials import Credentials +from otterdog.providers.github.auth import token_auth from .graphql import GraphQLClient from .rest import RestApi @@ -52,11 +53,9 @@ def close(self) -> None: self.rest_api.close() def _init_clients(self): - from .rest.auth.token import TokenAuthStrategy - - self.rest_api = RestApi(TokenAuthStrategy(self._credentials.github_token)) + self.rest_api = RestApi(token_auth(self._credentials.github_token)) self.web_client = WebClient(self._credentials) - self.graphql_client = GraphQLClient(self._credentials.github_token) + self.graphql_client = GraphQLClient(token_auth(self._credentials.github_token)) def __getstate__(self): return ( @@ -75,10 +74,10 @@ def __setstate__(self, state): ) = state self._init_clients() - def get_content(self, org_id: str, repo_name: str, path: str, ref: Optional[str] = None) -> str: - return self.rest_api.content.get_content(org_id, repo_name, path, ref) + async def get_content(self, org_id: str, repo_name: str, path: str, ref: Optional[str] = None) -> str: + return await self.rest_api.content.get_content(org_id, repo_name, path, ref) - def update_content( + async def update_content( self, org_id: str, repo_name: str, @@ -86,26 +85,26 @@ def update_content( content: str, message: Optional[str] = None, ) -> bool: - return self.rest_api.content.update_content(org_id, repo_name, path, content, message) + return await self.rest_api.content.update_content(org_id, repo_name, path, content, message) - def get_org_settings(self, org_id: str, included_keys: set[str], no_web_ui: bool) -> dict[str, Any]: + async def get_org_settings(self, org_id: str, included_keys: set[str], no_web_ui: bool) -> dict[str, Any]: # first, get supported settings via the rest api. required_rest_keys = {x for x in included_keys if x in self._settings_restapi_keys} - merged_settings = self.rest_api.org.get_settings(org_id, required_rest_keys) + merged_settings = await self.rest_api.org.get_settings(org_id, required_rest_keys) # second, get settings only accessible via the web interface and merge # them with the other settings, unless --no-web-ui is specified. if not no_web_ui: required_web_keys = {x for x in included_keys if x in self._settings_web_keys} if len(required_web_keys) > 0: - web_settings = self.web_client.get_org_settings(org_id, required_web_keys) + web_settings = await self.web_client.get_org_settings(org_id, required_web_keys) merged_settings.update(web_settings) utils.print_trace(f"merged org settings = {merged_settings}") return merged_settings - def update_org_settings(self, org_id: str, settings: dict[str, Any]) -> None: + async def update_org_settings(self, org_id: str, settings: dict[str, Any]) -> None: rest_fields = {} web_fields = {} @@ -121,47 +120,47 @@ def update_org_settings(self, org_id: str, settings: dict[str, Any]) -> None: # update any settings via the rest api if len(rest_fields) > 0: - self.rest_api.org.update_settings(org_id, rest_fields) + await self.rest_api.org.update_settings(org_id, rest_fields) # update any settings via the web interface if len(web_fields) > 0: - self.web_client.update_org_settings(org_id, web_fields) + await self.web_client.update_org_settings(org_id, web_fields) - def get_org_workflow_settings(self, org_id: str) -> dict[str, Any]: - return self.rest_api.org.get_workflow_settings(org_id) + async def get_org_workflow_settings(self, org_id: str) -> dict[str, Any]: + return await self.rest_api.org.get_workflow_settings(org_id) - def update_org_workflow_settings(self, org_id: str, workflow_settings: dict[str, Any]) -> None: - self.rest_api.org.update_workflow_settings(org_id, workflow_settings) + async def update_org_workflow_settings(self, org_id: str, workflow_settings: dict[str, Any]) -> None: + await self.rest_api.org.update_workflow_settings(org_id, workflow_settings) - def get_org_webhooks(self, org_id: str) -> list[dict[str, Any]]: - return self.rest_api.org.get_webhooks(org_id) + async def get_org_webhooks(self, org_id: str) -> list[dict[str, Any]]: + return await self.rest_api.org.get_webhooks(org_id) - def update_org_webhook(self, org_id: str, webhook_id: int, webhook: dict[str, Any]) -> None: + async def update_org_webhook(self, org_id: str, webhook_id: int, webhook: dict[str, Any]) -> None: if len(webhook) > 0: - self.rest_api.org.update_webhook(org_id, webhook_id, webhook) + await self.rest_api.org.update_webhook(org_id, webhook_id, webhook) - def add_org_webhook(self, org_id: str, data: dict[str, str]) -> None: - self.rest_api.org.add_webhook(org_id, data) + async def add_org_webhook(self, org_id: str, data: dict[str, str]) -> None: + await self.rest_api.org.add_webhook(org_id, data) - def delete_org_webhook(self, org_id: str, webhook_id: int, url: str) -> None: - self.rest_api.org.delete_webhook(org_id, webhook_id, url) + async def delete_org_webhook(self, org_id: str, webhook_id: int, url: str) -> None: + await self.rest_api.org.delete_webhook(org_id, webhook_id, url) - def get_repos(self, org_id: str) -> list[str]: + async def get_repos(self, org_id: str) -> list[str]: # filter out repos which are created to work on GitHub Security Advisories # they should not be part of the visible configuration - return list(filter(lambda name: not utils.is_ghsa_repo(name), self.rest_api.org.get_repos(org_id))) + return list(filter(lambda name: not utils.is_ghsa_repo(name), await self.rest_api.org.get_repos(org_id))) - def get_repo_data(self, org_id: str, repo_name: str) -> dict[str, Any]: - return self.rest_api.repo.get_repo_data(org_id, repo_name) + async def get_repo_data(self, org_id: str, repo_name: str) -> dict[str, Any]: + return await self.rest_api.repo.get_repo_data(org_id, repo_name) - def get_repo_by_id(self, repo_id: int) -> dict[str, Any]: - return self.rest_api.repo.get_repo_by_id(repo_id) + async def get_repo_by_id(self, repo_id: int) -> dict[str, Any]: + return await self.rest_api.repo.get_repo_by_id(repo_id) - def update_repo(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + async def update_repo(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: if len(data) > 0: - self.rest_api.repo.update_repo(org_id, repo_name, data) + await self.rest_api.repo.update_repo(org_id, repo_name, data) - def add_repo( + async def add_repo( self, org_id: str, data: dict[str, str], @@ -171,7 +170,7 @@ def add_repo( fork_default_branch_only: bool, auto_init_repo: bool, ) -> None: - self.rest_api.repo.add_repo( + await self.rest_api.repo.add_repo( org_id, data, template_repository, @@ -181,13 +180,13 @@ def add_repo( auto_init_repo, ) - def delete_repo(self, org_id: str, repo_name: str) -> None: - self.rest_api.repo.delete_repo(org_id, repo_name) + async def delete_repo(self, org_id: str, repo_name: str) -> None: + await self.rest_api.repo.delete_repo(org_id, repo_name) async def get_branch_protection_rules(self, org_id: str, repo: str) -> list[dict[str, Any]]: - return await self.graphql_client.async_get_branch_protection_rules(org_id, repo) + return await self.graphql_client.get_branch_protection_rules(org_id, repo) - def update_branch_protection_rule( + async def update_branch_protection_rule( self, org_id: str, repo_name: str, @@ -195,9 +194,9 @@ def update_branch_protection_rule( rule_id: str, data: dict[str, Any], ) -> None: - self.graphql_client.update_branch_protection_rule(org_id, repo_name, rule_pattern, rule_id, data) + await self.graphql_client.update_branch_protection_rule(org_id, repo_name, rule_pattern, rule_id, data) - def add_branch_protection_rule( + async def add_branch_protection_rule( self, org_id: str, repo_name: str, @@ -206,122 +205,126 @@ def add_branch_protection_rule( ) -> None: # in case the repo_id is not available yet, we need to fetch it from GitHub. if not repo_node_id: - repo_data = self.rest_api.repo.get_repo_data(org_id, repo_name) + repo_data = await self.rest_api.repo.get_repo_data(org_id, repo_name) repo_node_id = repo_data["node_id"] - self.graphql_client.add_branch_protection_rule(org_id, repo_name, repo_node_id, data) + await self.graphql_client.add_branch_protection_rule(org_id, repo_name, repo_node_id, data) - def delete_branch_protection_rule(self, org_id: str, repo_name: str, rule_pattern: str, rule_id: str) -> None: - self.graphql_client.delete_branch_protection_rule(org_id, repo_name, rule_pattern, rule_id) + async def delete_branch_protection_rule(self, org_id: str, repo_name: str, rule_pattern: str, rule_id: str) -> None: + await self.graphql_client.delete_branch_protection_rule(org_id, repo_name, rule_pattern, rule_id) - def update_repo_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, ruleset: dict[str, Any]) -> None: + async def update_repo_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, ruleset: dict[str, Any]) -> None: if len(ruleset) > 0: - self.rest_api.repo.update_ruleset(org_id, repo_name, ruleset_id, ruleset) + await self.rest_api.repo.update_ruleset(org_id, repo_name, ruleset_id, ruleset) - def add_repo_ruleset(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: - self.rest_api.repo.add_ruleset(org_id, repo_name, data) + async def add_repo_ruleset(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + await self.rest_api.repo.add_ruleset(org_id, repo_name, data) - def delete_repo_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str) -> None: - self.rest_api.repo.delete_ruleset(org_id, repo_name, ruleset_id, name) + async def delete_repo_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str) -> None: + await self.rest_api.repo.delete_ruleset(org_id, repo_name, ruleset_id, name) - def get_repo_webhooks(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - return self.rest_api.repo.get_webhooks(org_id, repo_name) + async def get_repo_webhooks(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + return await self.rest_api.repo.get_webhooks(org_id, repo_name) - def update_repo_webhook(self, org_id: str, repo_name: str, webhook_id: int, webhook: dict[str, Any]) -> None: + async def update_repo_webhook(self, org_id: str, repo_name: str, webhook_id: int, webhook: dict[str, Any]) -> None: if len(webhook) > 0: - self.rest_api.repo.update_webhook(org_id, repo_name, webhook_id, webhook) + await self.rest_api.repo.update_webhook(org_id, repo_name, webhook_id, webhook) - def add_repo_webhook(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: - self.rest_api.repo.add_webhook(org_id, repo_name, data) + async def add_repo_webhook(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + await self.rest_api.repo.add_webhook(org_id, repo_name, data) - def delete_repo_webhook(self, org_id: str, repo_name: str, webhook_id: int, url: str) -> None: - self.rest_api.repo.delete_webhook(org_id, repo_name, webhook_id, url) + async def delete_repo_webhook(self, org_id: str, repo_name: str, webhook_id: int, url: str) -> None: + await self.rest_api.repo.delete_webhook(org_id, repo_name, webhook_id, url) - def get_repo_environments(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - return self.rest_api.repo.get_environments(org_id, repo_name) + async def get_repo_environments(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + return await self.rest_api.repo.get_environments(org_id, repo_name) - def update_repo_environment(self, org_id: str, repo_name: str, env_name: str, env: dict[str, Any]) -> None: + async def update_repo_environment(self, org_id: str, repo_name: str, env_name: str, env: dict[str, Any]) -> None: if len(env) > 0: - self.rest_api.repo.update_environment(org_id, repo_name, env_name, env) + await self.rest_api.repo.update_environment(org_id, repo_name, env_name, env) - def add_repo_environment(self, org_id: str, repo_name: str, env_name: str, data: dict[str, str]) -> None: - self.rest_api.repo.add_environment(org_id, repo_name, env_name, data) + async def add_repo_environment(self, org_id: str, repo_name: str, env_name: str, data: dict[str, str]) -> None: + await self.rest_api.repo.add_environment(org_id, repo_name, env_name, data) - def delete_repo_environment(self, org_id: str, repo_name: str, env_name: str) -> None: - self.rest_api.repo.delete_environment(org_id, repo_name, env_name) + async def delete_repo_environment(self, org_id: str, repo_name: str, env_name: str) -> None: + await self.rest_api.repo.delete_environment(org_id, repo_name, env_name) - def get_repo_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: - return self.rest_api.repo.get_workflow_settings(org_id, repo_name) + async def get_repo_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: + return await self.rest_api.repo.get_workflow_settings(org_id, repo_name) - def update_repo_workflow_settings(self, org_id: str, repo_name: str, workflow_settings: dict[str, Any]) -> None: - self.rest_api.repo.update_workflow_settings(org_id, repo_name, workflow_settings) + async def update_repo_workflow_settings( + self, org_id: str, repo_name: str, workflow_settings: dict[str, Any] + ) -> None: + await self.rest_api.repo.update_workflow_settings(org_id, repo_name, workflow_settings) - def get_org_secrets(self, org_id: str) -> list[dict[str, Any]]: - return self.rest_api.org.get_secrets(org_id) + async def get_org_secrets(self, org_id: str) -> list[dict[str, Any]]: + return await self.rest_api.org.get_secrets(org_id) - def update_org_secret(self, org_id: str, secret_name: str, secret: dict[str, Any]) -> None: + async def update_org_secret(self, org_id: str, secret_name: str, secret: dict[str, Any]) -> None: if len(secret) > 0: - self.rest_api.org.update_secret(org_id, secret_name, secret) + await self.rest_api.org.update_secret(org_id, secret_name, secret) - def add_org_secret(self, org_id: str, data: dict[str, str]) -> None: - self.rest_api.org.add_secret(org_id, data) + async def add_org_secret(self, org_id: str, data: dict[str, str]) -> None: + await self.rest_api.org.add_secret(org_id, data) - def delete_org_secret(self, org_id: str, secret_name: str) -> None: - self.rest_api.org.delete_secret(org_id, secret_name) + async def delete_org_secret(self, org_id: str, secret_name: str) -> None: + await self.rest_api.org.delete_secret(org_id, secret_name) - def get_org_variables(self, org_id: str) -> list[dict[str, Any]]: - return self.rest_api.org.get_variables(org_id) + async def get_org_variables(self, org_id: str) -> list[dict[str, Any]]: + return await self.rest_api.org.get_variables(org_id) - def update_org_variable(self, org_id: str, variable_name: str, variable: dict[str, Any]) -> None: + async def update_org_variable(self, org_id: str, variable_name: str, variable: dict[str, Any]) -> None: if len(variable) > 0: - self.rest_api.org.update_variable(org_id, variable_name, variable) + await self.rest_api.org.update_variable(org_id, variable_name, variable) - def add_org_variable(self, org_id: str, data: dict[str, str]) -> None: - self.rest_api.org.add_variable(org_id, data) + async def add_org_variable(self, org_id: str, data: dict[str, str]) -> None: + await self.rest_api.org.add_variable(org_id, data) - def delete_org_variable(self, org_id: str, variable_name: str) -> None: - self.rest_api.org.delete_variable(org_id, variable_name) + async def delete_org_variable(self, org_id: str, variable_name: str) -> None: + await self.rest_api.org.delete_variable(org_id, variable_name) - def get_repo_secrets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - return self.rest_api.repo.get_secrets(org_id, repo_name) + async def get_repo_secrets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + return await self.rest_api.repo.get_secrets(org_id, repo_name) - def update_repo_secret(self, org_id: str, repo_name: str, secret_name: str, secret: dict[str, Any]) -> None: + async def update_repo_secret(self, org_id: str, repo_name: str, secret_name: str, secret: dict[str, Any]) -> None: if len(secret) > 0: - self.rest_api.repo.update_secret(org_id, repo_name, secret_name, secret) + await self.rest_api.repo.update_secret(org_id, repo_name, secret_name, secret) - def add_repo_secret(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: - self.rest_api.repo.add_secret(org_id, repo_name, data) + async def add_repo_secret(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + await self.rest_api.repo.add_secret(org_id, repo_name, data) - def delete_repo_secret(self, org_id: str, repo_name: str, secret_name: str) -> None: - self.rest_api.repo.delete_secret(org_id, repo_name, secret_name) + async def delete_repo_secret(self, org_id: str, repo_name: str, secret_name: str) -> None: + await self.rest_api.repo.delete_secret(org_id, repo_name, secret_name) - def get_repo_variables(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - return self.rest_api.repo.get_variables(org_id, repo_name) + async def get_repo_variables(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + return await self.rest_api.repo.get_variables(org_id, repo_name) - def update_repo_variable(self, org_id: str, repo_name: str, variable_name: str, variable: dict[str, Any]) -> None: + async def update_repo_variable( + self, org_id: str, repo_name: str, variable_name: str, variable: dict[str, Any] + ) -> None: if len(variable) > 0: - self.rest_api.repo.update_variable(org_id, repo_name, variable_name, variable) + await self.rest_api.repo.update_variable(org_id, repo_name, variable_name, variable) - def add_repo_variable(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: - self.rest_api.repo.add_variable(org_id, repo_name, data) + async def add_repo_variable(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + await self.rest_api.repo.add_variable(org_id, repo_name, data) - def delete_repo_variable(self, org_id: str, repo_name: str, variable_name: str) -> None: - self.rest_api.repo.delete_variable(org_id, repo_name, variable_name) + async def delete_repo_variable(self, org_id: str, repo_name: str, variable_name: str) -> None: + await self.rest_api.repo.delete_variable(org_id, repo_name, variable_name) - def dispatch_workflow(self, org_id: str, repo_name: str, workflow_name: str) -> bool: - return self.rest_api.repo.dispatch_workflow(org_id, repo_name, workflow_name) + async def dispatch_workflow(self, org_id: str, repo_name: str, workflow_name: str) -> bool: + return await self.rest_api.repo.dispatch_workflow(org_id, repo_name, workflow_name) - def get_repo_ids(self, org_id: str, repo_names: list[str]) -> list[str]: + async def get_repo_ids(self, org_id: str, repo_names: list[str]) -> list[str]: repo_ids = [] for repo_name in repo_names: - repo_data = self.get_repo_data(org_id, repo_name) + repo_data = await self.get_repo_data(org_id, repo_name) repo_ids.append(repo_data["id"]) return repo_ids - def get_actor_node_ids(self, actor_names: list[str]) -> list[str]: - return list(map(lambda x: x[1][1], self.get_actor_ids_with_type(actor_names))) + async def get_actor_node_ids(self, actor_names: list[str]) -> list[str]: + return list(map(lambda x: x[1][1], await self.get_actor_ids_with_type(actor_names))) - def get_actor_ids_with_type(self, actor_names: list[str]) -> list[tuple[str, tuple[int, str]]]: + async def get_actor_ids_with_type(self, actor_names: list[str]) -> list[tuple[str, tuple[int, str]]]: result = [] for actor in actor_names: if actor.startswith("@"): @@ -330,31 +333,28 @@ def get_actor_ids_with_type(self, actor_names: list[str]) -> list[tuple[str, tup # - user-names are not allowed to contain a / if "/" in actor: try: - result.append(("Team", self.rest_api.org.get_team_ids(actor[1:]))) + result.append(("Team", await self.rest_api.org.get_team_ids(actor[1:]))) except RuntimeError: utils.print_warn(f"team '{actor[1:]}' does not exist, skipping") else: try: - result.append(("User", self.rest_api.user.get_user_ids(actor[1:]))) + result.append(("User", await self.rest_api.user.get_user_ids(actor[1:]))) except RuntimeError: utils.print_warn(f"user '{actor[1:]}' does not exist, skipping") else: # it's an app try: - result.append(("App", self.rest_api.app.get_app_ids(actor))) + result.append(("App", await self.rest_api.app.get_app_ids(actor))) except RuntimeError: utils.print_warn(f"app '{actor}' does not exist, skipping") return result - def get_app_node_ids(self, app_names: set[str]) -> dict[str, str]: - return {app_name: self.rest_api.app.get_app_ids(app_name)[1] for app_name in app_names} - - def get_app_ids(self, app_names: set[str]) -> dict[str, str]: - return {app_name: self.rest_api.app.get_app_ids(app_name)[0] for app_name in app_names} + async def get_app_node_ids(self, app_names: set[str]) -> dict[str, str]: + return {app_name: (await self.rest_api.app.get_app_ids(app_name))[1] for app_name in app_names} - def get_ref_for_pull_request(self, org_id: str, repo_name: str, pull_number: str) -> str: - return self.rest_api.repo.get_ref_for_pull_request(org_id, repo_name, pull_number) + async def get_app_ids(self, app_names: set[str]) -> dict[str, str]: + return {app_name: (await self.rest_api.app.get_app_ids(app_name))[0] for app_name in app_names} - def open_browser_with_logged_in_user(self, org_id: str) -> None: - self.web_client.open_browser_with_logged_in_user(org_id) + async def get_ref_for_pull_request(self, org_id: str, repo_name: str, pull_number: str) -> str: + return await self.rest_api.repo.get_ref_for_pull_request(org_id, repo_name, pull_number) diff --git a/otterdog/providers/github/rest/auth/__init__.py b/otterdog/providers/github/auth/__init__.py similarity index 73% rename from otterdog/providers/github/rest/auth/__init__.py rename to otterdog/providers/github/auth/__init__.py index 3fcdf350..7567fe97 100644 --- a/otterdog/providers/github/rest/auth/__init__.py +++ b/otterdog/providers/github/auth/__init__.py @@ -22,3 +22,15 @@ class AuthStrategy(ABC): @abstractmethod def get_auth(self) -> AuthImpl: ... + + +def app_auth(app_id: str, private_key: str) -> AuthStrategy: + from .app import AppAuthStrategy + + return AppAuthStrategy(app_id, private_key) + + +def token_auth(github_token: str) -> AuthStrategy: + from .token import TokenAuthStrategy + + return TokenAuthStrategy(github_token) diff --git a/otterdog/providers/github/rest/auth/app.py b/otterdog/providers/github/auth/app.py similarity index 100% rename from otterdog/providers/github/rest/auth/app.py rename to otterdog/providers/github/auth/app.py diff --git a/otterdog/providers/github/rest/auth/token.py b/otterdog/providers/github/auth/token.py similarity index 100% rename from otterdog/providers/github/rest/auth/token.py rename to otterdog/providers/github/auth/token.py diff --git a/otterdog/providers/github/graphql.py b/otterdog/providers/github/graphql.py index 3064ef80..768010bc 100644 --- a/otterdog/providers/github/graphql.py +++ b/otterdog/providers/github/graphql.py @@ -10,53 +10,53 @@ from typing import Any import jq # type: ignore -import requests from aiohttp.client import ClientSession from importlib_resources import files -from otterdog import resources, utils +from otterdog import resources +from otterdog.providers.github.auth import AuthStrategy +from otterdog.utils import is_debug_enabled, is_trace_enabled, print_debug, print_trace class GraphQLClient: _GH_GRAPHQL_URL_ROOT = "https://api.github.com/graphql" - def __init__(self, token: str): - self._token = token + def __init__(self, auth_strategy: AuthStrategy): + self._auth = auth_strategy.get_auth() self._headers = { - "Authorization": f"Bearer {token}", "X-Github-Next-Global-ID": "1", } - async def async_get_branch_protection_rules(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - utils.print_debug(f"async retrieving branch protection rules for repo '{org_id}/{repo_name}'") + async def get_branch_protection_rules(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + print_debug(f"async retrieving branch protection rules for repo '{org_id}/{repo_name}'") variables = {"organization": org_id, "repository": repo_name} branch_protection_rules = await self._async_run_paged_query(variables, "get-branch-protection-rules.gql") for branch_protection_rule in branch_protection_rules: - await self._async_fill_paged_results_if_not_empty( + await self._fill_paged_results_if_not_empty( branch_protection_rule, "pushAllowances", "pushRestrictions", "get-push-allowances.gql", ) - await self._async_fill_paged_results_if_not_empty( + await self._fill_paged_results_if_not_empty( branch_protection_rule, "reviewDismissalAllowances", "reviewDismissalAllowances", "get-review-dismissal-allowances.gql", ) - await self._async_fill_paged_results_if_not_empty( + await self._fill_paged_results_if_not_empty( branch_protection_rule, "bypassPullRequestAllowances", "bypassPullRequestAllowances", "get-bypass-pull-request-allowances.gql", ) - await self._async_fill_paged_results_if_not_empty( + await self._fill_paged_results_if_not_empty( branch_protection_rule, "bypassForcePushAllowances", "bypassForcePushAllowances", @@ -65,7 +65,7 @@ async def async_get_branch_protection_rules(self, org_id: str, repo_name: str) - return branch_protection_rules - async def _async_fill_paged_results_if_not_empty( + async def _fill_paged_results_if_not_empty( self, branch_protection_rule: dict[str, Any], input_key: str, @@ -83,7 +83,7 @@ async def _async_fill_paged_results_if_not_empty( else: branch_protection_rule[output_key] = [] - def update_branch_protection_rule( + async def update_branch_protection_rule( self, org_id: str, repo_name: str, @@ -91,7 +91,7 @@ def update_branch_protection_rule( rule_id: str, data: dict[str, Any], ) -> None: - utils.print_debug(f"updating branch protection rule '{rule_pattern}' for repo '{org_id}/{repo_name}'") + print_debug(f"updating branch protection rule '{rule_pattern}' for repo '{org_id}/{repo_name}'") data["branchProtectionRuleId"] = rule_id variables = {"ruleInput": data} @@ -104,26 +104,22 @@ def update_branch_protection_rule( } }""" - response = requests.post( - url=f"{self._GH_GRAPHQL_URL_ROOT}", - headers=self._headers, - json={"query": query, "variables": variables}, - ) - utils.print_trace(f"graphql result = ({response.status_code}, {response.text})") + status, body = await self._async_request_raw("POST", query=query, variables=variables) - if not response.ok: - msg = f"failed updating branch protection rule '{rule_pattern}' for repo '{repo_name}'" - raise RuntimeError(msg) + if status >= 400: + raise RuntimeError(f"failed updating branch protection rule '{rule_pattern}' for repo '{repo_name}'") - json_data = response.json() - if "data" in json_data: - utils.print_debug(f"successfully updated branch protection rule '{rule_pattern}'") - else: + json_data = json.loads(body) + if "data" not in json_data: raise RuntimeError(f"failed to update branch protection rule '{rule_pattern}'") - def add_branch_protection_rule(self, org_id: str, repo_name: str, repo_node_id: str, data: dict[str, Any]) -> None: + print_debug(f"successfully updated branch protection rule '{rule_pattern}'") + + async def add_branch_protection_rule( + self, org_id: str, repo_name: str, repo_node_id: str, data: dict[str, Any] + ) -> None: rule_pattern = data["pattern"] - utils.print_debug( + print_debug( f"creating branch_protection_rule with pattern '{rule_pattern}' " f"for repo '{org_id}/{repo_name}'" ) @@ -139,28 +135,19 @@ def add_branch_protection_rule(self, org_id: str, repo_name: str, repo_node_id: } }""" - utils.print_trace(query) - utils.print_trace(json.dumps(variables)) + status, body = await self._async_request_raw("POST", query, variables) - response = requests.post( - url=f"{self._GH_GRAPHQL_URL_ROOT}", - headers=self._headers, - json={"query": query, "variables": variables}, - ) - utils.print_trace(f"graphql result = ({response.status_code}, {response.text})") - - if not response.ok: - msg = f"failed creating branch protection rule '{rule_pattern}' for repo '{repo_name}'" - raise RuntimeError(msg) + if status >= 400: + raise RuntimeError(f"failed creating branch protection rule '{rule_pattern}' for repo '{repo_name}'") - json_data = response.json() - if "data" in json_data: - utils.print_debug(f"successfully created branch protection rule '{rule_pattern}'") - else: + json_data = json.loads(body) + if "data" not in json_data: raise RuntimeError(f"failed to create branch protection rule '{rule_pattern}'") - def delete_branch_protection_rule(self, org_id: str, repo_name: str, rule_pattern: str, rule_id: str) -> None: - utils.print_debug(f"deleting branch protection rule '{rule_pattern}' for repo '{org_id}/{repo_name}'") + print_debug(f"successfully created branch protection rule '{rule_pattern}'") + + async def delete_branch_protection_rule(self, org_id: str, repo_name: str, rule_pattern: str, rule_id: str) -> None: + print_debug(f"deleting branch protection rule '{rule_pattern}' for repo '{org_id}/{repo_name}'") variables = {"ruleInput": {"branchProtectionRuleId": rule_id}} @@ -170,18 +157,12 @@ def delete_branch_protection_rule(self, org_id: str, repo_name: str, rule_patter } }""" - response = requests.post( - url=f"{self._GH_GRAPHQL_URL_ROOT}", - headers=self._headers, - json={"query": query, "variables": variables}, - ) - utils.print_trace(f"graphql result = ({response.status_code}, {response.text})") + status, body = await self._async_request_raw("POST", query, variables) - if not response.ok: - msg = f"failed removing branch protection rule '{rule_pattern}' for repo '{repo_name}'" - raise RuntimeError(msg) + if status >= 400: + raise RuntimeError(f"failed removing branch protection rule '{rule_pattern}' for repo '{repo_name}'") - utils.print_debug(f"successfully removed branch protection rule '{rule_pattern}'") + print_debug(f"successfully removed branch protection rule '{rule_pattern}'") async def _async_run_paged_query( self, @@ -189,7 +170,7 @@ async def _async_run_paged_query( query_file: str, prefix_selector: str = ".data.repository.branchProtectionRules", ) -> list[dict[str, Any]]: - utils.print_debug(f"running async graphql query '{query_file}' with input '{json.dumps(input_variables)}'") + print_debug(f"running async graphql query '{query_file}' with input '{json.dumps(input_variables)}'") query = files(resources).joinpath(f"graphql/{query_file}").read_text() @@ -202,19 +183,22 @@ async def _async_run_paged_query( variables.update(input_variables) async with ClientSession() as session: + headers = self._headers.copy() + self._auth.update_headers_with_authorization(headers) + async with session.post( url=f"{self._GH_GRAPHQL_URL_ROOT}", - headers=self._headers, + headers=headers, json={"query": query, "variables": variables}, ) as response: - if utils.is_debug_enabled(): - utils.print_debug( + if is_debug_enabled(): + print_debug( f"graphql query '{query_file}' with input '{json.dumps(input_variables)}': " f"rate-limit-used = {response.headers.get('x-ratelimit-used', None)}" ) - if utils.is_trace_enabled(): - utils.print_trace(f"graphql result = ({response.status}, {await response.text()})") + if is_trace_enabled(): + print_trace(f"graphql result = ({response.status}, {await response.text()})") if not response.ok: raise RuntimeError(f"failed running query '{query_file}'") @@ -238,6 +222,27 @@ async def _async_run_paged_query( return result + async def _async_request_raw(self, method: str, query: str, variables: dict[str, Any]) -> tuple[int, str]: + print_trace(f"async '{method}', query = {query}, variables = {variables}") + + headers = self._headers.copy() + self._auth.update_headers_with_authorization(headers) + + async with ClientSession() as session: + async with session.request( + method, + url=self._GH_GRAPHQL_URL_ROOT, + headers=headers, + json={"query": query, "variables": variables}, + ) as response: + text = await response.text() + status = response.status + + if is_trace_enabled(): + print_trace(f"async '{method}' result = ({status}, {text})") + + return status, text + @staticmethod def _transform_actors(actors: list[dict[str, Any]]) -> list[str]: result = [] diff --git a/otterdog/providers/github/rest/__init__.py b/otterdog/providers/github/rest/__init__.py index dcbf1e42..e19f2e1c 100644 --- a/otterdog/providers/github/rest/__init__.py +++ b/otterdog/providers/github/rest/__init__.py @@ -9,9 +9,12 @@ from __future__ import annotations from abc import ABC +from datetime import datetime from functools import cached_property +from typing import Optional + +from otterdog.providers.github.auth import AuthStrategy -from .auth import AuthStrategy from .requester import Requester @@ -21,8 +24,18 @@ class RestApi: _GH_API_URL_ROOT = "https://api.github.com" def __init__(self, auth_strategy: AuthStrategy): + self._auth_strategy = auth_strategy self._requester = Requester(auth_strategy, self._GH_API_URL_ROOT, self._GH_API_VERSION) + @property + def token(self) -> Optional[str]: + from otterdog.providers.github.auth.token import TokenAuthStrategy + + if isinstance(self._auth_strategy, TokenAuthStrategy): + return self._auth_strategy.token + else: + return None + def close(self) -> None: self._requester.close() @@ -42,6 +55,18 @@ def content(self): return ContentClient(self) + @cached_property + def issue(self): + from .issue_client import IssueClient + + return IssueClient(self) + + @cached_property + def pull_request(self): + from .pull_request_client import PullRequestClient + + return PullRequestClient(self) + @cached_property def repo(self): from .repo_client import RepoClient @@ -86,3 +111,10 @@ def encrypt_value(public_key: str, secret_value: str) -> str: sealed_box = public.SealedBox(public_key_obj) encrypted = sealed_box.encrypt(secret_value.encode("utf-8")) return b64encode(encrypted).decode("utf-8") + + +_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + + +def parse_date_string(date: str) -> datetime: + return datetime.strptime(date, _FORMAT) diff --git a/otterdog/providers/github/rest/app_client.py b/otterdog/providers/github/rest/app_client.py index f795e9b0..46bae8ac 100644 --- a/otterdog/providers/github/rest/app_client.py +++ b/otterdog/providers/github/rest/app_client.py @@ -6,32 +6,54 @@ # SPDX-License-Identifier: EPL-2.0 # ******************************************************************************* +from datetime import datetime from typing import Any +from otterdog.providers.github.exception import GitHubException from otterdog.utils import print_debug -from ..exception import GitHubException -from . import RestApi, RestClient +from . import RestApi, RestClient, parse_date_string class AppClient(RestClient): def __init__(self, rest_api: RestApi): super().__init__(rest_api) - def get_authenticated_app(self) -> dict[str, Any]: + async def get_authenticated_app(self) -> dict[str, Any]: print_debug("retrieving authenticated app") try: - return self.requester.request_json("GET", "/app") + return await self.requester.async_request_json("GET", "/app") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving authenticated app:\n{ex}").with_traceback(tb) - def get_app_ids(self, app_slug: str) -> tuple[int, str]: + async def get_app_installations(self) -> list[dict[str, Any]]: + print_debug("retrieving app installations") + + try: + return await self.requester.async_request_paged_json("GET", "/app/installations") + except GitHubException as ex: + tb = ex.__traceback__ + raise RuntimeError(f"failed retrieving authenticated app:\n{ex}").with_traceback(tb) + + async def create_installation_access_token(self, installation_id: str) -> tuple[str, datetime]: + print_debug(f"creating an installation access token for installation '{installation_id}'") + + try: + response = await self.requester.async_request_json( + "POST", f"/app/installations/{installation_id}/access_tokens" + ) + return response["token"], parse_date_string(response["expires_at"]) + except GitHubException as ex: + tb = ex.__traceback__ + raise RuntimeError(f"failed creating installation access token:\n{ex}").with_traceback(tb) + + async def get_app_ids(self, app_slug: str) -> tuple[int, str]: print_debug("retrieving app node id") try: - response = self.requester.request_json("GET", f"/apps/{app_slug}") + response = await self.requester.async_request_json("GET", f"/apps/{app_slug}") return response["id"], response["node_id"] except GitHubException as ex: tb = ex.__traceback__ diff --git a/otterdog/providers/github/rest/content_client.py b/otterdog/providers/github/rest/content_client.py index 83a0a834..3913c3e1 100644 --- a/otterdog/providers/github/rest/content_client.py +++ b/otterdog/providers/github/rest/content_client.py @@ -9,9 +9,9 @@ import base64 from typing import Any, Optional +from otterdog.providers.github.exception import GitHubException from otterdog.utils import print_debug -from ..exception import GitHubException from . import RestApi, RestClient @@ -19,7 +19,9 @@ class ContentClient(RestClient): def __init__(self, rest_api: RestApi): super().__init__(rest_api) - def get_content_object(self, org_id: str, repo_name: str, path: str, ref: Optional[str] = None) -> dict[str, Any]: + async def get_content_object( + self, org_id: str, repo_name: str, path: str, ref: Optional[str] = None + ) -> dict[str, Any]: print_debug(f"retrieving content '{path}' from repo '{org_id}/{repo_name}'") try: @@ -28,16 +30,18 @@ def get_content_object(self, org_id: str, repo_name: str, path: str, ref: Option else: params = None - return self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/contents/{path}", params=params) + return await self.requester.async_request_json( + "GET", f"/repos/{org_id}/{repo_name}/contents/{path}", params=params + ) except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving content '{path}' from repo '{repo_name}':\n{ex}").with_traceback(tb) - def get_content(self, org_id: str, repo_name: str, path: str, ref: Optional[str]) -> str: - json_response = self.get_content_object(org_id, repo_name, path, ref) + async def get_content(self, org_id: str, repo_name: str, path: str, ref: Optional[str]) -> str: + json_response = await self.get_content_object(org_id, repo_name, path, ref) return base64.b64decode(json_response["content"]).decode("utf-8") - def update_content( + async def update_content( self, org_id: str, repo_name: str, @@ -48,7 +52,7 @@ def update_content( print_debug(f"putting content '{path}' to repo '{org_id}/{repo_name}'") try: - json_response = self.get_content_object(org_id, repo_name, path) + json_response = await self.get_content_object(org_id, repo_name, path) old_sha = json_response["sha"] old_content = base64.b64decode(json_response["content"]).decode("utf-8") except RuntimeError: @@ -77,13 +81,13 @@ def update_content( data["sha"] = old_sha try: - self.requester.request_json("PUT", f"/repos/{org_id}/{repo_name}/contents/{path}", data) + await self.requester.async_request_json("PUT", f"/repos/{org_id}/{repo_name}/contents/{path}", data) return True except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed putting content '{path}' to repo '{repo_name}':\n{ex}").with_traceback(tb) - def delete_content( + async def delete_content( self, org_id: str, repo_name: str, @@ -93,7 +97,7 @@ def delete_content( print_debug(f"deleting content '{path}' in repo '{org_id}/{repo_name}'") try: - json_response = self.get_content_object(org_id, repo_name, path) + json_response = await self.get_content_object(org_id, repo_name, path) old_sha = json_response["sha"] except RuntimeError: old_sha = None @@ -109,7 +113,7 @@ def delete_content( data = {"message": push_message, "sha": old_sha} try: - self.requester.request_json("DELETE", f"/repos/{org_id}/{repo_name}/contents/{path}", data) + await self.requester.async_request_json("DELETE", f"/repos/{org_id}/{repo_name}/contents/{path}", data) return True except GitHubException as ex: tb = ex.__traceback__ diff --git a/otterdog/providers/github/rest/issue_client.py b/otterdog/providers/github/rest/issue_client.py new file mode 100644 index 00000000..d23f2468 --- /dev/null +++ b/otterdog/providers/github/rest/issue_client.py @@ -0,0 +1,29 @@ +# ******************************************************************************* +# Copyright (c) 2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from otterdog.providers.github.exception import GitHubException +from otterdog.utils import print_debug + +from . import RestApi, RestClient + + +class IssueClient(RestClient): + def __init__(self, rest_api: RestApi): + super().__init__(rest_api) + + async def create_comment(self, org_id: str, repo_name: str, issue_number: str, body: str) -> None: + print_debug(f"creating issue comment for issue '{issue_number}' at '{org_id}/{repo_name}'") + + try: + data = {"body": body} + return await self.requester.async_request_json( + "POST", f"/repos/{org_id}/{repo_name}/issues/{issue_number}/comments", data=data + ) + except GitHubException as ex: + tb = ex.__traceback__ + raise RuntimeError(f"failed creating issue comment:\n{ex}").with_traceback(tb) diff --git a/otterdog/providers/github/rest/org_client.py b/otterdog/providers/github/rest/org_client.py index 298b5356..dfb0feec 100644 --- a/otterdog/providers/github/rest/org_client.py +++ b/otterdog/providers/github/rest/org_client.py @@ -10,9 +10,9 @@ import re from typing import Any +from otterdog.providers.github.exception import GitHubException from otterdog.utils import print_debug, print_trace, print_warn -from ..exception import GitHubException from . import RestApi, RestClient, encrypt_value @@ -20,17 +20,17 @@ class OrgClient(RestClient): def __init__(self, rest_api: RestApi): super().__init__(rest_api) - def get_settings(self, org_id: str, included_keys: set[str]) -> dict[str, Any]: + async def get_settings(self, org_id: str, included_keys: set[str]) -> dict[str, Any]: print_debug(f"retrieving settings for org '{org_id}'") try: - settings = self.requester.request_json("GET", f"/orgs/{org_id}") + settings = await self.requester.async_request_json("GET", f"/orgs/{org_id}") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving settings for organization '{org_id}':\n{ex}").with_traceback(tb) if "security_managers" in included_keys: - security_managers = self.list_security_managers(org_id) + security_managers = await self.list_security_managers(org_id) settings["security_managers"] = security_managers result = {} @@ -41,25 +41,25 @@ def get_settings(self, org_id: str, included_keys: set[str]) -> dict[str, Any]: return result - def update_settings(self, org_id: str, data: dict[str, Any]) -> None: + async def update_settings(self, org_id: str, data: dict[str, Any]) -> None: print_debug(f"updating settings for organization '{org_id}'") try: - self.requester.request_json("PATCH", f"/orgs/{org_id}", data) + await self.requester.async_request_json("PATCH", f"/orgs/{org_id}", data) except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to update settings for organization '{org_id}':\n{ex}").with_traceback(tb) if "security_managers" in data: - self.update_security_managers(org_id, data["security_managers"]) + await self.update_security_managers(org_id, data["security_managers"]) print_debug(f"updated {len(data)} setting(s)") - def list_security_managers(self, org_id: str) -> list[str]: + async def list_security_managers(self, org_id: str) -> list[str]: print_debug(f"retrieving security managers for organization {org_id}") try: - result = self.requester.request_json("GET", f"/orgs/{org_id}/security-managers") + result = await self.requester.async_request_json("GET", f"/orgs/{org_id}/security-managers") return list(map(lambda x: x["slug"], result)) except GitHubException as ex: tb = ex.__traceback__ @@ -67,71 +67,75 @@ def list_security_managers(self, org_id: str) -> list[str]: f"failed retrieving security managers for organization " f"'{org_id}':\n{ex}" ).with_traceback(tb) - def update_security_managers(self, org_id: str, security_managers: list[str]) -> None: + async def update_security_managers(self, org_id: str, security_managers: list[str]) -> None: print_debug(f"updating security managers for organization {org_id}") - current_managers = set(self.list_security_managers(org_id)) + current_managers = set(await self.list_security_managers(org_id)) # first, add all security managers that are not yet configured. for team_slug in security_managers: if team_slug in current_managers: current_managers.remove(team_slug) else: - self.add_security_manager_team(org_id, team_slug) + await self.add_security_manager_team(org_id, team_slug) # second, remove the current managers that are left. for team_slug in current_managers: - self.remove_security_manager_team(org_id, team_slug) + await self.remove_security_manager_team(org_id, team_slug) - def add_security_manager_team(self, org_id: str, team_slug: str) -> None: + async def add_security_manager_team(self, org_id: str, team_slug: str) -> None: print_debug(f"adding team {team_slug} to security managers for organization {org_id}") - response = self.requester.request_raw("PUT", f"/orgs/{org_id}/security-managers/teams/{team_slug}") + status, body = await self.requester.async_request_raw( + "PUT", f"/orgs/{org_id}/security-managers/teams/{team_slug}" + ) - if response.status_code == 204: + if status == 204: print_debug(f"added team {team_slug} to security managers for organization {org_id}") - elif response.status_code == 404: + elif status == 404: print_warn( f"failed to add team '{team_slug}' to security managers for organization {org_id}: " f"team not found" ) else: raise RuntimeError( f"failed adding team '{team_slug}' to security managers of organization '{org_id}'" - f"\n{response.status_code}: {response.text}" + f"\n{status}: {body}" ) - def remove_security_manager_team(self, org_id: str, team_slug: str) -> None: + async def remove_security_manager_team(self, org_id: str, team_slug: str) -> None: print_debug(f"removing team {team_slug} from security managers for organization {org_id}") - response = self.requester.request_raw("DELETE", f"/orgs/{org_id}/security-managers/teams/{team_slug}") - if response.status_code != 204: + status, body = await self.requester.async_request_raw( + "DELETE", f"/orgs/{org_id}/security-managers/teams/{team_slug}" + ) + if status != 204: raise RuntimeError( f"failed removing team '{team_slug}' from security managers of organization '{org_id}'" - f"\n{response.status_code}: {response.text}" + f"\n{status}: {body}" ) - else: - print_debug(f"removed team {team_slug} from security managers for organization {org_id}") - def get_webhooks(self, org_id: str) -> list[dict[str, Any]]: + print_debug(f"removed team {team_slug} from security managers for organization {org_id}") + + async def get_webhooks(self, org_id: str) -> list[dict[str, Any]]: print_debug(f"retrieving org webhooks for org '{org_id}'") try: - return self.requester.request_json("GET", f"/orgs/{org_id}/hooks") + return await self.requester.async_request_json("GET", f"/orgs/{org_id}/hooks") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving webhooks for org '{org_id}':\n{ex}").with_traceback(tb) - def update_webhook(self, org_id: str, webhook_id: int, webhook: dict[str, Any]) -> None: + async def update_webhook(self, org_id: str, webhook_id: int, webhook: dict[str, Any]) -> None: print_debug(f"updating org webhook '{webhook_id}' for organization {org_id}") try: - self.requester.request_json("PATCH", f"/orgs/{org_id}/hooks/{webhook_id}", webhook) + await self.requester.async_request_json("PATCH", f"/orgs/{org_id}/hooks/{webhook_id}", webhook) print_debug(f"updated webhook {webhook_id}") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to update org webhook {webhook_id}:\n{ex}").with_traceback(tb) - def add_webhook(self, org_id: str, data: dict[str, Any]) -> None: + async def add_webhook(self, org_id: str, data: dict[str, Any]) -> None: url = data["config"]["url"] print_debug(f"adding org webhook with url '{url}'") @@ -139,60 +143,62 @@ def add_webhook(self, org_id: str, data: dict[str, Any]) -> None: data["name"] = "web" try: - self.requester.request_json("POST", f"/orgs/{org_id}/hooks", data) + await self.requester.async_request_json("POST", f"/orgs/{org_id}/hooks", data) print_debug(f"added org webhook with url '{url}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to add org webhook with url '{url}':\n{ex}").with_traceback(tb) - def delete_webhook(self, org_id: str, webhook_id: int, url: str) -> None: + async def delete_webhook(self, org_id: str, webhook_id: int, url: str) -> None: print_debug(f"deleting org webhook with url '{url}'") - response = self.requester.request_raw("DELETE", f"/orgs/{org_id}/hooks/{webhook_id}") + status, _ = await self.requester.async_request_raw("DELETE", f"/orgs/{org_id}/hooks/{webhook_id}") - if response.status_code != 204: + if status != 204: raise RuntimeError(f"failed to delete org webhook with url '{url}'") print_debug(f"removed org webhook with url '{url}'") - def get_repos(self, org_id: str) -> list[str]: + async def get_repos(self, org_id: str) -> list[str]: print_debug(f"retrieving repos for organization {org_id}") params = {"type": "all"} try: - repos = self.requester.request_paged_json("GET", f"/orgs/{org_id}/repos", params) + repos = await self.requester.async_request_paged_json("GET", f"/orgs/{org_id}/repos", params) return [repo["name"] for repo in repos] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to retrieve repos for organization '{org_id}':\n{ex}").with_traceback(tb) - def get_secrets(self, org_id: str) -> list[dict[str, Any]]: + async def get_secrets(self, org_id: str) -> list[dict[str, Any]]: print_debug(f"retrieving secrets for org '{org_id}'") try: - response = self.requester.request_json("GET", f"/orgs/{org_id}/actions/secrets") + response = await self.requester.async_request_json("GET", f"/orgs/{org_id}/actions/secrets") secrets = response["secrets"] for secret in secrets: if secret["visibility"] == "selected": - secret["selected_repositories"] = self._get_selected_repositories_for_secret(org_id, secret["name"]) + secret["selected_repositories"] = await self._get_selected_repositories_for_secret( + org_id, secret["name"] + ) return secrets except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed getting secrets for org '{org_id}':\n{ex}").with_traceback(tb) - def _get_selected_repositories_for_secret(self, org_id: str, secret_name: str) -> list[dict[str, Any]]: + async def _get_selected_repositories_for_secret(self, org_id: str, secret_name: str) -> list[dict[str, Any]]: print_debug(f"retrieving selected repositories for secret '{secret_name}'") try: url = f"/orgs/{org_id}/actions/secrets/{secret_name}/repositories" - response = self.requester.request_json("GET", url) + response = await self.requester.async_request_json("GET", url) return response["repositories"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving selected repositories:\n{ex}").with_traceback(tb) - def _set_selected_repositories_for_secret( + async def _set_selected_repositories_for_secret( self, org_id: str, secret_name: str, selected_repository_ids: list[str] ) -> None: print_debug(f"setting selected repositories for secret '{secret_name}'") @@ -201,8 +207,8 @@ def _set_selected_repositories_for_secret( params = {"selected_repository_ids": selected_repository_ids} url = f"/orgs/{org_id}/actions/secrets/{secret_name}/repositories" - response = self.requester.request_json("PUT", url, params=params) - if response.status_code != 204: + status, _ = await self.requester.async_request_raw("PUT", url, params=params) + if status != 204: raise RuntimeError(f"failed to update selected repositories for secret '{secret_name}'") else: print_debug(f"updated selected repositories for secret '{secret_name}'") @@ -211,7 +217,7 @@ def _set_selected_repositories_for_secret( tb = ex.__traceback__ raise RuntimeError(f"failed retrieving selected repositories:\n{ex}").with_traceback(tb) - def update_secret(self, org_id: str, secret_name: str, secret: dict[str, Any]) -> None: + async def update_secret(self, org_id: str, secret_name: str, secret: dict[str, Any]) -> None: print_debug(f"updating org secret '{secret_name}'") if "name" in secret: @@ -227,58 +233,61 @@ def update_secret(self, org_id: str, secret_name: str, secret: dict[str, Any]) - else: selected_repository_ids = None - self.encrypt_secret_inplace(org_id, secret) + await self._encrypt_secret_inplace(org_id, secret) - response = self.requester.request_raw( + status, _ = await self.requester.async_request_raw( "PUT", f"/orgs/{org_id}/actions/secrets/{secret_name}", json.dumps(secret) ) - if response.status_code != 204: + if status != 204: raise RuntimeError(f"failed to update org secret '{secret_name}'") - else: - if selected_repository_ids is not None and (visibility is None or visibility == "selected"): - self._set_selected_repositories_for_secret(org_id, secret_name, selected_repository_ids) - print_debug(f"updated org secret '{secret_name}'") + if selected_repository_ids is not None and (visibility is None or visibility == "selected"): + await self._set_selected_repositories_for_secret(org_id, secret_name, selected_repository_ids) + + print_debug(f"updated org secret '{secret_name}'") - def add_secret(self, org_id: str, data: dict[str, str]) -> None: + async def add_secret(self, org_id: str, data: dict[str, str]) -> None: secret_name = data.pop("name") print_debug(f"adding org secret '{secret_name}'") - self.encrypt_secret_inplace(org_id, data) + await self._encrypt_secret_inplace(org_id, data) + + status, _ = await self.requester.async_request_raw( + "PUT", f"/orgs/{org_id}/actions/secrets/{secret_name}", json.dumps(data) + ) - response = self.requester.request_raw("PUT", f"/orgs/{org_id}/actions/secrets/{secret_name}", json.dumps(data)) - if response.status_code != 201: + if status != 201: raise RuntimeError(f"failed to add org secret '{secret_name}'") - else: - print_debug(f"added org secret '{secret_name}'") - def encrypt_secret_inplace(self, org_id: str, data: dict[str, Any]) -> None: + print_debug(f"added org secret '{secret_name}'") + + async def _encrypt_secret_inplace(self, org_id: str, data: dict[str, Any]) -> None: if "value" in data: value = data.pop("value") - key_id, public_key = self.get_public_key(org_id) + key_id, public_key = await self.get_public_key(org_id) data["encrypted_value"] = encrypt_value(public_key, value) data["key_id"] = key_id - def delete_secret(self, org_id: str, secret_name: str) -> None: + async def delete_secret(self, org_id: str, secret_name: str) -> None: print_debug(f"deleting org secret '{secret_name}'") - response = self.requester.request_raw("DELETE", f"/orgs/{org_id}/actions/secrets/{secret_name}") - if response.status_code != 204: + status, _ = await self.requester.async_request_raw("DELETE", f"/orgs/{org_id}/actions/secrets/{secret_name}") + if status != 204: raise RuntimeError(f"failed to delete org secret '{secret_name}'") print_debug(f"removed org secret '{secret_name}'") - def get_variables(self, org_id: str) -> list[dict[str, Any]]: + async def get_variables(self, org_id: str) -> list[dict[str, Any]]: print_debug(f"retrieving variables for org '{org_id}'") try: - response = self.requester.request_json("GET", f"/orgs/{org_id}/actions/variables") + response = await self.requester.async_request_json("GET", f"/orgs/{org_id}/actions/variables") secrets = response["variables"] for secret in secrets: if secret["visibility"] == "selected": - secret["selected_repositories"] = self._get_selected_repositories_for_variable( + secret["selected_repositories"] = await self._get_selected_repositories_for_variable( org_id, secret["name"] ) return secrets @@ -286,18 +295,18 @@ def get_variables(self, org_id: str) -> list[dict[str, Any]]: tb = ex.__traceback__ raise RuntimeError(f"failed getting variables for org '{org_id}':\n{ex}").with_traceback(tb) - def _get_selected_repositories_for_variable(self, org_id: str, variable_name: str) -> list[dict[str, Any]]: + async def _get_selected_repositories_for_variable(self, org_id: str, variable_name: str) -> list[dict[str, Any]]: print_debug(f"retrieving selected repositories for variable '{variable_name}'") try: url = f"/orgs/{org_id}/actions/variables/{variable_name}/repositories" - response = self.requester.request_json("GET", url) + response = await self.requester.async_request_json("GET", url) return response["repositories"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving selected repositories:\n{ex}").with_traceback(tb) - def _set_selected_repositories_for_variable( + async def _set_selected_repositories_for_variable( self, org_id: str, variable_name: str, selected_repository_ids: list[str] ) -> None: print_debug(f"setting selected repositories for variable '{variable_name}'") @@ -306,17 +315,17 @@ def _set_selected_repositories_for_variable( params = {"selected_repository_ids": selected_repository_ids} url = f"/orgs/{org_id}/actions/variables/{variable_name}/repositories" - response = self.requester.request_json("PUT", url, params=params) - if response.status_code != 204: + status, _ = await self.requester.async_request_raw("PUT", url, params=params) + if status != 204: raise RuntimeError(f"failed to update selected repositories for variable '{variable_name}'") - else: - print_debug(f"updated selected repositories for variable '{variable_name}'") + + print_debug(f"updated selected repositories for variable '{variable_name}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving selected repositories:\n{ex}").with_traceback(tb) - def update_variable(self, org_id: str, variable_name: str, variable: dict[str, Any]) -> None: + async def update_variable(self, org_id: str, variable_name: str, variable: dict[str, Any]) -> None: print_debug(f"updating org variable '{variable_name}'") if "name" in variable: @@ -332,215 +341,220 @@ def update_variable(self, org_id: str, variable_name: str, variable: dict[str, A else: selected_repository_ids = None - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PATCH", f"/orgs/{org_id}/actions/variables/{variable_name}", json.dumps(variable) ) - if response.status_code != 204: - raise RuntimeError(f"failed to update org variable '{variable_name}': {response.text}") - else: - if selected_repository_ids is not None and (visibility is None or visibility == "selected"): - self._set_selected_repositories_for_variable(org_id, variable_name, selected_repository_ids) + if status != 204: + raise RuntimeError(f"failed to update org variable '{variable_name}': {body}") - print_debug(f"updated org variable '{variable_name}'") + if selected_repository_ids is not None and (visibility is None or visibility == "selected"): + await self._set_selected_repositories_for_variable(org_id, variable_name, selected_repository_ids) - def add_variable(self, org_id: str, data: dict[str, str]) -> None: + print_debug(f"updated org variable '{variable_name}'") + + async def add_variable(self, org_id: str, data: dict[str, str]) -> None: variable_name = data.get("name") print_debug(f"adding org variable '{variable_name}'") - response = self.requester.request_raw("POST", f"/orgs/{org_id}/actions/variables", json.dumps(data)) - if response.status_code != 201: - raise RuntimeError(f"failed to add org variable '{variable_name}': {response.text}") - else: - print_debug(f"added org variable '{variable_name}'") + status, body = await self.requester.async_request_raw( + "POST", f"/orgs/{org_id}/actions/variables", json.dumps(data) + ) + + if status != 201: + raise RuntimeError(f"failed to add org variable '{variable_name}': {body}") - def delete_variable(self, org_id: str, variable_name: str) -> None: + print_debug(f"added org variable '{variable_name}'") + + async def delete_variable(self, org_id: str, variable_name: str) -> None: print_debug(f"deleting org variable '{variable_name}'") - response = self.requester.request_raw("DELETE", f"/orgs/{org_id}/actions/variables/{variable_name}") - if response.status_code != 204: - raise RuntimeError(f"failed to delete org variable '{variable_name}': {response.text}") + status, body = await self.requester.async_request_raw( + "DELETE", f"/orgs/{org_id}/actions/variables/{variable_name}" + ) + + if status != 204: + raise RuntimeError(f"failed to delete org variable '{variable_name}': {body}") print_debug(f"removed org variable '{variable_name}'") - def get_public_key(self, org_id: str) -> tuple[str, str]: + async def get_public_key(self, org_id: str) -> tuple[str, str]: print_debug(f"retrieving org public key for org '{org_id}'") try: - response = self.requester.request_json("GET", f"/orgs/{org_id}/actions/secrets/public-key") + response = await self.requester.async_request_json("GET", f"/orgs/{org_id}/actions/secrets/public-key") return response["key_id"], response["key"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving org public key:\n{ex}").with_traceback(tb) - def get_team_ids(self, combined_slug: str) -> tuple[int, str]: + async def get_team_ids(self, combined_slug: str) -> tuple[int, str]: print_debug("retrieving team ids") org_id, team_slug = re.split("/", combined_slug) try: - response = self.requester.request_json("GET", f"/orgs/{org_id}/teams/{team_slug}") + response = await self.requester.async_request_json("GET", f"/orgs/{org_id}/teams/{team_slug}") return response["id"], response["node_id"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving team node id:\n{ex}").with_traceback(tb) - def get_teams(self, org_id: str) -> list[dict[str, Any]]: + async def get_teams(self, org_id: str) -> list[dict[str, Any]]: print_debug(f"retrieving teams for org '{org_id}'") try: - return self.requester.request_json("GET", f"/orgs/{org_id}/teams") + return await self.requester.async_request_json("GET", f"/orgs/{org_id}/teams") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving teams for org '{org_id}':\n{ex}").with_traceback(tb) - def get_app_installations(self, org_id: str) -> list[dict[str, Any]]: + async def get_app_installations(self, org_id: str) -> list[dict[str, Any]]: print_debug(f"retrieving app installations for org '{org_id}'") try: - response = self.requester.request_json("GET", f"/orgs/{org_id}/installations") + response = await self.requester.async_request_json("GET", f"/orgs/{org_id}/installations") return response["installations"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed getting app installations for org '{org_id}':\n{ex}").with_traceback(tb) - def get_workflow_settings(self, org_id: str) -> dict[str, Any]: + async def get_workflow_settings(self, org_id: str) -> dict[str, Any]: print_debug(f"retrieving workflow settings for org '{org_id}'") workflow_settings: dict[str, Any] = {} try: - permissions = self.requester.request_json("GET", f"/orgs/{org_id}/actions/permissions") + permissions = await self.requester.async_request_json("GET", f"/orgs/{org_id}/actions/permissions") workflow_settings.update(permissions) except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving workflow settings for org '{org_id}':\n{ex}").with_traceback(tb) if permissions["enabled_repositories"] == "selected": - workflow_settings["selected_repositories"] = self._get_selected_repositories_for_workflow_settings(org_id) + workflow_settings["selected_repositories"] = await self._get_selected_repositories_for_workflow_settings( + org_id + ) else: workflow_settings["selected_repositories"] = None allowed_actions = permissions.get("allowed_actions", "none") if allowed_actions == "selected": - workflow_settings.update(self._get_selected_actions_for_workflow_settings(org_id)) + workflow_settings.update(await self._get_selected_actions_for_workflow_settings(org_id)) if allowed_actions != "none": - workflow_settings.update(self._get_default_workflow_permissions(org_id)) + workflow_settings.update(await self._get_default_workflow_permissions(org_id)) return workflow_settings - def update_workflow_settings(self, org_id: str, data: dict[str, Any]) -> None: + async def update_workflow_settings(self, org_id: str, data: dict[str, Any]) -> None: print_debug(f"updating workflow settings for org '{org_id}'") permission_data = {k: data[k] for k in ["enabled_repositories", "allowed_actions"] if k in data} if len(permission_data) > 0: - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PUT", f"/orgs/{org_id}/actions/permissions", json.dumps(permission_data) ) - if response.status_code == 204: - print_debug(f"updated workflow settings for org '{org_id}'") - else: - raise RuntimeError( - f"failed to update workflow settings for org '{org_id}'" - f"\n{response.status_code}: {response.text}" - ) + if status != 204: + raise RuntimeError(f"failed to update workflow settings for org '{org_id}'" f"\n{status}: {body}") + + print_debug(f"updated workflow settings for org '{org_id}'") if "selected_repository_ids" in data: - self._update_selected_repositories_for_workflow_settings(org_id, data["selected_repository_ids"]) + await self._update_selected_repositories_for_workflow_settings(org_id, data["selected_repository_ids"]) allowed_action_data = { k: data[k] for k in ["github_owned_allowed", "verified_allowed", "patterns_allowed"] if k in data } if len(allowed_action_data) > 0: - self._update_selected_actions_for_workflow_settings(org_id, allowed_action_data) + await self._update_selected_actions_for_workflow_settings(org_id, allowed_action_data) default_permission_data = { k: data[k] for k in ["default_workflow_permissions", "can_approve_pull_request_reviews"] if k in data } if len(default_permission_data) > 0: - self._update_default_workflow_permissions(org_id, default_permission_data) + await self._update_default_workflow_permissions(org_id, default_permission_data) print_debug(f"updated {len(data)} workflow setting(s)") - def _get_selected_repositories_for_workflow_settings(self, org_id: str) -> list[dict[str, Any]]: + async def _get_selected_repositories_for_workflow_settings(self, org_id: str) -> list[dict[str, Any]]: print_debug("retrieving selected repositories for org workflow settings") try: - response = self.requester.request_json("GET", f"/orgs/{org_id}/actions/permissions/repositories") + response = await self.requester.async_request_json( + "GET", f"/orgs/{org_id}/actions/permissions/repositories" + ) return response["repositories"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving selected repositories:\n{ex}").with_traceback(tb) - def _update_selected_repositories_for_workflow_settings( + async def _update_selected_repositories_for_workflow_settings( self, org_id: str, selected_repository_ids: list[int] ) -> None: print_debug("updating selected repositories for org workflow settings") data = {"selected_repository_ids": selected_repository_ids} - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PUT", f"/orgs/{org_id}/actions/permissions/repositories", json.dumps(data) ) - if response.status_code == 204: - print_debug(f"updated selected repositories for workflow settings of org '{org_id}'") - else: + if status != 204: raise RuntimeError( - f"failed updating selected repositories for workflow settings of org '{org_id}'" - f"\n{response.status_code}: {response.text}" + f"failed updating selected repositories for workflow settings of org '{org_id}'" f"\n{status}: {body}" ) - def _get_selected_actions_for_workflow_settings(self, org_id: str) -> dict[str, Any]: + print_debug(f"updated selected repositories for workflow settings of org '{org_id}'") + + async def _get_selected_actions_for_workflow_settings(self, org_id: str) -> dict[str, Any]: print_debug(f"retrieving allowed actions for org '{org_id}'") try: - return self.requester.request_json("GET", f"/orgs/{org_id}/actions/permissions/selected-actions") + return await self.requester.async_request_json( + "GET", f"/orgs/{org_id}/actions/permissions/selected-actions" + ) except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving allowed actions for org '{org_id}':\n{ex}").with_traceback(tb) - def _update_selected_actions_for_workflow_settings(self, org_id: str, data: dict[str, Any]) -> None: + async def _update_selected_actions_for_workflow_settings(self, org_id: str, data: dict[str, Any]) -> None: print_debug(f"updating allowed actions for org '{org_id}'") - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PUT", f"/orgs/{org_id}/actions/permissions/selected-actions", json.dumps(data) ) - if response.status_code == 204: - print_debug(f"updated allowed actions for org '{org_id}'") - else: - raise RuntimeError( - f"failed updating allowed actions for org '{org_id}'" f"\n{response.status_code}: {response.text}" - ) + if status != 204: + raise RuntimeError(f"failed updating allowed actions for org '{org_id}'" f"\n{status}: {body}") - def _get_default_workflow_permissions(self, org_id: str) -> dict[str, Any]: + print_debug(f"updated allowed actions for org '{org_id}'") + + async def _get_default_workflow_permissions(self, org_id: str) -> dict[str, Any]: print_debug(f"retrieving default workflow permissions for org '{org_id}'") try: - return self.requester.request_json("GET", f"/orgs/{org_id}/actions/permissions/workflow") + return await self.requester.async_request_json("GET", f"/orgs/{org_id}/actions/permissions/workflow") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving org default workflow permissions:\n{ex}").with_traceback(tb) - def _update_default_workflow_permissions(self, org_id: str, data: dict[str, Any]) -> None: + async def _update_default_workflow_permissions(self, org_id: str, data: dict[str, Any]) -> None: print_debug(f"updating default workflow permissions for org '{org_id}'") - response = self.requester.request_raw("PUT", f"/orgs/{org_id}/actions/permissions/workflow", json.dumps(data)) + status, body = await self.requester.async_request_raw( + "PUT", f"/orgs/{org_id}/actions/permissions/workflow", json.dumps(data) + ) - if response.status_code == 204: - print_debug(f"updated default workflow permissions for org '{org_id}'") - else: - raise RuntimeError( - f"failed updating default workflow permissions for org '{org_id}'" - f"\n{response.status_code}: {response.text}" - ) + if status != 204: + raise RuntimeError(f"failed updating default workflow permissions for org '{org_id}'" f"\n{status}: {body}") + + print_debug(f"updated default workflow permissions for org '{org_id}'") - def list_members(self, org_id: str, two_factor_disabled: bool) -> list[dict[str, Any]]: + async def list_members(self, org_id: str, two_factor_disabled: bool) -> list[dict[str, Any]]: print_debug(f"retrieving list of organization members for org '{org_id}'") try: params = "?filter=2fa_disabled" if two_factor_disabled is True else "" - return self.requester.request_paged_json("GET", f"/orgs/{org_id}/members{params}") + return await self.requester.async_request_paged_json("GET", f"/orgs/{org_id}/members{params}") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving org default workflow permissions:\n{ex}").with_traceback(tb) diff --git a/otterdog/providers/github/rest/pull_request_client.py b/otterdog/providers/github/rest/pull_request_client.py new file mode 100644 index 00000000..e358fd6e --- /dev/null +++ b/otterdog/providers/github/rest/pull_request_client.py @@ -0,0 +1,30 @@ +# ******************************************************************************* +# Copyright (c) 2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from typing import Any + +from otterdog.providers.github.exception import GitHubException +from otterdog.utils import print_debug + +from . import RestApi, RestClient + + +class PullRequestClient(RestClient): + def __init__(self, rest_api: RestApi): + super().__init__(rest_api) + + async def get_pull_request(self, org_id: str, repo_name: str, pull_request_number: str) -> dict[str, Any]: + print_debug(f"getting pull request with number '{pull_request_number}' from repo '{org_id}/{repo_name}'") + + try: + return await self.requester.async_request_json( + "GET", f"/repos/{org_id}/{repo_name}/pulls/{pull_request_number}" + ) + except GitHubException as ex: + tb = ex.__traceback__ + raise RuntimeError(f"failed retrieving pull request:\n{ex}").with_traceback(tb) diff --git a/otterdog/providers/github/rest/repo_client.py b/otterdog/providers/github/rest/repo_client.py index 228b05a4..c1560e3c 100644 --- a/otterdog/providers/github/rest/repo_client.py +++ b/otterdog/providers/github/rest/repo_client.py @@ -6,7 +6,6 @@ # SPDX-License-Identifier: EPL-2.0 # ******************************************************************************* - import json import os import pathlib @@ -32,42 +31,29 @@ class RepoClient(RestClient): def __init__(self, rest_api: RestApi): super().__init__(rest_api) - def get_repo_data(self, org_id: str, repo_name: str) -> dict[str, Any]: + async def get_repo_data(self, org_id: str, repo_name: str) -> dict[str, Any]: print_debug(f"retrieving org repo data for '{org_id}/{repo_name}'") - try: - repo_data = self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}") - self._fill_github_pages_config(org_id, repo_name, repo_data) - self._fill_vulnerability_report(org_id, repo_name, repo_data) - self._fill_topics(org_id, repo_name, repo_data) - return repo_data - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError(f"failed retrieving data for repo '{repo_name}':\n{ex}").with_traceback(tb) - - async def async_get_repo_data(self, org_id: str, repo_name: str) -> dict[str, Any]: - print_debug(f"async retrieving org repo data for '{org_id}/{repo_name}'") - try: repo_data = await self.requester.async_request_json("GET", f"/repos/{org_id}/{repo_name}") - await self._async_fill_github_pages_config(org_id, repo_name, repo_data) - await self._async_fill_vulnerability_report(org_id, repo_name, repo_data) - await self._async_fill_topics(org_id, repo_name, repo_data) + await self._fill_github_pages_config(org_id, repo_name, repo_data) + await self._fill_vulnerability_report(org_id, repo_name, repo_data) + await self._fill_topics(org_id, repo_name, repo_data) return repo_data except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving data for repo '{repo_name}':\n{ex}").with_traceback(tb) - def get_repo_by_id(self, repo_id: int) -> dict[str, Any]: + async def get_repo_by_id(self, repo_id: int) -> dict[str, Any]: print_debug(f"retrieving repo by id for '{repo_id}'") try: - return self.requester.request_json("GET", f"/repositories/{repo_id}") + return await self.requester.async_request_json("GET", f"/repositories/{repo_id}") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving data for repo '{repo_id}':\n{ex}").with_traceback(tb) - def update_repo(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + async def update_repo(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: print_debug(f"updating repo settings for repo '{org_id}/{repo_name}'") changes = len(data) @@ -95,23 +81,23 @@ def update_repo(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None if changes > 0: try: if len(data) > 0: - self.requester.request_json("PATCH", f"/repos/{org_id}/{repo_name}", data) + await self.requester.async_request_json("PATCH", f"/repos/{org_id}/{repo_name}", data) if vulnerability_reports is not None: - self._update_vulnerability_report(org_id, repo_name, vulnerability_reports) + await self._update_vulnerability_report(org_id, repo_name, vulnerability_reports) if topics is not None: - self._update_topics(org_id, repo_name, topics) + await self._update_topics(org_id, repo_name, topics) if gh_pages is not None: - self._update_github_pages_config(org_id, repo_name, gh_pages) + await self._update_github_pages_config(org_id, repo_name, gh_pages) if default_branch is not None: - self._update_default_branch(org_id, repo_name, default_branch) + await self._update_default_branch(org_id, repo_name, default_branch) print_debug(f"updated {changes} repo setting(s) for repo '{repo_name}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to update settings for repo '{repo_name}':\n{ex}").with_traceback(tb) - def add_repo( + async def add_repo( self, org_id: str, data: dict[str, Any], @@ -134,20 +120,19 @@ def add_repo( "default_branch_only": fork_default_branch_only, } - self.requester.request_json( + await self.requester.async_request_json( "POST", f"/repos/{upstream_owner}/{upstream_repo}/forks", fork_data, ) - print_debug(f"created repo with name '{repo_name}' from template '{template_repository}'") - # get all the data for the created repo to avoid setting values that can not be changed due # to defaults from the organization (like web_commit_signoff_required) - current_data = self.get_repo_data(org_id, repo_name) + current_data = await self.get_repo_data(org_id, repo_name) self._remove_already_active_settings(data, current_data) - self.update_repo(org_id, repo_name, data) + await self.update_repo(org_id, repo_name, data) + print_debug(f"created repo with name '{repo_name}' from template '{template_repository}'") return except GitHubException as ex: tb = ex.__traceback__ @@ -167,7 +152,7 @@ def add_repo( "private": data.get("private", False), } - self.requester.request_json( + await self.requester.async_request_json( "POST", f"/repos/{template_owner}/{template_repo}/generate", template_data, @@ -177,16 +162,16 @@ def add_repo( # get all the data for the created repo to avoid setting values that can not be changed due # to defaults from the organization (like web_commit_signoff_required) - current_data = self.get_repo_data(org_id, repo_name) + current_data = await self.get_repo_data(org_id, repo_name) self._remove_already_active_settings(data, current_data) - self.update_repo(org_id, repo_name, data) + await self.update_repo(org_id, repo_name, data) # wait till the repo is initialized, this might take a while. if len(post_process_template_content) > 0: initialized = False for i in range(1, 11): try: - self.get_readme(org_id, repo_name) + await self.get_readme(org_id, repo_name) initialized = True break except RuntimeError: @@ -204,10 +189,10 @@ def add_repo( # if there is template content which shall be post-processed, # use chevron to expand some variables that might be used there. for content_path in post_process_template_content: - content = self.rest_api.content.get_content(org_id, repo_name, content_path, None) + content = await self.rest_api.content.get_content(org_id, repo_name, content_path, None) updated_content = self._render_template_content(org_id, repo_name, content) if content != updated_content: - self.rest_api.content.update_content(org_id, repo_name, content_path, updated_content) + await self.rest_api.content.update_content(org_id, repo_name, content_path, updated_content) return except GitHubException as ex: @@ -241,59 +226,34 @@ def add_repo( data["auto_init"] = auto_init_repo try: - result = self.requester.request_json("POST", f"/orgs/{org_id}/repos", data) + result = await self.requester.async_request_json("POST", f"/orgs/{org_id}/repos", data) print_debug(f"created repo with name '{repo_name}'") self._remove_already_active_settings(update_data, result) - self.update_repo(org_id, repo_name, update_data) + await self.update_repo(org_id, repo_name, update_data) except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to add repo with name '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def get_webhooks(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + async def get_webhooks(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: print_debug(f"retrieving webhooks for repo '{org_id}/{repo_name}'") try: - # special handling due to temporary private forks for security advisories. - # example repo: https://github.com/eclipse-cbi/jiro-ghsa-wqjm-x66q-r2c6 - # currently it is not possible via the api to determine such repos, but when - # requesting hooks for such a repo, you would get a 404 response. - response = self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/hooks") - if response.status_code == 200: - return response.json() - else: - return [] + return await self.requester.async_request_json("GET", f"/repos/{org_id}/{repo_name}/hooks") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving webhooks for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - async def async_get_webhooks(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - print_debug(f"async retrieving webhooks for repo '{org_id}/{repo_name}'") - - try: - # special handling due to temporary private forks for security advisories. - # example repo: https://github.com/eclipse-cbi/jiro-ghsa-wqjm-x66q-r2c6 - # currently it is not possible via the api to determine such repos, but when - # requesting hooks for such a repo, you would get a 404 response. - status, body = await self.requester.async_request_raw("GET", f"/repos/{org_id}/{repo_name}/hooks") - if status == 200: - return json.loads(body) - else: - return [] - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError(f"failed retrieving webhooks for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - - def update_webhook(self, org_id: str, repo_name: str, webhook_id: int, webhook: dict[str, Any]) -> None: + async def update_webhook(self, org_id: str, repo_name: str, webhook_id: int, webhook: dict[str, Any]) -> None: print_debug(f"updating repo webhook '{webhook_id}' for repo '{org_id}/{repo_name}'") try: - self.requester.request_json("PATCH", f"/repos/{org_id}/{repo_name}/hooks/{webhook_id}", webhook) + await self.requester.async_request_json("PATCH", f"/repos/{org_id}/{repo_name}/hooks/{webhook_id}", webhook) print_debug(f"updated repo webhook '{webhook_id}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to update repo webhook {webhook_id}:\n{ex}").with_traceback(tb) - def add_webhook(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + async def add_webhook(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: url = data["config"]["url"] print_debug(f"adding repo webhook with url '{url}' for repo '{org_id}/{repo_name}'") @@ -301,24 +261,24 @@ def add_webhook(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None data["name"] = "web" try: - self.requester.request_json("POST", f"/repos/{org_id}/{repo_name}/hooks", data) + await self.requester.async_request_json("POST", f"/repos/{org_id}/{repo_name}/hooks", data) print_debug(f"added repo webhook with url '{url}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to add repo webhook with url '{url}':\n{ex}").with_traceback(tb) - def delete_webhook(self, org_id: str, repo_name: str, webhook_id: int, url: str) -> None: + async def delete_webhook(self, org_id: str, repo_name: str, webhook_id: int, url: str) -> None: print_debug(f"deleting repo webhook with url '{url}' for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw("DELETE", f"/repos/{org_id}/{repo_name}/hooks/{webhook_id}") + status, _ = await self.requester.async_request_raw("DELETE", f"/repos/{org_id}/{repo_name}/hooks/{webhook_id}") - if response.status_code != 204: + if status != 204: raise RuntimeError(f"failed to delete repo webhook with url '{url}'") print_debug(f"removed repo webhook with url '{url}'") - async def async_get_rulesets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - print_debug(f"async retrieving rulesets for repo '{org_id}/{repo_name}'") + async def get_rulesets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + print_debug(f"retrieving rulesets for repo '{org_id}/{repo_name}'") try: result = [] @@ -327,14 +287,14 @@ async def async_get_rulesets(self, org_id: str, repo_name: str) -> list[dict[str "GET", f"/repos/{org_id}/{repo_name}/rulesets", params=params ) for ruleset in response: - result.append(await self.async_get_ruleset(org_id, repo_name, str(ruleset["id"]))) + result.append(await self.get_ruleset(org_id, repo_name, str(ruleset["id"]))) return result except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving rulesets for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - async def async_get_ruleset(self, org_id: str, repo_name: str, ruleset_id: str) -> dict[str, Any]: - print_debug(f"async retrieving ruleset '{ruleset_id}' for repo '{org_id}/{repo_name}'") + async def get_ruleset(self, org_id: str, repo_name: str, ruleset_id: str) -> dict[str, Any]: + print_debug(f"retrieving ruleset '{ruleset_id}' for repo '{org_id}/{repo_name}'") try: params = {"includes_parents": str(False)} @@ -345,17 +305,19 @@ async def async_get_ruleset(self, org_id: str, repo_name: str, ruleset_id: str) tb = ex.__traceback__ raise RuntimeError(f"failed retrieving ruleset for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def update_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, ruleset: dict[str, Any]) -> None: + async def update_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, ruleset: dict[str, Any]) -> None: print_debug(f"updating repo ruleset '{ruleset_id}' for repo '{org_id}/{repo_name}'") try: - self.requester.request_json("PUT", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}", ruleset) + await self.requester.async_request_json( + "PUT", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}", ruleset + ) print_debug(f"updated repo ruleset '{ruleset_id}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to update repo ruleset {ruleset_id}:\n{ex}").with_traceback(tb) - def add_ruleset(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + async def add_ruleset(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: name = data["name"] print_debug(f"adding repo ruleset with name '{name}' for repo '{org_id}/{repo_name}'") @@ -363,18 +325,20 @@ def add_ruleset(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None data["target"] = "branch" try: - self.requester.request_json("POST", f"/repos/{org_id}/{repo_name}/rulesets", data) + await self.requester.async_request_json("POST", f"/repos/{org_id}/{repo_name}/rulesets", data) print_debug(f"added repo ruleset with name '{name}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to add repo ruleset with name '{name}':\n{ex}").with_traceback(tb) - def delete_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str) -> None: + async def delete_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str) -> None: print_debug(f"deleting repo ruleset with name '{name}' for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw("DELETE", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}") + status, _ = await self.requester.async_request_raw( + "DELETE", f"/repos/{org_id}/{repo_name}/rulesets/{ruleset_id}" + ) - if response.status_code != 204: + if status != 204: raise RuntimeError(f"failed to delete repo ruleset with name '{name}'") print_debug(f"removed repo ruleset with name '{name}'") @@ -382,25 +346,24 @@ def delete_ruleset(self, org_id: str, repo_name: str, ruleset_id: int, name: str @staticmethod def _render_template_content(org_id: str, repo_name: str, content: str) -> str: variables = {"org": org_id, "repo": repo_name} - return chevron.render(content, variables) - def get_readme(self, org_id: str, repo_name: str) -> dict[str, Any]: + async def get_readme(self, org_id: str, repo_name: str) -> dict[str, Any]: print_debug(f"getting readme for repo '{org_id}/{repo_name}'") try: - return self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/readme") + return await self.requester.async_request_json("GET", f"/repos/{org_id}/{repo_name}/readme") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to get readme for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def delete_repo(self, org_id: str, repo_name: str) -> None: + async def delete_repo(self, org_id: str, repo_name: str) -> None: print_debug(f"deleting repo '{org_id}/{repo_name}'") - response = self.requester.request_raw("DELETE", f"/repos/{org_id}/{repo_name}") + status, body = await self.requester.async_request_raw("DELETE", f"/repos/{org_id}/{repo_name}") - if response.status_code != 204: - raise RuntimeError(f"failed to delete repo '{org_id}/{repo_name}': {response.text}") + if status != 204: + raise RuntimeError(f"failed to delete repo '{org_id}/{repo_name}': {body}") print_debug(f"removed repo '{org_id}/{repo_name}'") @@ -416,28 +379,21 @@ def _remove_already_active_settings(update_data: dict[str, Any], current_data: d print_debug(f"omitting setting '{key}' as it is already set") update_data.pop(key) - def _fill_github_pages_config(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: + async def _fill_github_pages_config(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: print_debug(f"retrieving github pages config for '{org_id}/{repo_name}'") - response = self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/pages") - if response.status_code == 200: - repo_data["gh_pages"] = response.json() - - async def _async_fill_github_pages_config(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: - print_debug(f"async retrieving github pages config for '{org_id}/{repo_name}'") - status, body = await self.requester.async_request_raw("GET", f"/repos/{org_id}/{repo_name}/pages") if status == 200: repo_data["gh_pages"] = json.loads(body) - def _update_github_pages_config(self, org_id: str, repo_name: str, gh_pages: dict[str, Any]) -> None: + async def _update_github_pages_config(self, org_id: str, repo_name: str, gh_pages: dict[str, Any]) -> None: print_debug(f"updating github pages config for '{org_id}/{repo_name}'") # special handling for repos hosting the organization site if repo_name.lower() == f"{org_id}.github.io".lower(): current_repo_data: dict[str, Any] = {} for i in range(1, 4): - self._fill_github_pages_config(org_id, repo_name, current_repo_data) + await self._fill_github_pages_config(org_id, repo_name, current_repo_data) if "gh_pages" in current_repo_data: break @@ -446,7 +402,7 @@ def _update_github_pages_config(self, org_id: str, repo_name: str, gh_pages: dic time.sleep(1) - current_gh_pages = current_repo_data.get("gh_pages", None) + current_gh_pages: Any = current_repo_data.get("gh_pages", None) if current_gh_pages is not None: has_changes = False for k, v in gh_pages.items(): @@ -465,14 +421,14 @@ def _update_github_pages_config(self, org_id: str, repo_name: str, gh_pages: dic else: gh_pages_data: list[tuple[str, str, int]] = [] # first check if the pages config already exists: - response_get = self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/pages") - if response_get.status_code != 200: + status, _ = await self.requester.async_request_raw("GET", f"/repos/{org_id}/{repo_name}/pages") + if status != 200: # check if the branch already exists - source = gh_pages.get("source", None) + source: Any = gh_pages.get("source", None) if source is not None: branch = source.get("branch", None) if branch is not None: - existing_branches = self.get_branches(org_id, repo_name) + existing_branches = await self.get_branches(org_id, repo_name) if len(existing_branches) == 0: print_debug(f"repo '{repo_name}' not yet initialized, skipping GH pages config") @@ -490,15 +446,18 @@ def _update_github_pages_config(self, org_id: str, repo_name: str, gh_pages: dic gh_pages_data.append((json.dumps(gh_pages), "PUT", 204)) for data, method, status_code in gh_pages_data: - response = self.requester.request_raw(method, f"/repos/{org_id}/{repo_name}/pages", data=data) - if response.status_code != status_code: - raise RuntimeError(f"failed to update github pages config for repo '{repo_name}': {response.text}") - else: - print_debug(f"updated github pages config for repo '{repo_name}'") + status, body = await self.requester.async_request_raw( + method, f"/repos/{org_id}/{repo_name}/pages", data=data + ) + + if status != status_code: + raise RuntimeError(f"failed to update github pages config for repo '{repo_name}': {body}") - def _update_default_branch(self, org_id: str, repo_name: str, new_default_branch: str) -> None: + print_debug(f"updated github pages config for repo '{repo_name}'") + + async def _update_default_branch(self, org_id: str, repo_name: str, new_default_branch: str) -> None: print_debug(f"updating default branch for '{org_id}/{repo_name}'") - existing_branches = self.get_branches(org_id, repo_name) + existing_branches = await self.get_branches(org_id, repo_name) existing_branch_names = list(map(lambda x: x["name"], existing_branches)) if len(existing_branches) == 0: @@ -508,13 +467,13 @@ def _update_default_branch(self, org_id: str, repo_name: str, new_default_branch try: if new_default_branch in existing_branch_names: data = {"default_branch": new_default_branch} - self.requester.request_json("PATCH", f"/repos/{org_id}/{repo_name}", data) + await self.requester.async_request_json("PATCH", f"/repos/{org_id}/{repo_name}", data) print_debug(f"updated default branch for '{org_id}/{repo_name}'") else: - repo = self.get_repo_data(org_id, repo_name) + repo = await self.get_repo_data(org_id, repo_name) default_branch = repo["default_branch"] data = {"new_name": new_default_branch} - self.requester.request_json( + await self.requester.async_request_json( "POST", f"/repos/{org_id}/{repo_name}/branches/{default_branch}/rename", data ) print_debug(f"renamed default branch for '{org_id}/{repo_name}'") @@ -524,26 +483,16 @@ def _update_default_branch(self, org_id: str, repo_name: str, new_default_branch f"failed to update default branch for repo '{org_id}/{repo_name}':\n{ex}" ).with_traceback(tb) - def _fill_vulnerability_report(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: + async def _fill_vulnerability_report(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: print_debug(f"retrieving repo vulnerability report status for '{org_id}/{repo_name}'") - response_vulnerability = self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/vulnerability-alerts") - - if response_vulnerability.status_code == 204: - repo_data["dependabot_alerts_enabled"] = True - else: - repo_data["dependabot_alerts_enabled"] = False - - async def _async_fill_vulnerability_report(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: - print_debug(f"async retrieving repo vulnerability report status for '{org_id}/{repo_name}'") - status, _ = await self.requester.async_request_raw("GET", f"/repos/{org_id}/{repo_name}/vulnerability-alerts") if status == 204: repo_data["dependabot_alerts_enabled"] = True else: repo_data["dependabot_alerts_enabled"] = False - def _update_vulnerability_report(self, org_id: str, repo_name: str, vulnerability_reports: bool) -> None: + async def _update_vulnerability_report(self, org_id: str, repo_name: str, vulnerability_reports: bool) -> None: print_debug(f"updating repo vulnerability report status for '{org_id}/{repo_name}'") if vulnerability_reports is True: @@ -551,76 +500,43 @@ def _update_vulnerability_report(self, org_id: str, repo_name: str, vulnerabilit else: method = "DELETE" - response = self.requester.request_raw(method, f"/repos/{org_id}/{repo_name}/vulnerability-alerts") - - if response.status_code != 204: - raise RuntimeError(f"failed to update vulnerability_reports for repo '{repo_name}': {response.text}") - else: - print_debug(f"updated vulnerability_reports for repo '{repo_name}'") + status, body = await self.requester.async_request_raw( + method, f"/repos/{org_id}/{repo_name}/vulnerability-alerts" + ) - def _fill_topics(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: - print_debug(f"retrieving repo topics for '{org_id}/{repo_name}'") + if status != 204: + raise RuntimeError(f"failed to update vulnerability_reports for repo '{repo_name}': {body}") - try: - # querying the topics might fail for temporary private forks, - # ignore exceptions, example repo that fails: - # https://github.com/eclipse-cbi/jiro-ghsa-wqjm-x66q-r2c6 - response = self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/topics") - repo_data["topics"] = response.get("names", []) - except GitHubException: - repo_data["topics"] = [] + print_debug(f"updated vulnerability_reports for repo '{repo_name}'") - async def _async_fill_topics(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: - print_debug(f"async retrieving repo topics for '{org_id}/{repo_name}'") + async def _fill_topics(self, org_id: str, repo_name: str, repo_data: dict[str, Any]) -> None: + print_debug(f"retrieving repo topics for '{org_id}/{repo_name}'") try: - # querying the topics might fail for temporary private forks, - # ignore exceptions, example repo that fails: - # https://github.com/eclipse-cbi/jiro-ghsa-wqjm-x66q-r2c6 response = await self.requester.async_request_json("GET", f"/repos/{org_id}/{repo_name}/topics") repo_data["topics"] = response.get("names", []) - except GitHubException: - repo_data["topics"] = [] + except GitHubException as ex: + tb = ex.__traceback__ + raise RuntimeError(f"failed retrieving topics for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def _update_topics(self, org_id: str, repo_name: str, topics: list[str]) -> None: + async def _update_topics(self, org_id: str, repo_name: str, topics: list[str]) -> None: print_debug(f"updating repo topics for '{org_id}/{repo_name}'") - data = {"names": topics} - self.requester.request_json("PUT", f"/repos/{org_id}/{repo_name}/topics", data=data) + await self.requester.async_request_json("PUT", f"/repos/{org_id}/{repo_name}/topics", data=data) print_debug(f"updated topics for repo '{repo_name}'") - def get_branches(self, org_id: str, repo_name) -> list[dict[str, Any]]: + async def get_branches(self, org_id: str, repo_name) -> list[dict[str, Any]]: print_debug(f"retrieving branches for repo '{org_id}/{repo_name}'") try: - return self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/branches") + return await self.requester.async_request_json("GET", f"/repos/{org_id}/{repo_name}/branches") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed getting branches for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def get_environments(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + async def get_environments(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: print_debug(f"retrieving environments for repo '{org_id}/{repo_name}'") - try: - response = self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/environments") - - environments = response["environments"] - for env in environments: - env_name = env["name"] - has_branch_policies = ( - jq.compile(".deployment_branch_policy.custom_branch_policies // false").input(env).first() - ) - - if has_branch_policies: - env["branch_policies"] = self._get_deployment_branch_policies(org_id, repo_name, env_name) - return environments - except GitHubException: - # querying the environments might fail for private repos, ignore exceptions - return [] - - async def async_get_environments(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - print_debug(f"async retrieving environments for repo '{org_id}/{repo_name}'") - try: response = await self.requester.async_request_json("GET", f"/repos/{org_id}/{repo_name}/environments") @@ -632,16 +548,14 @@ async def async_get_environments(self, org_id: str, repo_name: str) -> list[dict ) if has_branch_policies: - env["branch_policies"] = await self._async_get_deployment_branch_policies( - org_id, repo_name, env_name - ) + env["branch_policies"] = await self._get_deployment_branch_policies(org_id, repo_name, env_name) return environments except GitHubException: # querying the environments might fail for private repos, ignore exceptions return [] - def update_environment(self, org_id: str, repo_name: str, env_name: str, env: dict[str, Any]) -> None: - print_debug(f"updating repo environment '{env_name}' for repo '{org_id}/{repo_name}'") + async def update_environment(self, org_id: str, repo_name: str, env_name: str, env: dict[str, Any]) -> None: + print_debug(f"updating environment '{env_name}' for repo '{org_id}/{repo_name}'") if "name" in env: env.pop("name") @@ -652,46 +566,36 @@ def update_environment(self, org_id: str, repo_name: str, env_name: str, env: di branch_policies = None try: - self.requester.request_json("PUT", f"/repos/{org_id}/{repo_name}/environments/{env_name}", env) + await self.requester.async_request_json("PUT", f"/repos/{org_id}/{repo_name}/environments/{env_name}", env) if branch_policies is not None: - self._update_deployment_branch_policies(org_id, repo_name, env_name, branch_policies) + await self._update_deployment_branch_policies(org_id, repo_name, env_name, branch_policies) print_debug(f"updated repo environment '{env_name}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed to update repo environment '{env_name}':\n{ex}").with_traceback(tb) - def add_environment(self, org_id: str, repo_name: str, env_name: str, data: dict[str, Any]) -> None: - print_debug(f"adding repo environment '{env_name}' for repo '{org_id}/{repo_name}'") - self.update_environment(org_id, repo_name, env_name, data) - print_debug(f"added repo environment '{env_name}'") + async def add_environment(self, org_id: str, repo_name: str, env_name: str, data: dict[str, Any]) -> None: + print_debug(f"adding environment '{env_name}' for repo '{org_id}/{repo_name}'") + await self.update_environment(org_id, repo_name, env_name, data) + print_debug(f"added environment '{env_name}'") - def delete_environment(self, org_id: str, repo_name: str, env_name: str) -> None: + async def delete_environment(self, org_id: str, repo_name: str, env_name: str) -> None: print_debug(f"deleting repo environment '{env_name} for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw("DELETE", f"/repos/{org_id}/{repo_name}/environments/{env_name}") - if response.status_code != 204: + status, _ = await self.requester.async_request_raw( + "DELETE", f"/repos/{org_id}/{repo_name}/environments/{env_name}" + ) + + if status != 204: raise RuntimeError(f"failed to delete repo environment '{env_name}'") print_debug(f"removed repo environment '{env_name}'") - def _get_deployment_branch_policies(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: + async def _get_deployment_branch_policies(self, org_id: str, repo_name: str, env_name: str) -> list[dict[str, Any]]: print_debug(f"retrieving deployment branch policies for env '{env_name}'") - try: - url = f"/repos/{org_id}/{repo_name}/environments/{env_name}/deployment-branch-policies" - response = self.requester.request_json("GET", url) - return response["branch_policies"] - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError(f"failed retrieving deployment branch policies:\n{ex}").with_traceback(tb) - - async def _async_get_deployment_branch_policies( - self, org_id: str, repo_name: str, env_name: str - ) -> list[dict[str, Any]]: - print_debug(f"async retrieving deployment branch policies for env '{env_name}'") - try: url = f"/repos/{org_id}/{repo_name}/environments/{env_name}/deployment-branch-policies" response = await self.requester.async_request_json("GET", url) @@ -700,14 +604,14 @@ async def _async_get_deployment_branch_policies( tb = ex.__traceback__ raise RuntimeError(f"failed retrieving deployment branch policies:\n{ex}").with_traceback(tb) - def _update_deployment_branch_policies( + async def _update_deployment_branch_policies( self, org_id: str, repo_name: str, env_name: str, branch_policies: list[str] ) -> None: print_debug(f"updating deployment branch policies for env '{env_name}'") try: current_branch_policies_by_name = associate_by_key( - self._get_deployment_branch_policies(org_id, repo_name, env_name), + await self._get_deployment_branch_policies(org_id, repo_name, env_name), lambda x: x["name"], ) except RuntimeError: @@ -718,10 +622,10 @@ def _update_deployment_branch_policies( if policy in current_branch_policies_by_name: current_branch_policies_by_name.pop(policy) else: - self._create_deployment_branch_policy(org_id, repo_name, env_name, policy) + await self._create_deployment_branch_policy(org_id, repo_name, env_name, policy) for policy_name, policy_dict in current_branch_policies_by_name.items(): - self._delete_deployment_branch_policy(org_id, repo_name, env_name, policy_dict["id"]) + await self._delete_deployment_branch_policy(org_id, repo_name, env_name, policy_dict["id"]) print_debug(f"updated deployment branch policies for env '{env_name}'") @@ -729,43 +633,33 @@ def _update_deployment_branch_policies( tb = ex.__traceback__ raise RuntimeError(f"failed creating deployment branch policies:\n{ex}").with_traceback(tb) - def _create_deployment_branch_policy(self, org_id: str, repo_name: str, env_name: str, name: str) -> None: + async def _create_deployment_branch_policy(self, org_id: str, repo_name: str, env_name: str, name: str) -> None: print_debug(f"creating deployment branch policy for env '{env_name}' with name '{name}") try: data = {"name": name} url = f"/repos/{org_id}/{repo_name}/environments/{env_name}/deployment-branch-policies" - self.requester.request_json("POST", url, data) + await self.requester.async_request_json("POST", url, data) print_debug(f"created deployment branch policy for env '{env_name}'") except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed creating deployment branch policy:\n{ex}").with_traceback(tb) - def _delete_deployment_branch_policy(self, org_id: str, repo_name: str, env_name: str, policy_id: int) -> None: + async def _delete_deployment_branch_policy( + self, org_id: str, repo_name: str, env_name: str, policy_id: int + ) -> None: print_debug(f"deleting deployment branch policy for env '{env_name}' with id '{policy_id}") url = f"/repos/{org_id}/{repo_name}/environments/{env_name}/deployment-branch-policies/{policy_id}" - response = self.requester.request_raw("DELETE", url) - if response.status_code != 204: - raise RuntimeError(f"failed deleting deployment branch policy" f"\n{response.status_code}: {response.text}") - else: - print_debug(f"deleted deployment branch policy for env '{env_name}'") + status, body = await self.requester.async_request_raw("DELETE", url) - def get_secrets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - print_debug(f"retrieving secrets for repo '{org_id}/{repo_name}'") + if status != 204: + raise RuntimeError(f"failed deleting deployment branch policy" f"\n{status}: {body}") - try: - response = self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/actions/secrets") - if response.status_code == 200: - return response.json()["secrets"] - else: - return [] - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError(f"failed retrieving secrets for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) + print_debug(f"deleted deployment branch policy for env '{env_name}'") - async def async_get_secrets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - print_debug(f"async retrieving secrets for repo '{org_id}/{repo_name}'") + async def get_secrets(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + print_debug(f"retrieving secrets for repo '{org_id}/{repo_name}'") try: status, body = await self.requester.async_request_raw("GET", f"/repos/{org_id}/{repo_name}/actions/secrets") @@ -777,71 +671,63 @@ async def async_get_secrets(self, org_id: str, repo_name: str) -> list[dict[str, tb = ex.__traceback__ raise RuntimeError(f"failed retrieving secrets for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def update_secret(self, org_id: str, repo_name: str, secret_name: str, secret: dict[str, Any]) -> None: + async def update_secret(self, org_id: str, repo_name: str, secret_name: str, secret: dict[str, Any]) -> None: print_debug(f"updating repo secret '{secret_name}' for repo '{org_id}/{repo_name}'") if "name" in secret: secret.pop("name") - self.encrypt_secret_inplace(org_id, repo_name, secret) + await self._encrypt_secret_inplace(org_id, repo_name, secret) - response = self.requester.request_raw( + status, _ = await self.requester.async_request_raw( "PUT", f"/repos/{org_id}/{repo_name}/actions/secrets/{secret_name}", json.dumps(secret), ) - if response.status_code != 204: + + if status != 204: raise RuntimeError(f"failed to update repo secret '{secret_name}'") - else: - print_debug(f"updated repo secret '{secret_name}'") - def add_secret(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + print_debug(f"updated repo secret '{secret_name}'") + + async def add_secret(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: secret_name = data.pop("name") print_debug(f"adding repo secret '{secret_name}' for repo '{org_id}/{repo_name}'") - self.encrypt_secret_inplace(org_id, repo_name, data) + await self._encrypt_secret_inplace(org_id, repo_name, data) - response = self.requester.request_raw( + status, _ = await self.requester.async_request_raw( "PUT", f"/repos/{org_id}/{repo_name}/actions/secrets/{secret_name}", json.dumps(data), ) - if response.status_code != 201: + + if status != 201: raise RuntimeError(f"failed to add repo secret '{secret_name}'") - else: - print_debug(f"added repo secret '{secret_name}'") - def encrypt_secret_inplace(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + print_debug(f"added repo secret '{secret_name}'") + + async def _encrypt_secret_inplace(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: value = data.pop("value") - key_id, public_key = self.get_public_key(org_id, repo_name) + key_id, public_key = await self.get_public_key(org_id, repo_name) data["encrypted_value"] = encrypt_value(public_key, value) data["key_id"] = key_id - def delete_secret(self, org_id: str, repo_name: str, secret_name: str) -> None: + async def delete_secret(self, org_id: str, repo_name: str, secret_name: str) -> None: print_debug(f"deleting repo secret '{secret_name}' for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw("DELETE", f"/repos/{org_id}/{repo_name}/actions/secrets/{secret_name}") - if response.status_code != 204: + status, _ = await self.requester.async_request_raw( + "DELETE", f"/repos/{org_id}/{repo_name}/actions/secrets/{secret_name}" + ) + + if status != 204: raise RuntimeError(f"failed to delete repo secret '{secret_name}'") print_debug(f"removed repo secret '{secret_name}'") - def get_variables(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: + async def get_variables(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: print_debug(f"retrieving variables for repo '{org_id}/{repo_name}'") - try: - response = self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/actions/variables") - if response.status_code == 200: - return response.json()["variables"] - else: - return [] - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError(f"failed retrieving variables for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - - async def async_get_variables(self, org_id: str, repo_name: str) -> list[dict[str, Any]]: - print_debug(f"async retrieving variables for repo '{org_id}/{repo_name}'") - try: status, body = await self.requester.async_request_raw( "GET", f"/repos/{org_id}/{repo_name}/actions/variables" @@ -854,75 +740,54 @@ async def async_get_variables(self, org_id: str, repo_name: str) -> list[dict[st tb = ex.__traceback__ raise RuntimeError(f"failed retrieving variables for repo '{org_id}/{repo_name}':\n{ex}").with_traceback(tb) - def update_variable(self, org_id: str, repo_name: str, variable_name: str, variable: dict[str, Any]) -> None: + async def update_variable(self, org_id: str, repo_name: str, variable_name: str, variable: dict[str, Any]) -> None: print_debug(f"updating repo variable '{variable_name}' for repo '{org_id}/{repo_name}'") if "name" in variable: variable.pop("name") - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PATCH", f"/repos/{org_id}/{repo_name}/actions/variables/{variable_name}", json.dumps(variable), ) - if response.status_code != 204: - raise RuntimeError(f"failed to update repo variable '{variable_name}': {response.text}") - else: - print_debug(f"updated repo variable '{variable_name}'") + if status != 204: + raise RuntimeError(f"failed to update repo variable '{variable_name}': {body}") - def add_variable(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: + print_debug(f"updated repo variable '{variable_name}'") + + async def add_variable(self, org_id: str, repo_name: str, data: dict[str, str]) -> None: variable_name = data.get("name") print_debug(f"adding repo variable '{variable_name}' for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "POST", f"/repos/{org_id}/{repo_name}/actions/variables", json.dumps(data), ) - if response.status_code != 201: - raise RuntimeError(f"failed to add repo variable '{variable_name}': {response.text}") - else: - print_debug(f"added repo variable '{variable_name}'") - def delete_variable(self, org_id: str, repo_name: str, variable_name: str) -> None: + if status != 201: + raise RuntimeError(f"failed to add repo variable '{variable_name}': {body}") + + print_debug(f"added repo variable '{variable_name}'") + + async def delete_variable(self, org_id: str, repo_name: str, variable_name: str) -> None: print_debug(f"deleting repo variable '{variable_name}' for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw( + status, _ = await self.requester.async_request_raw( "DELETE", f"/repos/{org_id}/{repo_name}/actions/variables/{variable_name}" ) - if response.status_code != 204: + + if status != 204: raise RuntimeError(f"failed to delete repo variable '{variable_name}'") print_debug(f"removed repo variable '{variable_name}'") - def get_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: + async def get_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: print_debug(f"retrieving workflow settings for repo '{org_id}/{repo_name}'") workflow_settings: dict[str, Any] = {} - try: - permissions = self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/actions/permissions") - workflow_settings.update(permissions) - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError( - f"failed retrieving workflow settings for repo '{org_id}/{repo_name}':\n{ex}" - ).with_traceback(tb) - - allowed_actions = permissions.get("allowed_actions", "none") - if allowed_actions == "selected": - workflow_settings.update(self._get_selected_actions_for_workflow_settings(org_id, repo_name)) - - if permissions.get("enabled", False) is not False: - workflow_settings.update(self._get_default_workflow_permissions(org_id, repo_name)) - - return workflow_settings - - async def async_get_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: - print_debug(f"async retrieving workflow settings for repo '{org_id}/{repo_name}'") - - workflow_settings: dict[str, Any] = {} - try: permissions = await self.requester.async_request_json( "GET", f"/repos/{org_id}/{repo_name}/actions/permissions" @@ -936,62 +801,48 @@ async def async_get_workflow_settings(self, org_id: str, repo_name: str) -> dict allowed_actions = permissions.get("allowed_actions", "none") if allowed_actions == "selected": - workflow_settings.update(await self._async_get_selected_actions_for_workflow_settings(org_id, repo_name)) + workflow_settings.update(await self._get_selected_actions_for_workflow_settings(org_id, repo_name)) if permissions.get("enabled", False) is not False: - workflow_settings.update(await self._async_get_default_workflow_permissions(org_id, repo_name)) + workflow_settings.update(await self._get_default_workflow_permissions(org_id, repo_name)) return workflow_settings - def update_workflow_settings(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + async def update_workflow_settings(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: print_debug(f"updating workflow settings for repo '{org_id}/{repo_name}'") permission_data = {k: data[k] for k in ["enabled", "allowed_actions"] if k in data} if len(permission_data) > 0: - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PUT", f"/repos/{org_id}/{repo_name}/actions/permissions", json.dumps(permission_data) ) - if response.status_code == 204: - print_debug(f"updated workflow settings for repo '{org_id}/{repo_name}'") - else: + if status != 204: raise RuntimeError( - f"failed to update workflow settings for repo '{org_id}/{repo_name}'" - f"\n{response.status_code}: {response.text}" + f"failed to update workflow settings for repo '{org_id}/{repo_name}'" f"\n{status}: {body}" ) + print_debug(f"updated workflow settings for repo '{org_id}/{repo_name}'") + # only update the selected actions if needed if data.get("allowed_actions", "selected") == "selected": allowed_action_data = { k: data[k] for k in ["github_owned_allowed", "verified_allowed", "patterns_allowed"] if k in data } if len(allowed_action_data) > 0: - self._update_selected_actions_for_workflow_settings(org_id, repo_name, allowed_action_data) + await self._update_selected_actions_for_workflow_settings(org_id, repo_name, allowed_action_data) default_permission_data = { k: data[k] for k in ["default_workflow_permissions", "can_approve_pull_request_reviews"] if k in data } if len(default_permission_data) > 0: - self._update_default_workflow_permissions(org_id, repo_name, default_permission_data) + await self._update_default_workflow_permissions(org_id, repo_name, default_permission_data) print_debug(f"updated {len(data)} workflow setting(s)") - def _get_selected_actions_for_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: + async def _get_selected_actions_for_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: print_debug(f"retrieving allowed actions for org '{org_id}'") - try: - return self.requester.request_json( - "GET", f"/repos/{org_id}/{repo_name}/actions/permissions/selected-actions" - ) - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError( - f"failed retrieving allowed actions for repo '{org_id}/{repo_name}':\n{ex}" - ).with_traceback(tb) - - async def _async_get_selected_actions_for_workflow_settings(self, org_id: str, repo_name: str) -> dict[str, Any]: - print_debug(f"async retrieving allowed actions for org '{org_id}'") - try: return await self.requester.async_request_json( "GET", f"/repos/{org_id}/{repo_name}/actions/permissions/selected-actions" @@ -1002,33 +853,21 @@ async def _async_get_selected_actions_for_workflow_settings(self, org_id: str, r f"failed retrieving allowed actions for repo '{org_id}/{repo_name}':\n{ex}" ).with_traceback(tb) - def _update_selected_actions_for_workflow_settings(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + async def _update_selected_actions_for_workflow_settings( + self, org_id: str, repo_name: str, data: dict[str, Any] + ) -> None: print_debug(f"updating allowed actions for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PUT", f"/repos/{org_id}/{repo_name}/actions/permissions/selected-actions", json.dumps(data) ) - if response.status_code == 204: - print_debug(f"updated allowed actions for repo '{org_id}/{repo_name}'") - else: - raise RuntimeError( - f"failed updating allowed actions for repo '{org_id}/{repo_name}'" - f"\n{response.status_code}: {response.text}" - ) + if status != 204: + raise RuntimeError(f"failed updating allowed actions for repo '{org_id}/{repo_name}'" f"\n{status}: {body}") - def _get_default_workflow_permissions(self, org_id: str, repo_name: str) -> dict[str, Any]: - print_debug(f"retrieving default workflow permissions for repo '{org_id}/{repo_name}'") + print_debug(f"updated allowed actions for repo '{org_id}/{repo_name}'") - try: - return self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/actions/permissions/workflow") - except GitHubException as ex: - tb = ex.__traceback__ - raise RuntimeError( - f"failed retrieving default workflow permissions for repo '{org_id}/{repo_name}':\n{ex}" - ).with_traceback(tb) - - async def _async_get_default_workflow_permissions(self, org_id: str, repo_name: str) -> dict[str, Any]: + async def _get_default_workflow_permissions(self, org_id: str, repo_name: str) -> dict[str, Any]: print_debug(f"async retrieving default workflow permissions for repo '{org_id}/{repo_name}'") try: @@ -1041,59 +880,62 @@ async def _async_get_default_workflow_permissions(self, org_id: str, repo_name: f"failed retrieving default workflow permissions for repo '{org_id}/{repo_name}':\n{ex}" ).with_traceback(tb) - def _update_default_workflow_permissions(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: + async def _update_default_workflow_permissions(self, org_id: str, repo_name: str, data: dict[str, Any]) -> None: print_debug(f"updating default workflow permissions for repo '{org_id}/{repo_name}'") - response = self.requester.request_raw( + status, body = await self.requester.async_request_raw( "PUT", f"/repos/{org_id}/{repo_name}/actions/permissions/workflow", json.dumps(data) ) - if response.status_code == 204: - print_debug(f"updated default workflow permissions for repo '{org_id}/{repo_name}'") - else: + if status != 204: raise RuntimeError( - f"failed updating default workflow permissions for repo '{org_id}/{repo_name}'" - f"\n{response.status_code}: {response.text}" + f"failed updating default workflow permissions for repo '{org_id}/{repo_name}'" f"\n{status}: {body}" ) - def get_public_key(self, org_id: str, repo_name: str) -> tuple[str, str]: + print_debug(f"updated default workflow permissions for repo '{org_id}/{repo_name}'") + + async def get_public_key(self, org_id: str, repo_name: str) -> tuple[str, str]: print_debug(f"retrieving repo public key for repo '{org_id}/{repo_name}'") try: - response = self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/actions/secrets/public-key") + response = await self.requester.async_request_json( + "GET", f"/repos/{org_id}/{repo_name}/actions/secrets/public-key" + ) return response["key_id"], response["key"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving repo public key:\n{ex}").with_traceback(tb) - def dispatch_workflow(self, org_id: str, repo_name: str, workflow_name: str) -> bool: + async def dispatch_workflow(self, org_id: str, repo_name: str, workflow_name: str) -> bool: print_debug(f"dispatching workflow for repo '{org_id}/{repo_name}'") - repo_data = self.get_repo_data(org_id, repo_name) + repo_data = await self.get_repo_data(org_id, repo_name) data = {"ref": repo_data["default_branch"]} - response = self.requester.request_raw( + status, _ = await self.requester.async_request_raw( "POST", f"/repos/{org_id}/{repo_name}/actions/workflows/{workflow_name}/dispatches", json.dumps(data) ) - if response.status_code != 204: + if status != 204: print_debug(f"failed dispatching workflow for repo '{org_id}/{repo_name}'") return False else: print_debug(f"dispatched workflow for repo '{org_id}/{repo_name}'") return True - def get_ref_for_pull_request(self, org_id: str, repo_name: str, pull_number: str) -> str: + async def get_ref_for_pull_request(self, org_id: str, repo_name: str, pull_number: str) -> str: print_debug(f"retrieving ref for pull request {pull_number} at {org_id}/{repo_name}") try: - response = self.requester.request_json("GET", f"/repos/{org_id}/{repo_name}/pulls/{pull_number}") + response = await self.requester.async_request_json( + "GET", f"/repos/{org_id}/{repo_name}/pulls/{pull_number}" + ) return response["head"]["sha"] except GitHubException as ex: tb = ex.__traceback__ raise RuntimeError(f"failed retrieving ref for pull request:\n{ex}").with_traceback(tb) - def sync_from_template_repository( + async def sync_from_template_repository( self, org_id: str, repo_name: str, @@ -1106,7 +948,7 @@ def sync_from_template_repository( with tempfile.TemporaryDirectory() as tmp_dir: archive_file_name = os.path.join(tmp_dir, "archive.zip") with open(archive_file_name, "wb") as archive_file: - self._download_repository_archive(archive_file, template_owner, template_repo) + await self._download_repository_archive(archive_file, template_owner, template_repo) archive_target_dir = os.path.join(tmp_dir, "contents") with zipfile.ZipFile(archive_file_name, "r") as zip_file: @@ -1133,16 +975,19 @@ def sync_from_template_repository( if str(relative_path) in template_paths_set: content = self._render_template_content(org_id, repo_name, content) - updated = self.rest_api.content.update_content(org_id, repo_name, str(relative_path), content) + updated = await self.rest_api.content.update_content( + org_id, repo_name, str(relative_path), content + ) if updated: updated_files.append(str(relative_path)) return updated_files - def _download_repository_archive(self, file: IO, org_id: str, repo_name: str, ref: str = "") -> None: + async def _download_repository_archive(self, file: IO, org_id: str, repo_name: str, ref: str = "") -> None: print_debug(f"downloading repository archive for '{org_id}/{repo_name}'") try: + # TODO: use async streaming with self.requester.request_raw("GET", f"/repos/{org_id}/{repo_name}/zipball/{ref}") as response: file.write(response.content) diff --git a/otterdog/providers/github/rest/requester.py b/otterdog/providers/github/rest/requester.py index 5e19f3b8..0fc04bab 100644 --- a/otterdog/providers/github/rest/requester.py +++ b/otterdog/providers/github/rest/requester.py @@ -14,11 +14,10 @@ from requests import Response from requests_cache import CachedSession +from otterdog.providers.github.auth import AuthStrategy from otterdog.providers.github.exception import BadCredentialsException, GitHubException from otterdog.utils import is_debug_enabled, is_trace_enabled, print_debug, print_trace -from .auth import AuthStrategy - _AIOHTTP_CACHE_DIR = ".cache/async_http" _REQUESTS_CACHE_DIR = ".cache/http" @@ -142,7 +141,7 @@ def request_raw( method: str, url_path: str, data: Optional[str] = None, - params: Optional[dict[str, str]] = None, + params: Optional[dict[str, Any]] = None, stream: bool = False, force_refresh: bool = False, ) -> Response: @@ -172,7 +171,7 @@ async def async_request_raw( method: str, url_path: str, data: Optional[str] = None, - params: Optional[dict[str, str]] = None, + params: Optional[dict[str, Any]] = None, ) -> tuple[int, str]: print_trace(f"async '{method}' url = {url_path}, data = {data}, headers = {self._headers}") diff --git a/otterdog/providers/github/rest/user_client.py b/otterdog/providers/github/rest/user_client.py index 4aa1a435..14e25cd5 100644 --- a/otterdog/providers/github/rest/user_client.py +++ b/otterdog/providers/github/rest/user_client.py @@ -16,11 +16,11 @@ class UserClient(RestClient): def __init__(self, rest_api: RestApi): super().__init__(rest_api) - def get_user_ids(self, login: str) -> tuple[int, str]: + async def get_user_ids(self, login: str) -> tuple[int, str]: print_debug(f"retrieving user ids for user '{login}'") try: - response = self.requester.request_json("GET", f"/users/{login}") + response = await self.requester.async_request_json("GET", f"/users/{login}") return response["id"], response["node_id"] except GitHubException as ex: tb = ex.__traceback__ diff --git a/otterdog/providers/github/web.py b/otterdog/providers/github/web.py index ecfe684a..e1461cc0 100644 --- a/otterdog/providers/github/web.py +++ b/otterdog/providers/github/web.py @@ -6,12 +6,13 @@ # SPDX-License-Identifier: EPL-2.0 # ******************************************************************************* +from asyncio import gather from functools import cached_property -from typing import Any +from typing import Any, Iterator from importlib_resources import files -from playwright.sync_api import Error as PlaywrightError -from playwright.sync_api import Page, sync_playwright +from playwright.async_api import Error as PlaywrightError +from playwright.async_api import Page, async_playwright from otterdog import resources, utils from otterdog.credentials import Credentials @@ -33,12 +34,12 @@ def web_settings_definition(self) -> dict[str, Any]: web_settings_config = files(resources).joinpath("github-web-settings.jsonnet") return utils.jsonnet_evaluate_file(str(web_settings_config)) - def get_org_settings(self, org_id: str, included_keys: set[str]) -> dict[str, Any]: + async def get_org_settings(self, org_id: str, included_keys: set[str]) -> dict[str, Any]: utils.print_debug("retrieving settings via web interface") - with sync_playwright() as playwright: + async with async_playwright() as playwright: try: - browser = playwright.firefox.launch() + browser = await playwright.firefox.launch() except Exception as e: tb = e.__traceback__ raise RuntimeError( @@ -46,102 +47,119 @@ def get_org_settings(self, org_id: str, included_keys: set[str]) -> dict[str, An "'otterdog install-deps'" ).with_traceback(tb) from None - page = browser.new_page() - page.set_default_timeout(self._DEFAULT_TIMEOUT) + context = await browser.new_context() - self._login_if_required(page) - settings = self._retrieve_settings(org_id, included_keys, page) - self._logout(page) + login_page = await context.new_page() + login_page.set_default_timeout(self._DEFAULT_TIMEOUT) + await self._login_if_required(login_page) - page.close() - browser.close() + async def process_page(page_url, page_def) -> dict[str, Any]: + page = await context.new_page() + page.set_default_timeout(self._DEFAULT_TIMEOUT) + return await self._retrieve_settings(org_id, page_url, page_def, included_keys, page) - return settings + tasks = [process_page(page_url, page_def) for page_url, page_def in self._get_pages(included_keys)] + settings_list = await gather(*tasks) + settings = {k: v for d in settings_list for k, v in d.items()} - def _retrieve_settings(self, org_id: str, included_keys: set[str], page: Page) -> dict[str, Any]: - settings: dict[str, Any] = {} + await self._logout(login_page) + + await login_page.close() + await context.close() + await browser.close() + + return settings + def _get_pages(self, included_keys: set[str]) -> Iterator[tuple[str, Any]]: for page_url, page_def in self.web_settings_definition.items(): # check if the page contains any setting that is requested if not any(x in included_keys for x in list(map(lambda x: x["name"], page_def))): continue + else: + yield page_url, page_def - utils.print_trace(f"loading page '{page_url}'") - response = page.goto("https://github.com/organizations/{}/{}".format(org_id, page_url)) - assert response is not None - if not response.ok: - raise RuntimeError(f"unable to access github page '{page_url}': {response.status}") + @staticmethod + async def _retrieve_settings( + org_id: str, page_url: str, page_def: Any, included_keys: set[str], page: Page + ) -> dict[str, Any]: + settings: dict[str, Any] = {} - for setting_def in page_def: - setting = setting_def["name"] - optional = setting_def["optional"] - utils.print_trace(f"checking setting '{setting}'") + utils.print_trace(f"loading page '{page_url}'") + response = await page.goto("https://github.com/organizations/{}/{}".format(org_id, page_url)) + assert response is not None + if not response.ok: + raise RuntimeError(f"unable to access github page '{page_url}': {response.status}") - if setting not in included_keys: - continue + for setting_def in page_def: + setting = setting_def["name"] + optional = setting_def["optional"] + utils.print_trace(f"checking setting '{setting}'") - parent = setting_def.get("parent", None) - if parent is not None: - parent_value = settings[parent] - if isinstance(parent_value, bool) and parent_value is False: - settings[setting] = None - continue + if setting not in included_keys: + continue - try: - setting_type = setting_def["type"] - match setting_type: - case "checkbox": - selector = setting_def["selector"] + parent = setting_def.get("parent", None) + if parent is not None: + parent_value = settings[parent] + if isinstance(parent_value, bool) and parent_value is False: + settings[setting] = None + continue - case "radio": - selector = f"{setting_def['selector']}:checked" + try: + setting_type = setting_def["type"] + match setting_type: + case "checkbox": + selector = setting_def["selector"] - case "select-menu": - selector = f"{setting_def['selector']}" + case "radio": + selector = f"{setting_def['selector']}:checked" - case "text": - selector = setting_def["selector"] + case "select-menu": + selector = f"{setting_def['selector']}" - case _: - raise RuntimeError(f"not supported setting type '{setting_type}'") + case "text": + selector = setting_def["selector"] - pre_selector = setting_def["preSelector"] - if pre_selector is not None: - page.click(pre_selector) - page.wait_for_selector(selector, state="attached") + case _: + raise RuntimeError(f"not supported setting type '{setting_type}'") - value = page.eval_on_selector( - selector, - "(el, property) => el[property]", - setting_def["valueSelector"], - ) + pre_selector = setting_def["preSelector"] + if pre_selector is not None: + await page.click(pre_selector) + await page.wait_for_selector(selector, state="attached") - if isinstance(value, str): - value = value.strip() + value = await page.eval_on_selector( + selector, + "(el, property) => el[property]", + setting_def["valueSelector"], + ) - settings[setting] = value - utils.print_trace(f"retrieved setting for '{setting}' = '{value}'") + if isinstance(value, str): + value = value.strip() - except Exception as e: - if optional: - continue + settings[setting] = value + utils.print_trace(f"retrieved setting for '{setting}' = '{value}'") - if utils.is_debug_enabled(): - page_name = page_url.split("/")[-1] - screenshot_file = f"screenshot_{page_name}.png" - page.screenshot(path=screenshot_file) - utils.print_warn(f"saved page screenshot to file '{screenshot_file}'") + except Exception as e: + if optional: + continue + + if utils.is_debug_enabled(): + page_name = page_url.split("/")[-1] + screenshot_file = f"screenshot_{page_name}.png" + await page.screenshot(path=screenshot_file) + utils.print_warn(f"saved page screenshot to file '{screenshot_file}'") - utils.print_warn(f"failed to retrieve setting '{setting}' via web ui:\n{str(e)}") + utils.print_warn(f"failed to retrieve setting '{setting}' via web ui:\n{str(e)}") return settings - def update_org_settings(self, org_id: str, data: dict[str, Any]) -> None: + async def update_org_settings(self, org_id: str, data: dict[str, Any]) -> None: utils.print_debug("updating settings via web interface") - with sync_playwright() as playwright: + async with async_playwright() as playwright: try: - browser = playwright.firefox.launch() + browser = await playwright.firefox.launch() except Exception as e: tb = e.__traceback__ raise RuntimeError( @@ -149,19 +167,19 @@ def update_org_settings(self, org_id: str, data: dict[str, Any]) -> None: "'otterdog install-deps'" ).with_traceback(tb) from None - page = browser.new_page() + page = await browser.new_page() page.set_default_timeout(self._DEFAULT_TIMEOUT) - self._login_if_required(page) - self._update_settings(org_id, data, page) - self._logout(page) + await self._login_if_required(page) + await self._update_settings(org_id, data, page) + await self._logout(page) - page.close() - browser.close() + await page.close() + await browser.close() utils.print_debug(f"updated {len(data)} setting(s) via web interface") - def _update_settings(self, org_id: str, settings: dict[str, Any], page: Page) -> None: + async def _update_settings(self, org_id: str, settings: dict[str, Any], page: Page) -> None: # first, collect the set of pages that are need to be loaded pages_to_load: dict[str, dict[str, Any]] = {} for page_url, page_def in self.web_settings_definition.items(): @@ -176,7 +194,7 @@ def _update_settings(self, org_id: str, settings: dict[str, Any], page: Page) -> # second, load the required pages and modify the settings for page_url, page_dict in pages_to_load.items(): utils.print_trace(f"loading page '{page_url}'") - response = page.goto("https://github.com/organizations/{}/{}".format(org_id, page_url)) + response = await page.goto("https://github.com/organizations/{}/{}".format(org_id, page_url)) assert response is not None if not response.ok: raise RuntimeError(f"unable to access github page '{page_url}': {response.status}") @@ -189,25 +207,25 @@ def _update_settings(self, org_id: str, settings: dict[str, Any], page: Page) -> setting_type = setting_def["type"] match setting_type: case "checkbox": - page.set_checked(setting_def["selector"], new_value == "True" or new_value) + await page.set_checked(setting_def["selector"], new_value == "True" or new_value) case "radio": - page.set_checked(f"{setting_def['selector']}[value='{new_value}']", True) + await page.set_checked(f"{setting_def['selector']}[value='{new_value}']", True) case "select-menu": pre_selector = setting_def["preSelector"] - page.click(pre_selector) + await page.click(pre_selector) selector = f"{setting_def['saveSelector']}" - page.wait_for_selector(selector, state="attached") - handles = page.query_selector_all(selector) + await page.wait_for_selector(selector, state="attached") + handles = await page.query_selector_all(selector) for handle in handles: - if new_value == handle.inner_text().strip(): - handle.click() + if new_value == (await handle.inner_text()).strip(): + await handle.click() break case "text": - page.fill(setting_def["selector"], new_value) + await page.fill(setting_def["selector"], new_value) case _: raise RuntimeError(f"not supported setting type '{setting_type}'") @@ -218,26 +236,26 @@ def _update_settings(self, org_id: str, settings: dict[str, Any], page: Page) -> # do a trial run first as this will wait till the button is enabled # this might be needed for some text input forms that perform input validation. - page.click(setting_def["save"], trial=True) - page.click(setting_def["save"], trial=False) + await page.click(setting_def["save"], trial=True) + await page.click(setting_def["save"], trial=False) utils.print_trace(f"updated setting for '{setting}' = '{new_value}'") except Exception as e: if utils.is_debug_enabled(): page_name = page_url.split("/")[-1] screenshot_file = f"screenshot_{page_name}.png" - page.screenshot(path=screenshot_file) + await page.screenshot(path=screenshot_file) utils.print_warn(f"saved page screenshot to file '{screenshot_file}'") utils.print_warn(f"failed to update setting '{setting}' via web ui:\n{str(e)}") raise e - def open_browser_with_logged_in_user(self, org_id: str) -> None: + async def open_browser_with_logged_in_user(self, org_id: str) -> None: utils.print_debug("opening browser window") - with sync_playwright() as playwright: + async with async_playwright() as playwright: try: - browser = playwright.firefox.launch(headless=False) + browser = await playwright.firefox.launch(headless=False) except Exception as e: tb = e.__traceback__ raise RuntimeError( @@ -245,111 +263,111 @@ def open_browser_with_logged_in_user(self, org_id: str) -> None: "'otterdog install-deps'" ).with_traceback(tb) from None - context = browser.new_context(no_viewport=True) + context = await browser.new_context(no_viewport=True) - page = context.new_page() + page = await context.new_page() page.set_default_timeout(self._DEFAULT_TIMEOUT) - self._login_if_required(page) + await self._login_if_required(page) - page.goto("https://github.com/{}".format(org_id)) + await page.goto("https://github.com/{}".format(org_id)) input("Enter anything to logout and close browser.\n") - self._logout(page) + await self._logout(page) - page.close() - context.close() - browser.close() + await page.close() + await context.close() + await browser.close() - def _login_if_required(self, page: Page) -> None: - actor = self._logged_in_as(page) + async def _login_if_required(self, page: Page) -> None: + actor = await self._logged_in_as(page) if actor is None: - self._login(page) + await self._login(page) elif actor != self.credentials.username: raise RuntimeError(f"logged in with unexpected user {actor}") @staticmethod - def _logged_in_as(page: Page) -> str: - response = page.goto("https://github.com/settings/profile") + async def _logged_in_as(page: Page) -> str: + response = await page.goto("https://github.com/settings/profile") assert response is not None if not response.ok: raise RuntimeError(f"unable to load github profile page: {response.status}") try: - actor = page.eval_on_selector('meta[name="octolytics-actor-login"]', "element => element.content") + actor = await page.eval_on_selector('meta[name="octolytics-actor-login"]', "element => element.content") except PlaywrightError: actor = None return actor - def _login(self, page: Page) -> None: - response = page.goto("https://github.com/login") + async def _login(self, page: Page) -> None: + response = await page.goto("https://github.com/login") assert response is not None if not response.ok: raise RuntimeError(f"unable to load github login page: {response.status}") - page.type("#login_field", self.credentials.username) - page.type("#password", self.credentials.password) - page.click('input[name="commit"]') + await page.type("#login_field", self.credentials.username) + await page.type("#password", self.credentials.password) + await page.click('input[name="commit"]') - page.goto("https://github.com/sessions/two-factor") - page.type("#app_totp", self.credentials.totp) + await page.goto("https://github.com/sessions/two-factor") + await page.type("#app_totp", self.credentials.totp) try: - actor = page.eval_on_selector('meta[name="octolytics-actor-login"]', "element => element.content") + actor = await page.eval_on_selector('meta[name="octolytics-actor-login"]', "element => element.content") utils.print_trace(f"logged in as {actor}") - if page.title() == "Verify two-factor authentication": + if await page.title() == "Verify two-factor authentication": verify_button = page.get_by_role("button", name="Verify 2FA now") if verify_button is not None: - verify_button.click() + await verify_button.click() - if page.is_visible('button[text="Confirm"]'): + if await page.is_visible('button[text="Confirm"]'): confirm_button = page.get_by_role("button", name="Confirm") if confirm_button is not None: - confirm_button.click() + await confirm_button.click() - if page.title() == "Confirm your account recovery settings": + if await page.title() == "Confirm your account recovery settings": confirm_button = page.get_by_role("button", name="Confirm") if confirm_button is not None: - confirm_button.click() + await confirm_button.click() - page.type("#app_totp", self.credentials.totp) + await page.type("#app_totp", self.credentials.totp) except PlaywrightError as e: raise RuntimeError(f"could not log in to web UI: {str(e)}") - def _logout(self, page: Page) -> None: - actor = self._logged_in_as(page) + async def _logout(self, page: Page) -> None: + actor = await self._logged_in_as(page) - response = page.goto("https://github.com/logout") + response = await page.goto("https://github.com/logout") assert response is not None if not response.ok: - response = page.goto("https://github.com/settings/profile") + response = await page.goto("https://github.com/settings/profile") assert response is not None if not response.ok: raise RuntimeError("unable to load github logout page") try: selector = 'summary.Header-link > img[alt = "@{}"]'.format(actor) - page.eval_on_selector(selector, "el => el.click()") - page.wait_for_selector('button[type="submit"].dropdown-signout') - page.eval_on_selector('button[type="submit"].dropdown-signout', "el => el.click()") + await page.eval_on_selector(selector, "el => el.click()") + await page.wait_for_selector('button[type="submit"].dropdown-signout') + await page.eval_on_selector('button[type="submit"].dropdown-signout', "el => el.click()") except Exception as e: if utils.is_debug_enabled(): screenshot_file = "screenshot_profile.png" - page.screenshot(path=screenshot_file) + await page.screenshot(path=screenshot_file) utils.print_warn(f"saved page screenshot to file '{screenshot_file}'") raise RuntimeError(f"failed to logout via web ui: {str(e)}") else: try: selector = 'input[value = "Sign out"]' - page.eval_on_selector(selector, "el => el.click()") + await page.eval_on_selector(selector, "el => el.click()") except Exception as e: if utils.is_debug_enabled(): screenshot_file = "screenshot_profile.png" - page.screenshot(path=screenshot_file) + await page.screenshot(path=screenshot_file) utils.print_warn(f"saved page screenshot to file '{screenshot_file}'") raise RuntimeError(f"failed to logout via web ui: {str(e)}") diff --git a/otterdog/utils.py b/otterdog/utils.py index 54a3bc9f..4d87add4 100644 --- a/otterdog/utils.py +++ b/otterdog/utils.py @@ -273,6 +273,7 @@ class LogLevel(Enum): GLOBAL = 0 INFO = 1 WARN = 2 + ERROR = 3 class IndentingPrinter: @@ -286,6 +287,10 @@ def __init__( self._indented_line = False self._log_level = log_level + @property + def writer(self) -> TextIO: + return self._writer + @property def _current_indentation(self) -> str: return self._initial_offset + " " * (self._level * self._spaces_per_level) diff --git a/otterdog/webapp/__init__.py b/otterdog/webapp/__init__.py new file mode 100644 index 00000000..044e3871 --- /dev/null +++ b/otterdog/webapp/__init__.py @@ -0,0 +1,43 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from importlib import import_module +from importlib.util import find_spec + +from quart import Quart + +from .config import AppConfig + +_BLUEPRINT_MODULES: list[str] = [] + + +def register_github_webhook(app) -> None: + webhook_fqn = "otterdog.webapp.webhook" + spec = find_spec(webhook_fqn) + if spec is not None: + module = import_module(webhook_fqn) + module.webhook.init_app(app) + + +def register_blueprints(app): + for module_name in _BLUEPRINT_MODULES: + routes_fqn = f"otterdog.webapp.{module_name}.routes" + spec = find_spec(routes_fqn) + if spec is not None: + module = import_module(routes_fqn) + app.register_blueprint(module.blueprint) + + +def create_app(app_config: AppConfig): + app = Quart(app_config.QUART_APP) + app.config.from_object(app_config) + + register_github_webhook(app) + register_blueprints(app) + + return app diff --git a/otterdog/webapp/config.py b/otterdog/webapp/config.py new file mode 100644 index 00000000..3a14f05c --- /dev/null +++ b/otterdog/webapp/config.py @@ -0,0 +1,48 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +import os + +from decouple import config # type: ignore + + +class AppConfig(object): + QUART_APP = "otterdog.webapp" + + APP_ROOT = config("APP_ROOT") + if not os.path.exists(APP_ROOT): + os.makedirs(APP_ROOT) + + OTTERDOG_CONFIG = config("OTTERDOG_CONFIG", default="otterdog-app.json") + + # Set up the App SECRET_KEY + SECRET_KEY = config("SECRET_KEY") + + GITHUB_WEBHOOK_ENDPOINT = config("GITHUB_WEBHOOK_ENDPOINT", default="/github-webhook/receive") + GITHUB_WEBHOOK_SECRET = config("GITHUB_WEBHOOK_SECRET", default=None) + + # GitHub App config + GITHUB_APP_ID = config("GITHUB_APP_ID") + GITHUB_APP_PRIVATE_KEY = config("GITHUB_APP_PRIVATE_KEY") + + +class ProductionConfig(AppConfig): + DEBUG = False + + # Security + SESSION_COOKIE_HTTPONLY = True + REMEMBER_COOKIE_HTTPONLY = True + REMEMBER_COOKIE_DURATION = 3600 + + +class DebugConfig(AppConfig): + DEBUG = True + + +# Load all possible configurations +config_dict = {"Production": ProductionConfig, "Debug": DebugConfig} diff --git a/otterdog/webapp/tasks/__init__.py b/otterdog/webapp/tasks/__init__.py new file mode 100644 index 00000000..ff2cb0d3 --- /dev/null +++ b/otterdog/webapp/tasks/__init__.py @@ -0,0 +1,62 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from datetime import datetime +from typing import Optional + +from quart import current_app + +from otterdog.config import OtterdogConfig +from otterdog.providers.github.auth import app_auth, token_auth +from otterdog.providers.github.rest import RestApi + +_APP_REST_API: Optional[RestApi] = None +_INSTALLATION_REST_APIS: dict[str, tuple[RestApi, datetime]] = {} + +_OTTERDOG_CONFIG: Optional[OtterdogConfig] = None + + +def _create_rest_api_for_app() -> RestApi: + github_app_id = current_app.config["GITHUB_APP_ID"] + github_app_private_key = current_app.config["GITHUB_APP_PRIVATE_KEY"] + return RestApi(app_auth(github_app_id, github_app_private_key)) + + +def get_rest_api_for_app() -> RestApi: + global _APP_REST_API + + if _APP_REST_API is None: + _APP_REST_API = _create_rest_api_for_app() + + return _APP_REST_API + + +async def get_rest_api_for_installation(installation_id: int) -> RestApi: + global _INSTALLATION_REST_APIS + installation = str(installation_id) + + current_api, expires_at = _INSTALLATION_REST_APIS.get(installation, (None, datetime.now())) + if current_api is not None and expires_at is not None: + if expires_at > datetime.now(): + return current_api + + token, expires_at = await get_rest_api_for_app().app.create_installation_access_token(installation) + rest_api = RestApi(token_auth(token)) + _INSTALLATION_REST_APIS[installation] = (rest_api, expires_at) + return rest_api + + +def get_otterdog_config() -> OtterdogConfig: + global _OTTERDOG_CONFIG + + if _OTTERDOG_CONFIG is None: + app_root = current_app.config["APP_ROOT"] + config_file = current_app.config["OTTERDOG_CONFIG"] + _OTTERDOG_CONFIG = OtterdogConfig(config_file, False, app_root) + + return _OTTERDOG_CONFIG diff --git a/otterdog/webapp/tasks/apply_changes.py b/otterdog/webapp/tasks/apply_changes.py new file mode 100644 index 00000000..16b4ee72 --- /dev/null +++ b/otterdog/webapp/tasks/apply_changes.py @@ -0,0 +1,88 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +import os +from io import StringIO +from logging import getLogger +from tempfile import TemporaryDirectory + +from quart import render_template + +from otterdog.config import OrganizationConfig, OtterdogConfig +from otterdog.operations.apply import ApplyOperation +from otterdog.utils import IndentingPrinter, LogLevel +from otterdog.webapp.tasks import get_rest_api_for_installation +from otterdog.webapp.webhook.github_models import PullRequest, Repository + +from .validate_pull_request import escape_for_github, get_config + +logger = getLogger(__name__) + + +async def apply_changes( + org_id: str, + installation_id: int, + pull_request: PullRequest, + repository: Repository, + otterdog_config: OtterdogConfig, +) -> None: + """Applies changes from a merged PR and adds the result as a comment.""" + + if pull_request.base.ref != repository.default_branch: + logger.info( + "pull request merged into '%s' which is not the default branch '%s', ignoring", + pull_request.base.ref, + repository.default_branch, + ) + return + + assert pull_request.merged is True + assert pull_request.merge_commit_sha is not None + + logger.info("applying merged pull request #%d for repo '%s'", pull_request.number, repository.full_name) + + pull_request_number = str(pull_request.number) + + rest_api = await get_rest_api_for_installation(installation_id) + + with TemporaryDirectory(dir=otterdog_config.jsonnet_base_dir) as work_dir: + org_config = OrganizationConfig.of( + org_id, {"provider": "inmemory", "api_token": rest_api.token}, work_dir, otterdog_config + ) + + jsonnet_config = org_config.jsonnet_config + + if not os.path.exists(jsonnet_config.org_dir): + os.makedirs(jsonnet_config.org_dir) + + jsonnet_config.init_template() + + # get config from merge commit sha + head_file = jsonnet_config.org_config_file + await get_config( + rest_api, + org_id, + org_id, + otterdog_config.default_config_repo, + head_file, + pull_request.merge_commit_sha, + ) + + output = StringIO() + printer = IndentingPrinter(output, log_level=LogLevel.ERROR) + operation = ApplyOperation(True, True, False, False, "", True) + operation.init(otterdog_config, printer) + + await operation.execute(org_config) + + text = output.getvalue() + logger.info(text) + + result = await render_template("applied_changes.txt", result=escape_for_github(text)) + + await rest_api.issue.create_comment(org_id, otterdog_config.default_config_repo, pull_request_number, result) diff --git a/otterdog/webapp/tasks/help_comment.py b/otterdog/webapp/tasks/help_comment.py new file mode 100644 index 00000000..c0080d63 --- /dev/null +++ b/otterdog/webapp/tasks/help_comment.py @@ -0,0 +1,17 @@ +# ******************************************************************************* +# Copyright (c) 2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +from quart import render_template + +from otterdog.webapp.tasks import get_rest_api_for_installation + + +async def create_help_comment(org_id: str, installation_id: int, repo_name: str, pull_request_number: int) -> None: + rest_api = await get_rest_api_for_installation(installation_id) + comment = await render_template("help_comment.txt") + await rest_api.issue.create_comment(org_id, repo_name, str(pull_request_number), comment) diff --git a/otterdog/webapp/tasks/validate_pull_request.py b/otterdog/webapp/tasks/validate_pull_request.py new file mode 100644 index 00000000..df6a6d5b --- /dev/null +++ b/otterdog/webapp/tasks/validate_pull_request.py @@ -0,0 +1,138 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +import filecmp +import os +import re +from io import StringIO +from logging import getLogger +from tempfile import TemporaryDirectory +from typing import cast + +from pydantic import ValidationError +from quart import render_template + +from otterdog.config import OrganizationConfig, OtterdogConfig +from otterdog.operations.local_plan import LocalPlanOperation +from otterdog.providers.github import RestApi +from otterdog.utils import IndentingPrinter, LogLevel +from otterdog.webapp.tasks import get_rest_api_for_installation +from otterdog.webapp.webhook.github_models import PullRequest, Repository + +logger = getLogger(__name__) + + +async def validate_pull_request( + org_id: str, + installation_id: int, + pull_request_or_number: PullRequest | int, + repository: Repository, + otterdog_config: OtterdogConfig, + log_level: LogLevel = LogLevel.WARN, +) -> None: + """Validates a PR and adds the result as a comment.""" + + rest_api = await get_rest_api_for_installation(installation_id) + + if isinstance(pull_request_or_number, int): + response = await rest_api.pull_request.get_pull_request(org_id, repository.name, str(pull_request_or_number)) + try: + pull_request = PullRequest.model_validate(response) + except ValidationError: + logger.error("failed to load pull request event data", exc_info=True) + return + else: + pull_request = cast(PullRequest, pull_request_or_number) + + pull_request_number = str(pull_request.number) + + logger.info( + "validating pull request #%d for repo '%s' with level %s", pull_request.number, repository.full_name, log_level + ) + + with TemporaryDirectory(dir=otterdog_config.jsonnet_base_dir) as work_dir: + org_config = OrganizationConfig.of( + org_id, {"provider": "inmemory", "api_token": rest_api.token}, work_dir, otterdog_config + ) + + jsonnet_config = org_config.jsonnet_config + if not os.path.exists(jsonnet_config.org_dir): + os.makedirs(jsonnet_config.org_dir) + + jsonnet_config.init_template() + + # get BASE config + base_file = jsonnet_config.org_config_file + "-BASE" + await get_config( + rest_api, + org_id, + org_id, + otterdog_config.default_config_repo, + base_file, + pull_request.base.ref, + ) + + # get HEAD config from PR + head_file = jsonnet_config.org_config_file + await get_config( + rest_api, + org_id, + pull_request.head.repo.owner.login, + pull_request.head.repo.name, + head_file, + pull_request.head.ref, + ) + + if filecmp.cmp(base_file, head_file): + logger.info("head and base config are identical, no need to validate") + return + + output = StringIO() + printer = IndentingPrinter(output, log_level=log_level) + operation = LocalPlanOperation("-BASE", False, False, "") + operation.init(otterdog_config, printer) + + await operation.execute(org_config) + + text = output.getvalue() + logger.info(text) + + result = await render_template("validation.txt", sha=pull_request.head.sha, result=escape_for_github(text)) + + await rest_api.issue.create_comment(org_id, otterdog_config.default_config_repo, pull_request_number, result) + + +async def get_config(rest_api: RestApi, org_id: str, owner: str, repo: str, filename: str, ref: str): + path = f"otterdog/{org_id}.jsonnet" + content = await rest_api.content.get_content( + owner, + repo, + path, + ref, + ) + with open(filename, "w") as file: + file.write(content) + + +def escape_for_github(text: str) -> str: + lines = text.splitlines() + + output = [] + for line in lines: + ansi_escape = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -/]*[@-~]") + line = ansi_escape.sub("", line) + + diff_escape = re.compile(r"(\s+)([-+!])(\s+)") + line = diff_escape.sub(r"\g<2>\g<1>", line) + + diff_escape2 = re.compile(r"(\s+)(~)") + line = diff_escape2.sub(r"!\g<1>", line) + + output.append(line) + + return "\n".join(output) diff --git a/otterdog/webapp/templates/applied_changes.txt b/otterdog/webapp/templates/applied_changes.txt new file mode 100644 index 00000000..c4055556 --- /dev/null +++ b/otterdog/webapp/templates/applied_changes.txt @@ -0,0 +1,5 @@ +Changes have been applied: + +```diff +{{ result }} +``` diff --git a/otterdog/webapp/templates/help_comment.txt b/otterdog/webapp/templates/help_comment.txt new file mode 100644 index 00000000..f9ca83b5 --- /dev/null +++ b/otterdog/webapp/templates/help_comment.txt @@ -0,0 +1,4 @@ +This is your friendly self-service bot. The following commands are supported: + +- `/validate`: validates the configuration change if this PR touches the configuration +- `/validate info`: validates the configuration change, printing also validation infos diff --git a/otterdog/webapp/templates/validation.txt b/otterdog/webapp/templates/validation.txt new file mode 100644 index 00000000..6e5c21a4 --- /dev/null +++ b/otterdog/webapp/templates/validation.txt @@ -0,0 +1,12 @@ +This is your friendly self-service bot. Please find below the validation of the requested configuration changes: + +
+Diff for {{ sha }} + +```diff +{{ result }} +``` + +
+ +Add a comment `/help` to get a list of available commands. diff --git a/otterdog/webapp/webhook/__init__.py b/otterdog/webapp/webhook/__init__.py new file mode 100644 index 00000000..174ba641 --- /dev/null +++ b/otterdog/webapp/webhook/__init__.py @@ -0,0 +1,123 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +import re +from logging import getLogger + +from pydantic import ValidationError +from quart import Response, current_app + +from otterdog.utils import LogLevel +from otterdog.webapp.tasks import get_otterdog_config +from otterdog.webapp.tasks.apply_changes import apply_changes +from otterdog.webapp.tasks.help_comment import create_help_comment +from otterdog.webapp.tasks.validate_pull_request import validate_pull_request + +from .github_models import IssueCommentEvent, PullRequestEvent +from .github_webhook import GitHubWebhook + +webhook = GitHubWebhook() + +logger = getLogger(__name__) + + +@webhook.hook("pull_request") +async def on_pull_request_received(data): + try: + event = PullRequestEvent.model_validate(data) + except ValidationError: + logger.error("failed to load pull request event data", exc_info=True) + return success() + + otterdog_config = get_otterdog_config() + + if event.repository.name != otterdog_config.default_config_repo: + return success() + + if event.action in ["opened", "synchronize", "ready_for_review", "reopened"] and event.pull_request.draft is False: + + async def validate(): + await validate_pull_request( + event.organization.login, + event.installation.id, + event.pull_request, + event.repository, + otterdog_config, + ) + + current_app.add_background_task(validate) + + elif event.action in ["closed"] and event.pull_request.merged is True: + + async def apply(): + await apply_changes( + event.organization.login, + event.installation.id, + event.pull_request, + event.repository, + otterdog_config, + ) + + current_app.add_background_task(apply) + + return success() + + +@webhook.hook("issue_comment") +async def on_issue_comment_received(data): + try: + event = IssueCommentEvent.model_validate(data) + except ValidationError: + logger.error("failed to load issue comment event data", exc_info=True) + return success() + + otterdog_config = get_otterdog_config() + + if event.repository.name != otterdog_config.default_config_repo: + return success() + + if event.action in ["created", "edited"] and "/pull/" in event.issue.html_url: + org_id = event.organization.login + installation_id = event.installation.id + + if re.match(r"\s*/help\s*", event.comment.body) is not None: + + async def help_comment(): + await create_help_comment(org_id, installation_id, event.repository.name, event.issue.number) + + current_app.add_background_task(help_comment) + return success() + + m = re.match(r"\s*/validate(\s+info)?\s*", event.comment.body) + if m is None: + return success() + + log_level_str = m.group(1) + log_level = LogLevel.WARN + + match log_level_str: + case "info": + log_level = LogLevel.INFO + + async def validate(): + await validate_pull_request( + org_id, + installation_id, + event.issue.number, + event.repository, + otterdog_config, + log_level=log_level, + ) + + current_app.add_background_task(validate) + + return success() + + +def success() -> Response: + return Response({}, mimetype="application/json", status=200) diff --git a/otterdog/webapp/webhook/github_models.py b/otterdog/webapp/webhook/github_models.py new file mode 100644 index 00000000..eb6a2428 --- /dev/null +++ b/otterdog/webapp/webhook/github_models.py @@ -0,0 +1,144 @@ +# ******************************************************************************* +# Copyright (c) 2023-2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +"""Data classes for events received via webhook from GitHub""" + + +from __future__ import annotations + +from abc import ABC +from typing import Optional + +from pydantic import BaseModel + + +class Installation(BaseModel): + """The installation that is associated with the event.""" + + id: int + node_id: str + + +class Organization(BaseModel): + """The organization that is associated with the event.""" + + login: str + id: int + node_id: str + + +class Repository(BaseModel): + """A reference to the repository.""" + + id: int + node_id: str + name: str + full_name: str + private: bool + owner: Actor + default_branch: str + + +class Actor(BaseModel): + """An actor, can be either of type 'User' or 'Organization'.""" + + login: str + id: int + node_id: str + type: str + + +class Ref(BaseModel): + """A ref in a repository.""" + + label: str + ref: str + sha: str + user: Actor + repo: Repository + + +class PullRequest(BaseModel): + """Represents a pull request.""" + + id: int + node_id: str + number: int + state: str + locked: bool + title: str + body: Optional[str] + draft: bool + merged: bool + merge_commit_sha: Optional[str] + user: Actor + + head: Ref + base: Ref + + +class Comment(BaseModel): + """Represents a comment in an issue.""" + + id: int + node_id: str + user: Actor + body: str + created_at: str + updated_at: str + + +class Issue(BaseModel): + """Represents an issue""" + + number: int + node_id: str + title: str + state: str + draft: bool + body: Optional[str] + html_url: str + + +class Event(ABC, BaseModel): + """Base class of events""" + + action: str + installation: Installation + organization: Organization + sender: Actor + + +class PullRequestEvent(Event): + """A payload sent for pull request specific events.""" + + number: int + pull_request: PullRequest + repository: Repository + + +class PushEvent(Event): + """A payload sent for push events.""" + + ref: str + before: str + after: str + + repository: Repository + + created: bool + deleted: bool + forced: bool + + +class IssueCommentEvent(Event): + """A payload sent for issue comment events.""" + + issue: Issue + comment: Comment + repository: Repository diff --git a/otterdog/webapp/webhook/github_webhook.py b/otterdog/webapp/webhook/github_webhook.py new file mode 100644 index 00000000..ac3cd51c --- /dev/null +++ b/otterdog/webapp/webhook/github_webhook.py @@ -0,0 +1,155 @@ +# ******************************************************************************* +# Copyright (c) 2024 Eclipse Foundation and others. +# This program and the accompanying materials are made available +# under the terms of the Eclipse Public License 2.0 +# which is available at http://www.eclipse.org/legal/epl-v20.html +# SPDX-License-Identifier: EPL-2.0 +# ******************************************************************************* + +# Original code from https://github.com/go-build-it/quart-github-webhook +# licensed under Apache License Version 2.0 + +import collections +import hashlib +import hmac +import json +import logging + +from quart import abort, request + + +class GitHubWebhook(object): + def __init__(self): + self._endpoint = None + self._secret = None + self._hooks = collections.defaultdict(list) + self._logger = logging.getLogger(__name__) + + def init_app(self, app) -> None: + """ + Initializes this webhook on the given :code:`app`. + """ + + self._endpoint = app.config["GITHUB_WEBHOOK_ENDPOINT"] + + secret = app.config["GITHUB_WEBHOOK_SECRET"] + if secret is not None and not isinstance(secret, bytes): + secret = secret.encode("utf-8") + self._secret = secret + + app.add_url_rule( + rule=self._endpoint, + endpoint=self._endpoint, + view_func=self._post_receive, + methods=["POST"], + ) + + def hook(self, event_type): + """ + Registers a function as a hook. Multiple hooks can be registered for a given type, but the + order in which they are invoke is unspecified. + + :param event_type: The event type this hook will be invoked for. + """ + + def decorator(func): + self._hooks[event_type].append(func) + return func + + return decorator + + async def _get_digest(self): + """Return message digest if a secret key was provided""" + + if self._secret: + return hmac.new(self._secret, await request.data, hashlib.sha1).hexdigest() + + async def _post_receive(self): + """Callback from Flask""" + + digest = await self._get_digest() + + if digest is not None: + sig_parts = _get_header("X-Hub-Signature").split("=", 1) + if not isinstance(digest, str): + digest = str(digest) + + if len(sig_parts) < 2 or sig_parts[0] != "sha1" or not hmac.compare_digest(sig_parts[1], digest): + abort(400, "Invalid signature") + + event_type = _get_header("X-Github-Event") + content_type = _get_header("content-type") + if content_type == "application/x-www-form-urlencoded": + formdata = (await request.form).to_dict(flat=True) + data = json.loads(formdata["payload"]) + elif content_type == "application/json": + data = await request.get_json() + else: + abort(415, f"Unknown content type {content_type}") + + if data is None: + abort(400, "Request body must contain data") + + self._logger.info( + "%s (%s)", + _format_event(event_type, data), + _get_header("X-Github-Delivery"), + ) + + for hook in self._hooks.get(event_type, []): + await hook(data) + + return "", 204 + + +def _get_header(key): + """Return message header""" + + try: + return request.headers[key] + except KeyError: + abort(400, "Missing header: " + key) + + +EVENT_DESCRIPTIONS = { + "commit_comment": "{comment[user][login]} commented on " "{comment[commit_id]} in {repository[full_name]}", + "create": "{sender[login]} created {ref_type} ({ref}) in " "{repository[full_name]}", + "delete": "{sender[login]} deleted {ref_type} ({ref}) in " "{repository[full_name]}", + "deployment": "{sender[login]} deployed {deployment[ref]} to " + "{deployment[environment]} in {repository[full_name]}", + "deployment_status": "deployment of {deployement[ref]} to " + "{deployment[environment]} " + "{deployment_status[state]} in " + "{repository[full_name]}", + "fork": "{forkee[owner][login]} forked {forkee[name]}", + "gollum": "{sender[login]} edited wiki pages in {repository[full_name]}", + "issue_comment": "{sender[login]} commented on issue #{issue[number]} " "in {repository[full_name]}", + "issues": "{sender[login]} {action} issue #{issue[number]} in " "{repository[full_name]}", + "member": "{sender[login]} {action} member {member[login]} in " "{repository[full_name]}", + "membership": "{sender[login]} {action} member {member[login]} to team " + "{team[name]} in " + "{repository[full_name]}", + "page_build": "{sender[login]} built pages in {repository[full_name]}", + "ping": "ping from {sender[login]}", + "public": "{sender[login]} publicized {repository[full_name]}", + "pull_request": "{sender[login]} {action} pull #{pull_request[number]} in " "{repository[full_name]}", + "pull_request_review": "{sender[login]} {action} {review[state]} " + "review on pull #{pull_request[number]} in " + "{repository[full_name]}", + "pull_request_review_comment": "{comment[user][login]} {action} comment " + "on pull #{pull_request[number]} in " + "{repository[full_name]}", + "push": "{pusher[name]} pushed {ref} in {repository[full_name]}", + "release": "{release[author][login]} {action} {release[tag_name]} in " "{repository[full_name]}", + "repository": "{sender[login]} {action} repository " "{repository[full_name]}", + "status": "{sender[login]} set {sha} status to {state} in " "{repository[full_name]}", + "team_add": "{sender[login]} added repository {repository[full_name]} to " "team {team[name]}", + "watch": "{sender[login]} {action} watch in repository " "{repository[full_name]}", +} + + +def _format_event(event_type, data): + try: + return EVENT_DESCRIPTIONS[event_type].format(**data) + except KeyError: + return event_type diff --git a/poetry.lock b/poetry.lock index 0c2acfd7..27f370fe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -162,6 +162,17 @@ files = [ dev = ["aiounittest (==1.4.1)", "attribution (==1.6.2)", "black (==23.3.0)", "coverage[toml] (==7.2.3)", "flake8 (==5.0.4)", "flake8-bugbear (==23.3.12)", "flit (==3.7.1)", "mypy (==1.2.0)", "ufmt (==2.1.0)", "usort (==1.0.6)"] docs = ["sphinx (==6.1.3)", "sphinx-mdinclude (==0.5.3)"] +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + [[package]] name = "async-timeout" version = "4.0.3" @@ -252,6 +263,17 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "blinker" +version = "1.7.0" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, +] + [[package]] name = "cattrs" version = "23.2.3" @@ -609,6 +631,28 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.11.0,<2.12.0" pyflakes = ">=3.1.0,<3.2.0" +[[package]] +name = "flask" +version = "3.0.1" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.1-py3-none-any.whl", hash = "sha256:ca631a507f6dfe6c278ae20112cea3ff54ff2216390bf8880f6b035a5354af13"}, + {file = "flask-3.0.1.tar.gz", hash = "sha256:6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + [[package]] name = "frozenlist" version = "1.4.1" @@ -793,6 +837,79 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "hypercorn" +version = "0.16.0" +description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn" +optional = false +python-versions = ">=3.8" +files = [ + {file = "hypercorn-0.16.0-py3-none-any.whl", hash = "sha256:929e45c4acde3fbf7c58edf55336d30a009d2b4cb1f1eb96e6a515d61b663f58"}, + {file = "hypercorn-0.16.0.tar.gz", hash = "sha256:3b17d1dcf4992c1f262d9f9dd799c374125d0b9a8e40e1e2d11e2938b0adfe03"}, +] + +[package.dependencies] +h11 = "*" +h2 = ">=3.1.0" +priority = "*" +taskgroup = {version = "*", markers = "python_version < \"3.11\""} +tomli = {version = "*", markers = "python_version < \"3.11\""} +wsproto = ">=0.14.0" + +[package.extras] +docs = ["pydata_sphinx_theme", "sphinxcontrib_mermaid"] +h3 = ["aioquic (>=0.9.0,<1.0)"] +trio = ["exceptiongroup (>=1.1.0)", "trio (>=0.22.0)"] +uvloop = ["uvloop"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + [[package]] name = "identify" version = "2.5.33" @@ -857,13 +974,13 @@ files = [ [[package]] name = "jinja2" -version = "3.0.3" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1174,13 +1291,13 @@ mkdocs = "*" [[package]] name = "mkdocs-material" -version = "9.5.4" +version = "9.5.5" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.4-py3-none-any.whl", hash = "sha256:efd7cc8ae03296d728da9bd38f4db8b07ab61f9738a0cbd0dfaf2a15a50e7343"}, - {file = "mkdocs_material-9.5.4.tar.gz", hash = "sha256:3d196ee67fad16b2df1a458d650a8ac1890294eaae368d26cee71bc24ad41c40"}, + {file = "mkdocs_material-9.5.5-py3-none-any.whl", hash = "sha256:ac50b2431a79a3b160fdefbba37c9132485f1a69166aba115ad49fafdbbbc5df"}, + {file = "mkdocs_material-9.5.5.tar.gz", hash = "sha256:4480d9580faf42fed0123d0465502bfc1c0c239ecc9c4d66159cf0459ea1b4ae"}, ] [package.dependencies] @@ -1198,7 +1315,7 @@ requests = ">=2.26,<3.0" [package.extras] git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2,<2.0)"] -imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=9.4,<10.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] [[package]] @@ -1467,6 +1584,17 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "priority" +version = "2.0.0" +description = "A pure-Python implementation of the HTTP/2 priority tree" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"}, + {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"}, +] + [[package]] name = "pycodestyle" version = "2.11.1" @@ -1489,6 +1617,142 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pydantic" +version = "2.5.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, + {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.6" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.6" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, + {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, + {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, + {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, + {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, + {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, + {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, + {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, + {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, + {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, + {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, + {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, + {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, + {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, + {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, + {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pyee" version = "11.0.1" @@ -1612,6 +1876,31 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-decouple" +version = "3.8" +description = "Strict separation of settings from code." +optional = false +python-versions = "*" +files = [ + {file = "python-decouple-3.8.tar.gz", hash = "sha256:ba6e2657d4f376ecc46f77a3a615e058d93ba5e465c01bbe57289bfb7cce680f"}, + {file = "python_decouple-3.8-py3-none-any.whl", hash = "sha256:d0d45340815b25f4de59c974b855bb38d03151d81b037d9e3f463b0c9f8cbd66"}, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pyyaml" version = "6.0.1" @@ -1685,6 +1974,32 @@ files = [ [package.dependencies] pyyaml = "*" +[[package]] +name = "quart" +version = "0.19.4" +description = "A Python ASGI web microframework with the same API as Flask" +optional = false +python-versions = ">=3.8" +files = [ + {file = "quart-0.19.4-py3-none-any.whl", hash = "sha256:959da9371b44b6f48d952661863f8f64e68a893481ef3f2ef45b177629dc0928"}, + {file = "quart-0.19.4.tar.gz", hash = "sha256:22ff186cf164955a7bf7483ff42a739a9fad3b119041846b15dc9597ec74c85c"}, +] + +[package.dependencies] +aiofiles = "*" +blinker = ">=1.6" +click = ">=8.0.0" +flask = ">=3.0.0" +hypercorn = ">=0.11.2" +itsdangerous = "*" +jinja2 = "*" +markupsafe = "*" +werkzeug = ">=3.0.0" + +[package.extras] +docs = ["pydata_sphinx_theme"] +dotenv = ["python-dotenv"] + [[package]] name = "referencing" version = "0.32.1" @@ -1988,6 +2303,20 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "taskgroup" +version = "0.0.0a4" +description = "backport of asyncio.TaskGroup, asyncio.Runner and asyncio.timeout" +optional = false +python-versions = "*" +files = [ + {file = "taskgroup-0.0.0a4-py2.py3-none-any.whl", hash = "sha256:5c1bd0e4c06114e7a4128583ab75c987597d5378a33948a3b74c662b90f61277"}, + {file = "taskgroup-0.0.0a4.tar.gz", hash = "sha256:eb08902d221e27661950f2a0320ddf3f939f579279996f81fe30779bca3a159c"}, +] + +[package.dependencies] +exceptiongroup = "*" + [[package]] name = "tomli" version = "2.0.1" @@ -2149,6 +2478,37 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] +[[package]] +name = "werkzeug" +version = "3.0.1" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, + {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + [[package]] name = "yarl" version = "1.9.4" @@ -2255,4 +2615,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "7d79769a5f26147611058240a670216a6a10e3cee7c9c2e75f5f2d916b10d8cd" +content-hash = "eca7e7da59233839152e3aad73d2489aced3b46ef00ed27ee96460790e044a8f" diff --git a/pyproject.toml b/pyproject.toml index f24134b2..eb18733e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.10" aiofiles = "^23.2" -aiohttp = "^3.8" +aiohttp = "^3.9" aiohttp-client-cache = "^0.10" aiosqlite = "^0.19" chevron = "^0.14" @@ -47,15 +47,21 @@ colorama = "^0.4" gojsonnet = "^0.20" importlib_resources = "^5.12" jq = "^1.6" -jsonschema = "^4.19" +jsonschema = "^4.20" jwt = "^1.3" mintotp = "^0.3" -playwright = "^1.38" -requests = "^2.30" +playwright = "^1.40" +requests = "^2.31" requests-cache = "^1.1" JSONBender = "^0.9" PyNaCl = "^1.5" +[tool.poetry.group.app.dependencies] +quart = "^0.19" +python-decouple = "^3.8" +python-dotenv = "^1.0" +pydantic = "^2.5" + [tool.poetry.group.dev.dependencies] black = "^23.3" flake8 = "^6.0" @@ -75,14 +81,12 @@ types-chevron = "^0.14" optional = true [tool.poetry.group.docs.dependencies] -mkdocs = "^1.4" -mkdocs-material = "^9.4" +mkdocs = "^1.5" +mkdocs-material = "^9.5" mkdocs-exclude = "^1.0" -jinja2 = "3.0.3" [tool.poetry.scripts] otterdog = "otterdog.cli:cli" -otterdog-app = "otterdog.app:run" [build-system] requires = ["poetry-core"] diff --git a/tests/models/test_branch_protection_rule.py b/tests/models/test_branch_protection_rule.py index d35de8ac..1df92db2 100644 --- a/tests/models/test_branch_protection_rule.py +++ b/tests/models/test_branch_protection_rule.py @@ -75,10 +75,10 @@ def test_load_from_provider(self): assert bpr.review_dismissal_allowances == ["@netomi"] assert bpr.required_status_checks == ["any:Run CI"] - def test_to_provider(self): + async def test_to_provider(self): bpr = BranchProtectionRule.from_model_data(self.model_data) - provider_data = bpr.to_provider_data(self.org_id, self.provider) + provider_data = await bpr.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 24 assert provider_data["pattern"] == "main" @@ -88,7 +88,7 @@ def test_to_provider(self): {"appId": "any", "context": "Run CI"}, ] - def test_changes_to_provider(self): + async def test_changes_to_provider(self): current = BranchProtectionRule.from_model_data(self.model_data) other = BranchProtectionRule.from_model_data(self.model_data) @@ -96,7 +96,7 @@ def test_changes_to_provider(self): other.required_status_checks = ["eclipse-eca-validation:eclipsefdn/eca"] changes = current.get_difference_from(other) - provider_data = BranchProtectionRule.changes_to_provider(self.org_id, changes, self.provider) + provider_data = await BranchProtectionRule.changes_to_provider(self.org_id, changes, self.provider) assert len(provider_data) == 2 assert provider_data["requiresApprovingReviews"] is True diff --git a/tests/models/test_environment.py b/tests/models/test_environment.py index 31c57ed6..f4a4a162 100644 --- a/tests/models/test_environment.py +++ b/tests/models/test_environment.py @@ -46,10 +46,10 @@ def test_load_from_provider(self): assert env.deployment_branch_policy == "selected" assert env.branch_policies == ["main", "develop/*"] - def test_to_provider(self): + async def test_to_provider(self): env = Environment.from_model_data(self.model_data) - provider_data = env.to_provider_data(self.org_id, self.provider) + provider_data = await env.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 5 assert provider_data["wait_timer"] == 15 diff --git a/tests/models/test_organization_settings.py b/tests/models/test_organization_settings.py index 8d67c55c..a9bfa42e 100644 --- a/tests/models/test_organization_settings.py +++ b/tests/models/test_organization_settings.py @@ -88,17 +88,17 @@ def test_load_from_provider(self): assert settings.packages_containers_internal is False assert settings.members_can_change_project_visibility is False - def test_to_provider(self): + async def test_to_provider(self): settings = OrganizationSettings.from_model_data(self.model_data) settings.description = UNSET - provider_data = settings.to_provider_data(self.org_id, self.provider) + provider_data = await settings.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 27 assert provider_data["billing_email"] == settings.billing_email - def test_changes_to_provider(self): + async def test_changes_to_provider(self): current = OrganizationSettings.from_model_data(self.model_data) other = OrganizationSettings.from_model_data(self.model_data) @@ -106,7 +106,7 @@ def test_changes_to_provider(self): other.default_repository_permission = "none" changes = current.get_difference_from(other) - provider_data = OrganizationSettings.changes_to_provider(self.org_id, changes, self.provider) + provider_data = await OrganizationSettings.changes_to_provider(self.org_id, changes, self.provider) assert len(provider_data) == 2 assert provider_data["billing_email"] == current.billing_email diff --git a/tests/models/test_organization_webhook.py b/tests/models/test_organization_webhook.py index d011c77b..dbec29f4 100644 --- a/tests/models/test_organization_webhook.py +++ b/tests/models/test_organization_webhook.py @@ -45,12 +45,12 @@ def test_load_from_provider(self): assert webhook.url == "https://www.example.org" assert webhook.insecure_ssl == "0" - def test_to_provider(self): + async def test_to_provider(self): webhook = OrganizationWebhook.from_model_data(self.model_data) webhook.secret = UNSET - provider_data = webhook.to_provider_data(self.org_id, self.provider) + provider_data = await webhook.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 3 assert provider_data["active"] is True @@ -61,7 +61,7 @@ def test_to_provider(self): assert jq.compile(".config.insecure_ssl").input(provider_data).first() == "0" assert jq.compile(".config.content_type").input(provider_data).first() == "form" - def test_changes_to_provider(self): + async def test_changes_to_provider(self): current = OrganizationWebhook.from_model_data(self.model_data) other = OrganizationWebhook.from_model_data(self.model_data) @@ -69,7 +69,7 @@ def test_changes_to_provider(self): other.insecure_ssl = "1" changes = current.get_difference_from(other) - provider_data = OrganizationWebhook.changes_to_provider(self.org_id, changes, self.provider) + provider_data = await OrganizationWebhook.changes_to_provider(self.org_id, changes, self.provider) assert len(provider_data) == 2 assert provider_data["active"] is True diff --git a/tests/models/test_repo_webhook.py b/tests/models/test_repo_webhook.py index 2a504f3e..e6ce36d9 100644 --- a/tests/models/test_repo_webhook.py +++ b/tests/models/test_repo_webhook.py @@ -45,12 +45,12 @@ def test_load_from_provider(self): assert webhook.url == "https://www.example.org" assert webhook.insecure_ssl == "0" - def test_to_provider(self): + async def test_to_provider(self): webhook = RepositoryWebhook.from_model_data(self.model_data) webhook.secret = UNSET - provider_data = webhook.to_provider_data(self.org_id, self.provider) + provider_data = await webhook.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 3 assert provider_data["active"] is True @@ -61,7 +61,7 @@ def test_to_provider(self): assert jq.compile(".config.insecure_ssl").input(provider_data).first() == "0" assert jq.compile(".config.content_type").input(provider_data).first() == "form" - def test_changes_to_provider(self): + async def test_changes_to_provider(self): current = RepositoryWebhook.from_model_data(self.model_data) other = RepositoryWebhook.from_model_data(self.model_data) @@ -69,7 +69,7 @@ def test_changes_to_provider(self): other.insecure_ssl = "1" changes = current.get_difference_from(other) - provider_data = RepositoryWebhook.changes_to_provider(self.org_id, changes, self.provider) + provider_data = await RepositoryWebhook.changes_to_provider(self.org_id, changes, self.provider) assert len(provider_data) == 2 assert provider_data["active"] is True diff --git a/tests/models/test_repo_workflow_settings.py b/tests/models/test_repo_workflow_settings.py index 101df6e5..6862c365 100644 --- a/tests/models/test_repo_workflow_settings.py +++ b/tests/models/test_repo_workflow_settings.py @@ -43,9 +43,9 @@ def test_load_from_provider(self): assert workflow_settings.default_workflow_permissions == "read" assert workflow_settings.actions_can_approve_pull_request_reviews is True - def test_to_provider(self): + async def test_to_provider(self): workflow_settings = RepositoryWorkflowSettings.from_model_data(self.model_data) - provider_data = workflow_settings.to_provider_data(self.org_id, self.provider) + provider_data = await workflow_settings.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 7 assert provider_data["allowed_actions"] == "all" @@ -55,14 +55,14 @@ def test_to_provider(self): assert provider_data["patterns_allowed"] == [] assert provider_data["can_approve_pull_request_reviews"] is True - def test_changes_to_provider(self): + async def test_changes_to_provider(self): current = RepositoryWorkflowSettings.from_model_data(self.model_data) other = RepositoryWorkflowSettings.from_model_data(self.model_data) other.enabled = False changes = current.get_difference_from(other) - provider_data = RepositoryWorkflowSettings.changes_to_provider(self.org_id, changes, self.provider) + provider_data = await RepositoryWorkflowSettings.changes_to_provider(self.org_id, changes, self.provider) assert len(provider_data) == 1 assert provider_data["enabled"] is True diff --git a/tests/models/test_repository.py b/tests/models/test_repository.py index 77f19c50..89010910 100644 --- a/tests/models/test_repository.py +++ b/tests/models/test_repository.py @@ -85,12 +85,12 @@ def test_load_from_provider(self): assert repo.secret_scanning_push_protection == "disabled" assert repo.dependabot_alerts_enabled is True - def test_to_provider(self): + async def test_to_provider(self): repo = Repository.from_model_data(self.model_data) repo.description = UNSET - provider_data = repo.to_provider_data(self.org_id, self.provider) + provider_data = await repo.to_provider_data(self.org_id, self.provider) assert len(provider_data) == 22 assert provider_data["name"] == "otterdog-defaults" @@ -100,7 +100,7 @@ def test_to_provider(self): jq.compile('.security_and_analysis.secret_scanning.status // ""').input(provider_data).first() == "enabled" ) - def test_changes_to_provider(self): + async def test_changes_to_provider(self): current = Repository.from_model_data(self.model_data) other = Repository.from_model_data(self.model_data) @@ -109,7 +109,7 @@ def test_changes_to_provider(self): other.secret_scanning = "disabled" changes = current.get_difference_from(other) - provider_data = Repository.changes_to_provider(self.org_id, changes, self.provider) + provider_data = await Repository.changes_to_provider(self.org_id, changes, self.provider) assert len(provider_data) == 3 assert provider_data["name"] == "otterdog-defaults"