From 9067e7a315c9f738777066093d51b3b6ce874286 Mon Sep 17 00:00:00 2001 From: Christoph Kuhnke Date: Wed, 8 May 2024 10:03:33 +0200 Subject: [PATCH] #14 fixture operational saas database (#27) * Added fixture waiting until SaaS database is running * Added project short tag and user name to resources in Exasol Saas with max length * replaced os.getlogin() by getpass.getuser() * Added log messages for deleting the database * Make pytest display log output of tests cases in CI build * Added sleep before deleting the database * Added log message for creating a database * Added parameter region for create_database() --- .github/workflows/checks.yml | 5 +- doc/changes/changes_0.3.0.md | 4 + doc/developer_guide/developer_guide.md | 4 +- exasol/saas/client/__init__.py | 27 ++++++ noxfile.py | 19 ++++ poetry.lock | 16 +++- pyproject.toml | 5 ++ test/integration/api_access.py | 116 ++++++++++++++++++++++--- test/integration/conftest.py | 28 +++++- test/integration/databases_test.py | 27 +++++- 10 files changed, 228 insertions(+), 23 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 9041220..03b336c 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -109,7 +109,10 @@ jobs: SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} - run: poetry run nox -s coverage -- -- + PYTEST_ADDOPTS: -o log_cli=true -o log_cli_level=INFO + run: | + export PROJECT_SHORT_TAG=$(poetry run nox -s get-project-short-tag) + poetry run nox -s coverage -- -- - name: Upload Artifacts uses: actions/upload-artifact@v3 diff --git a/doc/changes/changes_0.3.0.md b/doc/changes/changes_0.3.0.md index d323609..8a5249d 100644 --- a/doc/changes/changes_0.3.0.md +++ b/doc/changes/changes_0.3.0.md @@ -8,4 +8,8 @@ This release adds integration tests for the most important calls to SaaS API. * #21: Added integration test for operation "create database" * #23: Added integration test for operation "add IP to whitelist" + +## Feature + +* #14: Added fixture waiting until SaaS database is running * #25: Fixed transitive dependencies required by generated API client diff --git a/doc/developer_guide/developer_guide.md b/doc/developer_guide/developer_guide.md index 2e29c4f..cff3122 100644 --- a/doc/developer_guide/developer_guide.md +++ b/doc/developer_guide/developer_guide.md @@ -43,8 +43,8 @@ openapi-python-client reads the JSON specification of the SaaS API and generates The easiest way is to make openapi-python-client create a dedicated file `pyproject.toml` and copy the transitive dependencies from there to SAPIPY's file `pyproject.toml`. In order to create file `pyproject.toml` -* In file `noxfile.py` you need to replace mode `update` by `generate` -* Additionally in file `openapi_config.yml` you need to specify a non-existing top-level directory as `name` and a package that does not contain slashes, e.g. +* In file `noxfile.py`, function `generate_api` you need to replace mode `update` by `generate`. +* Additionally, in file `openapi_config.yml` you need to specify a non-existing top-level directory as `project_name_override` and a package that does not contain slashes, e.g. ```yaml project_name_override: "generate" diff --git a/exasol/saas/client/__init__.py b/exasol/saas/client/__init__.py index 6c22d66..a792713 100644 --- a/exasol/saas/client/__init__.py +++ b/exasol/saas/client/__init__.py @@ -2,4 +2,31 @@ Package openapi contains the API generated from the JSON definition. """ +from dataclasses import dataclass +from typing import Final +from datetime import datetime, timedelta +from exasol.saas.client.openapi.models.status import Status + + SAAS_HOST = "https://cloud.exasol.com" + +PROMISING_STATES = [ + Status.CREATING, + Status.RUNNING, + Status.STARTING, + Status.TOCREATE, + Status.TOSTART, +] + + +class Limits: + """ + Constants for Exasol SaaS databases. + """ + MAX_DATABASE_NAME_LENGTH: Final[int] = 20 + MAX_CLUSTER_NAME_LENGTH: Final[int] = 40 + AUTOSTOP_MIN_IDLE_TIME: Final[timedelta] = timedelta(minutes=15) + AUTOSTOP_MAX_IDLE_TIME: Final[timedelta] = timedelta(minutes=10000) + AUTOSTOP_DEFAULT_IDLE_TIME: Final[timedelta] = timedelta(minutes=120) + # If deleting a database too early, then logging and accounting could be invalid. + MIN_DATABASE_LIFETIME: Final[timedelta] = timedelta(seconds=30) diff --git a/noxfile.py b/noxfile.py index 1396c4a..bae485b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,5 +1,7 @@ import os import nox + +from pathlib import Path from nox import Session from noxconfig import PROJECT_CONFIG from exasol.saas.client import SAAS_HOST @@ -41,3 +43,20 @@ def check_api_outdated(session: Session): """ generate_api(session) session.run("git", "diff", "--exit-code") + + +@nox.session(name="get-project-short-tag", python=False) +def get_project_short_tag(session: Session): + config_file = Path("error_code_config.yml") + content = config_file.read_text() + header = False + for line in content.splitlines(): + line = line.strip() + if header: + print(line.strip().replace(":", "")) + return + if line.startswith("error-tags:"): + header = True + raise RuntimeError( + f"Could not read project short tag from file {config_file}" + ) diff --git a/poetry.lock b/poetry.lock index 625aa03..6f2bee5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1606,6 +1606,20 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "tokenize-rt" version = "5.2.0" @@ -1740,4 +1754,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8.0,<4.0" -content-hash = "ad4b180534cde9f997bdcf957befe72feec70ee062f48b8099ebfccc5869b165" +content-hash = "af45210362c425328aeef0fcce6ac508c3475f64e655fe874b45f8a48967e57c" diff --git a/pyproject.toml b/pyproject.toml index b17b8af..f9cc3be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ python = ">=3.8.0,<4.0" requests = "^2.31.0" types-requests = "^2.31.0.6" ifaddr = "^0.2.0" +tenacity = "^8.2.3" # generated by openapi-python-client httpx = ">=0.20.0,<0.28.0" attrs = ">=21.3.0" @@ -56,6 +57,10 @@ source = [ "exasol", ] +omit = [ + '*/exasol/saas/client/openapi/*', +] + [tool.coverage.report] fail_under = 15 diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 5fd3a83..7449c06 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,22 +1,60 @@ +import getpass +import logging +import time + from typing import Iterable from contextlib import contextmanager -from datetime import datetime +from datetime import datetime, timedelta +from tenacity.wait import wait_fixed +from tenacity.stop import stop_after_delay -from exasol.saas.client import openapi +from exasol.saas.client import ( + openapi, + Limits, +) +from exasol.saas.client.openapi.models.status import Status from exasol.saas.client.openapi.api.databases import ( create_database, delete_database, list_databases, + get_database, ) from exasol.saas.client.openapi.api.security import ( list_allowed_i_ps, add_allowed_ip, delete_allowed_ip, ) +from tenacity import retry, TryAgain + + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.INFO) + + +def timestamp_name(project_short_tag: str | None = None) -> str: + """ + project_short_tag: Abbreviation of your project + """ + timestamp = f'{datetime.now().timestamp():.0f}' + owner = getpass.getuser() + candidate = f"{timestamp}{project_short_tag or ''}-{owner}" + return candidate[:Limits.MAX_DATABASE_NAME_LENGTH] + + +def wait_for_delete_clearance(start: datetime.time): + lifetime = datetime.now() - start + if lifetime < Limits.MIN_DATABASE_LIFETIME: + wait = Limits.MIN_DATABASE_LIFETIME - lifetime + LOG.info(f"Waiting {int(wait.seconds)} seconds" + " before deleting the database.") + time.sleep(wait.seconds) -def timestamp() -> str: - return f'{datetime.now().timestamp():.0f}' +class DatabaseStartupFailure(Exception): + """ + If a SaaS database instance during startup reports a status other than + successful. + """ def create_saas_client( @@ -42,19 +80,32 @@ def __init__(self, client: openapi.Client, account_id: str): self._client = client self._account_id = account_id - def create_database(self, cluster_size: str = "XS") -> openapi.models.database.Database: + def create_database( + self, + name: str, + cluster_size: str = "XS", + region: str = "eu-central-1", + ) -> openapi.models.database.Database: + def minutes(x: timedelta) -> int: + return x.seconds // 60 + cluster_spec = openapi.models.CreateCluster( name="my-cluster", size=cluster_size, + auto_stop=openapi.models.AutoStop( + enabled=True, + idle_time=minutes(Limits.AUTOSTOP_MIN_IDLE_TIME), + ), ) + LOG.info(f"Creating database {name}") return create_database.sync( self._account_id, client=self._client, body=openapi.models.CreateDatabase( - name=f"pytest-{timestamp()}", + name=name, initial_cluster=cluster_spec, provider="aws", - region='us-east-1', + region=region, ) ) @@ -77,16 +128,57 @@ def list_database_ids(self) -> Iterable[str]: @contextmanager def database( self, + name: str, keep: bool = False, ignore_delete_failure: bool = False, ): db = None + start = datetime.now() try: - db = self.create_database() + db = self.create_database(name) yield db + wait_for_delete_clearance(start) finally: - if not keep and db: - self.delete_database(db.id, ignore_delete_failure) + if db and not keep: + LOG.info(f"Deleting database {db.name}") + response = self.delete_database(db.id, ignore_delete_failure) + if response.status_code == 200: + LOG.info(f"Successfully deleted database {db.name}.") + else: + LOG.warning(f"Ignoring status code {response.status_code}.") + elif not db: + LOG.warning("Cannot delete db None") + else: + LOG.info(f"Keeping database {db.name} as keep = {keep}") + + def get_database(self, database_id: str) -> openapi.models.database.Database: + return get_database.sync( + self._account_id, + database_id, + client=self._client, + ) + + def wait_until_running( + self, + database_id: str, + timeout: timedelta = timedelta(minutes=30), + interval: timedelta = timedelta(minutes=2), + ) -> str: + success = [ + Status.RUNNING, + ] + + @retry(wait=wait_fixed(interval), stop=stop_after_delay(timeout)) + def poll_status(): + db = self.get_database(database_id) + if db.status not in success: + print(f'status = {db.status}') + raise TryAgain + return db.status + + if poll_status() not in success: + raise DatabaseStartupFailure() + def list_allowed_ip_ids(self) -> Iterable[openapi.models.allowed_ip.AllowedIP]: ips = list_allowed_i_ps.sync( @@ -103,7 +195,7 @@ def add_allowed_ip(self, cidr_ip: str = "0.0.0.0/0") -> openapi.models.allowed_i * ::/0 = all ipv6 """ rule = openapi.models.create_allowed_ip.CreateAllowedIP( - name=f"pytest-{timestamp()}", + name=timestamp_name(), cidr_ip=cidr_ip, ) return add_allowed_ip.sync( @@ -129,5 +221,5 @@ def allowed_ip( ip = self.add_allowed_ip(cidr_ip) yield ip finally: - if not keep and ip: + if ip and not keep: self.delete_allowed_ip(ip.id, ignore_delete_failure) diff --git a/test/integration/conftest.py b/test/integration/conftest.py index b407910..bfa611a 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -1,8 +1,13 @@ import pytest import os +from pathlib import Path from exasol.saas.client import openapi -from api_access import create_saas_client, _OpenApiAccess +from api_access import ( + create_saas_client, + _OpenApiAccess, + timestamp_name, +) @pytest.fixture(scope="session") def saas_host() -> str: @@ -26,10 +31,27 @@ def api_access(saas_host, saas_pat, saas_account_id) -> _OpenApiAccess: @pytest.fixture(scope="session") -def saas_database(api_access) -> openapi.models.database.Database: +def saas_database(api_access, database_name) -> openapi.models.database.Database: """ Note: The SaaS instance database returned by this fixture initially will not be operational. The startup takes about 20 minutes. """ - with api_access.database() as db: + with api_access.database(database_name) as db: yield db + + +@pytest.fixture(scope="session") +def operational_saas_database_id(api_access, database_name) -> str: + with api_access.database(database_name) as db: + api_access.wait_until_running(db.id) + yield db + + +@pytest.fixture(scope="session") +def project_short_tag(): + return os.environ.get("PROJECT_SHORT_TAG") + + +@pytest.fixture +def database_name(project_short_tag): + return timestamp_name(project_short_tag) diff --git a/test/integration/databases_test.py b/test/integration/databases_test.py index 80c76d5..118c315 100644 --- a/test/integration/databases_test.py +++ b/test/integration/databases_test.py @@ -1,7 +1,13 @@ -from exasol.saas.client import openapi +import pytest +from exasol.saas.client import openapi, PROMISING_STATES +from tenacity import RetryError +from datetime import datetime, timedelta -def test_lifecycle(api_access): +from api_access import wait_for_delete_clearance + + +def test_lifecycle(api_access, database_name): """ This integration test uses the database created and provided by pytest context ``_OpenApiAccess.database()`` to verify @@ -13,14 +19,27 @@ def test_lifecycle(api_access): """ testee = api_access - with testee.database(ignore_delete_failure=True) as db: + with testee.database(database_name, ignore_delete_failure=True) as db: + start = datetime.now() # verify state and clusters of created database - assert db.status == openapi.models.Status.TOCREATE and \ + assert db.status in PROMISING_STATES and \ db.clusters.total == 1 # verify database is listed assert db.id in testee.list_database_ids() # delete database and verify database is not listed anymore + wait_for_delete_clearance(start) testee.delete_database(db.id) assert db.id not in testee.list_database_ids() + + +def test_poll(api_access, database_name): + with api_access.database(database_name) as db: + print(f'{db.status}') + with pytest.raises(RetryError): + api_access.wait_until_running( + db.id, + timeout=timedelta(seconds=3), + interval=timedelta(seconds=1), + )