From db74636c492f031c163e3af503646da1e6948c23 Mon Sep 17 00:00:00 2001 From: Mikhail Beck Date: Thu, 23 May 2024 07:58:23 +0100 Subject: [PATCH] Refactoring/7 use new bfs interface (#9) * Add documentation build folder to .gitignore * #7 Moved to the bucketfs PathLike interface * #7 Moved to the bucketfs PathLike interface * #7 Fixe the problem with archive extension * #7 Added an integration test for SaaS * #7 Added an integration test for SaaS * #7 Disabled all integration tests but SaaS * Update checks.yml * Update checks.yml * Update checks.yml * Update checks.yml * Update checks.yml * Update checks.yml * #7 Using dummy SaaS secrets * Update checks.yml * Update ci.yml * Update pr-merge.yml * Update ci-cd.yml * Update checks.yml * #7 Fixed operational_saas_database_id fixture * #7 Made a pause before the udf validation * #7 Made a bigger pause before the udf validation * #7 Trying a new TB feature * #7 Trying a new TB feature * #7 Running tests without SaaS * Running test with Saas [run-saas-tests] * #7 Running all checks without SaaS * #7 Running all checks with SaaS [run-saas-tests] * #7 Addressed the review comments [run-saas-tests] * #7 A new connection is required [run-saas-tests] * #7 Addressed review issues [run-saas-tests] * #7 Updated dependencies [run-saas-tests] --------- Co-authored-by: Christoph Kuhnke --- .github/workflows/checks.yml | 19 +- .github/workflows/ci-cd.yml | 1 + .github/workflows/ci.yml | 1 + .github/workflows/pr-merge.yml | 1 + doc/changes/unreleased.md | 4 + .../deployment/language_container_deployer.py | 118 ++++-- .../language_container_deployer_cli.py | 49 ++- poetry.lock | 368 +++++++++++++++--- pyproject.toml | 11 +- test/integration/conftest.py | 62 +++ .../test_language_container_deployer.py | 21 +- .../test_language_container_deployer_cli.py | 4 + .../test_language_container_deployer_saas.py | 67 ++++ ...st_language_container_deployer_saas_cli.py | 103 +++++ test/integration/test_placeholder.py | 2 - .../test_language_container_deployer.py | 59 +-- 16 files changed, 743 insertions(+), 147 deletions(-) create mode 100644 test/integration/test_language_container_deployer_saas.py create mode 100644 test/integration/test_language_container_deployer_saas_cli.py delete mode 100644 test/integration/test_placeholder.py diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index adfb429..c156fb7 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -3,12 +3,12 @@ name: Checks on: workflow_call jobs: - version-check-job: name: Version Check runs-on: ubuntu-latest steps: + - name: SCM Checkout uses: actions/checkout@v3 with: @@ -103,11 +103,26 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Calculate Test Coverage + - name: Calculate Test Coverage with SaaS + if: "contains(github.event.head_commit.message, '[run-saas-tests]') && (matrix.python-version == '3.10')" + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} run: poetry run nox -s coverage -- -- --db-version ${{ matrix.exasol-version }} + - name: Calculate Test Coverage without SaaS + if: "!contains(github.event.head_commit.message, '[run-saas-tests]') || (matrix.python-version != '3.10')" + run: poetry run nox -s coverage -- -- -m "not saas" --db-version ${{ matrix.exasol-version }} + - name: Upload Artifacts uses: actions/upload-artifact@v3 with: name: .coverage path: .coverage + + - name: Fail, if SaaS tests are not activated + if: "!contains(github.event.head_commit.message, '[run-saas-tests]') && (matrix.python-version == '3.10')" + run: | + echo "Failed because the SaaS tests are not activated" + exit 1 diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 5421955..0c9bb23 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -15,6 +15,7 @@ jobs: name: Checks needs: [ check-tag-version-job ] uses: ./.github/workflows/checks.yml + secrets: inherit cd-job: name: Continues Delivery diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f0dc9d1..8e5dc15 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,6 +18,7 @@ jobs: ci-job: name: Checks uses: ./.github/workflows/checks.yml + secrets: inherit metrics: needs: [ ci-job ] diff --git a/.github/workflows/pr-merge.yml b/.github/workflows/pr-merge.yml index 7a4fc86..99139d6 100644 --- a/.github/workflows/pr-merge.yml +++ b/.github/workflows/pr-merge.yml @@ -11,6 +11,7 @@ jobs: ci-job: name: Checks uses: ./.github/workflows/checks.yml + secrets: inherit publish-docs: name: Publish Documentation diff --git a/doc/changes/unreleased.md b/doc/changes/unreleased.md index cbfad72..22eec26 100644 --- a/doc/changes/unreleased.md +++ b/doc/changes/unreleased.md @@ -3,3 +3,7 @@ ## Added - Integration tests for the LanguageContainerDeployer class and the cli function - language_container_deployer_main. + + - Started using the bucket-fs PathLike interface. + + - Added support SaaS backend diff --git a/exasol/python_extension_common/deployment/language_container_deployer.py b/exasol/python_extension_common/deployment/language_container_deployer.py index 048ca66..abd52ab 100644 --- a/exasol/python_extension_common/deployment/language_container_deployer.py +++ b/exasol/python_extension_common/deployment/language_container_deployer.py @@ -7,27 +7,13 @@ import ssl import requests # type: ignore import pyexasol # type: ignore -from exasol_bucketfs_utils_python.bucketfs_location import BucketFSLocation # type: ignore -from exasol_bucketfs_utils_python.bucket_config import BucketConfig, BucketFSConfig # type: ignore -from exasol_bucketfs_utils_python.bucketfs_connection_config import BucketFSConnectionConfig # type: ignore +import exasol.bucketfs as bfs # type: ignore +from exasol.saas.client.api_access import get_connection_params # type: ignore -logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) -def create_bucketfs_location( - bucketfs_name: str, bucketfs_host: str, bucketfs_port: int, - bucketfs_use_https: bool, bucketfs_user: str, bucketfs_password: str, - bucket: str, path_in_bucket: str) -> BucketFSLocation: - _bucketfs_connection = BucketFSConnectionConfig( - host=bucketfs_host, port=bucketfs_port, user=bucketfs_user, - pwd=bucketfs_password, is_https=bucketfs_use_https) - _bucketfs_config = BucketFSConfig( - bucketfs_name=bucketfs_name, connection_config=_bucketfs_connection) - _bucket_config = BucketConfig( - bucket_name=bucket, bucketfs_config=_bucketfs_config) - return BucketFSLocation( - bucket_config=_bucket_config, - base_path=PurePosixPath(path_in_bucket)) +ARCHIVE_EXTENSIONS = [".tar.gz", ".tgz", ".zip", ".tar"] def get_websocket_sslopt(use_ssl_cert_validation: bool = True, @@ -87,14 +73,32 @@ def get_language_settings(pyexasol_conn: pyexasol.ExaConnection, alter_type: Lan return result[0][0] +def get_udf_path(bucket_base_path: bfs.path.PathLike, bucket_file: str) -> PurePosixPath: + """ + Returns the path of the specified file in a bucket, as it's seen from a UDF + For known types of archives removes the archive extension from the file name. + + bucket_base_path - Base directory in the bucket + bucket_file - File path in the bucket, relative to the base directory. + """ + + for extension in ARCHIVE_EXTENSIONS: + if bucket_file.endswith(extension): + bucket_file = bucket_file[: -len(extension)] + break + + file_path = bucket_base_path / bucket_file + return PurePosixPath(file_path.as_udf_path()) + + class LanguageContainerDeployer: def __init__(self, pyexasol_connection: pyexasol.ExaConnection, language_alias: str, - bucketfs_location: BucketFSLocation) -> None: + bucketfs_path: bfs.path.PathLike) -> None: - self._bucketfs_location = bucketfs_location + self._bucketfs_path = bucketfs_path self._language_alias = language_alias self._pyexasol_conn = pyexasol_connection logger.debug("Init %s", LanguageContainerDeployer.__name__) @@ -177,8 +181,8 @@ def upload_container(self, container_file: Path, raise RuntimeError(f"Container file {container_file} " f"is not a file.") with open(container_file, "br") as f: - self._bucketfs_location.upload_fileobj_to_bucketfs( - fileobj=f, bucket_file_path=bucket_file_path) + file_path = self._bucketfs_path / bucket_file_path + file_path.write(f) logging.debug("Container is uploaded to bucketfs") def activate_container(self, bucket_file_path: str, @@ -209,7 +213,7 @@ def generate_activation_command(self, bucket_file_path: str, allow_override - If True the activation of a language container with the same alias will be overriden, otherwise a RuntimeException will be thrown. """ - path_in_udf = self._bucketfs_location.generate_bucket_udf_path(bucket_file_path) + path_in_udf = get_udf_path(self._bucketfs_path, bucket_file_path) new_settings = \ self._update_previous_language_settings(alter_type, allow_override, path_in_udf) alter_command = \ @@ -233,7 +237,7 @@ def get_language_definition(self, bucket_file_path: str): bucket_file_path - Path within the designated bucket where the container is uploaded. """ - path_in_udf = self._bucketfs_location.generate_bucket_udf_path(bucket_file_path) + path_in_udf = get_udf_path(self._bucketfs_path, bucket_file_path) result = self._generate_new_language_settings(path_in_udf=path_in_udf, prev_lang_aliases=[]) return result @@ -265,27 +269,61 @@ def _check_if_requested_language_alias_already_exists( raise RuntimeError(warning_message) @classmethod - def create(cls, bucketfs_name: str, bucketfs_host: str, bucketfs_port: int, - bucketfs_use_https: bool, bucketfs_user: str, - bucketfs_password: str, bucket: str, path_in_bucket: str, - dsn: str, db_user: str, db_password: str, language_alias: str, + def create(cls, + language_alias: str, dsn: Optional[str] = None, + db_user: Optional[str] = None, db_password: Optional[str] = None, + bucketfs_host: Optional[str] = None, bucketfs_port: Optional[int] = None, + bucketfs_name: Optional[str] = None, bucket: Optional[str] = None, + bucketfs_user: Optional[str] = None, bucketfs_password: Optional[str] = None, + bucketfs_use_https: bool = True, + saas_url: Optional[str] = None, + saas_account_id: Optional[str] = None, saas_database_id: Optional[str] = None, + saas_token: Optional[str] = None, + path_in_bucket: str = '', use_ssl_cert_validation: bool = True, ssl_trusted_ca: Optional[str] = None, ssl_client_certificate: Optional[str] = None, ssl_private_key: Optional[str] = None) -> "LanguageContainerDeployer": + # Infer where the database is - on-prem or SaaS. + if all((dsn, db_user, db_password, bucketfs_host, bucketfs_port, + bucketfs_name, bucket, bucketfs_user, bucketfs_password)): + connection_params = {'dsn': dsn, 'user': db_user, 'password': db_password} + bfs_url = (f"{'https' if bucketfs_use_https else 'http'}://" + f"{bucketfs_host}:{bucketfs_port}") + verify = ssl_trusted_ca or use_ssl_cert_validation + bucketfs_path = bfs.path.build_path(backend=bfs.path.StorageBackend.onprem, + url=bfs_url, + username=bucketfs_user, + password=bucketfs_password, + service_name=bucketfs_name, + bucket_name=bucket, + verify=verify, + path=path_in_bucket) + + elif all((saas_url, saas_account_id, saas_database_id, saas_token)): + connection_params = get_connection_params(host=saas_url, + account_id=saas_account_id, + database_id=saas_database_id, + pat=saas_token) + bucketfs_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_url, + account_id=saas_account_id, + database_id=saas_database_id, + pat=saas_token, + path=path_in_bucket) + else: + raise ValueError('Incomplete parameter list. ' + 'Please either provide the parameters [dns, db_user, ' + 'db_password, bucketfs_host, bucketfs_port, bucketfs_name, ' + 'bucket, bucketfs_user, bucketfs_password] for an On-Prem ' + 'database or [saas_url, saas_account_id, saas_database_id, ' + 'saas_token] for a SaaS database.') + websocket_sslopt = get_websocket_sslopt(use_ssl_cert_validation, ssl_trusted_ca, ssl_client_certificate, ssl_private_key) - pyexasol_conn = pyexasol.connect( - dsn=dsn, - user=db_user, - password=db_password, - encryption=True, - websocket_sslopt=websocket_sslopt - ) - - bucketfs_location = create_bucketfs_location( - bucketfs_name, bucketfs_host, bucketfs_port, bucketfs_use_https, - bucketfs_user, bucketfs_password, bucket, path_in_bucket) + pyexasol_conn = pyexasol.connect(**connection_params, + encryption=True, + websocket_sslopt=websocket_sslopt) - return cls(pyexasol_conn, language_alias, bucketfs_location) + return cls(pyexasol_conn, language_alias, bucketfs_path) diff --git a/exasol/python_extension_common/deployment/language_container_deployer_cli.py b/exasol/python_extension_common/deployment/language_container_deployer_cli.py index 2fc1c65..6a91c38 100644 --- a/exasol/python_extension_common/deployment/language_container_deployer_cli.py +++ b/exasol/python_extension_common/deployment/language_container_deployer_cli.py @@ -8,6 +8,9 @@ DB_PASSWORD_ENVIRONMENT_VARIABLE = "DB_PASSWORD" BUCKETFS_PASSWORD_ENVIRONMENT_VARIABLE = "BUCKETFS_PASSWORD" +SAAS_ACCOUNT_ID_ENVIRONMENT_VARIABLE = "SAAS_ACCOUNT_ID" +SAAS_DATABASE_ID_ENVIRONMENT_VARIABLE = "SAAS_DATABASE_ID" +SAAS_TOKEN_ENVIRONMENT_VARIABLE = "SAAS_TOKEN" class CustomizableParameters(Enum): @@ -80,24 +83,32 @@ def clear_formatters(self): @click.command(name="language-container") -@click.option('--bucketfs-name', type=str, required=True) -@click.option('--bucketfs-host', type=str, required=True) -@click.option('--bucketfs-port', type=int, required=True) +@click.option('--bucketfs-name', type=str) +@click.option('--bucketfs-host', type=str) +@click.option('--bucketfs-port', type=int) @click.option('--bucketfs-use-https', type=bool, default=False) -@click.option('--bucketfs-user', type=str, required=True, default="w") -@click.option('--bucketfs-password', prompt='bucketFS password', hide_input=True, - default=lambda: os.environ.get(BUCKETFS_PASSWORD_ENVIRONMENT_VARIABLE, "")) -@click.option('--bucket', type=str, required=True) -@click.option('--path-in-bucket', type=str, required=True, default=None) +@click.option('--bucketfs-user', type=str) +@click.option('--bucketfs-password', type=str, + default=lambda: os.environ.get(BUCKETFS_PASSWORD_ENVIRONMENT_VARIABLE)) +@click.option('--bucket', type=str) +@click.option('--saas-url', type=str, + default='https://cloud.exasol.com') +@click.option('--saas-account-id', type=str, + default=lambda: os.environ.get(SAAS_ACCOUNT_ID_ENVIRONMENT_VARIABLE)) +@click.option('--saas-database-id', type=str, + default=lambda: os.environ.get(SAAS_DATABASE_ID_ENVIRONMENT_VARIABLE)) +@click.option('--saas-token', type=str, + default=lambda: os.environ.get(SAAS_TOKEN_ENVIRONMENT_VARIABLE)) +@click.option('--path-in-bucket', type=str) @click.option('--container-file', - type=click.Path(exists=True, file_okay=True), default=None) -@click.option('--version', type=str, default=None, expose_value=False, + type=click.Path(exists=True, file_okay=True)) +@click.option('--version', type=str, expose_value=False, callback=slc_parameter_formatters) -@click.option('--dsn', type=str, required=True) -@click.option('--db-user', type=str, required=True) -@click.option('--db-pass', prompt='db password', hide_input=True, - default=lambda: os.environ.get(DB_PASSWORD_ENVIRONMENT_VARIABLE, "")) -@click.option('--language-alias', type=str, default="PYTHON3_TE") +@click.option('--dsn', type=str) +@click.option('--db-user', type=str) +@click.option('--db-pass', + default=lambda: os.environ.get(DB_PASSWORD_ENVIRONMENT_VARIABLE)) +@click.option('--language-alias', type=str, default="PYTHON3_EXT") @click.option('--ssl-cert-path', type=str, default="") @click.option('--ssl-client-cert-path', type=str, default="") @click.option('--ssl-client-private-key', type=str, default="") @@ -113,6 +124,10 @@ def language_container_deployer_main( bucketfs_user: str, bucketfs_password: str, bucket: str, + saas_url: str, + saas_account_id: str, + saas_database_id: str, + saas_token: str, path_in_bucket: str, container_file: str, dsn: str, @@ -137,6 +152,10 @@ def language_container_deployer_main( bucketfs_user=bucketfs_user, bucketfs_password=bucketfs_password, bucket=bucket, + saas_url=saas_url, + saas_account_id=saas_account_id, + saas_database_id=saas_database_id, + saas_token=saas_token, path_in_bucket=path_in_bucket, dsn=dsn, db_user=db_user, diff --git a/poetry.lock b/poetry.lock index f9ed7df..db3d654 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,35 +11,75 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "argcomplete" -version = "2.1.2" +version = "3.3.0" description = "Bash tab completion for argparse" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "argcomplete-2.1.2-py3-none-any.whl", hash = "sha256:4ba9cdaa28c361d251edce884cd50b4b1215d65cdc881bd204426cdde9f52731"}, - {file = "argcomplete-2.1.2.tar.gz", hash = "sha256:fc82ef070c607b1559b5c720529d63b54d9dcf2dcfc2632b10e6372314a34457"}, + {file = "argcomplete-3.3.0-py3-none-any.whl", hash = "sha256:c168c3723482c031df3c207d4ba8fa702717ccb9fc0bfe4117166c1f537b4a54"}, + {file = "argcomplete-3.3.0.tar.gz", hash = "sha256:fd03ff4a5b9e6580569d34b273f741e85cd9e072f3feeeee3eba4891c70eda62"}, ] [package.extras] -lint = ["flake8", "mypy"] -test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "astroid" -version = "3.2.0" +version = "3.2.2" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.0-py3-none-any.whl", hash = "sha256:16ee8ca5c75ac828783028cc1f967777f0e507c6886a295ad143e0f405b975a2"}, - {file = "astroid-3.2.0.tar.gz", hash = "sha256:f7f829f8506ade59f1b3c6c93d8fac5b1ebc721685fa9af23e9794daf1d450a3"}, + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, ] [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + [[package]] name = "babel" version = "2.15.0" @@ -596,16 +636,19 @@ files = [ [[package]] name = "exasol-bucketfs" -version = "0.9.0" +version = "0.10.0" description = "BucketFS utilities for the Python programming language" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "exasol_bucketfs-0.9.0-py3-none-any.whl", hash = "sha256:7264ff60a87ecd709595babf90de99bf46e1e81b39041069c6008c4393eae8a8"}, - {file = "exasol_bucketfs-0.9.0.tar.gz", hash = "sha256:209689248cd0212c92f97555adac0a0a1a741346e33abf277341a3f9b3c80335"}, + {file = "exasol_bucketfs-0.10.0-py3-none-any.whl", hash = "sha256:4f5aa81c31c5e03f19daa04d8b455ed09740f9e82bc53f3f9cb47db025146625"}, + {file = "exasol_bucketfs-0.10.0.tar.gz", hash = "sha256:033ee923728037af4d7771d9c6855e9eed2389d842c98a8456f937c917c395f8"}, ] [package.dependencies] +attrs = ">=23.2.0" +exasol-saas-api = ">=0.3.0" +httpx = ">=0.27.0" joblib = ">=1.0.1" requests = ">=2.24.0" typeguard = "4.0.0" @@ -642,6 +685,26 @@ requests = ">=2.21.0" simplejson = ">=3.16.0" "stopwatch.py" = ">=1.0.0" +[[package]] +name = "exasol-saas-api" +version = "0.6.0" +description = "API enabling Python applications connecting to Exasol database SaaS instances and using their SaaS services" +optional = false +python-versions = "<4.0,>=3.8.0" +files = [ + {file = "exasol_saas_api-0.6.0-py3-none-any.whl", hash = "sha256:d6ff6501e10e97352459cd853f47053ed3affcc6d7c733debbe80b3f8c709aaa"}, + {file = "exasol_saas_api-0.6.0.tar.gz", hash = "sha256:195ad5aaf15be270838e08b2e4a9fddb7981faf33ee00ed68042c5707d90612f"}, +] + +[package.dependencies] +attrs = ">=21.3.0" +httpx = ">=0.20.0,<0.28.0" +ifaddr = ">=0.2.0,<0.3.0" +python-dateutil = ">=2.8.0,<3.0.0" +requests = ">=2.31.0,<3.0.0" +tenacity = ">=8.2.3,<9.0.0" +types-requests = ">=2.31.0.6,<3.0.0.0" + [[package]] name = "exasol-script-languages-container-tool" version = "0.18.3" @@ -663,30 +726,34 @@ networkx = "2.8.2" [[package]] name = "exasol-toolbox" -version = "0.8.0" +version = "0.12.0" description = "" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "exasol_toolbox-0.8.0-py3-none-any.whl", hash = "sha256:519c9d738df097d52ef82ab9631dd5dd29d94c82ee46114e4603e9ae0bab8afc"}, - {file = "exasol_toolbox-0.8.0.tar.gz", hash = "sha256:6a4c5427e5bd972c5b70b9f5dbb0f50768065959a4905e4a9b09355ce1361042"}, + {file = "exasol_toolbox-0.12.0-py3-none-any.whl", hash = "sha256:663ef58c6cebab202ec180159564bac903b99d5d373f42149e5b990e2a390c8e"}, + {file = "exasol_toolbox-0.12.0.tar.gz", hash = "sha256:1529bf533d1f4ae7288d750b91dfabe8aaf0dcd6c7919540259780caef2b4794"}, ] [package.dependencies] black = ">=23.1.0,<24.0.0" coverage = ">=6.4.4,<8.0.0" -furo = ">=2022.9.15,<2023.0.0" +furo = ">=2022.9.15" importlib-resources = ">=5.12.0" isort = ">=5.12.0,<6.0.0" mypy = ">=0.971" -nox = ">=2022.8.7,<2023.0.0" +myst-parser = ">=2.0.0,<4" +nox = ">=2022.8.7" +pluggy = ">=1.5.0,<2.0.0" pre-commit = ">=3.1.1,<4.0.0" -prysk = ">=0.15.1" +prysk = {version = ">=0.17.0,<0.18.0", extras = ["pytest-plugin"]} pylint = ">=2.15.4" pytest = ">=7.2.2,<8.0.0" pyupgrade = ">=2.38.2,<4.0.0" +shibuya = ">=2024.5.14,<2025.0.0" sphinx = ">=5.3,<7.0" sphinx-copybutton = ">=0.5.0,<0.6.0" +sphinx-design = ">=0.5.0,<0.6.0" typer = {version = ">=0.7.0", extras = ["all"]} [[package]] @@ -741,20 +808,20 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "furo" -version = "2022.12.7" +version = "2024.5.6" description = "A clean customisable Sphinx documentation theme." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "furo-2022.12.7-py3-none-any.whl", hash = "sha256:7cb76c12a25ef65db85ab0743df907573d03027a33631f17d267e598ebb191f7"}, - {file = "furo-2022.12.7.tar.gz", hash = "sha256:d8008f8efbe7587a97ba533c8b2df1f9c21ee9b3e5cad0d27f61193d38b1a986"}, + {file = "furo-2024.5.6-py3-none-any.whl", hash = "sha256:490a00d08c0a37ecc90de03ae9227e8eb5d6f7f750edf9807f398a2bdf2358de"}, + {file = "furo-2024.5.6.tar.gz", hash = "sha256:81f205a6605ebccbb883350432b4831c0196dd3d1bc92f61e1f459045b3d2b0b"}, ] [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" -sphinx = ">=5.0,<7.0" -sphinx-basic-ng = "*" +sphinx = ">=6.0,<8.0" +sphinx-basic-ng = ">=1.0.0.beta2" [[package]] name = "gitdb" @@ -788,6 +855,62 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "humanfriendly" version = "10.0" @@ -827,6 +950,17 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "ifaddr" +version = "0.2.0" +description = "Cross-platform network interface and IP address enumeration library" +optional = false +python-versions = "*" +files = [ + {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, + {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, +] + [[package]] name = "imagesize" version = "1.4.1" @@ -968,12 +1102,12 @@ files = [ [[package]] name = "luigi" -version = "3.5.0" +version = "3.5.1" description = "Workflow mgmgt + task scheduling + dependency resolution." optional = false python-versions = "*" files = [ - {file = "luigi-3.5.0.tar.gz", hash = "sha256:d3ede04966655c13bc4f473f6390268c62e83c4c4540d78936c4f12496e4f128"}, + {file = "luigi-3.5.1.tar.gz", hash = "sha256:fc790b2747515dd19c673efbb8e4c9ace5f4c5cdc31f8e7f93dc667deb2ec6c8"}, ] [package.dependencies] @@ -1091,6 +1225,25 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mdit-py-plugins" +version = "0.4.1" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, + {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "mdurl" version = "0.1.2" @@ -1160,6 +1313,32 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "myst-parser" +version = "3.0.1" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, +] + +[package.dependencies] +docutils = ">=0.18,<0.22" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] + [[package]] name = "netaddr" version = "1.2.1" @@ -1208,23 +1387,25 @@ setuptools = "*" [[package]] name = "nox" -version = "2022.11.21" +version = "2024.4.15" description = "Flexible test automation." optional = false python-versions = ">=3.7" files = [ - {file = "nox-2022.11.21-py3-none-any.whl", hash = "sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb"}, - {file = "nox-2022.11.21.tar.gz", hash = "sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684"}, + {file = "nox-2024.4.15-py3-none-any.whl", hash = "sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565"}, + {file = "nox-2024.4.15.tar.gz", hash = "sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f"}, ] [package.dependencies] -argcomplete = ">=1.9.4,<3.0" +argcomplete = ">=1.9.4,<4.0" colorlog = ">=2.6.1,<7.0.0" packaging = ">=20.9" -virtualenv = ">=14" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.14.1" [package.extras] tox-to-nox = ["jinja2", "tox"] +uv = ["uv (>=0.1.6)"] [[package]] name = "packaging" @@ -1271,13 +1452,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.1" +version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, - {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] @@ -1339,16 +1520,17 @@ virtualenv = ">=20.10.0" [[package]] name = "prysk" -version = "0.20.0" +version = "0.17.0" description = "Functional tests for command line applications" optional = false -python-versions = "<4.0.0,>=3.8" +python-versions = ">=3.8,<4.0.0" files = [ - {file = "prysk-0.20.0-py3-none-any.whl", hash = "sha256:3758f59febe1ff27710c8ba69a8edad42286050d041ed8df519fc4bbeea41133"}, - {file = "prysk-0.20.0.tar.gz", hash = "sha256:3499d24c9c8d534754d3915218cb2ab59cf59a8d6f37acfb68dc582650e67e33"}, + {file = "prysk-0.17.0-py3-none-any.whl", hash = "sha256:c2e0ce69ede821e5a7e03f576c51e2a35000c570f6e22cf7c13daec1b3978832"}, + {file = "prysk-0.17.0.tar.gz", hash = "sha256:0a500bb9ff742eca878d5802bad9fcfd7ba1c6bbae64b2a2ff96bff94d4f8ad8"}, ] [package.dependencies] +pytest-prysk = {version = ">=0.2.0,<0.3.0", optional = true, markers = "extra == \"pytest-plugin\""} rich = ">=13.3.1,<14.0.0" [package.extras] @@ -1435,17 +1617,17 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.2.0" +version = "3.2.2" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.0-py3-none-any.whl", hash = "sha256:9f20c05398520474dac03d7abb21ab93181f91d4c110e1e0b32bc0d016c34fa4"}, - {file = "pylint-3.2.0.tar.gz", hash = "sha256:ad8baf17c8ea5502f23ae38d7c1b7ec78bd865ce34af9a0b986282e2611a8ff2"}, + {file = "pylint-3.2.2-py3-none-any.whl", hash = "sha256:3f8788ab20bb8383e06dd2233e50f8e08949cfd9574804564803441a4946eab4"}, + {file = "pylint-3.2.2.tar.gz", hash = "sha256:d068ca1dfd735fb92a07d33cb8f288adc0f6bc1287a139ca2425366f7cbe38f8"}, ] [package.dependencies] -astroid = ">=3.2.0,<=3.3.0-dev0" +astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1554,6 +1736,21 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-prysk" +version = "0.2.0" +description = "Pytest plugin for prysk" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pytest_prysk-0.2.0-py3-none-any.whl", hash = "sha256:3180a9d3a6634e6e70107b2eed2a6a7420630b14ba2036598ef690f9b71be79f"}, + {file = "pytest_prysk-0.2.0.tar.gz", hash = "sha256:488d1f77e35beec9cad13e11368dcc5d09555ec31a4d6a3f9d901e78bbeeb2d1"}, +] + +[package.dependencies] +prysk = ">=0.15.0" +pytest = ">=7.3.2,<8.0.0" + [[package]] name = "python-daemon" version = "3.0.1" @@ -1751,19 +1948,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shellingham" @@ -1776,6 +1972,20 @@ files = [ {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, ] +[[package]] +name = "shibuya" +version = "2024.5.15" +description = "A clean, responsive, and customizable Sphinx documentation theme with light/dark mode." +optional = false +python-versions = ">=3.7" +files = [ + {file = "shibuya-2024.5.15-py3-none-any.whl", hash = "sha256:85a2338b6a900ade614d1f15533604672a9e55826ecc86617090b25fb4f05f50"}, + {file = "shibuya-2024.5.15.tar.gz", hash = "sha256:4053a79f97debf07de154812681aa86639c9eaaa845fa87f85611ac82b8f6019"}, +] + +[package.dependencies] +Sphinx = "*" + [[package]] name = "simplejson" version = "3.19.2" @@ -1905,6 +2115,17 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -1997,6 +2218,29 @@ sphinx = ">=1.8" code-style = ["pre-commit (==2.12.1)"] rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] +[[package]] +name = "sphinx-design" +version = "0.5.0" +description = "A sphinx extension for designing beautiful, view size responsive web components." +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, + {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, +] + +[package.dependencies] +sphinx = ">=5,<8" + +[package.extras] +code-style = ["pre-commit (>=3,<4)"] +rtd = ["myst-parser (>=1,<3)"] +testing = ["myst-parser (>=1,<3)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2023.7.0,<2023.8.0)"] +theme-pydata = ["pydata-sphinx-theme (>=0.13.0,<0.14.0)"] +theme-rtd = ["sphinx-rtd-theme (>=1.0,<2.0)"] +theme-sbt = ["sphinx-book-theme (>=1.0,<2.0)"] + [[package]] name = "sphinxcontrib-applehelp" version = "1.0.4" @@ -2201,6 +2445,20 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-requests" +version = "2.32.0.20240521" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20240521.tar.gz", hash = "sha256:c5c4a0ae95aad51f1bf6dae9eed04a78f7f2575d4b171da37b622e08b93eb5d3"}, + {file = "types_requests-2.32.0.20240521-py3-none-any.whl", hash = "sha256:ab728ba43ffb073db31f21202ecb97db8753ded4a9dc49cb480d8a5350c5c421"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.11.0" @@ -2346,20 +2604,20 @@ files = [ [[package]] name = "zipp" -version = "3.18.1" +version = "3.18.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.18.2-py3-none-any.whl", hash = "sha256:dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e"}, + {file = "zipp-3.18.2.tar.gz", hash = "sha256:6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.0,<4.0" -content-hash = "8c6e24ab681240b5efbb27467151b8c3eef1568fe597db5b7c1e6971c0c9e78c" +content-hash = "6dbd36baea42a138ca5cd8324193a426b5f8abce16ae802ab0ac9906f736f9e7" diff --git a/pyproject.toml b/pyproject.toml index 34501d1..979ed2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,12 +10,14 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.8.0,<4.0" pyexasol = "^0.25.0" -exasol-bucketfs = "^0.9.0" +exasol-bucketfs = ">=0.10.0" click = "^8.0.4" +exasol-saas-api = ">=0.6.0" +requests = "<2.32.0" [tool.poetry.group.dev.dependencies] pytest = "^7.2.0" -exasol-toolbox = "^0.8.0" +exasol-toolbox = ">=0.12.0" exasol-script-languages-container-tool = "^0.18.2" [build-system] @@ -62,3 +64,8 @@ module = [ ] ignore_errors = true ignore_missing_imports = true + +[tool.pytest.ini_options] +markers = [ + "saas: integration test that creates a db in SaaS.", +] diff --git a/test/integration/conftest.py b/test/integration/conftest.py index 52eada4..d5ecfa3 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -1,7 +1,22 @@ +from __future__ import annotations +from typing import Any +import os import pytest import click import requests +from exasol.saas.client.api_access import ( + create_saas_client, + timestamp_name, + OpenApiAccess, + get_connection_params +) +from exasol.saas.client.openapi.models import CreateAllowedIP +from exasol.saas.client.openapi.api.security.add_allowed_ip import sync as add_allowed_ip +from exasol.saas.client.openapi.api.security.delete_allowed_ip import sync_detailed as delete_allowed_ip +from exasol.saas.client.openapi.api.clusters.list_clusters import sync as list_clusters +from exasol.saas.client.openapi.api.clusters.get_cluster_connection import sync as get_cluster_connection + from exasol.python_extension_common.deployment.language_container_deployer_cli import ( language_container_deployer_main, slc_parameter_formatters, CustomizableParameters) @@ -53,3 +68,50 @@ def container_path(tmpdir_factory, container_url, container_name) -> str: with open(slc_path, 'wb') as f: f.write(response.content) return slc_path + + +def _env(var: str) -> str: + result = os.environ.get(var) + if result: + return result + raise RuntimeError(f"Environment variable {var} is empty.") + + +@pytest.fixture(scope="session") +def saas_host() -> str: + return _env("SAAS_HOST") + + +@pytest.fixture(scope="session") +def saas_token() -> str: + return _env("SAAS_PAT") + + +@pytest.fixture(scope="session") +def saas_account_id() -> str: + return _env("SAAS_ACCOUNT_ID") + + +@pytest.fixture(scope="session") +def api_access(saas_host, saas_token, saas_account_id) -> OpenApiAccess: + with create_saas_client(saas_host, saas_token) as client: + yield OpenApiAccess(client, saas_account_id) + + +@pytest.fixture(scope="session") +def operational_saas_database_id(api_access) -> str: + database_name = timestamp_name('PEC') + with api_access.database(database_name) as db: + api_access.wait_until_running(db.id) + yield db.id + + +@pytest.fixture(scope="session") +def saas_connection_params(saas_host, saas_token, saas_account_id, operational_saas_database_id, + api_access) -> dict[str, Any]: + with api_access.allowed_ip(): + connection_params = get_connection_params(host=saas_host, + account_id=saas_account_id, + database_id=operational_saas_database_id, + pat=saas_token) + yield connection_params diff --git a/test/integration/test_language_container_deployer.py b/test/integration/test_language_container_deployer.py index a4588ce..5aa0237 100644 --- a/test/integration/test_language_container_deployer.py +++ b/test/integration/test_language_container_deployer.py @@ -6,7 +6,7 @@ from pyexasol import ExaConnection from pytest_itde import config -from exasol_bucketfs_utils_python.bucketfs_factory import BucketFSFactory +import exasol.bucketfs as bfs from exasol.python_extension_common.deployment.language_container_deployer import ( LanguageContainerDeployer, LanguageActivationLevel) @@ -21,14 +21,17 @@ def create_container_deployer(language_alias: str, pyexasol_connection: ExaConnection, bucketfs_config: config.BucketFs) -> LanguageContainerDeployer: - bucket_fs_factory = BucketFSFactory() - bucketfs_location = bucket_fs_factory.create_bucketfs_location( - url=f"{bucketfs_config.url}/default/container;bfsdefault", - user=f"{bucketfs_config.username}", - pwd=f"{bucketfs_config.password}", - base_path=None) + + bucketfs_path = bfs.path.build_path(backend=bfs.path.StorageBackend.onprem, + url=bucketfs_config.url, + username=bucketfs_config.username, + password=bucketfs_config.password, + service_name="bfsdefault", + bucket_name="default", + verify=False, + path="container") return LanguageContainerDeployer( - pyexasol_connection, language_alias, bucketfs_location) + pyexasol_connection, language_alias, bucketfs_path) def test_language_container_deployer( @@ -46,6 +49,8 @@ def test_language_container_deployer( pyexasol_connection=pyexasol_connection, bucketfs_config=itde.bucketfs) deployer.run(container_file=Path(container_path), alter_system=True, allow_override=True) + # In order to check that the uploaded container works we need a new pyexasol connection. + # The system level activation of the language container didn't affect pre-existing sessions. new_connection = stack.enter_context(connection_factory(itde.db)) assert_udf_running(new_connection, TEST_LANGUAGE_ALIAS, TEST_SCHEMA) diff --git a/test/integration/test_language_container_deployer_cli.py b/test/integration/test_language_container_deployer_cli.py index 12d6bb3..fa65056 100644 --- a/test/integration/test_language_container_deployer_cli.py +++ b/test/integration/test_language_container_deployer_cli.py @@ -1,6 +1,7 @@ from typing import Optional, Callable from contextlib import ExitStack +import pytest from urllib.parse import urlparse from click.testing import CliRunner from pyexasol import ExaConnection, ExaConnectionFailedError @@ -76,6 +77,9 @@ def test_language_container_deployer_cli_with_container_file( assert result.exit_code == 0 assert result.exception is None assert result.stdout == "" + # In order to check that the uploaded container works we need a new pyexasol connection. + # The deployer should have activated the language container at the system level but that would + # not affect pre-existing sessions. new_connection = stack.enter_context(connection_factory(itde.db)) assert_udf_running(new_connection, TEST_LANGUAGE_ALIAS, TEST_SCHEMA) diff --git a/test/integration/test_language_container_deployer_saas.py b/test/integration/test_language_container_deployer_saas.py new file mode 100644 index 0000000..d5345d7 --- /dev/null +++ b/test/integration/test_language_container_deployer_saas.py @@ -0,0 +1,67 @@ +from __future__ import annotations +from typing import Any +from contextlib import ExitStack +from pathlib import Path +import time + +import pytest +import pyexasol +import exasol.bucketfs as bfs + +from exasol.python_extension_common.deployment.language_container_deployer import ( + LanguageContainerDeployer) + +from test.utils.revert_language_settings import revert_language_settings +from test.utils.db_utils import (create_schema, assert_udf_running) + +TEST_SCHEMA = "PEC_DEPLOYER_TESTS" +TEST_LANGUAGE_ALIAS = "PYTHON3_PEC_TESTS" + + +def create_container_deployer(language_alias: str, + pyexasol_connection: pyexasol.ExaConnection, + url: str, + account_id: str, + database_id: str, + token: str) -> LanguageContainerDeployer: + + bucketfs_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=url, + account_id=account_id, + database_id=database_id, + pat=token, + path="container") + return LanguageContainerDeployer( + pyexasol_connection, language_alias, bucketfs_path) + + +@pytest.mark.saas +def test_language_container_deployer( + saas_host: str, + saas_token: str, + saas_account_id: str, + operational_saas_database_id: str, + saas_connection_params: dict[str, Any], + container_path: str): + """ + Tests the deployment of a container in one call, including the activation at the System level. + """ + with ExitStack() as stack: + pyexasol_connection = stack.enter_context(pyexasol.connect(**saas_connection_params, compression=True)) + stack.enter_context(revert_language_settings(pyexasol_connection)) + create_schema(pyexasol_connection, TEST_SCHEMA) + deployer = create_container_deployer(language_alias=TEST_LANGUAGE_ALIAS, + pyexasol_connection=pyexasol_connection, + url=saas_host, + account_id=saas_account_id, + database_id=operational_saas_database_id, + token=saas_token) + deployer.run(container_file=Path(container_path), alter_system=True, allow_override=True) + + # Need to give the SaaS BucketFS some time to digest the language container. + # The required time is somewhere between 20 seconds and 5 minutes. + time.sleep(300.) + # In order to check that the uploaded container works we need a new pyexasol connection. + # The system level activation of the language container didn't affect pre-existing sessions. + new_connection = stack.enter_context(pyexasol.connect(**saas_connection_params, compression=True)) + assert_udf_running(new_connection, TEST_LANGUAGE_ALIAS, TEST_SCHEMA) diff --git a/test/integration/test_language_container_deployer_saas_cli.py b/test/integration/test_language_container_deployer_saas_cli.py new file mode 100644 index 0000000..0adbb0c --- /dev/null +++ b/test/integration/test_language_container_deployer_saas_cli.py @@ -0,0 +1,103 @@ +from __future__ import annotations +from typing import Optional, Any +from contextlib import ExitStack +import time +import os + +import pytest +from click.testing import CliRunner + +import pyexasol +from exasol.python_extension_common.deployment.language_container_deployer_cli import ( + SAAS_ACCOUNT_ID_ENVIRONMENT_VARIABLE, + SAAS_DATABASE_ID_ENVIRONMENT_VARIABLE, + SAAS_TOKEN_ENVIRONMENT_VARIABLE, +) + +from test.utils.revert_language_settings import revert_language_settings +from test.utils.db_utils import (create_schema, assert_udf_running) + + +TEST_SCHEMA = "PEC_DEPLOYER_TESTS_CLI" +TEST_LANGUAGE_ALIAS = "PYTHON3_PEC_TESTS_CLI" + + +def call_language_definition_deployer_cli(func, + language_alias: str, + url: str, + account_id: str, + database_id: str, + token: str, + connection_params: dict[str, Any], + container_path: Optional[str] = None, + version: Optional[str] = None, + use_ssl_cert_validation: bool = False): + + os.environ[SAAS_ACCOUNT_ID_ENVIRONMENT_VARIABLE] = account_id + os.environ[SAAS_DATABASE_ID_ENVIRONMENT_VARIABLE] = database_id + os.environ[SAAS_TOKEN_ENVIRONMENT_VARIABLE] = token + + args_list = [ + "language-container", + "--saas-url", url, + "--path-in-bucket", "container", + "--dsn", connection_params['dsn'], + "--db-user", connection_params['user'], + "--db-pass", connection_params['password'], + "--language-alias", language_alias + ] + if use_ssl_cert_validation: + args_list += [ + "--use-ssl-cert-validation" + ] + else: + args_list += [ + "--no-use-ssl-cert-validation" + ] + if version: + args_list += [ + "--version", version, + ] + if container_path: + args_list += [ + "--container-file", container_path, + ] + runner = CliRunner() + result = runner.invoke(func, args_list) + return result + + +@pytest.mark.saas +def test_language_container_deployer_cli_with_container_file( + saas_host: str, + saas_token: str, + saas_account_id: str, + operational_saas_database_id: str, + saas_connection_params: dict[str, Any], + container_path: str, + main_func +): + with ExitStack() as stack: + pyexasol_connection = stack.enter_context(pyexasol.connect(**saas_connection_params, compression=True)) + stack.enter_context(revert_language_settings(pyexasol_connection)) + create_schema(pyexasol_connection, TEST_SCHEMA) + result = call_language_definition_deployer_cli(main_func, + container_path=container_path, + language_alias=TEST_LANGUAGE_ALIAS, + url=saas_host, + account_id=saas_account_id, + database_id=operational_saas_database_id, + token=saas_token, + connection_params=saas_connection_params) + assert result.exit_code == 0 + assert result.exception is None + assert result.stdout == "" + + # Need to give the SaaS BucketFS some time to digest the language container. + # The required time is somewhere between 20 seconds and 5 minutes. + time.sleep(300.) + # In order to check that the uploaded container works we need a new pyexasol connection. + # The deployer should have activated the language container at the system level but that would + # not affect pre-existing sessions. + new_connection = stack.enter_context(pyexasol.connect(**saas_connection_params, compression=True)) + assert_udf_running(new_connection, TEST_LANGUAGE_ALIAS, TEST_SCHEMA) diff --git a/test/integration/test_placeholder.py b/test/integration/test_placeholder.py deleted file mode 100644 index 201975f..0000000 --- a/test/integration/test_placeholder.py +++ /dev/null @@ -1,2 +0,0 @@ -def test_placeholder(): - pass diff --git a/test/unit/deployment/test_language_container_deployer.py b/test/unit/deployment/test_language_container_deployer.py index bb2a135..099dd69 100644 --- a/test/unit/deployment/test_language_container_deployer.py +++ b/test/unit/deployment/test_language_container_deployer.py @@ -2,7 +2,7 @@ from unittest.mock import create_autospec, MagicMock, patch import pytest -from exasol_bucketfs_utils_python.bucketfs_location import BucketFSLocation +import exasol.bucketfs as bfs from pyexasol import ExaConnection from exasol.python_extension_common.deployment.language_container_deployer import ( @@ -11,7 +11,7 @@ @pytest.fixture(scope='module') def container_file_name() -> str: - return 'container_xyz.tag.gz' + return 'container_xyz.tar.gz' @pytest.fixture(scope='module') @@ -34,18 +34,11 @@ def mock_pyexasol_conn() -> ExaConnection: return create_autospec(ExaConnection) -@pytest.fixture(scope='module') -def mock_bfs_location(container_bfs_path) -> BucketFSLocation: - mock_loc = create_autospec(BucketFSLocation) - mock_loc.generate_bucket_udf_path.return_value = PurePosixPath(f'/buckets/{container_bfs_path}') - return mock_loc - - @pytest.fixture -def container_deployer(mock_pyexasol_conn, mock_bfs_location, language_alias) -> LanguageContainerDeployer: +def container_deployer(mock_pyexasol_conn, language_alias) -> LanguageContainerDeployer: deployer = LanguageContainerDeployer(pyexasol_connection=mock_pyexasol_conn, language_alias=language_alias, - bucketfs_location=mock_bfs_location) + bucketfs_path=create_autospec(bfs.path.PathLike)) deployer.upload_container = MagicMock() deployer.activate_container = MagicMock() @@ -53,30 +46,39 @@ def container_deployer(mock_pyexasol_conn, mock_bfs_location, language_alias) -> def test_slc_deployer_deploy(container_deployer, container_file_name, container_file_path): - container_deployer.run(container_file=container_file_path, bucket_file_path=container_file_name, alter_system=True, + container_deployer.run(container_file=container_file_path, + bucket_file_path=container_file_name, + alter_system=True, allow_override=True) - container_deployer.upload_container.assert_called_once_with(container_file_path, container_file_name) - container_deployer.activate_container.assert_called_once_with(container_file_name, LanguageActivationLevel.System, + container_deployer.upload_container.assert_called_once_with(container_file_path, + container_file_name) + container_deployer.activate_container.assert_called_once_with(container_file_name, + LanguageActivationLevel.System, True) def test_slc_deployer_upload(container_deployer, container_file_name, container_file_path): container_deployer.run(container_file=container_file_path, alter_system=False) - container_deployer.upload_container.assert_called_once_with(container_file_path, container_file_name) + container_deployer.upload_container.assert_called_once_with(container_file_path, + container_file_name) container_deployer.activate_container.assert_not_called() def test_slc_deployer_activate(container_deployer, container_file_name): container_deployer.run(bucket_file_path=container_file_name, alter_system=True, allow_override=True) container_deployer.upload_container.assert_not_called() - container_deployer.activate_container.assert_called_once_with(container_file_name, LanguageActivationLevel.System, + container_deployer.activate_container.assert_called_once_with(container_file_name, + LanguageActivationLevel.System, True) +@patch('exasol.python_extension_common.deployment.language_container_deployer.get_udf_path') @patch('exasol.python_extension_common.deployment.language_container_deployer.get_language_settings') -def test_slc_deployer_generate_activation_command(mock_lang_settings, container_deployer, language_alias, +def test_slc_deployer_generate_activation_command(mock_lang_settings, mock_udf_path, + container_deployer, language_alias, container_file_name, container_bfs_path): mock_lang_settings.return_value = 'R=builtin_r JAVA=builtin_java PYTHON3=builtin_python3' + mock_udf_path.return_value = PurePosixPath(f'/buckets/{container_bfs_path}') alter_type = LanguageActivationLevel.Session expected_command = f"ALTER {alter_type.value.upper()} SET SCRIPT_LANGUAGES='" \ @@ -88,14 +90,17 @@ def test_slc_deployer_generate_activation_command(mock_lang_settings, container_ assert command == expected_command +@patch('exasol.python_extension_common.deployment.language_container_deployer.get_udf_path') @patch('exasol.python_extension_common.deployment.language_container_deployer.get_language_settings') -def test_slc_deployer_generate_activation_command_override(mock_lang_settings, container_deployer, language_alias, +def test_slc_deployer_generate_activation_command_override(mock_lang_settings, mock_udf_path, + container_deployer, language_alias, container_file_name, container_bfs_path): current_bfs_path = 'bfsdefault/default/container_abc' mock_lang_settings.return_value = \ 'R=builtin_r JAVA=builtin_java PYTHON3=builtin_python3 ' \ f'{language_alias}=localzmq+protobuf:///{current_bfs_path}?' \ f'lang=python#/buckets/{current_bfs_path}/exaudf/exaudfclient_py3' + mock_udf_path.return_value = PurePosixPath(f'/buckets/{container_bfs_path}') alter_type = LanguageActivationLevel.Session expected_command = f"ALTER {alter_type.value.upper()} SET SCRIPT_LANGUAGES='" \ @@ -103,26 +108,34 @@ def test_slc_deployer_generate_activation_command_override(mock_lang_settings, c f"{language_alias}=localzmq+protobuf:///{container_bfs_path}?" \ f"lang=python#/buckets/{container_bfs_path}/exaudf/exaudfclient_py3';" - command = container_deployer.generate_activation_command(container_file_name, alter_type, allow_override=True) + command = container_deployer.generate_activation_command(container_file_name, alter_type, + allow_override=True) assert command == expected_command +@patch('exasol.python_extension_common.deployment.language_container_deployer.get_udf_path') @patch('exasol.python_extension_common.deployment.language_container_deployer.get_language_settings') -def test_slc_deployer_generate_activation_command_failure(mock_lang_settings, container_deployer, language_alias, - container_file_name): +def test_slc_deployer_generate_activation_command_failure(mock_lang_settings, mock_udf_path, + container_deployer, language_alias, + container_file_name, container_bfs_path): current_bfs_path = 'bfsdefault/default/container_abc' mock_lang_settings.return_value = \ 'R=builtin_r JAVA=builtin_java PYTHON3=builtin_python3 ' \ f'{language_alias}=localzmq+protobuf:///{current_bfs_path}?' \ f'lang=python#/buckets/{current_bfs_path}/exaudf/exaudfclient_py3' + mock_udf_path.return_value = PurePosixPath(f'/buckets/{container_bfs_path}') with pytest.raises(RuntimeError): - container_deployer.generate_activation_command(container_file_name, LanguageActivationLevel.Session, + container_deployer.generate_activation_command(container_file_name, + LanguageActivationLevel.Session, allow_override=False) -def test_slc_deployer_get_language_definition(container_deployer, language_alias, +@patch('exasol.python_extension_common.deployment.language_container_deployer.get_udf_path') +def test_slc_deployer_get_language_definition(mock_udf_path, + container_deployer, language_alias, container_file_name, container_bfs_path): + mock_udf_path.return_value = PurePosixPath(f'/buckets/{container_bfs_path}') expected_command = f"{language_alias}=localzmq+protobuf:///{container_bfs_path}?" \ f"lang=python#/buckets/{container_bfs_path}/exaudf/exaudfclient_py3"