From 679df6ca5352d5511f50b9916b272668a6eee74c Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:30:02 -0300 Subject: [PATCH 1/8] #411: Removed task UploadFileToBucketFS --- .../database_setup/upload_file_to_db.py | 213 ------------------ test/integration/test_bucketfs_upload.py | 174 -------------- 2 files changed, 387 deletions(-) delete mode 100644 exasol_integration_test_docker_environment/lib/test_environment/database_setup/upload_file_to_db.py delete mode 100644 test/integration/test_bucketfs_upload.py diff --git a/exasol_integration_test_docker_environment/lib/test_environment/database_setup/upload_file_to_db.py b/exasol_integration_test_docker_environment/lib/test_environment/database_setup/upload_file_to_db.py deleted file mode 100644 index 82534a14c..000000000 --- a/exasol_integration_test_docker_environment/lib/test_environment/database_setup/upload_file_to_db.py +++ /dev/null @@ -1,213 +0,0 @@ -import dataclasses -from pathlib import Path -from typing import Tuple - -import luigi -from docker.models.containers import Container -from exasol_bucketfs_utils_python import list_files, upload -from exasol_bucketfs_utils_python.bucket_config import BucketConfig -from exasol_bucketfs_utils_python.bucketfs_config import BucketFSConfig -from exasol_bucketfs_utils_python.bucketfs_connection_config import BucketFSConnectionConfig - -# TODO add timeout, because sometimes the upload stucks -from exasol_integration_test_docker_environment.abstract_method_exception import AbstractMethodException -from exasol_integration_test_docker_environment.lib.base.docker_base_task import DockerBaseTask -from exasol_integration_test_docker_environment.lib.base.json_pickle_parameter import JsonPickleParameter -from exasol_integration_test_docker_environment.lib.base.still_running_logger import StillRunningLogger, \ - StillRunningLoggerThread -from exasol_integration_test_docker_environment.lib.data.environment_info \ - import EnvironmentInfo -from exasol_integration_test_docker_environment \ - .lib.test_environment.database_setup.docker_db_log_based_bucket_sync_checker \ - import DockerDBLogBasedBucketFSSyncChecker -from exasol_integration_test_docker_environment \ - .lib.test_environment.database_setup.time_based_bucketfs_sync_waiter \ - import TimeBasedBucketFSSyncWaiter -from exasol_integration_test_docker_environment \ - .lib.base.db_os_executor import ( - DbOsExecutor, - DbOsExecFactory, - ) - - -@dataclasses.dataclass -class UploadResult: - upload_target: str - reused: bool - - -class UploadFileToBucketFS(DockerBaseTask): - environment_name = luigi.Parameter() - test_environment_info = JsonPickleParameter( - EnvironmentInfo, significant=False) # type: EnvironmentInfo - reuse_uploaded = luigi.BoolParameter(False, significant=False) - bucketfs_write_password = luigi.Parameter( - significant=False, visibility=luigi.parameter.ParameterVisibility.HIDDEN) - executor_factory=JsonPickleParameter(DbOsExecFactory, significant=False) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._database_info = self.test_environment_info.database_info - - def run_task(self): - file_to_upload = self.get_file_to_upload() - upload_target = self.get_upload_target() - pattern_to_wait_for = self.get_pattern_to_wait_for() - log_file = self.get_log_file() - sync_time_estimation = self.get_sync_time_estimation() - - with self._get_docker_client() as docker_client: - if self._database_info.container_info is not None: - database_container = docker_client.containers.get( - self._database_info.container_info.container_name) - else: - database_container = None - if not self.should_be_reused(upload_target): - with self.executor_factory.executor() as executor: - executor.prepare() - self.upload_and_wait( - database_container, - file_to_upload, - upload_target, - log_file, - pattern_to_wait_for, - sync_time_estimation, - db_os_executor=executor, - ) - self.return_object(UploadResult( - upload_target=upload_target, - reused=False - )) - else: - self.logger.warning("Reusing uploaded target %s instead of file %s", - upload_target, file_to_upload) - self.write_logs("Reusing") - self.return_object(UploadResult( - upload_target=upload_target, - reused=True - )) - - def upload_and_wait( - self, - database_container, - file_to_upload: str, - upload_target: str, - log_file: str, - pattern_to_wait_for: str, - sync_time_estimation: int, - db_os_executor: DbOsExecutor, - ): - still_running_logger = StillRunningLogger( - self.logger, - f"file upload of {file_to_upload} to {upload_target}", - ) - thread = StillRunningLoggerThread(still_running_logger) - thread.start() - sync_checker = self.get_sync_checker( - database_container, - sync_time_estimation, - log_file, - pattern_to_wait_for, - db_os_executor=db_os_executor, - ) - sync_checker.prepare_upload() - try: - output = self.upload_file( - file_to_upload=file_to_upload, - upload_target=upload_target, - ) - sync_checker.wait_for_bucketfs_sync() - self.write_logs(output) - finally: - thread.stop() - thread.join() - - def get_sync_checker( - self, - database_container: Container, - sync_time_estimation: int, - log_file: str, - pattern_to_wait_for: str, - db_os_executor: DbOsExecutor, - ): - if database_container is not None: - return DockerDBLogBasedBucketFSSyncChecker( - database_container=database_container, - log_file_to_check=log_file, - pattern_to_wait_for=pattern_to_wait_for, - logger=self.logger, - bucketfs_write_password=str(self.bucketfs_write_password), - executor=db_os_executor, - ) - else: - return TimeBasedBucketFSSyncWaiter(sync_time_estimation) - - def should_be_reused(self, upload_target: str): - return self.reuse_uploaded and self.exist_file_in_bucketfs(upload_target) - - @staticmethod - def split_upload_target(upload_target: str) -> Tuple[str, str, str]: - upload_parts = upload_target.split("/") - bucket_name = upload_parts[0] - path_in_bucket = "/".join(upload_parts[1:-1]) - file_in_bucket = upload_parts[-1] - return bucket_name, path_in_bucket, file_in_bucket - - def exist_file_in_bucketfs(self, upload_target: str) -> bool: - self.logger.info("Check if file %s exist in bucketfs", upload_target) - bucket_name, path_in_bucket, file_in_bucket = self.split_upload_target(upload_target) - - bucket_config = self.generate_bucket_config(bucket_name) - try: - files = list_files.list_files_in_bucketfs( - bucket_config=bucket_config, - bucket_file_path=path_in_bucket) - return file_in_bucket in files - except FileNotFoundError as ex: - return False - - def generate_bucket_config(self, bucket_name: str) -> BucketConfig: - connection_config = BucketFSConnectionConfig( - host=self._database_info.host, port=int(self._database_info.ports.bucketfs), - user="w", pwd=str(self.bucketfs_write_password), - is_https=False) - bucketfs_config = BucketFSConfig( - connection_config=connection_config, - bucketfs_name="bfsdefault") - bucket_config = BucketConfig( - bucket_name=bucket_name, - bucketfs_config=bucketfs_config) - return bucket_config - - def upload_file(self, file_to_upload: str, upload_target: str): - self.logger.info("upload file %s to %s", - file_to_upload, upload_target) - bucket_name, path_in_bucket, file_in_bucket = self.split_upload_target(upload_target) - bucket_config = self.generate_bucket_config(bucket_name) - upload.upload_file_to_bucketfs( - bucket_config=bucket_config, - bucket_file_path=f"{path_in_bucket}/{file_in_bucket}", - local_file_path=Path(file_to_upload) - ) - return f"File '{file_to_upload}' to '{upload_target}'" - - def write_logs(self, output): - log_file = Path(self.get_log_path(), "log") - with log_file.open("w") as file: - file.write(output) - - def get_log_file(self) -> str: - raise AbstractMethodException() - - def get_pattern_to_wait_for(self) -> str: - raise AbstractMethodException() - - def get_file_to_upload(self) -> str: - raise AbstractMethodException() - - def get_upload_target(self) -> str: - raise AbstractMethodException() - - def get_sync_time_estimation(self) -> int: - """Estimated time in seconds which the bucketfs needs to extract and sync a uploaded file""" - raise AbstractMethodException() diff --git a/test/integration/test_bucketfs_upload.py b/test/integration/test_bucketfs_upload.py deleted file mode 100644 index a4a8cf158..000000000 --- a/test/integration/test_bucketfs_upload.py +++ /dev/null @@ -1,174 +0,0 @@ -import luigi -import os -import pytest - -from typing import List, Optional - -from exasol_integration_test_docker_environment \ - .testing.api_test_environment import ApiTestEnvironment -from dataclasses import dataclass -from exasol.bucketfs import Service, Bucket, as_string -from exasol_integration_test_docker_environment \ - .lib.test_environment.database_setup.upload_file_to_db \ - import ( - UploadFileToBucketFS, - UploadResult, - ) -from exasol_integration_test_docker_environment.lib.api.common import ( - generate_root_task, - run_task, -) -from exasol_integration_test_docker_environment \ - .lib.data.environment_info import EnvironmentInfo -from exasol_integration_test_docker_environment \ - .lib.data.database_info import DatabaseInfo -from exasol_integration_test_docker_environment \ - .lib.test_environment.parameter \ - .docker_db_test_environment_parameter import DbOsAccess -from exasol_integration_test_docker_environment \ - .lib.base.db_os_executor import ( - DbOsExecFactory, - DockerExecFactory, - SshExecFactory, - ) -from test.integration.helpers import get_executor_factory - -BUCKET_NAME = "default" - - -def bucketfs_path(path: str, relative: bool = False) -> str: - parent = "upload_test" - suffix = f"{parent}/{path}" - if relative: - return suffix - return f"{BUCKET_NAME}/{suffix}" - - -class ArgumentError(Exception): - """Invalid arguments to BucketFsAccess.upload()""" - - -class BucketFsAccess: - class FileUploadTask(UploadFileToBucketFS): - local_path = luigi.Parameter() - target = luigi.Parameter() - - def get_log_file(self) -> str: - return "/exa/logs/cored/*bucketfsd*" - - def get_pattern_to_wait_for(self) -> str: - filename = os.path.basename(self.local_path) - return f"{filename}.*linked" - - def get_file_to_upload(self) -> str: - return str(self.local_path) - - def get_upload_target(self) -> str: - return self.target - - def get_sync_time_estimation(self) -> int: - """Estimated time in seconds which the bucketfs needs to extract and sync a uploaded file""" - return 10 - - def __init__(self, environment: ApiTestEnvironment, executor_factory: DbOsExecFactory): - self.environment = environment - self.executor_factory = executor_factory - - def _get_bucket(self) -> Bucket: - db_info = self.environment.environment_info.database_info - url = f"http://{db_info.host}:{db_info.ports.bucketfs}" - credentials = { BUCKET_NAME: { - "username": self.environment.bucketfs_username, - "password": self.environment.bucketfs_password - } } - bucketfs = Service(url, credentials) - return bucketfs.buckets[BUCKET_NAME] - - def list(self) -> List[str]: - return self._get_bucket().files - - def upload(self, - local_path: str, - relative: Optional[str] = None, - target: Optional[str] = None, - reuse: bool = False) -> UploadResult: - if not (relative or target): - raise ArgumentError("Either relative or target must be specified.") - if relative: - local_path = f"{local_path}/{relative}" - target = bucketfs_path(target or relative) - task_creator = lambda: generate_root_task( - task_class=self.FileUploadTask, - local_path=local_path, - target=target, - environment_name=self.environment.name, - test_environment_info=self.environment.environment_info, - bucketfs_write_password=self.environment.bucketfs_password, - reuse_uploaded=reuse, - executor_factory=self.executor_factory, - ) - result = run_task(task_creator=task_creator, log_level="INFO") - return result - - def download(self, relative: str) -> str: - path = bucketfs_path(path=relative, relative=True) - return as_string(self._get_bucket().download(path)) - - -class UploadValidator: - def __init__(self, tmp_path: str, bucketfs: BucketFsAccess, reuse: bool): - self.tmp_path = tmp_path - self.bucketfs = bucketfs - self.reuse = reuse - self.filename = None - self.actual_result = None - - def upload(self, filename: str, content: str): - with open(f"{self.tmp_path}/{filename}", "w") as f: - f.write(content) - self.filename = filename - self.actual_result = self.bucketfs.upload( - self.tmp_path, - relative=filename, - reuse=self.reuse, - ) - return self - - def validate(self, expected_content: str, expected_reuse: bool): - expected_result = UploadResult( - upload_target=bucketfs_path(self.filename), - reused=expected_reuse, - ) - assert self.actual_result == expected_result - assert bucketfs_path(self.filename, relative=True) in self.bucketfs.list() - assert expected_content == self.bucketfs.download(self.filename) - - -@pytest.mark.parametrize("db_os_access", [DbOsAccess.DOCKER_EXEC, DbOsAccess.SSH]) -def test_upload_without_reuse(api_database, tmp_path, db_os_access): - params = { "db_os_access": db_os_access.name } - with api_database(additional_parameters=params) as db: - dbinfo = db.environment_info.database_info - executor_factory = get_executor_factory(dbinfo, db_os_access) - bucketfs = BucketFsAccess(db, executor_factory) - filename = "sample-file.txt" - validator = UploadValidator(tmp_path, bucketfs, reuse=False) - validator.upload(filename, "old content") \ - .validate("old content", expected_reuse=False) - validator.upload(filename, "new content") \ - .validate("new content", expected_reuse=False) - - -@pytest.mark.parametrize("db_os_access", [DbOsAccess.DOCKER_EXEC, DbOsAccess.SSH]) -def test_upload_with_reuse(api_database, tmp_path, db_os_access): - params = { "db_os_access": db_os_access.name } - with api_database(additional_parameters=params) as db: - dbinfo = db.environment_info.database_info - executor_factory = get_executor_factory(dbinfo, db_os_access) - bucketfs = BucketFsAccess(db, executor_factory) - filename = "sample-file.txt" - validator = UploadValidator(tmp_path, bucketfs, reuse=True) - validator.upload(filename, "old content") \ - .validate("old content", expected_reuse=False) - validator.upload(filename, "new content") \ - .validate("old content", expected_reuse=True) From 099dca0efe398698418e078b23f6fbcbf366abbd Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 12:58:25 -0300 Subject: [PATCH 2/8] Removed exasol-bucketfs from dependencies --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 68708cbc6..33a14f9eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,6 @@ requests = ">=2.21.0" simplejson = ">=3.16.0" importlib_resources = ">=5.4.0" #Needed to copy resource files, can be removed after upgrading to Python 3.9 (see https://docs.python.org/3.11/library/importlib.resources.html#importlib.resources.files) "stopwatch.py" = ">=1.0.0" -exasol-bucketfs = ">=0.6.0,<2.0.0" fabric = "^3.0.1" portalocker = "^2.7.0" exasol-error-reporting = ">=0.4.0" From b68f2c9bc310b8897559a304c6818d55e604dddb Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:01:10 -0300 Subject: [PATCH 3/8] Updated changelog --- doc/changes/unreleased.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/changes/unreleased.md b/doc/changes/unreleased.md index 6b7263c65..01bae738c 100644 --- a/doc/changes/unreleased.md +++ b/doc/changes/unreleased.md @@ -9,3 +9,4 @@ Code name: * #119: Refactored `pkg_resources` usage to `importlib.resources` * #420: Added file `py.typed` to enable mypy to find project specific types * #418: Use exasol/python-toolbox +* #411: Removed usage of exasol-bucketfs From ff0b089e0a67e095e202ed50436f0685060b9123 Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:02:19 -0300 Subject: [PATCH 4/8] [run all tests] From 12a1a795b6a996e6d1f2b7adf6d46baed18cdc25 Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:03:51 -0300 Subject: [PATCH 5/8] Updated poetry.lock --- poetry.lock | 205 +--------------------------------------------------- 1 file changed, 1 insertion(+), 204 deletions(-) diff --git a/poetry.lock b/poetry.lock index effca324d..e0daa45c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,28 +11,6 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] -[[package]] -name = "anyio" -version = "4.6.2.post1" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.9" -files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] -trio = ["trio (>=0.26.1)"] - [[package]] name = "argcomplete" version = "2.1.2" @@ -62,25 +40,6 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - [[package]] name = "babel" version = "2.16.0" @@ -693,25 +652,6 @@ files = [ {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] -[[package]] -name = "exasol-bucketfs" -version = "0.13.0" -description = "BucketFS utilities for the Python programming language" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "exasol_bucketfs-0.13.0-py3-none-any.whl", hash = "sha256:df661fe184c61c4b89289c4aa88ffed0f8de74f5f934ff2dc1ecb19fc892bf04"}, - {file = "exasol_bucketfs-0.13.0.tar.gz", hash = "sha256:55318de6ae18fb87670f61b2a0824204d22ff63c755c65be17c4abcc4b753137"}, -] - -[package.dependencies] -attrs = ">=23.2.0" -exasol-saas-api = ">=0.3.0" -httpx = ">=0.27.0" -joblib = ">=1.0.1" -requests = ">=2.24.0" -typeguard = ">=4.3.0" - [[package]] name = "exasol-error-reporting" version = "0.5.0" @@ -723,26 +663,6 @@ files = [ {file = "exasol_error_reporting-0.5.0.tar.gz", hash = "sha256:5051f9a0dc9dbfc2dfa808d6b88c7595fa2688aa11a2b51c0895db00fe3e41bb"}, ] -[[package]] -name = "exasol-saas-api" -version = "0.7.0" -description = "API enabling Python applications connecting to Exasol database SaaS instances and using their SaaS services" -optional = false -python-versions = "<4.0,>=3.8.0" -files = [ - {file = "exasol_saas_api-0.7.0-py3-none-any.whl", hash = "sha256:7ffe1a05aa419099bcafa3984af5f750dc2234c8b18170ccda5336b95bac7c09"}, - {file = "exasol_saas_api-0.7.0.tar.gz", hash = "sha256:8d69780cdc876dc206797fea5b2f964a06248f0a087b611ae06ac3646f84a846"}, -] - -[package.dependencies] -attrs = ">=21.3.0" -httpx = ">=0.20.0,<0.28.0" -ifaddr = ">=0.2.0,<0.3.0" -python-dateutil = ">=2.8.0,<3.0.0" -requests = ">=2.31.0,<3.0.0" -tenacity = ">=8.2.3,<9.0.0" -types-requests = ">=2.31.0.6,<3.0.0.0" - [[package]] name = "exasol-toolbox" version = "0.18.0" @@ -981,63 +901,6 @@ files = [ [package.dependencies] typing-extensions = ">=3.10.0.0" -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] - -[[package]] -name = "httpx" -version = "0.27.2" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - [[package]] name = "humanfriendly" version = "10.0" @@ -1080,17 +943,6 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "ifaddr" -version = "0.2.0" -description = "Cross-platform network interface and IP address enumeration library" -optional = false -python-versions = "*" -files = [ - {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, - {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, -] - [[package]] name = "imagesize" version = "1.4.1" @@ -1217,17 +1069,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - [[package]] name = "jsonpickle" version = "4.0.0" @@ -2304,17 +2145,6 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - [[package]] name = "snowballstemmer" version = "2.2.0" @@ -2649,25 +2479,6 @@ files = [ {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, ] -[[package]] -name = "typeguard" -version = "4.4.1" -description = "Run-time type checker for Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, - {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -typing-extensions = ">=4.10.0" - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] -test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] - [[package]] name = "typer" version = "0.13.1" @@ -2685,20 +2496,6 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" -[[package]] -name = "types-requests" -version = "2.32.0.20241016" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, - {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, -] - -[package.dependencies] -urllib3 = ">=2" - [[package]] name = "typing-extensions" version = "4.12.2" @@ -2864,4 +2661,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "2dfe93393fead0ac050db6d9a3918897ec84234b90d77b35d3944ce238fdb7b7" +content-hash = "b504bec7004069302256e95f1c7bfcfd2588752d63f14e4b04a13710290b94e2" From 28ce82f0d47ee26c007c964744bdfdfcc7e03e76 Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:03:54 -0300 Subject: [PATCH 6/8] [run all tests] From 06201705e0b0cadf87e8b5b6c35ec4b2ccb636ec Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:16:33 -0300 Subject: [PATCH 7/8] Added Pypi pkg joblib as dev dependency --- poetry.lock | 13 ++++++++++++- pyproject.toml | 1 + 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index e0daa45c4..fdd897b4a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1069,6 +1069,17 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "joblib" +version = "1.4.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + [[package]] name = "jsonpickle" version = "4.0.0" @@ -2661,4 +2672,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4" -content-hash = "b504bec7004069302256e95f1c7bfcfd2588752d63f14e4b04a13710290b94e2" +content-hash = "9f6e5fbdba70c995c263be2914d9e456523151ef0102066a6543c59147c3c5de" diff --git a/pyproject.toml b/pyproject.toml index 33a14f9eb..f2e3f34bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ nox = "^2022.1.7" mypy = "^1.1.1" pyexasol = "^0.25.2" exasol-toolbox = ">=0.18.0" +joblib = "^1.4.2" [tool.poetry.scripts] itde = 'exasol_integration_test_docker_environment.main:main' From 14ed35ea3036d71b4e5bdeb1f3ab96307a4576d8 Mon Sep 17 00:00:00 2001 From: Thomas Ubensee <34603111+tomuben@users.noreply.github.com> Date: Tue, 19 Nov 2024 13:16:37 -0300 Subject: [PATCH 8/8] [run all tests]