diff --git a/exasol/python_extension_common/cli/bucketfs_conn_object_cli.py b/exasol/python_extension_common/cli/bucketfs_conn_object_cli.py new file mode 100644 index 0000000..60a7b34 --- /dev/null +++ b/exasol/python_extension_common/cli/bucketfs_conn_object_cli.py @@ -0,0 +1,10 @@ +from exasol.python_extension_common.connections.bucketfs_location import ( + create_bucketfs_conn_object) + + +class BucketfsConnObjectCli: + def __init__(self, conn_name_arg: str): + self._conn_name_arg = conn_name_arg + + def __call__(self, **kwargs): + create_bucketfs_conn_object(conn_name=self._conn_name_arg, **kwargs) diff --git a/exasol/python_extension_common/connections/bucketfs_location.py b/exasol/python_extension_common/connections/bucketfs_location.py index dce06ed..bd59b07 100644 --- a/exasol/python_extension_common/connections/bucketfs_location.py +++ b/exasol/python_extension_common/connections/bucketfs_location.py @@ -9,20 +9,25 @@ from exasol.python_extension_common.connections.pyexasol_connection import open_pyexasol_connection -class DBType(Enum): +class _Backend(Enum): onprem = auto() saas = auto() -def _infer_db_type(bfs_params: dict[str, Any]) -> DBType: +def _infer_backend(bfs_params: dict[str, Any]) -> _Backend: + """ + Infers the backend from the provided dictionary of CLI parameters. + Raises a ValueError if the collection of CLI parameters is insufficient to access + the BucketFS on either of the backends. + """ if check_params([StdParams.bucketfs_host, StdParams.bucketfs_port, StdParams.bucket, StdParams.bucketfs_user, StdParams.bucketfs_password], bfs_params): - return DBType.onprem + return _Backend.onprem elif check_params([StdParams.saas_url, StdParams.saas_account_id, StdParams.saas_token, [StdParams.saas_database_id, StdParams.saas_database_name]], bfs_params): - return DBType.saas + return _Backend.saas raise ValueError( 'Incomplete parameter list. Please either provide the parameters [' @@ -37,6 +42,10 @@ def _infer_db_type(bfs_params: dict[str, Any]) -> DBType: def _convert_onprem_bfs_params(bfs_params: dict[str, Any]) -> dict[str, Any]: + """ + Converts OnPrem BucketFS parameters from the CLI format to the format expected + by the exasol.bucketfs.path.build_path. + """ net_service = ('https' if bfs_params.get(StdParams.bucketfs_use_https.name, True) else 'http') @@ -56,6 +65,10 @@ def _convert_onprem_bfs_params(bfs_params: dict[str, Any]) -> dict[str, Any]: def _convert_saas_bfs_params(bfs_params: dict[str, Any]) -> dict[str, Any]: + """ + Converts SaaS BucketFS parameters from the CLI format to the format expected + by the exasol.bucketfs.path.build_path. + """ saas_url = bfs_params[StdParams.saas_url.name] saas_account_id = bfs_params[StdParams.saas_account_id.name] @@ -77,29 +90,29 @@ def _convert_saas_bfs_params(bfs_params: dict[str, Any]) -> dict[str, Any]: } -def _to_json_str(bucketfs_params: dict[str, Any], selected: list[str]) -> str: - filtered_kwargs = {k: v for k, v in bucketfs_params.items() - if (k in selected) and (v is not None)} - return json.dumps(filtered_kwargs) - - def create_bucketfs_location(**kwargs) -> bfs.path.PathLike: """ Creates a BucketFS PathLike object using the data provided in the kwargs. These - can be parameters for the BucketFS either On-Prem or SaaS database. The parameters - should correspond to the CLI options defined in the cli/std_options.py. + can be parameters for the BucketFS at either On-Prem or SaaS database. The input + parameters should correspond to the CLI options defined in the cli/std_options.py. Raises a ValueError if the provided parameters are insufficient for either On-Prem or SaaS cases. """ - db_type = _infer_db_type(kwargs) - if db_type == DBType.onprem: + db_type = _infer_backend(kwargs) + if db_type == _Backend.onprem: return bfs.path.build_path(**_convert_onprem_bfs_params(kwargs)) else: return bfs.path.build_path(**_convert_saas_bfs_params(kwargs)) +def _to_json_str(bucketfs_params: dict[str, Any], selected: list[str]) -> str: + filtered_kwargs = {k: v for k, v in bucketfs_params.items() + if (k in selected) and (v is not None)} + return json.dumps(filtered_kwargs) + + def _write_bucketfs_conn_object(pyexasol_connection: pyexasol.ExaConnection, conn_name: str, conn_to: str, @@ -116,6 +129,18 @@ def _write_bucketfs_conn_object(pyexasol_connection: pyexasol.ExaConnection, def create_bucketfs_conn_object_onprem(pyexasol_connection: pyexasol.ExaConnection, conn_name: str, bucketfs_params: dict[str, Any]) -> None: + """ + Creates in the database a connection object encapsulating the BucketFS parameters + for an OnPrem backend. + + Parameters: + pyexasol_connection: + DB connection. + conn_name: + Name for the connection object. + bucketfs_params: + OnPrem BucketFS parameters in the format of the exasol.bucketfs.path.build_path. + """ conn_to = _to_json_str(bucketfs_params, [ 'backend', 'url', 'service_name', 'bucket_name', 'path', 'verify']) conn_user = _to_json_str(bucketfs_params, ['username']) @@ -128,6 +153,18 @@ def create_bucketfs_conn_object_onprem(pyexasol_connection: pyexasol.ExaConnecti def create_bucketfs_conn_object_saas(pyexasol_connection: pyexasol.ExaConnection, conn_name: str, bucketfs_params: dict[str, Any]) -> None: + """ + Creates in the database a connection object encapsulating the BucketFS parameters + for a SaaS backend. + + Parameters: + pyexasol_connection: + DB connection. + conn_name: + Name for the connection object. + bucketfs_params: + SaaS BucketFS parameters in the format of the exasol.bucketfs.path.build_path. + """ conn_to = _to_json_str(bucketfs_params, ['backend', 'url', 'path']) conn_user = _to_json_str(bucketfs_params, ['account_id', 'database_id']) conn_password = _to_json_str(bucketfs_params, ['pat']) @@ -138,10 +175,16 @@ def create_bucketfs_conn_object_saas(pyexasol_connection: pyexasol.ExaConnection def create_bucketfs_conn_object(conn_name: str, **kwargs) -> None: """ + Creates in the database a connection object encapsulating the provided BucketFS + parameters. These can be parameters for either On-Prem or SaaS database. They + should correspond to the CLI options defined in the cli/std_options.py. + + Raises a ValueError if the provided parameters are insufficient for either + On-Prem or SaaS cases. """ with open_pyexasol_connection(**kwargs) as pyexasol_connection: - db_type = _infer_db_type(kwargs) - if db_type == DBType.onprem: + db_type = _infer_backend(kwargs) + if db_type == _Backend.onprem: create_bucketfs_conn_object_onprem(pyexasol_connection, conn_name, _convert_onprem_bfs_params(kwargs)) else: @@ -151,7 +194,8 @@ def create_bucketfs_conn_object(conn_name: str, **kwargs) -> None: def create_bucketfs_location_from_conn_object(conn_obj) -> bfs.path.PathLike: """ - Create BucketFS PathLike object using data contained in the provided connection object. + Creates a BucketFS PathLike object using data contained in the provided connection + object. """ bfs_params = json.loads(conn_obj.address) diff --git a/test/integration/connections/test_bucketfs_location.py b/test/integration/connections/test_bucketfs_location.py index 8f1b284..302a58f 100644 --- a/test/integration/connections/test_bucketfs_location.py +++ b/test/integration/connections/test_bucketfs_location.py @@ -1,28 +1,21 @@ +from typing import Any from urllib.parse import urlparse import pytest import exasol.bucketfs as bfs from exasol.python_extension_common.cli.std_options import StdParams -from exasol.python_extension_common.connections.bucketfs_location import create_bucketfs_location +from exasol.python_extension_common.connections.bucketfs_location import ( + create_bucketfs_location, + create_bucketfs_conn_object, + create_bucketfs_location_from_conn_object) -def validate_bfs_path(bfs_path: bfs.path.PathLike) -> None: - file_content = b'A rose by any other name would smell as sweet.' - bfs_path.write(file_content) - data_back = b''.join(bfs_path.read()) - bfs_path.rm() - assert data_back == file_content - - -def test_create_bucketfs_location_onprem(use_onprem, - backend_aware_onprem_database, - bucketfs_config): - if not use_onprem: - pytest.skip("The test is not configured to use ITDE.") - +@pytest.fixture(scope='session') +def onprem_params(backend_aware_onprem_database, + bucketfs_config) -> dict[str, Any]: parsed_url = urlparse(bucketfs_config.url) host, port = parsed_url.netloc.split(":") - kwargs = { + return { StdParams.bucketfs_host.name: host, StdParams.bucketfs_port.name: port, StdParams.bucketfs_use_https.name: parsed_url.scheme.lower() == 'https', @@ -33,46 +26,63 @@ def test_create_bucketfs_location_onprem(use_onprem, StdParams.use_ssl_cert_validation.name: False, StdParams.path_in_bucket.name: 'test_path' } - bfs_path = create_bucketfs_location(**kwargs) - validate_bfs_path(bfs_path) -def test_create_bucketfs_location_saas_db_id(use_saas, - saas_host, - saas_pat, - saas_account_id, - backend_aware_saas_database_id): - if not use_saas: - pytest.skip("The test is not configured to use SaaS.") - - kwargs = { +@pytest.fixture(scope='session') +def saas_params_with_id(saas_host, + saas_pat, + saas_account_id, + backend_aware_saas_database_id) -> dict[str, Any]: + return { StdParams.saas_url.name: saas_host, StdParams.saas_account_id.name: saas_account_id, StdParams.saas_database_id.name: backend_aware_saas_database_id, StdParams.saas_token.name: saas_pat, StdParams.path_in_bucket.name: 'test_path' } - bfs_path = create_bucketfs_location(**kwargs) + + +@pytest.fixture(scope='session') +def saas_params_with_name(saas_params_with_id, + database_name): + saas_params = dict(saas_params_with_id) + saas_params.pop(StdParams.saas_database_id.name) + saas_params[StdParams.saas_database_name.name] = database_name + return saas_params + + +def validate_bfs_path(bfs_path: bfs.path.PathLike) -> None: + file_content = b'A rose by any other name would smell as sweet.' + bfs_path.write(file_content) + data_back = b''.join(bfs_path.read()) + bfs_path.rm() + assert data_back == file_content + + +def test_create_bucketfs_location_onprem(use_onprem, + onprem_params): + if not use_onprem: + pytest.skip("The test is not configured to use ITDE.") + + bfs_path = create_bucketfs_location(**onprem_params) + validate_bfs_path(bfs_path) + + +def test_create_bucketfs_location_saas_db_id(use_saas, + saas_params_with_id): + if not use_saas: + pytest.skip("The test is not configured to use SaaS.") + + bfs_path = create_bucketfs_location(**saas_params_with_id) validate_bfs_path(bfs_path) def test_create_bucketfs_location_saas_db_name(use_saas, - saas_host, - saas_pat, - saas_account_id, - backend_aware_saas_database_id, - database_name): + saas_params_with_name): if not use_saas: pytest.skip("The test is not configured to use SaaS.") - kwargs = { - StdParams.saas_url.name: saas_host, - StdParams.saas_account_id.name: saas_account_id, - StdParams.saas_database_name.name: database_name, - StdParams.saas_token.name: saas_pat, - StdParams.path_in_bucket.name: 'test_path' - } - bfs_path = create_bucketfs_location(**kwargs) + bfs_path = create_bucketfs_location(**saas_params_with_name) validate_bfs_path(bfs_path) @@ -84,3 +94,14 @@ def test_create_bucketfs_location_error(): } with pytest.raises(ValueError): create_bucketfs_location(**kwargs) + + +def test_create_bucketfs_conn_object_onprem(use_onprem, + onprem_params): + if not use_onprem: + pytest.skip("The test is not configured to use ITDE.") + + conn_name = 'ONPREM_TEST_BFS' + create_bucketfs_conn_object(conn_name=conn_name, **onprem_params) + # bfs_path = create_bucketfs_location_from_conn_object() + # validate_bfs_path( bfs_path)