diff --git a/ocs_ci/ocs/constants.py b/ocs_ci/ocs/constants.py index 38d87a0258e..705b06076d4 100644 --- a/ocs_ci/ocs/constants.py +++ b/ocs_ci/ocs/constants.py @@ -978,6 +978,7 @@ # Platforms AWS_PLATFORM = "aws" AZURE_PLATFORM = "azure" +AZURE_WITH_LOGS_PLATFORM = "azure-with-logs" GCP_PLATFORM = "gcp" VSPHERE_PLATFORM = "vsphere" BAREMETAL_PLATFORM = "baremetal" @@ -1863,7 +1864,7 @@ PRODUCTION_JOBS_PREFIX = ["jnk"] # Cloud Manager available platforms -CLOUD_MNGR_PLATFORMS = ["AWS", "GCP", "AZURE", "IBMCOS"] +CLOUD_MNGR_PLATFORMS = ["AWS", "GCP", "AZURE", "AZURE_WITH_LOGS", "IBMCOS"] # Vault related configurations VAULT_VERSION_INFO_URL = "https://github.com/hashicorp/vault/releases/latest" diff --git a/ocs_ci/ocs/resources/cloud_manager.py b/ocs_ci/ocs/resources/cloud_manager.py index 94f3a44852a..054fa53959a 100644 --- a/ocs_ci/ocs/resources/cloud_manager.py +++ b/ocs_ci/ocs/resources/cloud_manager.py @@ -40,6 +40,7 @@ def __init__(self): "AWS": S3Client, "GCP": GoogleClient, "AZURE": AzureClient, + "AZURE_WITH_LOGS": AzureWithLogsClient, "IBMCOS": S3Client, "RGW": S3Client, } @@ -565,3 +566,67 @@ def create_azure_secret(self): ).decode("ascii") return create_resource(**bs_secret_data) + + +class AzureWithLogsClient(AzureClient): + """ + Implementation of an Azure Client using the Azure API + to an existing storage account with bucket logs enabled + + """ + + def __init__( + self, account_name=None, credential=None, auth_dict=None, *args, **kwargs + ): + if auth_dict: + self.tenant_id = auth_dict.get("TENANT_ID") + self.app_id = auth_dict.get("APPLICATION_ID") + self.app_secret = auth_dict.get("APPLICATION_SECRET") + self.logs_analytics_workspace_id = auth_dict.get( + "LOGS_ANALYTICS_WORKSPACE_ID" + ) + super().__init__( + account_name=account_name, + credential=credential, + auth_dict=auth_dict, + *args, + **kwargs, + ) + + def create_azure_secret(self): + """ + Create a Kubernetes secret to allow NooBaa to create Azure-based backingstores + + Note that this method overides the parent method to include the + additional fields that are needed for the bucket logs feature + + """ + bs_secret_data = templating.load_yaml(constants.MCG_BACKINGSTORE_SECRET_YAML) + bs_secret_data["metadata"]["name"] = create_unique_resource_name( + "cldmgr-azure-logs", "secret" + ) + bs_secret_data["metadata"]["namespace"] = config.ENV_DATA["cluster_namespace"] + bs_secret_data["data"]["AccountKey"] = base64.urlsafe_b64encode( + self.credential.encode("UTF-8") + ).decode("ascii") + bs_secret_data["data"]["AccountName"] = base64.urlsafe_b64encode( + self.account_name.encode("UTF-8") + ).decode("ascii") + + # Note that the following encodings are in plain base64, and not urlsafe. + # This is because the urlsafe encoding for this credentials might contain + # characters that are not accepted when creating the secret. + bs_secret_data["data"]["TenantID"] = base64.b64encode( + self.tenant_id.encode("UTF-8") + ).decode("ascii") + bs_secret_data["data"]["ApplicationID"] = base64.b64encode( + self.app_id.encode("UTF-8") + ).decode("ascii") + bs_secret_data["data"]["ApplicationSecret"] = base64.b64encode( + self.app_secret.encode("UTF-8") + ).decode("ascii") + bs_secret_data["data"]["LogsAnalyticsWorkspaceID"] = base64.b64encode( + self.logs_analytics_workspace_id.encode("UTF-8") + ).decode("ascii") + + return create_resource(**bs_secret_data) diff --git a/ocs_ci/ocs/resources/cloud_uls.py b/ocs_ci/ocs/resources/cloud_uls.py index 021a9267a1f..bc91699dd1d 100644 --- a/ocs_ci/ocs/resources/cloud_uls.py +++ b/ocs_ci/ocs/resources/cloud_uls.py @@ -25,6 +25,7 @@ def cloud_uls_factory(request, cld_mgr): "aws": set(), "gcp": set(), "azure": set(), + "azure-with-logs": set(), "ibmcos": set(), "rgw": set(), } @@ -33,6 +34,7 @@ def cloud_uls_factory(request, cld_mgr): "aws": cld_mgr.aws_client, "gcp": cld_mgr.gcp_client, "azure": cld_mgr.azure_client, + "azure-with-logs": cld_mgr.azure_with_logs_client, "ibmcos": cld_mgr.ibmcos_client, } except AttributeError as e: @@ -66,6 +68,7 @@ def _create_uls(uls_dict): "aws": set(), "gcp": set(), "azure": set(), + "azure-with-logs": set(), "ibmcos": set(), "rgw": set(), } diff --git a/ocs_ci/ocs/resources/mcg_replication_policy.py b/ocs_ci/ocs/resources/mcg_replication_policy.py index e7a54fb7aff..d18300a09dd 100644 --- a/ocs_ci/ocs/resources/mcg_replication_policy.py +++ b/ocs_ci/ocs/resources/mcg_replication_policy.py @@ -1,11 +1,10 @@ +from abc import ABC, abstractmethod import uuid class McgReplicationPolicy: """ - A class representing an MCG bucket replication policy. - - This class handles the parsing of the relevant parameters to a dictionary that matches the expected JSON structure. + A class to handle the MCG bucket replication policy JSON structure. """ @@ -29,9 +28,9 @@ def __str__(self) -> str: return str(self.to_dict()) -class LogBasedReplicationPolicy(McgReplicationPolicy): +class LogBasedReplicationPolicy(McgReplicationPolicy, ABC): """ - A subclass of ReplicationPolicy that includes log-based replication information. + An abstract subclass of ReplicationPolicy that includes log-based replication information. """ @@ -39,23 +38,64 @@ def __init__( self, destination_bucket, sync_deletions=False, - logs_bucket="", prefix="", - logs_location_prefix="", ): super().__init__(destination_bucket, prefix) self.sync_deletions = sync_deletions + + @abstractmethod + def to_dict(self): + dict = super().to_dict() + dict["rules"][0]["sync_deletions"] = self.sync_deletions + dict["log_replication_info"] = {} + + return dict + + +class AwsLogBasedReplicationPolicy(LogBasedReplicationPolicy): + """ + A class to handle the AWS log-based bucket replication policy JSON structure. + + """ + + def __init__( + self, + destination_bucket, + sync_deletions=False, + logs_bucket="", + prefix="", + logs_location_prefix="", + ): + super().__init__(destination_bucket, sync_deletions, prefix) self.logs_bucket = logs_bucket self.logs_location_prefix = logs_location_prefix def to_dict(self): dict = super().to_dict() - dict["rules"][0]["sync_deletions"] = self.sync_deletions - dict["log_replication_info"] = { - "logs_location": { - "logs_bucket": self.logs_bucket, - "prefix": self.logs_location_prefix, - } + dict["log_replication_info"]["logs_location"] = { + "logs_bucket": self.logs_bucket, + "prefix": self.logs_location_prefix, } return dict + + +class AzureLogBasedReplicationPolicy(LogBasedReplicationPolicy): + """ + A class to handle the Azure log-based bucket replication policy JSON structure. + + """ + + def __init__( + self, + destination_bucket, + sync_deletions=False, + prefix="", + ): + super().__init__(destination_bucket, sync_deletions, prefix) + + def to_dict(self): + dict = super().to_dict() + dict["log_replication_info"]["endpoint_type"] = "AZURE" + + return dict diff --git a/ocs_ci/ocs/resources/namespacestore.py b/ocs_ci/ocs/resources/namespacestore.py index adf3f8ed287..c804f33f157 100644 --- a/ocs_ci/ocs/resources/namespacestore.py +++ b/ocs_ci/ocs/resources/namespacestore.py @@ -226,6 +226,11 @@ def cli_create_namespacestore( f"--account-name {get_attr_chain(cld_mgr, 'azure_client.account_name')} " f"--target-blob-container {uls_name}" ), + constants.AZURE_WITH_LOGS_PLATFORM: lambda: ( + f"azure-blob {nss_name} " + f"--secret-name {get_attr_chain(cld_mgr, 'azure_with_logs_client.secret.name')} " + f"--target-blob-container {uls_name}" + ), constants.RGW_PLATFORM: lambda: ( f"s3-compatible {nss_name} " f"--endpoint {get_attr_chain(cld_mgr, 'rgw_client.endpoint')} " @@ -301,6 +306,18 @@ def oc_create_namespacestore( }, }, }, + constants.AZURE_WITH_LOGS_PLATFORM: lambda: { + "type": "azure-blob", + "azureBlob": { + "targetBlobContainer": uls_name, + "secret": { + "name": get_attr_chain( + cld_mgr, "azure_with_logs_client.secret.name" + ), + "namespace": nss_data["metadata"]["namespace"], + }, + }, + }, constants.RGW_PLATFORM: lambda: { "type": "s3-compatible", "s3Compatible": { diff --git a/tests/libtest/test_azure_with_logs_infra.py b/tests/libtest/test_azure_with_logs_infra.py new file mode 100644 index 00000000000..7d3f6c87590 --- /dev/null +++ b/tests/libtest/test_azure_with_logs_infra.py @@ -0,0 +1,26 @@ +from ocs_ci.ocs import constants +from ocs_ci.framework.pytest_customization.marks import libtest +from ocs_ci.ocs.resources.mcg_replication_policy import AzureLogBasedReplicationPolicy +from ocs_ci.ocs.bucket_utils import bucket_read_api + + +@libtest +def test_azure_logs_based_repli_setup(bucket_factory, mcg_obj_session): + target_bucket = bucket_factory()[0].name + bucketclass_dict = { + "interface": "OC", + "namespace_policy_dict": { + "type": "Single", + "namespacestore_dict": {constants.AZURE_WITH_LOGS_PLATFORM: [(1, None)]}, + }, + } + replication_policy = AzureLogBasedReplicationPolicy( + destination_bucket=target_bucket, + sync_deletions=True, + ) + source_bucket = bucket_factory( + bucketclass=bucketclass_dict, replication_policy=replication_policy + )[0].name + + response = bucket_read_api(mcg_obj_session, source_bucket) + assert "replication_policy_id" in response diff --git a/tests/manage/mcg/test_log_based_bucket_replication.py b/tests/manage/mcg/test_log_based_bucket_replication.py index 285376c83b7..91ddf8ab240 100644 --- a/tests/manage/mcg/test_log_based_bucket_replication.py +++ b/tests/manage/mcg/test_log_based_bucket_replication.py @@ -22,7 +22,7 @@ tier4b, ) from ocs_ci.ocs.resources.pod import get_noobaa_pods, get_pod_node -from ocs_ci.ocs.resources.mcg_replication_policy import LogBasedReplicationPolicy +from ocs_ci.ocs.resources.mcg_replication_policy import AwsLogBasedReplicationPolicy from ocs_ci.ocs.scale_noobaa_lib import noobaa_running_node_restart logger = logging.getLogger(__name__) @@ -98,7 +98,7 @@ def log_based_replication_setup( platform=constants.AWS_PLATFORM, region=self.DEFAULT_AWS_REGION, ) - replication_policy = LogBasedReplicationPolicy( + replication_policy = AwsLogBasedReplicationPolicy( destination_bucket=target_bucket.name, sync_deletions=True, logs_bucket=mockup_logger.logs_bucket_uls_name, @@ -218,7 +218,7 @@ def test_patch_deletion_sync_to_existing_bucket( platform=constants.AWS_PLATFORM, region=self.DEFAULT_AWS_REGION, ) - replication_policy = LogBasedReplicationPolicy( + replication_policy = AwsLogBasedReplicationPolicy( destination_bucket=target_bucket.name, sync_deletions=True, logs_bucket=mockup_logger.logs_bucket_uls_name,