Skip to content

Commit

Permalink
Move NFSv3 accounts to a component resource
Browse files Browse the repository at this point in the history
Diagnostic settings are created as part of the component.
  • Loading branch information
JimMadge committed Nov 28, 2024
1 parent 168ca24 commit 7887e09
Show file tree
Hide file tree
Showing 8 changed files with 191 additions and 145 deletions.
6 changes: 4 additions & 2 deletions data_safe_haven/infrastructure/components/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
MicrosoftSQLDatabaseProps,
NFSV3BlobContainerComponent,
NFSV3BlobContainerProps,
NFSV3StorageAccountComponent,
NFSV3StorageAccountProps,
PostgresqlDatabaseComponent,
PostgresqlDatabaseProps,
VMComponent,
Expand All @@ -23,7 +25,6 @@
)
from .wrapped import (
WrappedLogAnalyticsWorkspace,
WrappedNFSV3StorageAccount,
)

__all__ = [
Expand All @@ -41,11 +42,12 @@
"MicrosoftSQLDatabaseProps",
"NFSV3BlobContainerComponent",
"NFSV3BlobContainerProps",
"NFSV3StorageAccountComponent",
"NFSV3StorageAccountProps",
"PostgresqlDatabaseComponent",
"PostgresqlDatabaseProps",
"SSLCertificate",
"SSLCertificateProps",
"VMComponent",
"WrappedLogAnalyticsWorkspace",
"WrappedNFSV3StorageAccount",
]
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
MicrosoftSQLDatabaseProps,
)
from .nfsv3_blob_container import NFSV3BlobContainerComponent, NFSV3BlobContainerProps
from .nfsv3_storage_account import (
NFSV3StorageAccountComponent,
NFSV3StorageAccountProps,
)
from .postgresql_database import PostgresqlDatabaseComponent, PostgresqlDatabaseProps
from .virtual_machine import LinuxVMComponentProps, VMComponent

Expand All @@ -23,6 +27,8 @@
"MicrosoftSQLDatabaseProps",
"NFSV3BlobContainerComponent",
"NFSV3BlobContainerProps",
"NFSV3StorageAccountComponent",
"NFSV3StorageAccountProps",
"PostgresqlDatabaseComponent",
"PostgresqlDatabaseProps",
"VMComponent",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
from collections.abc import Mapping, Sequence

from pulumi import ComponentResource, Input, Output, ResourceOptions
from pulumi_azure_native import insights, storage

from data_safe_haven.external import AzureIPv4Range
from data_safe_haven.infrastructure.components.wrapped import (
WrappedLogAnalyticsWorkspace,
)
from data_safe_haven.types import AzureServiceTag


class NFSV3StorageAccountProps:
def __init__(
self,
account_name: Input[str],
allowed_ip_addresses: Input[Sequence[str]] | None,
allowed_service_tag: AzureServiceTag | None,
location: Input[str],
log_analytics_workspace: Input[WrappedLogAnalyticsWorkspace],
resource_group_name: Input[str],
subnet_id: Input[str],
):
self.account_name = account_name
self.allowed_ip_addresses = allowed_ip_addresses
self.allowed_service_tag = allowed_service_tag
self.location = location
self.log_analytics_workspace = log_analytics_workspace
self.resource_group_name = resource_group_name
self.subnet_id = subnet_id


class NFSV3StorageAccountComponent(ComponentResource):
encryption_args = storage.EncryptionArgs(
key_source=storage.KeySource.MICROSOFT_STORAGE,
services=storage.EncryptionServicesArgs(
blob=storage.EncryptionServiceArgs(
enabled=True, key_type=storage.KeyType.ACCOUNT
),
file=storage.EncryptionServiceArgs(
enabled=True, key_type=storage.KeyType.ACCOUNT
),
),
)

def __init__(
self,
name: str,
props: NFSV3StorageAccountProps,
opts: ResourceOptions | None = None,
tags: Input[Mapping[str, Input[str]]] | None = None,
):
super().__init__("dsh:sre:NFSV3StorageAccountComponent", name, {}, opts)
child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self))
child_tags = {"component": "data"} | (tags if tags else {})

if props.allowed_service_tag == AzureServiceTag.INTERNET:
default_action = storage.DefaultAction.ALLOW
ip_rules = []
else:
default_action = storage.DefaultAction.DENY
ip_rules = Output.from_input(props.allowed_ip_addresses).apply(
lambda ip_ranges: [
storage.IPRuleArgs(
action=storage.Action.ALLOW,
i_p_address_or_range=str(ip_address),
)
for ip_range in sorted(ip_ranges)
for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips()
]
)

# Deploy storage account
self.storage_account = storage.StorageAccount(
f"{self._name}",
account_name=props.account_name,
allow_blob_public_access=False,
enable_https_traffic_only=True,
enable_nfs_v3=True,
encryption=self.encryption_args,
is_hns_enabled=True,
kind=storage.Kind.BLOCK_BLOB_STORAGE,
location=props.location,
minimum_tls_version=storage.MinimumTlsVersion.TLS1_2,
network_rule_set=storage.NetworkRuleSetArgs(
bypass=storage.Bypass.AZURE_SERVICES,
default_action=default_action,
ip_rules=ip_rules,
virtual_network_rules=[
storage.VirtualNetworkRuleArgs(
virtual_network_resource_id=props.subnet_id,
)
],
),
public_network_access=storage.PublicNetworkAccess.ENABLED,
resource_group_name=props.resource_group_name,
sku=storage.SkuArgs(name=storage.SkuName.PREMIUM_ZRS),
opts=child_opts,
tags=child_tags,
)

# Add diagnostic setting for blobs
insights.DiagnosticSetting(
f"{self.storage_account._name}_diagnostic_setting",
name=f"{self.storage_account._name}_diagnostic_setting",
log_analytics_destination_type="Dedicated",
logs=[
{
"category_group": "allLogs",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
{
"category_group": "audit",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
],
metrics=[
{
"category": "Transaction",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
}
],
resource_uri=self.storage_account.id.apply(
# This is the URI of the blobServices resource which is automatically
# created.
lambda resource_id: resource_id
+ "/blobServices/default"
),
workspace_id=props.log_analytics_workspace.id,
)

self.register_outputs({})
2 changes: 0 additions & 2 deletions data_safe_haven/infrastructure/components/wrapped/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from .log_analytics_workspace import WrappedLogAnalyticsWorkspace
from .nfsv3_storage_account import WrappedNFSV3StorageAccount

__all__ = [
"WrappedLogAnalyticsWorkspace",
"WrappedNFSV3StorageAccount",
]

This file was deleted.

1 change: 1 addition & 0 deletions data_safe_haven/infrastructure/programs/declarative_sre.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,7 @@ def __call__(self) -> None:
ldap_user_filter=ldap_user_filter,
ldap_user_search_base=ldap_user_search_base,
location=self.config.azure.location,
log_analytics_workspace=monitoring.log_analytics,
resource_group=resource_group,
software_repository_hostname=user_services.software_repositories.hostname,
subnet_desired_state=networking.subnet_desired_state,
Expand Down
70 changes: 18 additions & 52 deletions data_safe_haven/infrastructure/programs/sre/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from pulumi import ComponentResource, Input, Output, ResourceOptions
from pulumi_azure_native import (
authorization,
insights,
keyvault,
managedidentity,
network,
Expand All @@ -32,10 +31,11 @@
from data_safe_haven.infrastructure.components import (
NFSV3BlobContainerComponent,
NFSV3BlobContainerProps,
NFSV3StorageAccountComponent,
NFSV3StorageAccountProps,
SSLCertificate,
SSLCertificateProps,
WrappedLogAnalyticsWorkspace,
WrappedNFSV3StorageAccount,
)
from data_safe_haven.types import AzureDnsZoneNames, AzureServiceTag

Expand Down Expand Up @@ -471,20 +471,26 @@ def __init__(
# Deploy sensitive data blob storage account
# - This holds the /mnt/input and /mnt/output containers that are mounted by workspaces
# - Azure blobs have worse NFS support but can be accessed with Azure Storage Explorer
storage_account_data_private_sensitive = WrappedNFSV3StorageAccount(
component_data_private_sensitive = NFSV3StorageAccountComponent(
f"{self._name}_storage_account_data_private_sensitive",
# Storage account names have a maximum of 24 characters
account_name=alphanumeric(
f"{''.join(truncate_tokens(stack_name.split('-'), 11))}sensitivedata{sha256hash(self._name)}"
)[:24],
allowed_ip_addresses=data_private_sensitive_ip_addresses,
allowed_service_tag=data_private_sensitive_service_tag,
location=props.location,
subnet_id=props.subnet_data_private_id,
resource_group_name=props.resource_group_name,
NFSV3StorageAccountProps(
# Storage account names have a maximum of 24 characters
account_name=alphanumeric(
f"{''.join(truncate_tokens(stack_name.split('-'), 11))}sensitivedata{sha256hash(self._name)}"
)[:24],
allowed_ip_addresses=data_private_sensitive_ip_addresses,
allowed_service_tag=data_private_sensitive_service_tag,
location=props.location,
log_analytics_workspace=props.log_analytics_workspace,
subnet_id=props.subnet_data_private_id,
resource_group_name=props.resource_group_name,
),
opts=child_opts,
tags=child_tags,
)
storage_account_data_private_sensitive = (
component_data_private_sensitive.storage_account
)
# Deploy storage containers
NFSV3BlobContainerComponent(
f"{self._name}_blob_egress",
Expand Down Expand Up @@ -516,46 +522,6 @@ def __init__(
subscription_name=props.subscription_name,
),
)
# Add diagnostic setting for blobs
insights.DiagnosticSetting(
f"{storage_account_data_private_sensitive._name}_diagnostic_setting",
name=f"{storage_account_data_private_sensitive._name}_diagnostic_setting",
log_analytics_destination_type="Dedicated",
logs=[
{
"category_group": "allLogs",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
{
"category_group": "audit",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
},
],
metrics=[
{
"category": "Transaction",
"enabled": True,
"retention_policy": {
"days": 0,
"enabled": False,
},
}
],
resource_uri=storage_account_data_private_sensitive.id.apply(
# This is the URI of the blobServices resource which is automatically
# created.
lambda resource_id: resource_id + "/blobServices/default"
),
workspace_id=props.log_analytics_workspace.id,
)
# Set up a private endpoint for the sensitive data storage account
storage_account_data_private_sensitive_endpoint = network.PrivateEndpoint(
f"{storage_account_data_private_sensitive._name}_private_endpoint",
Expand Down
Loading

0 comments on commit 7887e09

Please sign in to comment.