diff --git a/.hatch/requirements-docs.txt b/.hatch/requirements-docs.txt index bb76c36f54..9f3cdcf9fa 100644 --- a/.hatch/requirements-docs.txt +++ b/.hatch/requirements-docs.txt @@ -1,7 +1,7 @@ # # This file is autogenerated by hatch-pip-compile with Python 3.12 # -# - emoji==2.13.2 +# - emoji==2.14.0 # - myst-parser==4.0.0 # - pydata-sphinx-theme==0.15.4 # - sphinx-togglebutton==0.3.2 @@ -28,7 +28,7 @@ docutils==0.21.2 # pydata-sphinx-theme # sphinx # sphinx-togglebutton -emoji==2.13.2 +emoji==2.14.0 # via hatch.envs.docs idna==3.10 # via requests diff --git a/.hatch/requirements-lint.txt b/.hatch/requirements-lint.txt index ee7edc0299..0a0e48f6c4 100644 --- a/.hatch/requirements-lint.txt +++ b/.hatch/requirements-lint.txt @@ -7,10 +7,10 @@ # - mypy==1.11.2 # - pandas-stubs==2.2.2.240909 # - pydantic==2.9.2 -# - ruff==0.6.8 +# - ruff==0.6.9 # - types-appdirs==1.4.3.5 # - types-chevron==0.14.2.20240310 -# - types-pytz==2024.2.0.20240913 +# - types-pytz==2024.2.0.20241003 # - types-pyyaml==6.0.12.20240917 # - types-requests==2.32.0.20240914 # @@ -135,7 +135,7 @@ mypy-extensions==1.0.0 # via # black # mypy -numpy==2.1.1 +numpy==2.1.2 # via pandas-stubs onigurumacffi==1.3.0 # via ansible-navigator @@ -222,7 +222,7 @@ referencing==0.35.1 # jsonschema-specifications resolvelib==1.0.1 # via ansible-core -rich==13.8.1 +rich==13.9.2 # via # ansible-lint # enrich @@ -235,14 +235,14 @@ ruamel-yaml==0.18.6 # via ansible-lint ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.8 +ruff==0.6.9 # via hatch.envs.lint subprocess-tee==0.4.2 # via # ansible-compat # ansible-dev-environment # ansible-lint -tox==4.21.0 +tox==4.21.2 # via tox-ansible tox-ansible==24.9.0 # via ansible-dev-tools @@ -250,7 +250,7 @@ types-appdirs==1.4.3.5 # via hatch.envs.lint types-chevron==0.14.2.20240310 # via hatch.envs.lint -types-pytz==2024.2.0.20240913 +types-pytz==2024.2.0.20241003 # via # hatch.envs.lint # pandas-stubs diff --git a/.hatch/requirements-test.txt b/.hatch/requirements-test.txt index c3f079ada4..a9397a2ba3 100644 --- a/.hatch/requirements-test.txt +++ b/.hatch/requirements-test.txt @@ -1,7 +1,7 @@ # # This file is autogenerated by hatch-pip-compile with Python 3.12 # -# [constraints] .hatch/requirements.txt (SHA256: 69488923545009ea59b421629909ae529ee7ea3a4a3780b3a967123c9a3c7251) +# [constraints] .hatch/requirements.txt (SHA256: f892a9714607641735b83f480e2c234b2ab8e1dffd2d59ad4188c887c06b24de) # # - appdirs==1.4.4 # - azure-core==1.31.0 @@ -23,16 +23,16 @@ # - chevron==0.14.0 # - cryptography==43.0.1 # - fqdn==1.5.1 -# - psycopg==3.2.3 -# - pulumi-azure-native==2.63.0 +# - psycopg[binary]==3.2.3 +# - pulumi-azure-native==2.64.3 # - pulumi-azuread==5.53.4 # - pulumi-random==4.16.6 -# - pulumi==3.134.1 +# - pulumi==3.135.1 # - pydantic==2.9.2 # - pyjwt[crypto]==2.9.0 # - pytz==2024.2 # - pyyaml==6.0.2 -# - rich==13.8.1 +# - rich==13.9.2 # - simple-acme-dns==3.1.0 # - typer==0.12.5 # - websocket-client==1.8.0 @@ -291,14 +291,18 @@ psycopg==3.2.3 # via # -c .hatch/requirements.txt # hatch.envs.test -pulumi==3.134.1 +psycopg-binary==3.2.3 + # via + # -c .hatch/requirements.txt + # psycopg +pulumi==3.135.1 # via # -c .hatch/requirements.txt # hatch.envs.test # pulumi-azure-native # pulumi-azuread # pulumi-random -pulumi-azure-native==2.63.0 +pulumi-azure-native==2.64.3 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -374,7 +378,7 @@ requests-oauthlib==2.0.0 # via # -c .hatch/requirements.txt # msrest -rich==13.8.1 +rich==13.9.2 # via # -c .hatch/requirements.txt # hatch.envs.test diff --git a/.hatch/requirements.txt b/.hatch/requirements.txt index b89c709d70..3d77a909db 100644 --- a/.hatch/requirements.txt +++ b/.hatch/requirements.txt @@ -21,16 +21,16 @@ # - chevron==0.14.0 # - cryptography==43.0.1 # - fqdn==1.5.1 -# - psycopg==3.2.3 -# - pulumi-azure-native==2.63.0 +# - psycopg[binary]==3.2.3 +# - pulumi-azure-native==2.64.3 # - pulumi-azuread==5.53.4 # - pulumi-random==4.16.6 -# - pulumi==3.134.1 +# - pulumi==3.135.1 # - pydantic==2.9.2 # - pyjwt[crypto]==2.9.0 # - pytz==2024.2 # - pyyaml==6.0.2 -# - rich==13.8.1 +# - rich==13.9.2 # - simple-acme-dns==3.1.0 # - typer==0.12.5 # - websocket-client==1.8.0 @@ -190,13 +190,15 @@ protobuf==4.25.5 # via pulumi psycopg==3.2.3 # via hatch.envs.default -pulumi==3.134.1 +psycopg-binary==3.2.3 + # via psycopg +pulumi==3.135.1 # via # hatch.envs.default # pulumi-azure-native # pulumi-azuread # pulumi-random -pulumi-azure-native==2.63.0 +pulumi-azure-native==2.64.3 # via hatch.envs.default pulumi-azuread==5.53.4 # via hatch.envs.default @@ -238,7 +240,7 @@ requests==2.32.3 # requests-oauthlib requests-oauthlib==2.0.0 # via msrest -rich==13.8.1 +rich==13.9.2 # via # hatch.envs.default # typer diff --git a/data_safe_haven/commands/sre.py b/data_safe_haven/commands/sre.py index 8f7d5c8d8c..d1f32ea278 100644 --- a/data_safe_haven/commands/sre.py +++ b/data_safe_haven/commands/sre.py @@ -6,7 +6,7 @@ from data_safe_haven.config import ContextManager, DSHPulumiConfig, SHMConfig, SREConfig from data_safe_haven.exceptions import DataSafeHavenConfigError, DataSafeHavenError -from data_safe_haven.external import GraphApi +from data_safe_haven.external import AzureSdk, GraphApi from data_safe_haven.functions import current_ip_address, ip_address_in_list from data_safe_haven.infrastructure import SREProjectManager from data_safe_haven.logging import get_logger @@ -77,12 +77,23 @@ def deploy( sre_config.azure.subscription_id, replace=False, ) - logger.info( - f"SRE will be deployed to subscription '[green]{sre_config.azure.subscription_id}[/]'" - ) stack.add_option( "azure-native:tenantId", sre_config.azure.tenant_id, replace=False ) + # Get SRE subscription name + azure_sdk = AzureSdk(subscription_name=context.subscription_name) + sre_subscription_name = azure_sdk.get_subscription_name( + sre_config.azure.subscription_id + ) + stack.add_option( + "sre-subscription-name", + sre_subscription_name, + replace=True, + ) + logger.info( + f"SRE will be deployed to subscription '[green]{sre_subscription_name}[/]'" + f" ('[bold]{sre_config.azure.subscription_id}[/]')" + ) # Set Entra options application = graph_api.get_application_by_name(context.entra_application_name) if not application: @@ -114,7 +125,18 @@ def deploy( shm_config.shm.fqdn, replace=True, ) - logger.info(f"SRE will be registered in shm '[green]{shm_config.shm.fqdn}[/]'") + stack.add_option( + "shm-location", + shm_config.azure.location, + replace=True, + ) + stack.add_option( + "shm-subscription-id", + shm_config.azure.subscription_id, + replace=True, + ) + logger.info(f"SRE will be registered in SHM '[green]{shm_config.shm.fqdn}[/]'") + logger.info(f"SHM subscription '[green]{shm_config.azure.subscription_id}[/]'") # Deploy Azure infrastructure with Pulumi try: @@ -129,7 +151,7 @@ def deploy( location=sre_config.azure.location, sre_name=sre_config.name, sre_stack=stack, - subscription_name=context.subscription_name, + subscription_name=sre_subscription_name, timezone=sre_config.sre.timezone, ) manager.run() diff --git a/data_safe_haven/commands/users.py b/data_safe_haven/commands/users.py index e250bc9fd5..fe413fa781 100644 --- a/data_safe_haven/commands/users.py +++ b/data_safe_haven/commands/users.py @@ -83,6 +83,10 @@ def list_users( # Load Pulumi config pulumi_config = DSHPulumiConfig.from_remote(context) + if sre not in pulumi_config.project_names: + msg = f"Could not load Pulumi settings for '{sre}'. Is the SRE deployed?" + logger.error(msg) + raise typer.Exit(1) # List users from all sources users = UserHandler(context, graph_api) users.list(sre, pulumi_config) diff --git a/data_safe_haven/external/api/azure_sdk.py b/data_safe_haven/external/api/azure_sdk.py index 6b64fcb6dc..1792988348 100644 --- a/data_safe_haven/external/api/azure_sdk.py +++ b/data_safe_haven/external/api/azure_sdk.py @@ -436,7 +436,11 @@ def ensure_keyvault( ) return key_vaults[0] except AzureError as exc: - msg = f"Failed to create key vault {key_vault_name}. Check if a key vault with the same name already exists in a deleted state." + msg = f"Failed to create key vault {key_vault_name}." + if "MissingSubscriptionRegistration" in exc.message: + msg += " Subscription is not registered to use the key vault resource provider. See https://learn.microsoft.com/en-us/azure/azure-resource-manager/troubleshooting/error-register-resource-provider" + else: + msg += " Check if a key vault with the same name already exists in a deleted state." raise DataSafeHavenAzureError(msg) from exc def ensure_keyvault_key( @@ -774,6 +778,21 @@ def get_subscription(self, subscription_name: str) -> Subscription: msg = f"Could not find subscription '{subscription_name}'" raise DataSafeHavenValueError(msg) + def get_subscription_name(self, subscription_id: str) -> str: + """Get an Azure subscription name by id.""" + try: + subscription_client = SubscriptionClient(self.credential()) + subscription = subscription_client.subscriptions.get(subscription_id) + except ClientAuthenticationError as exc: + msg = "Failed to authenticate with Azure API." + raise DataSafeHavenAzureAPIAuthenticationError(msg) from exc + except AzureError as exc: + msg = f"Failed to get name of subscription {subscription_id}." + raise DataSafeHavenAzureError(msg) from exc + + subscription_name: str = subscription.display_name + return subscription_name + def import_keyvault_certificate( self, certificate_name: str, diff --git a/data_safe_haven/infrastructure/programs/declarative_sre.py b/data_safe_haven/infrastructure/programs/declarative_sre.py index 0aa7215767..ce678dbb4a 100644 --- a/data_safe_haven/infrastructure/programs/declarative_sre.py +++ b/data_safe_haven/infrastructure/programs/declarative_sre.py @@ -54,6 +54,9 @@ def __call__(self) -> None: shm_admin_group_id = self.pulumi_opts.require("shm-admin-group-id") shm_entra_tenant_id = self.pulumi_opts.require("shm-entra-tenant-id") shm_fqdn = self.pulumi_opts.require("shm-fqdn") + shm_location = self.pulumi_opts.require("shm-location") + shm_subscription_id = self.pulumi_opts.require("shm-subscription-id") + sre_subscription_name = self.pulumi_opts.require("sre-subscription-name") # Construct DockerHubCredentials dockerhub_credentials = DockerHubCredentials( @@ -149,7 +152,9 @@ def __call__(self) -> None: location=self.config.azure.location, resource_group_name=resource_group.name, shm_fqdn=shm_fqdn, + shm_location=shm_location, shm_resource_group_name=self.context.resource_group_name, + shm_subscription_id=shm_subscription_id, shm_zone_name=shm_fqdn, sre_name=self.config.name, user_public_ip_ranges=self.config.sre.research_user_ip_addresses, @@ -197,7 +202,7 @@ def __call__(self) -> None: subnet_data_configuration=networking.subnet_data_configuration, subnet_data_private=networking.subnet_data_private, subscription_id=self.config.azure.subscription_id, - subscription_name=self.context.subscription_name, + subscription_name=sre_subscription_name, tenant_id=self.config.azure.tenant_id, ), tags=self.tags, @@ -369,7 +374,7 @@ def __call__(self) -> None: resource_group=resource_group, software_repository_hostname=user_services.software_repositories.hostname, subnet_desired_state=networking.subnet_desired_state, - subscription_name=self.context.subscription_name, + subscription_name=sre_subscription_name, ), ) @@ -390,7 +395,7 @@ def __call__(self) -> None: storage_account_data_private_user_name=data.storage_account_data_private_user_name, storage_account_data_private_sensitive_name=data.storage_account_data_private_sensitive_name, subnet_workspaces=networking.subnet_workspaces, - subscription_name=self.context.subscription_name, + subscription_name=sre_subscription_name, virtual_network=networking.virtual_network, vm_details=list(enumerate(self.config.sre.workspace_skus)), ), diff --git a/data_safe_haven/infrastructure/programs/sre/networking.py b/data_safe_haven/infrastructure/programs/sre/networking.py index c11d89bc17..42e1345c2d 100644 --- a/data_safe_haven/infrastructure/programs/sre/networking.py +++ b/data_safe_haven/infrastructure/programs/sre/networking.py @@ -2,8 +2,8 @@ from collections.abc import Mapping -from pulumi import ComponentResource, Input, Output, ResourceOptions -from pulumi_azure_native import network +from pulumi import ComponentResource, Input, InvokeOptions, Output, ResourceOptions +from pulumi_azure_native import network, provider from data_safe_haven.functions import alphanumeric, replace_separators from data_safe_haven.infrastructure.common import ( @@ -26,7 +26,9 @@ def __init__( location: Input[str], resource_group_name: Input[str], shm_fqdn: Input[str], + shm_location: Input[str], shm_resource_group_name: Input[str], + shm_subscription_id: Input[str], shm_zone_name: Input[str], sre_name: Input[str], user_public_ip_ranges: Input[list[str]], @@ -43,7 +45,9 @@ def __init__( self.location = location self.resource_group_name = resource_group_name self.shm_fqdn = shm_fqdn + self.shm_location = shm_location self.shm_resource_group_name = shm_resource_group_name + self.shm_subscription_id = shm_subscription_id self.shm_zone_name = shm_zone_name self.sre_name = sre_name self.user_public_ip_ranges = user_public_ip_ranges @@ -1834,6 +1838,13 @@ def __init__( ) # Define SRE DNS zone + shm_provider = provider.Provider( + "shm_provider", + provider.ProviderArgs( + location=props.shm_location, + subscription_id=props.shm_subscription_id, + ), + ) shm_dns_zone = Output.all( resource_group_name=props.shm_resource_group_name, zone_name=props.shm_zone_name, @@ -1841,6 +1852,9 @@ def __init__( lambda kwargs: network.get_zone( resource_group_name=kwargs["resource_group_name"], zone_name=kwargs["zone_name"], + opts=InvokeOptions( + provider=shm_provider, + ), ) ) sre_subdomain = Output.from_input(props.sre_name).apply( @@ -1867,7 +1881,11 @@ def __init__( ttl=3600, zone_name=shm_dns_zone.name, opts=ResourceOptions.merge( - child_opts, ResourceOptions(parent=sre_dns_zone) + child_opts, + ResourceOptions( + parent=sre_dns_zone, + provider=shm_provider, + ), ), ) network.RecordSet( diff --git a/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/run_all_tests.bats b/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/run_all_tests.bats index bc73d824f7..5ce9692c67 100644 --- a/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/run_all_tests.bats +++ b/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/run_all_tests.bats @@ -42,20 +42,24 @@ check_db_credentials() { # Mounted drives # -------------- -@test "Mounted drives (/data)" { - run bash test_mounted_drives.sh -d data +@test "Mounted drives (/mnt/input)" { + run bash test_mounted_drives.sh -d mnt/input [ "$status" -eq 0 ] } @test "Mounted drives (/home)" { run bash test_mounted_drives.sh -d home [ "$status" -eq 0 ] } -@test "Mounted drives (/output)" { - run bash test_mounted_drives.sh -d output +@test "Mounted drives (/mnt/output)" { + run bash test_mounted_drives.sh -d mnt/output [ "$status" -eq 0 ] } -@test "Mounted drives (/shared)" { - run bash test_mounted_drives.sh -d shared +@test "Mounted drives (/mnt/shared)" { + run bash test_mounted_drives.sh -d mnt/shared + [ "$status" -eq 0 ] +} +@test "Mounted drives (/var/local/ansible)" { + run bash test_mounted_drives.sh -d var/local/ansible [ "$status" -eq 0 ] } diff --git a/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/test_mounted_drives.sh b/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/test_mounted_drives.sh index a1812934b9..c74a7b4b48 100644 --- a/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/test_mounted_drives.sh +++ b/data_safe_haven/resources/workspace/ansible/files/usr/local/smoke_tests/test_mounted_drives.sh @@ -26,7 +26,7 @@ CAN_DELETE="$([[ "$(touch "${directory_path}/${testfile}" 2>&1 1>/dev/null && rm # Check that permissions are as expected for each directory case "$directory" in - data) + mnt/input) if [ "$CAN_CREATE" = 1 ]; then echo "Able to create files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi if [ "$CAN_WRITE" = 1 ]; then echo "Able to write files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi if [ "$CAN_DELETE" = 1 ]; then echo "Able to delete files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi @@ -38,18 +38,24 @@ case "$directory" in if [ "$CAN_DELETE" = 0 ]; then echo "Unable to delete files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi ;; - output) + mnt/output) if [ "$CAN_CREATE" = 0 ]; then echo "Unable to create files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi if [ "$CAN_WRITE" = 0 ]; then echo "Unable to write files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi if [ "$CAN_DELETE" = 0 ]; then echo "Unable to delete files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi ;; - shared) + mnt/shared) if [ "$CAN_CREATE" = 0 ]; then echo "Unable to create files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi if [ "$CAN_WRITE" = 0 ]; then echo "Unable to write files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi if [ "$CAN_DELETE" = 0 ]; then echo "Unable to delete files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi ;; + var/local/ansible) + if [ "$CAN_CREATE" = 1 ]; then echo "Able to create files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi + if [ "$CAN_WRITE" = 1 ]; then echo "Able to write files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi + if [ "$CAN_DELETE" = 1 ]; then echo "Able to delete files in ${directory_path}!"; nfailed=$((nfailed + 1)); fi + ;; + *) echo "Usage: $0 -d [directory]" exit 1 diff --git a/data_safe_haven/resources/workspace/ansible/host_vars/localhost.yaml b/data_safe_haven/resources/workspace/ansible/host_vars/localhost.yaml index f6900fcd7c..55ee5e6340 100644 --- a/data_safe_haven/resources/workspace/ansible/host_vars/localhost.yaml +++ b/data_safe_haven/resources/workspace/ansible/host_vars/localhost.yaml @@ -145,7 +145,6 @@ deb_packages: snap_packages: - name: codium classic: true - - name: dbeaver-ce - classic: false + - name: beekeeper-studio - name: pycharm-community classic: true diff --git a/data_safe_haven/resources/workspace/ansible/tasks/packages.yaml b/data_safe_haven/resources/workspace/ansible/tasks/packages.yaml index 9b01bfa6c8..584e3a1b24 100644 --- a/data_safe_haven/resources/workspace/ansible/tasks/packages.yaml +++ b/data_safe_haven/resources/workspace/ansible/tasks/packages.yaml @@ -29,7 +29,7 @@ tags: snap community.general.snap: name: "{{ item.name }}" - classic: "{{ item.classic }}" + classic: "{{ item.classic | default('false') }}" state: present loop: "{{ snap_packages }}" diff --git a/data_safe_haven/resources/workspace/workspace.cloud_init.mustache.yaml b/data_safe_haven/resources/workspace/workspace.cloud_init.mustache.yaml index 5694f1c88c..5c300b45a5 100644 --- a/data_safe_haven/resources/workspace/workspace.cloud_init.mustache.yaml +++ b/data_safe_haven/resources/workspace/workspace.cloud_init.mustache.yaml @@ -37,6 +37,8 @@ write_files: popd mounts: + # Mount ephemeral storage at resource instead of default /mnt + - [ephemeral0, /mnt/scratch] # Desired state configuration is in a blob container mounted as NFSv3 - ["{{storage_account_desired_state_name}}.blob.core.windows.net:/{{storage_account_desired_state_name}}/desiredstate", /var/local/ansible, nfs, "ro,_netdev,sec=sys,vers=3,nolock,proto=tcp"] # Secure data is in a blob container mounted as NFSv3 @@ -69,9 +71,6 @@ package_update: true package_upgrade: true runcmd: - # Restart services - - systemctl restart nslcd - # Mount all external volumes # -------------------------- - echo ">=== Mounting all external volumes... ===<" @@ -97,3 +96,6 @@ runcmd: - while (! test -f /var/local/ansible/vars/pulumi_vars.yaml) do sleep 5; done - echo ">=== Running initial desired state configuration... ===<" - systemctl start desired-state + + # Restart services + - systemctl restart nslcd diff --git a/docs/source/deployment/deploy_sre.md b/docs/source/deployment/deploy_sre.md index be080908f2..d4161ea579 100644 --- a/docs/source/deployment/deploy_sre.md +++ b/docs/source/deployment/deploy_sre.md @@ -69,11 +69,56 @@ sre: :::: +:::{hint} +See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/) for a full list of valid Azure VM SKUs. +::: + :::{important} -All VM SKUs you want to deploy must support premium SSDs. +All VM SKUs you deploy must support premium SSDs. +- SKUs that support premium SSDs have a lower case 's' in their name. +- See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/vm-naming-conventions) for a full naming convention explanation. - See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/disks-types#premium-ssds) for more details on premium SSD support. -- See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/) for a full list of valid SKUs + +::: + +:::{important} +All VM SKUs you deploy must have CPUs with the `x86_64` architecture. + +- SKUs with a lower case 'p' in their name have the ARM architecture and should not be used. +- See [here](https://learn.microsoft.com/en-us/azure/virtual-machines/vm-naming-conventions) for a full naming convention explanation. + +::: + +:::{important} +The antivirus process running on each workspace consumes around 1.3 GiB at idle. +This usage will roughly double for a short period each day while its database is updated. + +You should take this into account when choosing a VM size and pick an SKU with enough memory overhead for your workload and the antivirus service. +::: + +:::{important} +Only GPUs supported by CUDA and the Nvidia GPU drivers can be used. +['N' series](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/overview#gpu-accelerated) SKUs feature GPUs. +The NC and ND families are recommended as they feature GPUs designed for general purpose computation rather than graphics processing. + +There is no key to distinguish SKUs with Nvidia GPUs, however newer SKUs contain the name of the accelerator. +::: + +:::{hint} +Picking a good VM size depends on a lot of variables. +You should think about your expected use case and what kind of resources you need. + +As some general recommendations, + +- For general purpose use, the D family gives decent performance and a good balance of CPU and memory. + The [Dsv6 series](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes/general-purpose/dsv6-series#sizes-in-series) is a good starting point and can be scaled from 2 CPUs and 8 GB RAM to 128 CPUs and 512 GB RAM. + - `Standard_D8s_v6` should give reasonable performance for a single concurrent user. +- For GPU accelerated work, the NC family provides Nvidia GPUs and a good balance of CPU and memory. + In order of increasing throughput, the `NCv3` series features Nvidia V100 GPUs, the `NC_A100_v4` series features Nvidia A100 GPUs, and the `NCads_H100_v5` series features Nvidia H100 GPUs. + - `Stanard_NC6s_v3` should give reasonable performance for a single concurrent user with AI/ML workloads. + Scaling up in the same series (for example `Standard_NC12s_v3`) gives more accelerators of the same type. + Alternatively a series with more recent GPUs should give better performance. ::: diff --git a/docs/source/roles/researcher/images/db_beekeeper_studio_mssql.png b/docs/source/roles/researcher/images/db_beekeeper_studio_mssql.png new file mode 100644 index 0000000000..fede151a4a Binary files /dev/null and b/docs/source/roles/researcher/images/db_beekeeper_studio_mssql.png differ diff --git a/docs/source/roles/researcher/images/db_beekeeper_studio_postgres.png b/docs/source/roles/researcher/images/db_beekeeper_studio_postgres.png new file mode 100644 index 0000000000..473f40cda2 Binary files /dev/null and b/docs/source/roles/researcher/images/db_beekeeper_studio_postgres.png differ diff --git a/docs/source/roles/researcher/images/db_dbeaver_connect_mssql.png b/docs/source/roles/researcher/images/db_dbeaver_connect_mssql.png deleted file mode 100644 index 273d53a993..0000000000 Binary files a/docs/source/roles/researcher/images/db_dbeaver_connect_mssql.png and /dev/null differ diff --git a/docs/source/roles/researcher/images/db_dbeaver_connect_postgresql.png b/docs/source/roles/researcher/images/db_dbeaver_connect_postgresql.png deleted file mode 100644 index ac1e79cf76..0000000000 Binary files a/docs/source/roles/researcher/images/db_dbeaver_connect_postgresql.png and /dev/null differ diff --git a/docs/source/roles/researcher/images/db_dbeaver_driver_download.png b/docs/source/roles/researcher/images/db_dbeaver_driver_download.png deleted file mode 100644 index a2225657b1..0000000000 Binary files a/docs/source/roles/researcher/images/db_dbeaver_driver_download.png and /dev/null differ diff --git a/docs/source/roles/researcher/images/db_dbeaver_select_mssql.png b/docs/source/roles/researcher/images/db_dbeaver_select_mssql.png deleted file mode 100644 index ea5b7e9e41..0000000000 Binary files a/docs/source/roles/researcher/images/db_dbeaver_select_mssql.png and /dev/null differ diff --git a/docs/source/roles/researcher/images/db_dbeaver_select_postgresql.png b/docs/source/roles/researcher/images/db_dbeaver_select_postgresql.png deleted file mode 100644 index 75f0d019d3..0000000000 Binary files a/docs/source/roles/researcher/images/db_dbeaver_select_postgresql.png and /dev/null differ diff --git a/docs/source/roles/researcher/snippets/software_database.partial.md b/docs/source/roles/researcher/snippets/software_database.partial.md index e47f443c17..d76766476e 100644 --- a/docs/source/roles/researcher/snippets/software_database.partial.md +++ b/docs/source/roles/researcher/snippets/software_database.partial.md @@ -1,3 +1,3 @@ -- `DBeaver` desktop database management software +- `Beekeeper Studio` desktop database management software - `psql` a command line PostgreSQL client - `unixodbc-dev` driver for interacting with Microsoft SQL databases diff --git a/docs/source/roles/researcher/using_the_sre.md b/docs/source/roles/researcher/using_the_sre.md index 41752c1c06..51a7b60abf 100644 --- a/docs/source/roles/researcher/using_the_sre.md +++ b/docs/source/roles/researcher/using_the_sre.md @@ -529,7 +529,7 @@ Official tutorials for [MSSQL](https://learn.microsoft.com/en-us/sql/sql-server/ :class: dropdown note - **Server name** : mssql._SRE\_URL_ (e.g. mssql.sandbox.projects.example.org) -- **Username**: databaseadmin +- **Username**: databaseadmin@shm-_SHM\_NAME_-sre-_SRE\_NAME_-db-server-mssql - **Password**: provided by your {ref}`System Manager ` - **Database name**: provided by your {ref}`System Manager ` - **Port**: 1433 @@ -547,111 +547,57 @@ Official tutorials for [MSSQL](https://learn.microsoft.com/en-us/sql/sql-server/ ::: -Examples are given below for connecting using **DBeaver**, **Python** and **R**. +Examples are given below for connecting using **Beekeeper Studio**, **Python** and **R**. The instructions for using other graphical interfaces or programming languages will be similar. -### {{bear}} Connecting using DBeaver - -#### Microsoft SQL +### {{bee}} Connecting using Beekeeper Studio -::::{admonition} 1. Create new Microsoft SQL server connection +::::{admonition} Microsoft SQL :class: dropdown note -Click on the **{guilabel}`New database connection`** button (which looks a bit like an electrical plug with a plus sign next to it) - +- Click on the **{guilabel}`+ New Connection`** button - Select **SQL Server** as the database type - - :::{image} images/db_dbeaver_select_mssql.png - :alt: DBeaver select Microsoft SQL +- Enter the connection details + - **Authentication**: Username/Password + - **Host**: as above + - **Port**: as above + - **Enable SSL**: false + - **User**: as above + - **Password**: as above + - **Domain**: empty + - **Trust Server Certificate**: true + - **Default Database**: master +- Click on **{guilabel}`Test`** to test the connection settings +- Click on **{guilabel}`Connect`** to connect to the database or enter a name and click **{guilabel}`Save`** to save the connection settings for future use + + :::{image} images/db_beekeeper_studio_mssql.png + :alt: Beekeeper Studio MS SQL connection configuration :align: center :width: 90% ::: - -:::: - -::::{admonition} 2. Provide connection details -:class: dropdown note - -- **Host**: as above -- **Database**: as above -- **Authentication**: SQL Server Authentication -- **Username**: as above -- **Password**: as above -- Tick **Show All Schemas** -- Tick **Trust server certificate** - - :::{image} images/db_dbeaver_connect_mssql.png - :alt: DBeaver connect with Microsoft SQL - :align: center - :width: 90% - ::: - -:::: - -::::{admonition} 3. Download drivers if needed -:class: dropdown note - -- After clicking finish, you may be prompted to download driver files even though they should be pre-installed. -- Click on the **{guilabel}`Download`** button if this happens. - - :::{image} images/db_dbeaver_driver_download.png - :alt: DBeaver driver download for Microsoft SQL - :align: center - :width: 90% - ::: - -- If drivers are not available contact your {ref}`System Manager ` - :::: -#### PostgreSQL - -::::{admonition} 1. Create new PostgreSQL server connection -:class: dropdown note - -Click on the **{guilabel}`New database connection`** button (which looks a bit like an electrical plug with a plus sign next to it) - -- Select **PostgreSQL** as the database type - - :::{image} images/db_dbeaver_select_postgresql.png - :alt: DBeaver select PostgreSQL - :align: center - :width: 90% - ::: - -:::: - -::::{admonition} 2. Provide connection details -:class: dropdown note - -- **Host**: as above -- **Database**: as above -- **Authentication**: Database Native -- **Username**: as above -- **Password**: as above - - :::{image} images/db_dbeaver_connect_postgresql.png - :alt: DBeaver connect with PostgreSQL - :align: center - :width: 90% - ::: - -:::: - -::::{admonition} 3. Download drivers if needed +::::{admonition} PostgreSQL :class: dropdown note -- After clicking finish, you may be prompted to download driver files even though they should be pre-installed. -- Click on the **{guilabel}`Download`** button if this happens. - - :::{image} images/db_dbeaver_driver_download.png - :alt: DBeaver driver download for PostgreSQL +- Click on the **{guilabel}`+ New Connection`** button +- Select **Postgres** as the database type +- Enter the connection details + - **Connection Mode**: Host and Port + - **Host**: as above + - **Port**: as above + - **Enable SSL**: false + - **User**: as above + - **Password**: as above + - **Default Database**: postgres +- Click on **{guilabel}`Test`** to test the connection settings +- Click on **{guilabel}`Connect`** to connect to the database or enter a name and click **{guilabel}`Save`** to save the connection settings for future use + + :::{image} images/db_beekeeper_studio_postgres.png + :alt: Beekeeper Studio PostgreSQL connection configuration :align: center :width: 90% ::: - -- If drivers are not available contact your {ref}`System Manager ` - :::: ### {{snake}} Connecting using Python diff --git a/pyproject.toml b/pyproject.toml index 098f8b06be..ae503bed03 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,16 +44,16 @@ dependencies = [ "chevron==0.14.0", "cryptography==43.0.1", "fqdn==1.5.1", - "psycopg==3.2.3", - "pulumi-azure-native==2.63.0", + "psycopg[binary]==3.2.3", + "pulumi-azure-native==2.64.3", "pulumi-azuread==5.53.4", "pulumi-random==4.16.6", - "pulumi==3.134.1", + "pulumi==3.135.1", "pydantic==2.9.2", "pyjwt[crypto]==2.9.0", "pytz==2024.2", "pyyaml==6.0.2", - "rich==13.8.1", + "rich==13.9.2", "simple-acme-dns==3.1.0", "typer==0.12.5", "websocket-client==1.8.0", @@ -66,7 +66,7 @@ Source = "https://github.com/alan-turing-institute/data-safe-haven" [project.optional-dependencies] docs = [ - "emoji==2.13.2", + "emoji==2.14.0", "myst-parser==4.0.0", "pydata-sphinx-theme==0.15.4", "sphinx-togglebutton==0.3.2", @@ -79,10 +79,10 @@ lint = [ "mypy==1.11.2", "pandas-stubs==2.2.2.240909", "pydantic==2.9.2", - "ruff==0.6.8", + "ruff==0.6.9", "types-appdirs==1.4.3.5", "types-chevron==0.14.2.20240310", - "types-pytz==2024.2.0.20240913", + "types-pytz==2024.2.0.20241003", "types-pyyaml==6.0.12.20240917", "types-requests==2.32.0.20240914", ] @@ -125,6 +125,7 @@ features = ["docs"] [tool.hatch.envs.docs.scripts] build = "sphinx-build -M html docs/source/ docs/build/ --fail-on-warning" +lint = "mdl --style .mdlstyle.rb ./docs/source" [tool.hatch.envs.lint] type = "pip-compile" diff --git a/tests/commands/test_sre.py b/tests/commands/test_sre.py index 5cd7404f75..f8818d20cc 100644 --- a/tests/commands/test_sre.py +++ b/tests/commands/test_sre.py @@ -12,6 +12,7 @@ class TestDeploySRE: def test_deploy( self, runner: CliRunner, + mock_azuresdk_get_subscription_name, # noqa: ARG002 mock_graph_api_token, # noqa: ARG002 mock_contextmanager_assert_context, # noqa: ARG002 mock_ip_1_2_3_4, # noqa: ARG002 @@ -31,6 +32,7 @@ def test_no_application( self, caplog: LogCaptureFixture, runner: CliRunner, + mock_azuresdk_get_subscription_name, # noqa: ARG002 mock_contextmanager_assert_context, # noqa: ARG002 mock_graph_api_token, # noqa: ARG002 mock_ip_1_2_3_4, # noqa: ARG002 @@ -52,6 +54,7 @@ def test_no_application_secret( runner: CliRunner, context: Context, mocker: MockerFixture, + mock_azuresdk_get_subscription_name, # noqa: ARG002 mock_graph_api_get_application_by_name, # noqa: ARG002 mock_graph_api_token, # noqa: ARG002 mock_ip_1_2_3_4, # noqa: ARG002 diff --git a/tests/commands/test_users.py b/tests/commands/test_users.py index a9feb25ee2..c1b183c922 100644 --- a/tests/commands/test_users.py +++ b/tests/commands/test_users.py @@ -26,6 +26,17 @@ def test_invalid_shm( assert result.exit_code == 1 assert "Have you deployed the SHM?" in result.stdout + def test_invalid_sre( + self, + mock_pulumi_config_from_remote, # noqa: ARG002 + mock_shm_config_from_remote, # noqa: ARG002 + runner, + ): + result = runner.invoke(users_command_group, ["list", "my_sre"]) + + assert result.exit_code == 1 + assert "Is the SRE deployed?" in result.stdout + class TestRegister: def test_invalid_shm( diff --git a/tests/conftest.py b/tests/conftest.py index 4a953c408e..4626f1061e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -206,6 +206,15 @@ def mock_azuresdk_get_subscription(mocker, request): ) +@fixture +def mock_azuresdk_get_subscription_name(mocker): + mocker.patch.object( + AzureSdk, + "get_subscription_name", + return_value="Data Safe Haven Acme", + ) + + @fixture def mock_azuresdk_get_credential(mocker): class MockCredential(TokenCredential): diff --git a/typings/pulumi/__init__.pyi b/typings/pulumi/__init__.pyi index 80ae2bf986..0b80efa17e 100644 --- a/typings/pulumi/__init__.pyi +++ b/typings/pulumi/__init__.pyi @@ -11,6 +11,7 @@ from pulumi.output import ( ) from pulumi.resource import ( ComponentResource, + InvokeOptions, Resource, ResourceOptions, export, @@ -24,6 +25,7 @@ __all__ = [ "export", "FileAsset", "Input", + "InvokeOptions", "Output", "Resource", "ResourceOptions", diff --git a/typings/pulumi_azure_native/__init__.pyi b/typings/pulumi_azure_native/__init__.pyi index 56be0a1e3a..598b0a9e6c 100644 --- a/typings/pulumi_azure_native/__init__.pyi +++ b/typings/pulumi_azure_native/__init__.pyi @@ -12,6 +12,7 @@ import pulumi_azure_native.managedidentity as managedidentity import pulumi_azure_native.network as network import pulumi_azure_native.operationalinsights as operationalinsights import pulumi_azure_native.operationsmanagement as operationsmanagement +import pulumi_azure_native.provider as provider import pulumi_azure_native.resources as resources import pulumi_azure_native.sql as sql import pulumi_azure_native.storage as storage @@ -30,6 +31,7 @@ __all__ = [ "network", "operationalinsights", "operationsmanagement", + "provider", "resources", "sql", "storage",