diff --git a/.github/ISSUE_TEMPLATE/deployment_bug_report.md b/.github/ISSUE_TEMPLATE/deployment_bug_report.md index 6cf453cc13..fa569038c2 100644 --- a/.github/ISSUE_TEMPLATE/deployment_bug_report.md +++ b/.github/ISSUE_TEMPLATE/deployment_bug_report.md @@ -29,7 +29,6 @@ Before reporting a problem please check the following. Replace the empty checkbo List of packages diff --git a/.github/ISSUE_TEMPLATE/release_checklist.md b/.github/ISSUE_TEMPLATE/release_checklist.md index a25064faa1..575f5c9c53 100644 --- a/.github/ISSUE_TEMPLATE/release_checklist.md +++ b/.github/ISSUE_TEMPLATE/release_checklist.md @@ -14,9 +14,9 @@ Before reporting a problem please check the following. Replace the empty checkbo Refer to the [Deployment](https://data-safe-haven.readthedocs.io/en/latest/deployment) section of our documentation when completing these steps. -- [ ] Consult the `data-safe-haven/VERSIONING.md` guide and determine the version number of the new release. Record it in the title of this issue. +- [ ] Consult the `data-safe-haven/VERSIONING.md` guide and determine the version number of the new release. Record it in the title of this issue - [ ] Create a release branch called e.g. `release-v0.0.1` -- [ ] Draft a changelog for the release similar to our previous releases, see https://github.com/alan-turing-institute/data-safe-haven/releases +- [ ] Draft a changelog for the release similar to our [previous releases](https://github.com/alan-turing-institute/data-safe-haven/releases) ### For patch releases only @@ -34,26 +34,27 @@ Refer to the [Deployment](https://data-safe-haven.readthedocs.io/en/latest/deplo ### For major releases only - [ ] Confirm that a third party has carried out a full penetration test evaluating: - 1. external attack surface - 1. ability to exfiltrate data from the system - 1. ability to transfer data between SREs - 1. ability to escalate privileges on the SRD. + 1. external attack surface + 1. ability to exfiltrate data from the system + 1. ability to transfer data between SREs + 1. ability to escalate privileges on the SRD. ### Update documentation - [ ] Update supported versions in `SECURITY.md` - [ ] Update pen test results in `VERSIONING.md` -## :computer: Release information +### Making the release -- **Version number:** _ -- **SHM ID:** _ -- **T2 SRE ID:** _ -- **T3 SRE ID:** _ +- [ ] Merge release branch into `latest` +- [ ] Create a tag of the form `v0.0.1` pointing to the most recent commit on `latest` (the merge that you just made) +- [ ] Publish your draft GitHub release using this tag +- [ ] Ensure docs for the latest version are built and deployed on ReadTheDocs +- [ ] Push a build to PyPI +- [ ] Announce release on communications channels ## :deciduous_tree: Deployment problems - diff --git a/.github/scripts/update_azure_data_studio.py b/.github/scripts/update_azure_data_studio.py deleted file mode 100644 index 651e85fdfc..0000000000 --- a/.github/scripts/update_azure_data_studio.py +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env python3 -from lxml import html -import hashlib -import requests - -remote_page = requests.get("https://docs.microsoft.com/en-us/sql/azure-data-studio/download-azure-data-studio", allow_redirects=True) -root = html.fromstring(remote_page.content) -short_link = root.xpath("//a[contains(text(), '.deb')]/@href")[0] - -remote_content = requests.get(short_link, allow_redirects=True) -sha256 = hashlib.sha256(remote_content.content).hexdigest() -version = remote_content.url.split("-")[-1].replace(".deb", "") -remote = "/".join(remote_content.url.split("/")[:-1] + ["|DEBFILE|"]) - -with open("deployment/secure_research_desktop/packages/deb-azuredatastudio.version", "w") as f_out: - f_out.write(f"hash: {sha256}\n") - f_out.write(f"version: {version}\n") - f_out.write("debfile: azuredatastudio-linux-|VERSION|.deb\n") - f_out.write(f"remote: {remote}\n") diff --git a/.github/scripts/update_dbeaver_drivers.py b/.github/scripts/update_dbeaver_drivers.py deleted file mode 100644 index 696a501858..0000000000 --- a/.github/scripts/update_dbeaver_drivers.py +++ /dev/null @@ -1,72 +0,0 @@ -#! /usr/bin/env python3 -import json -from lxml import html -from natsort import natsorted -import requests - - -def get_latest_version(url, search_text): - """ - Get latest version number of a database driver from the Maven repository. - - Fetches the HTML page at the given URL, then converts it to an lxml tree. - Numeric strings are then extracted. - Note that mostly numeric strings for some drivers contain non-numeric text, - as different driver types exist for those drivers, even where the version number is the same. - The largest (latest) version number of the driver is then returned. - - Parameters - ---------- - url : str - The URL of the Maven repository containing the driver - search_text : str - Text to search for in the repository, to distinguish the driver from other files - - Returns - ------- - list - The latest available version number of the driver - """ - - remote_page = requests.get(url, allow_redirects=True) - root = html.fromstring(remote_page.content) - return natsorted([v for v in root.xpath("//a[contains(text(), '" + search_text + "')]/@href") if v != "../"])[-1].replace("/", "") - - -drivers = [ - { - 'name': "mssql_jdbc", - 'url': "https://repo1.maven.org/maven2/com/microsoft/sqlserver/mssql-jdbc/", - 'search_text': "jre8/" - }, - { - 'name': "pgjdbc", - 'url': "https://repo1.maven.org/maven2/org/postgresql/pgjdbc-versions/", - 'search_text': "/" - }, - { - 'name': "postgresql", - 'url': "https://repo1.maven.org/maven2/org/postgresql/postgresql/", - 'search_text': "/" - }, - { - 'name': "postgis_geometry", - 'url': "https://repo1.maven.org/maven2/net/postgis/postgis-geometry/", - 'search_text': "/" - }, - { - 'name': "postgis_jdbc", - 'url': "https://repo1.maven.org/maven2/net/postgis/postgis-jdbc/", - 'search_text': "/" - }, - { - 'name': "waffle_jna", - 'url': "https://repo1.maven.org/maven2/com/github/waffle/waffle-jna/", - 'search_text': "/" - } -] - -output = {driver['name']: get_latest_version(driver['url'], driver['search_text']) for driver in drivers} - -with open("deployment/secure_research_desktop/packages/dbeaver-driver-versions.json", "w") as f_out: - f_out.writelines(json.dumps(output, indent=4, sort_keys=True)) diff --git a/.github/scripts/update_rstudio.py b/.github/scripts/update_rstudio.py deleted file mode 100644 index ee36a35e66..0000000000 --- a/.github/scripts/update_rstudio.py +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env python3 -from lxml import html -import hashlib -import requests - -remote_page = requests.get("https://www.rstudio.com/products/rstudio/download/", allow_redirects=True) -root = html.fromstring(remote_page.content) -short_links = [link for link in root.xpath("//a[contains(text(), '.deb')]/@href") if "debian" not in link] - -for ubuntu_version in ["focal", "jammy"]: - short_link = [link for link in short_links if ubuntu_version in link][0] - remote_content = requests.get(short_link, allow_redirects=True) - sha256 = hashlib.sha256(remote_content.content).hexdigest() - version = "-".join(remote_content.url.split("/")[-1].split("-")[1:-1]) - remote = "/".join(remote_content.url.split("/")[:-1] + ["|DEBFILE|"]) - - with open(f"deployment/secure_research_desktop/packages/deb-rstudio-{ubuntu_version}.version", "w") as f_out: - f_out.write(f"hash: {sha256}\n") - f_out.write(f"version: {version}\n") - f_out.write("debfile: rstudio-|VERSION|-amd64.deb\n") - f_out.write(f"remote: {remote}\n") diff --git a/.github/security_checklist_template.md b/.github/security_checklist_template.md deleted file mode 100644 index b963331eef..0000000000 --- a/.github/security_checklist_template.md +++ /dev/null @@ -1,167 +0,0 @@ -# Security checklist -Running on SHM/SREs deployed using commit XXXXXXX - -## Summary -+ :white_check_mark: N tests passed -- :partly_sunny: N tests partially passed (see below for more details) -- :fast_forward: N tests skipped (see below for more details) -- :x: N tests failed (see below for more details) - -## Details -Some security checks were skipped since: -- No managed device was available -- No access to a physical space with its own dedicated network was possible - -### Multifactor Authentication and Password strength -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the SRE standard user cannot access the apps - +
:camera: Verify before adding to group: Microsoft Remote Desktop: Login works but apps cannot be viewed - -
- +
:camera: Verify before adding to group: Guacamole: User is prompted to setup MFA - -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that adding the **SRE standard user** to the SRE group on the domain controller does not give them access - +
:camera: Verify after adding to group: Microsoft Remote Desktop: Login works and apps can be viewed - -
- +
:camera: Verify after adding to group: Microsoft Remote Desktop: attempt to login to DSVM Main (Desktop) fails - -
- +
:camera: Verify before adding to group: Guacamole: User is prompted to setup MFA - -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the **SRE standard user** is able to successfully set up MFA - +
:camera: Verify: successfully set up MFA - -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the **SRE standard user** can authenticate with MFA - +
:camera: Verify: Guacamole: respond to the MFA prompt - 122043131-47bc8080-cddb-11eb-8578-e45ab3efaef0.png"> -
- +
:camera: Verify: Microsoft Remote Desktop: attempt to log in to DSVM Main (Desktop) and respond to the MFA prompt - 122043131-47bc8080-cddb-11eb-8578-e45ab3efaef0.png"> -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the **SRE standard user** can access the DSVM desktop - +
:camera: Verify: Microsoft Remote Desktop: connect to DSVM Main (Desktop) - -
- +
:camera: Verify: Guacamole: connect to Desktop: Ubuntu0 - -
- -### Isolated Network -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Connect to the SHM DC and NPS if connected to the SHM VPN -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Fail to connect to the SHM DC and NPS if not connected to the SHM VPN -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Fail to connect to the internet from within a DSVM on the SRE network. - +
:camera: Verify: Connection fails - 122045859-8142bb00-cdde-11eb-920c-3a162a180647.png"> -
- +
:camera: Verify: that you cannot access a website using curl - -
- +
:camera: Verify: that you cannot get the IP address for a website using nslookup - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that users cannot connect between two SREs within the same SHM, even if they have access to both SREs - +
:camera: Verify: SSH connection fails - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Network rules are set appropriately to block outgoing traffic - +
:camera: Verify: access rules - -
- -### User devices -#### Tier 2: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Connection succeeds from a personal device with an allow-listed IP address -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No managed device available to check connection - -#### Tier 3: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No managed device available to check user lacks root access -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Connection succeeds from a personal device with an allow-listed IP address -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No managed device available to check connection with an allow-listed IP address - -#### Tiers 2+: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Network rules permit access only from allow-listed IP addresses - +
:camera: Verify: access rules - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: All non-deployment NSGs have rules denying inbound connections from outside the Virtual Network - -### Physical security -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No secure physical space available so connection from outside was not tested -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No secure physical space available so connection from inside was not tested -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check the network IP ranges corresponding to the research spaces and compare against the IPs accepted by the firewall. -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No secure physical space available so confirmation of physical measures was not tested - -### Remote connections - -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Unable to connect as a user to the remote desktop server via SSH - +
:camera: Verify: SSH connection by FQDN fails - -
- +
:camera: Verify: SSH connection by public IP address fails - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: The remote desktop server is the only SRE resource with a public IP address - -### Copy-and-paste -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Unable to paste local text into a DSVM -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Unable to copy text from a DSVM -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Copy between VMs in an SRE succeeds - -### Data ingress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** secure upload token successfully created with write-only permissions -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** token was sent using a secure, out-of-band communication channel (e.g. secure email) -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading a file from an allow-listed IP address succeeds -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** downloading a file from an allow-listed IP address fails -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading a file from an non-allowed IP address fails -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** connection during lifetime of short-duration token succeeds -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** connection after lifetime of short-duration token fails -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading different file types succeeds - -### Storage volumes and egress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can read and write to the `/output` volume -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can only read from the `/data` volume -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can read and write to their directory in `/home` -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can read and write to the `/shared` volume -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** can see the files ready for egress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** can download egress-ready files - -### Software Ingress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** expected software tools are installed - +
:camera: Verify: DBeaver, RStudio, PyCharm and Visual Studio Code available - 122056611-0a132400-cdea-11eb-9087-385ab296189e.png"> -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** secure upload token successfully created with write-only permissions -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading is possible only during the token lifetime -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** uploaded files are readable and can be installed on the DSVM -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** uploaded files are readable but cannot be installed on the DSVM - -### Package mirrors - -#### Tier 2: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Can install any packages - +
:camera: Verify: botocore can be installed - -
- -#### Tier 3: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Can install only allow-listed packages - +
:camera: Verify: aero-calc can be installed; botocore cannot be installed - -
- -### Azure firewalls -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Admin has limited access to the internet - +
:camera: Verify: SHM DC cannot connect to google - 122067607-ff5d8c80-cdf3-11eb-8e20-a401faba0be4.png"> -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Admin can download Windows updates - +
:camera: Verify: Windows updates can be downloaded - 122067641-071d3100-cdf4-11eb-9dc8-03938ff49e3a.png"> -
diff --git a/.github/workflows/lint_code.yaml b/.github/workflows/lint_code.yaml index 7786fc4b62..4d0caed16c 100644 --- a/.github/workflows/lint_code.yaml +++ b/.github/workflows/lint_code.yaml @@ -108,7 +108,7 @@ jobs: done rm expanded.tmp - name: Lint YAML - uses: karancode/yamllint-github-action@v2.1.1 + uses: karancode/yamllint-github-action@v3.0.0 with: yamllint_strict: true yamllint_comment: false diff --git a/.hatch/requirements-lint.txt b/.hatch/requirements-lint.txt index e1ab89f54e..c09dc3ebb9 100644 --- a/.hatch/requirements-lint.txt +++ b/.hatch/requirements-lint.txt @@ -5,9 +5,9 @@ # - ansible==11.0.0 # - black==24.10.0 # - mypy==1.13.0 -# - pandas-stubs==2.2.3.241009 -# - pydantic==2.10.1 -# - ruff==0.8.0 +# - pandas-stubs==2.2.3.241126 +# - pydantic==2.10.2 +# - ruff==0.8.1 # - types-appdirs==1.4.3.5 # - types-chevron==0.14.2.20240310 # - types-pytz==2024.2.0.20241003 @@ -81,7 +81,7 @@ click-help-colors==0.9.4 # via molecule colorama==0.4.6 # via tox -cryptography==43.0.3 +cryptography==44.0.0 # via ansible-core distlib==0.3.9 # via @@ -151,7 +151,7 @@ packaging==24.2 # pytest # pytest-ansible # tox -pandas-stubs==2.2.3.241009 +pandas-stubs==2.2.3.241126 # via hatch.envs.lint parsley==1.3 # via bindep @@ -178,7 +178,7 @@ ptyprocess==0.7.0 # via pexpect pycparser==2.22 # via cffi -pydantic==2.10.1 +pydantic==2.10.2 # via hatch.envs.lint pydantic-core==2.27.1 # via pydantic @@ -186,7 +186,7 @@ pygments==2.18.0 # via rich pyproject-api==1.8.0 # via tox -pytest==8.3.3 +pytest==8.3.4 # via # pytest-ansible # pytest-xdist @@ -197,7 +197,7 @@ pytest-ansible==24.9.0 # tox-ansible pytest-xdist==3.6.1 # via tox-ansible -python-daemon==3.1.0 +python-daemon==3.1.1 # via ansible-runner python-gnupg==0.5.3 # via ansible-sign @@ -233,7 +233,7 @@ ruamel-yaml==0.18.6 # via ansible-lint ruamel-yaml-clib==0.2.12 # via ruamel-yaml -ruff==0.8.0 +ruff==0.8.1 # via hatch.envs.lint subprocess-tee==0.4.2 # via @@ -265,7 +265,7 @@ tzdata==2024.2 # via ansible-navigator urllib3==2.2.3 # via types-requests -virtualenv==20.27.1 +virtualenv==20.28.0 # via tox wcmatch==10.0 # via diff --git a/.hatch/requirements-test.txt b/.hatch/requirements-test.txt index 8c95d7dce0..ce24e1caee 100644 --- a/.hatch/requirements-test.txt +++ b/.hatch/requirements-test.txt @@ -1,7 +1,7 @@ # # This file is autogenerated by hatch-pip-compile with Python 3.12 # -# [constraints] .hatch/requirements.txt (SHA256: 3586aa93da255077aac182009c06aa28b96ec15387beec4148e3bebd2b9f8852) +# [constraints] .hatch/requirements.txt (SHA256: 9b78097f41c11566a80e32726aefa74a983ac227fce27db9adba04ae7594da1c) # # - appdirs==1.4.4 # - azure-core==1.32.0 @@ -21,25 +21,25 @@ # - azure-storage-file-datalake==12.18.0 # - azure-storage-file-share==12.20.0 # - chevron==0.14.0 -# - cryptography==43.0.3 +# - cryptography==44.0.0 # - fqdn==1.5.1 # - psycopg[binary]==3.1.19 -# - pulumi-azure-native==2.73.1 +# - pulumi-azure-native==2.74.0 # - pulumi-azuread==6.0.1 # - pulumi-random==4.16.7 -# - pulumi==3.141.0 -# - pydantic==2.10.1 -# - pyjwt[crypto]==2.10.0 +# - pulumi==3.142.0 +# - pydantic==2.10.2 +# - pyjwt[crypto]==2.10.1 # - pytz==2024.2 # - pyyaml==6.0.2 # - rich==13.9.4 # - simple-acme-dns==3.2.0 -# - typer==0.13.1 +# - typer==0.14.0 # - websocket-client==1.8.0 # - coverage==7.6.8 # - freezegun==1.5.1 # - pytest-mock==3.14.0 -# - pytest==8.3.3 +# - pytest==8.3.4 # - requests-mock==1.12.1 # @@ -182,7 +182,7 @@ click==8.1.7 # typer coverage==7.6.8 # via hatch.envs.test -cryptography==43.0.3 +cryptography==44.0.0 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -295,14 +295,14 @@ psycopg-binary==3.1.19 # via # -c .hatch/requirements.txt # psycopg -pulumi==3.141.0 +pulumi==3.142.0 # via # -c .hatch/requirements.txt # hatch.envs.test # pulumi-azure-native # pulumi-azuread # pulumi-random -pulumi-azure-native==2.73.1 +pulumi-azure-native==2.74.0 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -318,7 +318,7 @@ pycparser==2.22 # via # -c .hatch/requirements.txt # cffi -pydantic==2.10.1 +pydantic==2.10.2 # via # -c .hatch/requirements.txt # hatch.envs.test @@ -330,12 +330,12 @@ pygments==2.18.0 # via # -c .hatch/requirements.txt # rich -pyjwt==2.10.0 +pyjwt==2.10.1 # via # -c .hatch/requirements.txt # hatch.envs.test # msal -pyopenssl==24.2.1 +pyopenssl==24.3.0 # via # -c .hatch/requirements.txt # acme @@ -344,7 +344,7 @@ pyrfc3339==2.0.1 # via # -c .hatch/requirements.txt # acme -pytest==8.3.3 +pytest==8.3.4 # via # hatch.envs.test # pytest-mock @@ -403,7 +403,7 @@ six==1.16.0 # azure-core # pulumi # python-dateutil -typer==0.13.1 +typer==0.14.0 # via # -c .hatch/requirements.txt # hatch.envs.test diff --git a/.hatch/requirements.txt b/.hatch/requirements.txt index 82ad061fc0..f2589f1f68 100644 --- a/.hatch/requirements.txt +++ b/.hatch/requirements.txt @@ -19,20 +19,20 @@ # - azure-storage-file-datalake==12.18.0 # - azure-storage-file-share==12.20.0 # - chevron==0.14.0 -# - cryptography==43.0.3 +# - cryptography==44.0.0 # - fqdn==1.5.1 # - psycopg[binary]==3.1.19 -# - pulumi-azure-native==2.73.1 +# - pulumi-azure-native==2.74.0 # - pulumi-azuread==6.0.1 # - pulumi-random==4.16.7 -# - pulumi==3.141.0 -# - pydantic==2.10.1 -# - pyjwt[crypto]==2.10.0 +# - pulumi==3.142.0 +# - pydantic==2.10.2 +# - pyjwt[crypto]==2.10.1 # - pytz==2024.2 # - pyyaml==6.0.2 # - rich==13.9.4 # - simple-acme-dns==3.2.0 -# - typer==0.13.1 +# - typer==0.14.0 # - websocket-client==1.8.0 # @@ -122,7 +122,7 @@ chevron==0.14.0 # via hatch.envs.default click==8.1.7 # via typer -cryptography==43.0.3 +cryptography==44.0.0 # via # hatch.envs.default # acme @@ -192,13 +192,13 @@ psycopg==3.1.19 # via hatch.envs.default psycopg-binary==3.1.19 # via psycopg -pulumi==3.141.0 +pulumi==3.142.0 # via # hatch.envs.default # pulumi-azure-native # pulumi-azuread # pulumi-random -pulumi-azure-native==2.73.1 +pulumi-azure-native==2.74.0 # via hatch.envs.default pulumi-azuread==6.0.1 # via hatch.envs.default @@ -206,17 +206,17 @@ pulumi-random==4.16.7 # via hatch.envs.default pycparser==2.22 # via cffi -pydantic==2.10.1 +pydantic==2.10.2 # via hatch.envs.default pydantic-core==2.27.1 # via pydantic pygments==2.18.0 # via rich -pyjwt==2.10.0 +pyjwt==2.10.1 # via # hatch.envs.default # msal -pyopenssl==24.2.1 +pyopenssl==24.3.0 # via # acme # josepy @@ -257,7 +257,7 @@ six==1.16.0 # via # azure-core # pulumi -typer==0.13.1 +typer==0.14.0 # via hatch.envs.default typing-extensions==4.12.2 # via diff --git a/data_safe_haven/infrastructure/components/__init__.py b/data_safe_haven/infrastructure/components/__init__.py index f4b93b9c3d..52043d1ad3 100644 --- a/data_safe_haven/infrastructure/components/__init__.py +++ b/data_safe_haven/infrastructure/components/__init__.py @@ -9,6 +9,8 @@ MicrosoftSQLDatabaseProps, NFSV3BlobContainerComponent, NFSV3BlobContainerProps, + NFSV3StorageAccountComponent, + NFSV3StorageAccountProps, PostgresqlDatabaseComponent, PostgresqlDatabaseProps, VMComponent, @@ -23,7 +25,6 @@ ) from .wrapped import ( WrappedLogAnalyticsWorkspace, - WrappedNFSV3StorageAccount, ) __all__ = [ @@ -41,11 +42,12 @@ "MicrosoftSQLDatabaseProps", "NFSV3BlobContainerComponent", "NFSV3BlobContainerProps", + "NFSV3StorageAccountComponent", + "NFSV3StorageAccountProps", "PostgresqlDatabaseComponent", "PostgresqlDatabaseProps", "SSLCertificate", "SSLCertificateProps", "VMComponent", "WrappedLogAnalyticsWorkspace", - "WrappedNFSV3StorageAccount", ] diff --git a/data_safe_haven/infrastructure/components/composite/__init__.py b/data_safe_haven/infrastructure/components/composite/__init__.py index bc09bc18a8..8e561dd73a 100644 --- a/data_safe_haven/infrastructure/components/composite/__init__.py +++ b/data_safe_haven/infrastructure/components/composite/__init__.py @@ -9,6 +9,10 @@ MicrosoftSQLDatabaseProps, ) from .nfsv3_blob_container import NFSV3BlobContainerComponent, NFSV3BlobContainerProps +from .nfsv3_storage_account import ( + NFSV3StorageAccountComponent, + NFSV3StorageAccountProps, +) from .postgresql_database import PostgresqlDatabaseComponent, PostgresqlDatabaseProps from .virtual_machine import LinuxVMComponentProps, VMComponent @@ -23,6 +27,8 @@ "MicrosoftSQLDatabaseProps", "NFSV3BlobContainerComponent", "NFSV3BlobContainerProps", + "NFSV3StorageAccountComponent", + "NFSV3StorageAccountProps", "PostgresqlDatabaseComponent", "PostgresqlDatabaseProps", "VMComponent", diff --git a/data_safe_haven/infrastructure/components/composite/nfsv3_blob_container.py b/data_safe_haven/infrastructure/components/composite/nfsv3_blob_container.py index 98564918a0..29550e9541 100644 --- a/data_safe_haven/infrastructure/components/composite/nfsv3_blob_container.py +++ b/data_safe_haven/infrastructure/components/composite/nfsv3_blob_container.py @@ -1,7 +1,7 @@ from pulumi import ComponentResource, Input, ResourceOptions from pulumi_azure_native import storage -from data_safe_haven.infrastructure.components.dynamic.blob_container_acl import ( +from data_safe_haven.infrastructure.components.dynamic import ( BlobContainerAcl, BlobContainerAclProps, ) @@ -52,6 +52,7 @@ def __init__( ResourceOptions(parent=props.storage_account), ), ) + BlobContainerAcl( f"{storage_container._name}_acl", BlobContainerAclProps( diff --git a/data_safe_haven/infrastructure/components/composite/nfsv3_storage_account.py b/data_safe_haven/infrastructure/components/composite/nfsv3_storage_account.py new file mode 100644 index 0000000000..ca003bbd3d --- /dev/null +++ b/data_safe_haven/infrastructure/components/composite/nfsv3_storage_account.py @@ -0,0 +1,144 @@ +from collections.abc import Mapping, Sequence + +from pulumi import ComponentResource, Input, Output, ResourceOptions +from pulumi_azure_native import insights, storage + +from data_safe_haven.external import AzureIPv4Range +from data_safe_haven.infrastructure.components.wrapped import ( + WrappedLogAnalyticsWorkspace, +) +from data_safe_haven.types import AzureServiceTag + + +class NFSV3StorageAccountProps: + def __init__( + self, + account_name: Input[str], + allowed_ip_addresses: Input[Sequence[str]] | None, + allowed_service_tag: AzureServiceTag | None, + location: Input[str], + log_analytics_workspace: Input[WrappedLogAnalyticsWorkspace], + resource_group_name: Input[str], + subnet_id: Input[str], + ): + self.account_name = account_name + self.allowed_ip_addresses = allowed_ip_addresses + self.allowed_service_tag = allowed_service_tag + self.location = location + self.log_analytics_workspace = log_analytics_workspace + self.resource_group_name = resource_group_name + self.subnet_id = subnet_id + + +class NFSV3StorageAccountComponent(ComponentResource): + encryption_args = storage.EncryptionArgs( + key_source=storage.KeySource.MICROSOFT_STORAGE, + services=storage.EncryptionServicesArgs( + blob=storage.EncryptionServiceArgs( + enabled=True, key_type=storage.KeyType.ACCOUNT + ), + file=storage.EncryptionServiceArgs( + enabled=True, key_type=storage.KeyType.ACCOUNT + ), + ), + ) + + def __init__( + self, + name: str, + props: NFSV3StorageAccountProps, + opts: ResourceOptions | None = None, + tags: Input[Mapping[str, Input[str]]] | None = None, + ): + super().__init__("dsh:sre:NFSV3StorageAccountComponent", name, {}, opts) + child_opts = ResourceOptions.merge(opts, ResourceOptions(parent=self)) + child_tags = {"component": "data"} | (tags if tags else {}) + + if props.allowed_service_tag == AzureServiceTag.INTERNET: + default_action = storage.DefaultAction.ALLOW + ip_rules = [] + else: + default_action = storage.DefaultAction.DENY + ip_rules = Output.from_input(props.allowed_ip_addresses).apply( + lambda ip_ranges: [ + storage.IPRuleArgs( + action=storage.Action.ALLOW, + i_p_address_or_range=str(ip_address), + ) + for ip_range in sorted(ip_ranges) + for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips() + ] + ) + + # Deploy storage account + self.storage_account = storage.StorageAccount( + f"{self._name}", + account_name=props.account_name, + allow_blob_public_access=False, + enable_https_traffic_only=True, + enable_nfs_v3=True, + encryption=self.encryption_args, + is_hns_enabled=True, + kind=storage.Kind.BLOCK_BLOB_STORAGE, + location=props.location, + minimum_tls_version=storage.MinimumTlsVersion.TLS1_2, + network_rule_set=storage.NetworkRuleSetArgs( + bypass=storage.Bypass.AZURE_SERVICES, + default_action=default_action, + ip_rules=ip_rules, + virtual_network_rules=[ + storage.VirtualNetworkRuleArgs( + virtual_network_resource_id=props.subnet_id, + ) + ], + ), + public_network_access=storage.PublicNetworkAccess.ENABLED, + resource_group_name=props.resource_group_name, + sku=storage.SkuArgs(name=storage.SkuName.PREMIUM_ZRS), + opts=child_opts, + tags=child_tags, + ) + + # Add diagnostic setting for blobs + insights.DiagnosticSetting( + f"{self.storage_account._name}_diagnostic_setting", + name=f"{self.storage_account._name}_diagnostic_setting", + log_analytics_destination_type="Dedicated", + logs=[ + { + "category_group": "allLogs", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + { + "category_group": "audit", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + ], + metrics=[ + { + "category": "Transaction", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + } + ], + resource_uri=self.storage_account.id.apply( + # This is the URI of the blobServices resource which is automatically + # created. + lambda resource_id: resource_id + + "/blobServices/default" + ), + workspace_id=props.log_analytics_workspace.id, + ) + + self.register_outputs({}) diff --git a/data_safe_haven/infrastructure/components/wrapped/__init__.py b/data_safe_haven/infrastructure/components/wrapped/__init__.py index b449f46859..fc5f8c8f61 100644 --- a/data_safe_haven/infrastructure/components/wrapped/__init__.py +++ b/data_safe_haven/infrastructure/components/wrapped/__init__.py @@ -1,7 +1,5 @@ from .log_analytics_workspace import WrappedLogAnalyticsWorkspace -from .nfsv3_storage_account import WrappedNFSV3StorageAccount __all__ = [ "WrappedLogAnalyticsWorkspace", - "WrappedNFSV3StorageAccount", ] diff --git a/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py b/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py deleted file mode 100644 index e259de4806..0000000000 --- a/data_safe_haven/infrastructure/components/wrapped/nfsv3_storage_account.py +++ /dev/null @@ -1,79 +0,0 @@ -from collections.abc import Mapping, Sequence - -from pulumi import Input, Output, ResourceOptions -from pulumi_azure_native import storage - -from data_safe_haven.external import AzureIPv4Range -from data_safe_haven.types import AzureServiceTag - - -class WrappedNFSV3StorageAccount(storage.StorageAccount): - encryption_args = storage.EncryptionArgs( - key_source=storage.KeySource.MICROSOFT_STORAGE, - services=storage.EncryptionServicesArgs( - blob=storage.EncryptionServiceArgs( - enabled=True, key_type=storage.KeyType.ACCOUNT - ), - file=storage.EncryptionServiceArgs( - enabled=True, key_type=storage.KeyType.ACCOUNT - ), - ), - ) - - def __init__( - self, - resource_name: str, - *, - account_name: Input[str], - allowed_ip_addresses: Input[Sequence[str]] | None, - allowed_service_tag: AzureServiceTag | None, - location: Input[str], - resource_group_name: Input[str], - subnet_id: Input[str], - opts: ResourceOptions, - tags: Input[Mapping[str, Input[str]]], - ): - if allowed_service_tag == AzureServiceTag.INTERNET: - default_action = storage.DefaultAction.ALLOW - ip_rules = [] - else: - default_action = storage.DefaultAction.DENY - ip_rules = Output.from_input(allowed_ip_addresses).apply( - lambda ip_ranges: [ - storage.IPRuleArgs( - action=storage.Action.ALLOW, - i_p_address_or_range=str(ip_address), - ) - for ip_range in sorted(ip_ranges) - for ip_address in AzureIPv4Range.from_cidr(ip_range).all_ips() - ] - ) - - self.resource_group_name_ = Output.from_input(resource_group_name) - super().__init__( - resource_name, - account_name=account_name, - allow_blob_public_access=False, - enable_https_traffic_only=True, - enable_nfs_v3=True, - encryption=self.encryption_args, - is_hns_enabled=True, - kind=storage.Kind.BLOCK_BLOB_STORAGE, - location=location, - minimum_tls_version=storage.MinimumTlsVersion.TLS1_2, - network_rule_set=storage.NetworkRuleSetArgs( - bypass=storage.Bypass.AZURE_SERVICES, - default_action=default_action, - ip_rules=ip_rules, - virtual_network_rules=[ - storage.VirtualNetworkRuleArgs( - virtual_network_resource_id=subnet_id, - ) - ], - ), - public_network_access=storage.PublicNetworkAccess.ENABLED, - resource_group_name=resource_group_name, - sku=storage.SkuArgs(name=storage.SkuName.PREMIUM_ZRS), - opts=opts, - tags=tags, - ) diff --git a/data_safe_haven/infrastructure/programs/declarative_sre.py b/data_safe_haven/infrastructure/programs/declarative_sre.py index 78467f201b..2228078c36 100644 --- a/data_safe_haven/infrastructure/programs/declarative_sre.py +++ b/data_safe_haven/infrastructure/programs/declarative_sre.py @@ -163,12 +163,27 @@ def __call__(self) -> None: ), ) + # Deploy monitoring + monitoring = SREMonitoringComponent( + "sre_monitoring", + self.stack_name, + SREMonitoringProps( + dns_private_zones=dns.private_zones, + location=self.config.azure.location, + resource_group_name=resource_group.name, + subnet=networking.subnet_monitoring, + timezone=self.config.sre.timezone, + ), + tags=self.tags, + ) + # Deploy SRE firewall SREFirewallComponent( "sre_firewall", self.stack_name, SREFirewallProps( location=self.config.azure.location, + log_analytics_workspace=monitoring.log_analytics, resource_group_name=resource_group.name, route_table_name=networking.route_table_name, subnet_apt_proxy_server=networking.subnet_apt_proxy_server, @@ -196,6 +211,7 @@ def __call__(self) -> None: dns_record=networking.shm_ns_record, dns_server_admin_password=dns.password_admin, location=self.config.azure.location, + log_analytics_workspace=monitoring.log_analytics, resource_group=resource_group, sre_fqdn=networking.sre_fqdn, storage_quota_gb_home=self.config.sre.storage_quota_gb.home, @@ -209,20 +225,6 @@ def __call__(self) -> None: tags=self.tags, ) - # Deploy monitoring - monitoring = SREMonitoringComponent( - "sre_monitoring", - self.stack_name, - SREMonitoringProps( - dns_private_zones=dns.private_zones, - location=self.config.azure.location, - resource_group_name=resource_group.name, - subnet=networking.subnet_monitoring, - timezone=self.config.sre.timezone, - ), - tags=self.tags, - ) - # Deploy the apt proxy server apt_proxy_server = SREAptProxyServerComponent( "sre_apt_proxy_server", @@ -376,6 +378,7 @@ def __call__(self) -> None: ldap_user_filter=ldap_user_filter, ldap_user_search_base=ldap_user_search_base, location=self.config.azure.location, + log_analytics_workspace=monitoring.log_analytics, resource_group=resource_group, software_repository_hostname=user_services.software_repositories.hostname, subnet_desired_state=networking.subnet_desired_state, diff --git a/data_safe_haven/infrastructure/programs/sre/data.py b/data_safe_haven/infrastructure/programs/sre/data.py index 711b76139f..825861c122 100644 --- a/data_safe_haven/infrastructure/programs/sre/data.py +++ b/data_safe_haven/infrastructure/programs/sre/data.py @@ -7,6 +7,7 @@ from pulumi import ComponentResource, Input, Output, ResourceOptions from pulumi_azure_native import ( authorization, + insights, keyvault, managedidentity, network, @@ -31,9 +32,11 @@ from data_safe_haven.infrastructure.components import ( NFSV3BlobContainerComponent, NFSV3BlobContainerProps, + NFSV3StorageAccountComponent, + NFSV3StorageAccountProps, SSLCertificate, SSLCertificateProps, - WrappedNFSV3StorageAccount, + WrappedLogAnalyticsWorkspace, ) from data_safe_haven.types import AzureDnsZoneNames, AzureServiceTag @@ -51,6 +54,7 @@ def __init__( dns_record: Input[network.RecordSet], dns_server_admin_password: Input[pulumi_random.RandomPassword], location: Input[str], + log_analytics_workspace: Input[WrappedLogAnalyticsWorkspace], resource_group: Input[resources.ResourceGroup], sre_fqdn: Input[str], storage_quota_gb_home: Input[int], @@ -69,6 +73,7 @@ def __init__( self.dns_record = dns_record self.password_dns_server_admin = dns_server_admin_password self.location = location + self.log_analytics_workspace = log_analytics_workspace self.resource_group_id = Output.from_input(resource_group).apply(get_id_from_rg) self.resource_group_name = Output.from_input(resource_group).apply( get_name_from_rg @@ -421,6 +426,45 @@ def __init__( resource_group_name=kwargs["resource_group_name"], ) ) + # Add diagnostic setting for files + insights.DiagnosticSetting( + f"{storage_account_data_configuration._name}_diagnostic_setting", + name=f"{storage_account_data_configuration._name}_diagnostic_setting", + log_analytics_destination_type="Dedicated", + logs=[ + { + "category_group": "allLogs", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + { + "category_group": "audit", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + ], + metrics=[ + { + "category": "Transaction", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + } + ], + # This is the URI of the automatically created fileService resource + resource_uri=Output.concat( + storage_account_data_configuration.id, "/fileServices/default" + ), + workspace_id=props.log_analytics_workspace.id, + ) # Set up a private endpoint for the configuration data storage account storage_account_data_configuration_private_endpoint = network.PrivateEndpoint( f"{storage_account_data_configuration._name}_private_endpoint", @@ -467,20 +511,26 @@ def __init__( # Deploy sensitive data blob storage account # - This holds the /mnt/input and /mnt/output containers that are mounted by workspaces # - Azure blobs have worse NFS support but can be accessed with Azure Storage Explorer - storage_account_data_private_sensitive = WrappedNFSV3StorageAccount( + component_data_private_sensitive = NFSV3StorageAccountComponent( f"{self._name}_storage_account_data_private_sensitive", - # Storage account names have a maximum of 24 characters - account_name=alphanumeric( - f"{''.join(truncate_tokens(stack_name.split('-'), 11))}sensitivedata{sha256hash(self._name)}" - )[:24], - allowed_ip_addresses=data_private_sensitive_ip_addresses, - allowed_service_tag=data_private_sensitive_service_tag, - location=props.location, - subnet_id=props.subnet_data_private_id, - resource_group_name=props.resource_group_name, + NFSV3StorageAccountProps( + # Storage account names have a maximum of 24 characters + account_name=alphanumeric( + f"{''.join(truncate_tokens(stack_name.split('-'), 11))}sensitivedata{sha256hash(self._name)}" + )[:24], + allowed_ip_addresses=data_private_sensitive_ip_addresses, + allowed_service_tag=data_private_sensitive_service_tag, + location=props.location, + log_analytics_workspace=props.log_analytics_workspace, + subnet_id=props.subnet_data_private_id, + resource_group_name=props.resource_group_name, + ), opts=child_opts, tags=child_tags, ) + storage_account_data_private_sensitive = ( + component_data_private_sensitive.storage_account + ) # Deploy storage containers NFSV3BlobContainerComponent( f"{self._name}_blob_egress", @@ -615,6 +665,45 @@ def __init__( opts=child_opts, tags=child_tags, ) + # Add diagnostic setting for files + insights.DiagnosticSetting( + f"{storage_account_data_private_user._name}_diagnostic_setting", + name=f"{storage_account_data_private_user._name}_diagnostic_setting", + log_analytics_destination_type="Dedicated", + logs=[ + { + "category_group": "allLogs", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + { + "category_group": "audit", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + ], + metrics=[ + { + "category": "Transaction", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + } + ], + # This is the URI of the automatically created fileService resource + resource_uri=Output.concat( + storage_account_data_private_user.id, "/fileServices/default" + ), + workspace_id=props.log_analytics_workspace.id, + ) storage.FileShare( f"{storage_account_data_private_user._name}_files_home", access_tier=storage.ShareAccessTier.PREMIUM, diff --git a/data_safe_haven/infrastructure/programs/sre/desired_state.py b/data_safe_haven/infrastructure/programs/sre/desired_state.py index c4392f5210..20f4e357f1 100644 --- a/data_safe_haven/infrastructure/programs/sre/desired_state.py +++ b/data_safe_haven/infrastructure/programs/sre/desired_state.py @@ -31,7 +31,9 @@ from data_safe_haven.infrastructure.components import ( NFSV3BlobContainerComponent, NFSV3BlobContainerProps, - WrappedNFSV3StorageAccount, + NFSV3StorageAccountComponent, + NFSV3StorageAccountProps, + WrappedLogAnalyticsWorkspace, ) from data_safe_haven.resources import resources_path from data_safe_haven.types import AzureDnsZoneNames @@ -55,6 +57,7 @@ def __init__( ldap_user_filter: Input[str], ldap_user_search_base: Input[str], location: Input[str], + log_analytics_workspace: Input[WrappedLogAnalyticsWorkspace], resource_group: Input[resources.ResourceGroup], software_repository_hostname: Input[str], subscription_name: Input[str], @@ -73,6 +76,7 @@ def __init__( self.ldap_user_filter = ldap_user_filter self.ldap_user_search_base = ldap_user_search_base self.location = location + self.log_analytics_workspace = log_analytics_workspace self.resource_group_id = Output.from_input(resource_group).apply(get_id_from_rg) self.resource_group_name = Output.from_input(resource_group).apply( get_name_from_rg @@ -102,19 +106,23 @@ def __init__( # Deploy desired state storage account # - This holds the /var/local/ansible container that is mounted by workspaces # - Azure blobs have worse NFS support but can be accessed with Azure Storage Explorer - storage_account = WrappedNFSV3StorageAccount( + storage_component = NFSV3StorageAccountComponent( f"{self._name}_storage_account", - account_name=alphanumeric( - f"{''.join(truncate_tokens(stack_name.split('-'), 11))}desiredstate{sha256hash(self._name)}" - )[:24], - allowed_ip_addresses=props.admin_ip_addresses, - allowed_service_tag=None, - location=props.location, - resource_group_name=props.resource_group_name, - subnet_id=props.subnet_desired_state_id, + NFSV3StorageAccountProps( + account_name=alphanumeric( + f"{''.join(truncate_tokens(stack_name.split('-'), 11))}desiredstate{sha256hash(self._name)}" + )[:24], + allowed_ip_addresses=props.admin_ip_addresses, + allowed_service_tag=None, + location=props.location, + log_analytics_workspace=props.log_analytics_workspace, + resource_group_name=props.resource_group_name, + subnet_id=props.subnet_desired_state_id, + ), opts=child_opts, tags=child_tags, ) + storage_account = storage_component.storage_account # Deploy desired state share container_desired_state = NFSV3BlobContainerComponent( f"{self._name}_blob_desired_state", diff --git a/data_safe_haven/infrastructure/programs/sre/firewall.py b/data_safe_haven/infrastructure/programs/sre/firewall.py index 97f7a885b7..ed831e826a 100644 --- a/data_safe_haven/infrastructure/programs/sre/firewall.py +++ b/data_safe_haven/infrastructure/programs/sre/firewall.py @@ -3,12 +3,13 @@ from collections.abc import Mapping from pulumi import ComponentResource, Input, Output, ResourceOptions -from pulumi_azure_native import network +from pulumi_azure_native import insights, network from data_safe_haven.infrastructure.common import ( get_address_prefixes_from_subnet, get_id_from_subnet, ) +from data_safe_haven.infrastructure.components import WrappedLogAnalyticsWorkspace from data_safe_haven.types import ( FirewallPriorities, ForbiddenDomains, @@ -23,6 +24,7 @@ class SREFirewallProps: def __init__( self, location: Input[str], + log_analytics_workspace: Input[WrappedLogAnalyticsWorkspace], resource_group_name: Input[str], route_table_name: Input[str], subnet_apt_proxy_server: Input[network.GetSubnetResult], @@ -35,6 +37,7 @@ def __init__( subnet_workspaces: Input[network.GetSubnetResult], ) -> None: self.location = location + self.log_analytics_workspace = log_analytics_workspace self.resource_group_name = resource_group_name self.route_table_name = route_table_name self.subnet_apt_proxy_server_prefixes = Output.from_input( @@ -331,6 +334,36 @@ def __init__( tags=child_tags, ) + # Add diagnostic settings for firewall + # This links the firewall to the log analytics workspace + insights.DiagnosticSetting( + f"{self._name}_firewall_diagnostic_settings", + name="firewall_diagnostic_settings", + log_analytics_destination_type="Dedicated", + logs=[ + { + "category_group": "allLogs", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + ], + metrics=[ + { + "category": "AllMetrics", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + } + ], + resource_uri=firewall.id, + workspace_id=props.log_analytics_workspace.id, + ) + # Retrieve the private IP address for the firewall private_ip_address = firewall.ip_configurations.apply( lambda cfgs: "" if not cfgs else cfgs[0].private_ip_address diff --git a/docs/source/management/logs.md b/docs/source/management/logs.md index f9a9948453..10b9bfb0e5 100644 --- a/docs/source/management/logs.md +++ b/docs/source/management/logs.md @@ -7,10 +7,71 @@ Some of these logs are ingested into a central location, an Azure [Log Analytics Each SRE has its own Log Analytics Workspace. You can view the workspaces by going to the Azure portal and navigating to [Log Analytics Workspaces](https://portal.azure.com/#browse/Microsoft.OperationalInsights%2Fworkspaces). -Select which log workspace you want to view by clicking on the workspace named `shm--sre--log`. +Select which Log Analytics Workspace you want to view by clicking on the workspace named `shm--sre--log`. The logs can be filtered using [Kusto Query Language (KQL)](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/log-query-overview). +## Storage logs + +Depending on how different parts of Data Safe Haven storage are provisioned, logs may differ. + +### Sensitive data logs + +The sensitive data containers are the [ingress and egress containers](./data.md). +Logs from these containers are ingested into the [SRE's log analytics workspace](#log-workspace). +There are two tables, + +`StorageBlobLogs` +: Events occurring on the blob containers. +: For example data being uploaded, extracted or read. + +`AzureMetrics` +: Various metrics on blob container utilisation and performance. +: This table is not reserved for the sensitive data containers and other resources may log to it. + +### Desired state data logs + +The desired state container holds the data necessary to configure virtual machines in an SRE. +Logs from the desired state container are ingested into the [SRE's log analytics workspace](#log-workspace). +There are two tables, + +`StorageBlobLogs` +: Events occurring on the blob containers. +: For example data being uploaded, extracted or read. + +`AzureMetrics` +: Various metrics on blob container utilisation and performance. +: This table is not reserved for the desired state data container and other resources may log to it. + +### User data logs + +The user data file share holds the {ref}`researchers'` [home directories](https://refspecs.linuxfoundation.org/FHS_3.0/fhs/ch03s08.html), where they will store their personal data and configuration. +Logs from the share are ingested into the [SRE's log analytics workspace](#log-workspace). +There are two tables, + +`StorageFileLogs` +: NFS events occurring on the file share. +: For example data being written or directories being accessed + +`AzureMetrics` +: Various metrics on file share utilisation and performance. +: This table is not reserved for the user data share and other resources may log to it. + +### Configuration data logs + +There are multiple configuration data file shares. +Each contains the configuration and state data for the Data Safe Haven [services deployed as containers](#container-logs). +Logs from the share are ingested into the [SRE's log analytics workspace](#log-workspace). +There are two tables, + +`StorageFileLogs` +: SMB events occurring on the file share. +: For example data being written or directories being accessed + +`AzureMetrics` +: Various metrics on file share utilisation and performance. +: This table is not reserved for the configuration data shares and other resources may log to it. + ## Container logs Some of the Data Safe Haven infrastructure is provisioned as containers. @@ -20,8 +81,8 @@ These include, - package proxy - Gitea and Hedgedoc -Logs from all containers are ingested into the [SREs log workspace](#log-workspace). -There are two logs +Logs from all containers are ingested into the [SRE's log analytics workspace](#log-workspace). +There are two tables, `ContainerEvents_CL` : Event logs for the container instance resources such as starting, stopping, crashes and pulling images. @@ -29,3 +90,39 @@ There are two logs `ContainerInstanceLog_CL` : Container process logs. : This is where you can view the output of the containerised applications and will be useful for debugging problems. + +## Workspace logs + +Logs from all user workspaces are ingested into the [SRE's log analytics workspace](#log-workspace) using the [Azure Monitor Agent](https://learn.microsoft.com/en-us/azure/azure-monitor/agents/azure-monitor-agent-overview). + +There are three tables, + +`Perf` +: Usage statistics for individual workspaces, such as percent memory used and percent disk space used. + +`Syslog` +: [syslog](https://www.paessler.com/it-explained/syslog) events from workspaces. +: Syslog is the _de facto_ standard protocol for logging on Linux and most applications will log to it. +: These logs will be useful for debugging problems with the workspace or workspace software. + +`Heartbeat` +: Verification that the Azure Monitor Agent is present on the workspaces and is able to connect to the [log analytics workspace](#log-workspace). + +## Firewall logs + +The firewall plays a critical role in the security of a Data Safe Haven. +It filters all outbound traffic through a set of FQDN rules so that each component may only reach necessary and allowed domains. + +Logs from the firewall are ingested into the [SREs log workspace](#log-workspace). +There are three tables, + +`AZFWApplicationRule` +: Logs from the firewalls FDQN filters. +: Shows requests to the outside of the Data Safe Haven and why they have been approved or rejected. + +`AZFWDnsQuery` +: DNS requests handled by the firewall. + +`AzureMetrics` +: Various metrics on firewall utilisation and performance. +: This table is not reserved for the firewall and other resources may log to it. diff --git a/pyproject.toml b/pyproject.toml index cd01450d9d..8042b11eb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,20 +42,20 @@ dependencies = [ "azure-storage-file-datalake==12.18.0", "azure-storage-file-share==12.20.0", "chevron==0.14.0", - "cryptography==43.0.3", + "cryptography==44.0.0", "fqdn==1.5.1", "psycopg[binary]==3.1.19", # needed for installation on older MacOS versions - "pulumi-azure-native==2.73.1", + "pulumi-azure-native==2.74.0", "pulumi-azuread==6.0.1", "pulumi-random==4.16.7", - "pulumi==3.141.0", - "pydantic==2.10.1", - "pyjwt[crypto]==2.10.0", + "pulumi==3.142.0", + "pydantic==2.10.2", + "pyjwt[crypto]==2.10.1", "pytz==2024.2", "pyyaml==6.0.2", "rich==13.9.4", "simple-acme-dns==3.2.0", - "typer==0.13.1", + "typer==0.14.0", "websocket-client==1.8.0", ] @@ -78,9 +78,9 @@ lint = [ "ansible==11.0.0", "black==24.10.0", "mypy==1.13.0", - "pandas-stubs==2.2.3.241009", - "pydantic==2.10.1", - "ruff==0.8.0", + "pandas-stubs==2.2.3.241126", + "pydantic==2.10.2", + "ruff==0.8.1", "types-appdirs==1.4.3.5", "types-chevron==0.14.2.20240310", "types-pytz==2024.2.0.20241003", @@ -91,7 +91,7 @@ test = [ "coverage==7.6.8", "freezegun==1.5.1", "pytest-mock==3.14.0", - "pytest==8.3.3", + "pytest==8.3.4", "requests-mock==1.12.1", ]