From ca7483d87a3c468dc40b25d4ab5efa081a754edf Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Fri, 22 Nov 2024 09:20:16 +0000 Subject: [PATCH 01/11] Update release checklist Remove irrelevant names section, add release making steps --- .github/ISSUE_TEMPLATE/release_checklist.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/release_checklist.md b/.github/ISSUE_TEMPLATE/release_checklist.md index a25064faa1..ade631052e 100644 --- a/.github/ISSUE_TEMPLATE/release_checklist.md +++ b/.github/ISSUE_TEMPLATE/release_checklist.md @@ -44,16 +44,16 @@ Refer to the [Deployment](https://data-safe-haven.readthedocs.io/en/latest/deplo - [ ] Update supported versions in `SECURITY.md` - [ ] Update pen test results in `VERSIONING.md` -## :computer: Release information +### Making the release -- **Version number:** _ -- **SHM ID:** _ -- **T2 SRE ID:** _ -- **T3 SRE ID:** _ +- [ ] Merge release branch into `latest` +- [ ] Push tag in the format `v0.0.1` to the merge commit into `latest` +- [ ] Ensure docs for the latest version are built and deployed on ReadTheDocs +- [ ] Push a build to PyPI +- [ ] Announce release on communications channels ## :deciduous_tree: Deployment problems - From 147939cacab07de554612a30c0c317593050e351 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Fri, 22 Nov 2024 09:21:53 +0000 Subject: [PATCH 02/11] Remove powershell from bug report template --- .github/ISSUE_TEMPLATE/deployment_bug_report.md | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/deployment_bug_report.md b/.github/ISSUE_TEMPLATE/deployment_bug_report.md index 6cf453cc13..fa569038c2 100644 --- a/.github/ISSUE_TEMPLATE/deployment_bug_report.md +++ b/.github/ISSUE_TEMPLATE/deployment_bug_report.md @@ -29,7 +29,6 @@ Before reporting a problem please check the following. Replace the empty checkbo List of packages From 8a9df7a166a21f41ec6aa66858375db29b9392b0 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Fri, 22 Nov 2024 09:22:52 +0000 Subject: [PATCH 03/11] Remove unused scripts --- .github/scripts/update_azure_data_studio.py | 19 ------ .github/scripts/update_dbeaver_drivers.py | 72 --------------------- .github/scripts/update_rstudio.py | 21 ------ 3 files changed, 112 deletions(-) delete mode 100644 .github/scripts/update_azure_data_studio.py delete mode 100644 .github/scripts/update_dbeaver_drivers.py delete mode 100644 .github/scripts/update_rstudio.py diff --git a/.github/scripts/update_azure_data_studio.py b/.github/scripts/update_azure_data_studio.py deleted file mode 100644 index 651e85fdfc..0000000000 --- a/.github/scripts/update_azure_data_studio.py +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env python3 -from lxml import html -import hashlib -import requests - -remote_page = requests.get("https://docs.microsoft.com/en-us/sql/azure-data-studio/download-azure-data-studio", allow_redirects=True) -root = html.fromstring(remote_page.content) -short_link = root.xpath("//a[contains(text(), '.deb')]/@href")[0] - -remote_content = requests.get(short_link, allow_redirects=True) -sha256 = hashlib.sha256(remote_content.content).hexdigest() -version = remote_content.url.split("-")[-1].replace(".deb", "") -remote = "/".join(remote_content.url.split("/")[:-1] + ["|DEBFILE|"]) - -with open("deployment/secure_research_desktop/packages/deb-azuredatastudio.version", "w") as f_out: - f_out.write(f"hash: {sha256}\n") - f_out.write(f"version: {version}\n") - f_out.write("debfile: azuredatastudio-linux-|VERSION|.deb\n") - f_out.write(f"remote: {remote}\n") diff --git a/.github/scripts/update_dbeaver_drivers.py b/.github/scripts/update_dbeaver_drivers.py deleted file mode 100644 index 696a501858..0000000000 --- a/.github/scripts/update_dbeaver_drivers.py +++ /dev/null @@ -1,72 +0,0 @@ -#! /usr/bin/env python3 -import json -from lxml import html -from natsort import natsorted -import requests - - -def get_latest_version(url, search_text): - """ - Get latest version number of a database driver from the Maven repository. - - Fetches the HTML page at the given URL, then converts it to an lxml tree. - Numeric strings are then extracted. - Note that mostly numeric strings for some drivers contain non-numeric text, - as different driver types exist for those drivers, even where the version number is the same. - The largest (latest) version number of the driver is then returned. - - Parameters - ---------- - url : str - The URL of the Maven repository containing the driver - search_text : str - Text to search for in the repository, to distinguish the driver from other files - - Returns - ------- - list - The latest available version number of the driver - """ - - remote_page = requests.get(url, allow_redirects=True) - root = html.fromstring(remote_page.content) - return natsorted([v for v in root.xpath("//a[contains(text(), '" + search_text + "')]/@href") if v != "../"])[-1].replace("/", "") - - -drivers = [ - { - 'name': "mssql_jdbc", - 'url': "https://repo1.maven.org/maven2/com/microsoft/sqlserver/mssql-jdbc/", - 'search_text': "jre8/" - }, - { - 'name': "pgjdbc", - 'url': "https://repo1.maven.org/maven2/org/postgresql/pgjdbc-versions/", - 'search_text': "/" - }, - { - 'name': "postgresql", - 'url': "https://repo1.maven.org/maven2/org/postgresql/postgresql/", - 'search_text': "/" - }, - { - 'name': "postgis_geometry", - 'url': "https://repo1.maven.org/maven2/net/postgis/postgis-geometry/", - 'search_text': "/" - }, - { - 'name': "postgis_jdbc", - 'url': "https://repo1.maven.org/maven2/net/postgis/postgis-jdbc/", - 'search_text': "/" - }, - { - 'name': "waffle_jna", - 'url': "https://repo1.maven.org/maven2/com/github/waffle/waffle-jna/", - 'search_text': "/" - } -] - -output = {driver['name']: get_latest_version(driver['url'], driver['search_text']) for driver in drivers} - -with open("deployment/secure_research_desktop/packages/dbeaver-driver-versions.json", "w") as f_out: - f_out.writelines(json.dumps(output, indent=4, sort_keys=True)) diff --git a/.github/scripts/update_rstudio.py b/.github/scripts/update_rstudio.py deleted file mode 100644 index ee36a35e66..0000000000 --- a/.github/scripts/update_rstudio.py +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env python3 -from lxml import html -import hashlib -import requests - -remote_page = requests.get("https://www.rstudio.com/products/rstudio/download/", allow_redirects=True) -root = html.fromstring(remote_page.content) -short_links = [link for link in root.xpath("//a[contains(text(), '.deb')]/@href") if "debian" not in link] - -for ubuntu_version in ["focal", "jammy"]: - short_link = [link for link in short_links if ubuntu_version in link][0] - remote_content = requests.get(short_link, allow_redirects=True) - sha256 = hashlib.sha256(remote_content.content).hexdigest() - version = "-".join(remote_content.url.split("/")[-1].split("-")[1:-1]) - remote = "/".join(remote_content.url.split("/")[:-1] + ["|DEBFILE|"]) - - with open(f"deployment/secure_research_desktop/packages/deb-rstudio-{ubuntu_version}.version", "w") as f_out: - f_out.write(f"hash: {sha256}\n") - f_out.write(f"version: {version}\n") - f_out.write("debfile: rstudio-|VERSION|-amd64.deb\n") - f_out.write(f"remote: {remote}\n") From d4aed3047975b0cb097528274948ea805d7b8cf9 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Fri, 22 Nov 2024 09:23:04 +0000 Subject: [PATCH 04/11] Remove security checklist template --- .github/security_checklist_template.md | 167 ------------------------- 1 file changed, 167 deletions(-) delete mode 100644 .github/security_checklist_template.md diff --git a/.github/security_checklist_template.md b/.github/security_checklist_template.md deleted file mode 100644 index b963331eef..0000000000 --- a/.github/security_checklist_template.md +++ /dev/null @@ -1,167 +0,0 @@ -# Security checklist -Running on SHM/SREs deployed using commit XXXXXXX - -## Summary -+ :white_check_mark: N tests passed -- :partly_sunny: N tests partially passed (see below for more details) -- :fast_forward: N tests skipped (see below for more details) -- :x: N tests failed (see below for more details) - -## Details -Some security checks were skipped since: -- No managed device was available -- No access to a physical space with its own dedicated network was possible - -### Multifactor Authentication and Password strength -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the SRE standard user cannot access the apps - +
:camera: Verify before adding to group: Microsoft Remote Desktop: Login works but apps cannot be viewed - -
- +
:camera: Verify before adding to group: Guacamole: User is prompted to setup MFA - -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that adding the **SRE standard user** to the SRE group on the domain controller does not give them access - +
:camera: Verify after adding to group: Microsoft Remote Desktop: Login works and apps can be viewed - -
- +
:camera: Verify after adding to group: Microsoft Remote Desktop: attempt to login to DSVM Main (Desktop) fails - -
- +
:camera: Verify before adding to group: Guacamole: User is prompted to setup MFA - -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the **SRE standard user** is able to successfully set up MFA - +
:camera: Verify: successfully set up MFA - -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the **SRE standard user** can authenticate with MFA - +
:camera: Verify: Guacamole: respond to the MFA prompt - 122043131-47bc8080-cddb-11eb-8578-e45ab3efaef0.png"> -
- +
:camera: Verify: Microsoft Remote Desktop: attempt to log in to DSVM Main (Desktop) and respond to the MFA prompt - 122043131-47bc8080-cddb-11eb-8578-e45ab3efaef0.png"> -
- -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that the **SRE standard user** can access the DSVM desktop - +
:camera: Verify: Microsoft Remote Desktop: connect to DSVM Main (Desktop) - -
- +
:camera: Verify: Guacamole: connect to Desktop: Ubuntu0 - -
- -### Isolated Network -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Connect to the SHM DC and NPS if connected to the SHM VPN -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Fail to connect to the SHM DC and NPS if not connected to the SHM VPN -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Fail to connect to the internet from within a DSVM on the SRE network. - +
:camera: Verify: Connection fails - 122045859-8142bb00-cdde-11eb-920c-3a162a180647.png"> -
- +
:camera: Verify: that you cannot access a website using curl - -
- +
:camera: Verify: that you cannot get the IP address for a website using nslookup - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check that users cannot connect between two SREs within the same SHM, even if they have access to both SREs - +
:camera: Verify: SSH connection fails - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Network rules are set appropriately to block outgoing traffic - +
:camera: Verify: access rules - -
- -### User devices -#### Tier 2: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Connection succeeds from a personal device with an allow-listed IP address -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No managed device available to check connection - -#### Tier 3: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No managed device available to check user lacks root access -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Connection succeeds from a personal device with an allow-listed IP address -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No managed device available to check connection with an allow-listed IP address - -#### Tiers 2+: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Network rules permit access only from allow-listed IP addresses - +
:camera: Verify: access rules - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: All non-deployment NSGs have rules denying inbound connections from outside the Virtual Network - -### Physical security -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No secure physical space available so connection from outside was not tested -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No secure physical space available so connection from inside was not tested -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Check the network IP ranges corresponding to the research spaces and compare against the IPs accepted by the firewall. -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: No secure physical space available so confirmation of physical measures was not tested - -### Remote connections - -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Unable to connect as a user to the remote desktop server via SSH - +
:camera: Verify: SSH connection by FQDN fails - -
- +
:camera: Verify: SSH connection by public IP address fails - -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: The remote desktop server is the only SRE resource with a public IP address - -### Copy-and-paste -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Unable to paste local text into a DSVM -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Unable to copy text from a DSVM -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Copy between VMs in an SRE succeeds - -### Data ingress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** secure upload token successfully created with write-only permissions -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** token was sent using a secure, out-of-band communication channel (e.g. secure email) -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading a file from an allow-listed IP address succeeds -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** downloading a file from an allow-listed IP address fails -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading a file from an non-allowed IP address fails -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** connection during lifetime of short-duration token succeeds -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** connection after lifetime of short-duration token fails -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading different file types succeeds - -### Storage volumes and egress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can read and write to the `/output` volume -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can only read from the `/data` volume -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can read and write to their directory in `/home` -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** can read and write to the `/shared` volume -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** can see the files ready for egress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** can download egress-ready files - -### Software Ingress -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** expected software tools are installed - +
:camera: Verify: DBeaver, RStudio, PyCharm and Visual Studio Code available - 122056611-0a132400-cdea-11eb-9087-385ab296189e.png"> -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** secure upload token successfully created with write-only permissions -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **Data Provider:** uploading is possible only during the token lifetime -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **System administrator:** uploaded files are readable and can be installed on the DSVM -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: **SRE standard user** uploaded files are readable but cannot be installed on the DSVM - -### Package mirrors - -#### Tier 2: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Can install any packages - +
:camera: Verify: botocore can be installed - -
- -#### Tier 3: -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Can install only allow-listed packages - +
:camera: Verify: aero-calc can be installed; botocore cannot be installed - -
- -### Azure firewalls -+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Admin has limited access to the internet - +
:camera: Verify: SHM DC cannot connect to google - 122067607-ff5d8c80-cdf3-11eb-8e20-a401faba0be4.png"> -
-+ :white_check_mark:/:partly_sunny:/:fast_forward:/:x: Admin can download Windows updates - +
:camera: Verify: Windows updates can be downloaded - 122067641-071d3100-cdf4-11eb-9dc8-03938ff49e3a.png"> -
From 408b2d02fbc1a3c23c45295b0b479e0e2b449a45 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Mon, 25 Nov 2024 15:23:40 +0000 Subject: [PATCH 05/11] Add diagnostic settings for firewall --- .../programs/declarative_sre.py | 29 +++++++-------- .../infrastructure/programs/sre/firewall.py | 35 ++++++++++++++++++- 2 files changed, 49 insertions(+), 15 deletions(-) diff --git a/data_safe_haven/infrastructure/programs/declarative_sre.py b/data_safe_haven/infrastructure/programs/declarative_sre.py index 78467f201b..f69fc9cd45 100644 --- a/data_safe_haven/infrastructure/programs/declarative_sre.py +++ b/data_safe_haven/infrastructure/programs/declarative_sre.py @@ -163,12 +163,27 @@ def __call__(self) -> None: ), ) + # Deploy monitoring + monitoring = SREMonitoringComponent( + "sre_monitoring", + self.stack_name, + SREMonitoringProps( + dns_private_zones=dns.private_zones, + location=self.config.azure.location, + resource_group_name=resource_group.name, + subnet=networking.subnet_monitoring, + timezone=self.config.sre.timezone, + ), + tags=self.tags, + ) + # Deploy SRE firewall SREFirewallComponent( "sre_firewall", self.stack_name, SREFirewallProps( location=self.config.azure.location, + log_analytics_workspace=monitoring.log_analytics, resource_group_name=resource_group.name, route_table_name=networking.route_table_name, subnet_apt_proxy_server=networking.subnet_apt_proxy_server, @@ -209,20 +224,6 @@ def __call__(self) -> None: tags=self.tags, ) - # Deploy monitoring - monitoring = SREMonitoringComponent( - "sre_monitoring", - self.stack_name, - SREMonitoringProps( - dns_private_zones=dns.private_zones, - location=self.config.azure.location, - resource_group_name=resource_group.name, - subnet=networking.subnet_monitoring, - timezone=self.config.sre.timezone, - ), - tags=self.tags, - ) - # Deploy the apt proxy server apt_proxy_server = SREAptProxyServerComponent( "sre_apt_proxy_server", diff --git a/data_safe_haven/infrastructure/programs/sre/firewall.py b/data_safe_haven/infrastructure/programs/sre/firewall.py index 97f7a885b7..1f46db980b 100644 --- a/data_safe_haven/infrastructure/programs/sre/firewall.py +++ b/data_safe_haven/infrastructure/programs/sre/firewall.py @@ -3,12 +3,13 @@ from collections.abc import Mapping from pulumi import ComponentResource, Input, Output, ResourceOptions -from pulumi_azure_native import network +from pulumi_azure_native import insights, network from data_safe_haven.infrastructure.common import ( get_address_prefixes_from_subnet, get_id_from_subnet, ) +from data_safe_haven.infrastructure.components import WrappedLogAnalyticsWorkspace from data_safe_haven.types import ( FirewallPriorities, ForbiddenDomains, @@ -23,6 +24,7 @@ class SREFirewallProps: def __init__( self, location: Input[str], + log_analytics_workspace: Input[WrappedLogAnalyticsWorkspace], resource_group_name: Input[str], route_table_name: Input[str], subnet_apt_proxy_server: Input[network.GetSubnetResult], @@ -35,6 +37,7 @@ def __init__( subnet_workspaces: Input[network.GetSubnetResult], ) -> None: self.location = location + self.log_analytics_workspace = log_analytics_workspace self.resource_group_name = resource_group_name self.route_table_name = route_table_name self.subnet_apt_proxy_server_prefixes = Output.from_input( @@ -331,6 +334,36 @@ def __init__( tags=child_tags, ) + # Add diagnostic settings for firewall + # This links the firewall to the log analytics workspace + insights.DiagnosticSettings( + f"{self._name}_firewall_diagnostic_settings", + name="firewall_diagnostic_settings", + log_analytics_destination_type="Dedicated", + logs=[ + { + "category_group": "allLogs", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + }, + ], + metrics=[ + { + "category": "AllMetrics", + "enabled": True, + "retention_policy": { + "days": 0, + "enabled": False, + }, + } + ], + resource_uri=firewall.id, + workspace_id=props.log_analytics_workspace.workspace_id, + ) + # Retrieve the private IP address for the firewall private_ip_address = firewall.ip_configurations.apply( lambda cfgs: "" if not cfgs else cfgs[0].private_ip_address From a09802a36b77c29f8198dc8ca6a5dd08cea0ca77 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Mon, 25 Nov 2024 16:33:29 +0000 Subject: [PATCH 06/11] Correct component name --- data_safe_haven/infrastructure/programs/sre/firewall.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data_safe_haven/infrastructure/programs/sre/firewall.py b/data_safe_haven/infrastructure/programs/sre/firewall.py index 1f46db980b..4e45aff208 100644 --- a/data_safe_haven/infrastructure/programs/sre/firewall.py +++ b/data_safe_haven/infrastructure/programs/sre/firewall.py @@ -336,7 +336,7 @@ def __init__( # Add diagnostic settings for firewall # This links the firewall to the log analytics workspace - insights.DiagnosticSettings( + insights.DiagnosticSetting( f"{self._name}_firewall_diagnostic_settings", name="firewall_diagnostic_settings", log_analytics_destination_type="Dedicated", From 134404b068255d281d6567c349267361b15d6104 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Tue, 26 Nov 2024 09:41:00 +0000 Subject: [PATCH 07/11] Correct list indent --- .github/ISSUE_TEMPLATE/release_checklist.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/release_checklist.md b/.github/ISSUE_TEMPLATE/release_checklist.md index ade631052e..8686c95238 100644 --- a/.github/ISSUE_TEMPLATE/release_checklist.md +++ b/.github/ISSUE_TEMPLATE/release_checklist.md @@ -34,10 +34,10 @@ Refer to the [Deployment](https://data-safe-haven.readthedocs.io/en/latest/deplo ### For major releases only - [ ] Confirm that a third party has carried out a full penetration test evaluating: - 1. external attack surface - 1. ability to exfiltrate data from the system - 1. ability to transfer data between SREs - 1. ability to escalate privileges on the SRD. + 1. external attack surface + 1. ability to exfiltrate data from the system + 1. ability to transfer data between SREs + 1. ability to escalate privileges on the SRD. ### Update documentation From bb0eaf2c391781bb0219ad76b427a235162f98f8 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Tue, 26 Nov 2024 09:48:49 +0000 Subject: [PATCH 08/11] Remove bare URL --- .github/ISSUE_TEMPLATE/release_checklist.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/release_checklist.md b/.github/ISSUE_TEMPLATE/release_checklist.md index 8686c95238..42acf2fe51 100644 --- a/.github/ISSUE_TEMPLATE/release_checklist.md +++ b/.github/ISSUE_TEMPLATE/release_checklist.md @@ -14,9 +14,9 @@ Before reporting a problem please check the following. Replace the empty checkbo Refer to the [Deployment](https://data-safe-haven.readthedocs.io/en/latest/deployment) section of our documentation when completing these steps. -- [ ] Consult the `data-safe-haven/VERSIONING.md` guide and determine the version number of the new release. Record it in the title of this issue. +- [ ] Consult the `data-safe-haven/VERSIONING.md` guide and determine the version number of the new release. Record it in the title of this issue - [ ] Create a release branch called e.g. `release-v0.0.1` -- [ ] Draft a changelog for the release similar to our previous releases, see https://github.com/alan-turing-institute/data-safe-haven/releases +- [ ] Draft a changelog for the release similar to our [previous releases](https://github.com/alan-turing-institute/data-safe-haven/releases) ### For patch releases only From 56081d87bff505215239d3f19f8f46a0483ea28c Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Tue, 26 Nov 2024 09:49:04 +0000 Subject: [PATCH 09/11] Clarify tag and release creation --- .github/ISSUE_TEMPLATE/release_checklist.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/release_checklist.md b/.github/ISSUE_TEMPLATE/release_checklist.md index 42acf2fe51..575f5c9c53 100644 --- a/.github/ISSUE_TEMPLATE/release_checklist.md +++ b/.github/ISSUE_TEMPLATE/release_checklist.md @@ -47,7 +47,8 @@ Refer to the [Deployment](https://data-safe-haven.readthedocs.io/en/latest/deplo ### Making the release - [ ] Merge release branch into `latest` -- [ ] Push tag in the format `v0.0.1` to the merge commit into `latest` +- [ ] Create a tag of the form `v0.0.1` pointing to the most recent commit on `latest` (the merge that you just made) +- [ ] Publish your draft GitHub release using this tag - [ ] Ensure docs for the latest version are built and deployed on ReadTheDocs - [ ] Push a build to PyPI - [ ] Announce release on communications channels From d020b3e7ed764a42167e80adba37b1d854119a70 Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Tue, 26 Nov 2024 09:58:48 +0000 Subject: [PATCH 10/11] Use full resource URI --- data_safe_haven/infrastructure/programs/sre/firewall.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data_safe_haven/infrastructure/programs/sre/firewall.py b/data_safe_haven/infrastructure/programs/sre/firewall.py index 4e45aff208..ed831e826a 100644 --- a/data_safe_haven/infrastructure/programs/sre/firewall.py +++ b/data_safe_haven/infrastructure/programs/sre/firewall.py @@ -361,7 +361,7 @@ def __init__( } ], resource_uri=firewall.id, - workspace_id=props.log_analytics_workspace.workspace_id, + workspace_id=props.log_analytics_workspace.id, ) # Retrieve the private IP address for the firewall From fa08bc3a5d324d231b7e7b759ed53117a220410f Mon Sep 17 00:00:00 2001 From: Jim Madge Date: Tue, 26 Nov 2024 10:58:31 +0000 Subject: [PATCH 11/11] Add documentation for firewall logs --- docs/source/management/logs.md | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/source/management/logs.md b/docs/source/management/logs.md index f9a9948453..5d52fe5d77 100644 --- a/docs/source/management/logs.md +++ b/docs/source/management/logs.md @@ -21,7 +21,7 @@ These include, - Gitea and Hedgedoc Logs from all containers are ingested into the [SREs log workspace](#log-workspace). -There are two logs +There are two tables, `ContainerEvents_CL` : Event logs for the container instance resources such as starting, stopping, crashes and pulling images. @@ -29,3 +29,22 @@ There are two logs `ContainerInstanceLog_CL` : Container process logs. : This is where you can view the output of the containerised applications and will be useful for debugging problems. + +## Firewall logs + +The firewall plays a critical role in the security of a Data Safe Haven. +It filters all outbound traffic through a set of FQDN rules so that each component may only reach necessary and allowed domains. + +Logs from the firewall are ingested into the [SREs log workspace](#log-workspace). +There are multiple tables, + +`AZFWApplicationRule` +: Logs from the firewalls FDQN filters. +: Shows requests to the outside of the Data Safe Haven and why they have been approved or rejected. + +`AZFWDnsQuery` +: DNS requests handled by the firewall. + +`AzureMetrics` +: Various metrics on firewall utilisation and performance. +: This table is not reserved for the firewall and other resources may log to it.