diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 18bf05cb98..4a852d8499 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -6,7 +6,7 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ # Set package versions ARG AZURE_CLI_VERSION="2.42.0" -ARG PWSH_VERSION="7.2.6" +ARG PWSH_VERSION="7.3.6" # Set up TARGETARCH variable to use to pull the right binaries for the current architecture. ARG TARGETARCH @@ -46,7 +46,7 @@ RUN groupadd --gid $USER_GID $USERNAME \ USER $USERNAME # Install Sphinx dependencies -COPY ./docs/build/requirements.txt /build/requirements.txt +COPY ./docs/requirements.txt /build/requirements.txt RUN pip3 install -r /build/requirements.txt # Install/check needed powershell modules diff --git a/.github/ISSUE_TEMPLATE/software_package_request.md b/.github/ISSUE_TEMPLATE/software_package_request.md new file mode 100644 index 0000000000..2afdb8b800 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/software_package_request.md @@ -0,0 +1,42 @@ +--- +name: Software package request +about: Request that a new package is added to the Tier 3 allow list +title: "" +labels: "type: enhancement" +assignees: "" +--- + +## :white_check_mark: Checklist + + + +- [ ] I have searched open and closed issues for duplicates. +- [ ] This is a request for a new software package to be added to the Data Safe Haven +- [ ] The package is still missing in the [latest version](https://github.com/alan-turing-institute/data-safe-haven/releases). + +## :gift: Package details + + + +## :steam_locomotive: Why is this needed? + + diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index dd828294ba..1b2533d089 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -8,10 +8,10 @@ Replace the empty checkboxes [ ] below with checked ones [x] accordingly. --> - [ ] You have given your pull request a meaningful title (_e.g._ `Enable foobar integration` rather than `515 foobar`). -- [ ] You are targeting the **develop branch**. -- [ ] Your branch is up-to-date with the **develop branch** (you probably started your branch from `develop` but it may have changed since then). -- [ ] If-and-only-if your changes are not yet ready to merge, you have marked this pull request as a **draft** pull request and added '[WIP]' to the title. -- [ ] If-and-only-if you have changed any Powershell code, you have run the code formatter. You can do this with `./tests/AutoFormat_Powershell.ps1 -TargetPath `. +- [ ] You are targeting the appropriate branch. If you're not certain which one this is, it should be **`develop`**. +- [ ] Your branch is up-to-date with the **target branch** (it probably was when you started, but it may have changed since then). +- [ ] You have marked this pull request as a **draft** and added `'[WIP]'` to the title if needed (if you're not yet ready to merge). +- [ ] You have formatted your code using appropriate automated tools (for example `./tests/AutoFormat_Powershell.ps1 -TargetPath ` for Powershell). ### :arrow_heading_up: Summary diff --git a/.github/scripts/update_rstudio.py b/.github/scripts/update_rstudio.py index b5b16da1c0..ee36a35e66 100644 --- a/.github/scripts/update_rstudio.py +++ b/.github/scripts/update_rstudio.py @@ -7,7 +7,7 @@ root = html.fromstring(remote_page.content) short_links = [link for link in root.xpath("//a[contains(text(), '.deb')]/@href") if "debian" not in link] -for ubuntu_version in ["bionic", "jammy"]: +for ubuntu_version in ["focal", "jammy"]: short_link = [link for link in short_links if ubuntu_version in link][0] remote_content = requests.get(short_link, allow_redirects=True) sha256 = hashlib.sha256(remote_content.content).hexdigest() diff --git a/.github/workflows/build_allow_lists.yaml b/.github/workflows/build_allow_lists.yaml index c7124bcf77..45bd2b714c 100644 --- a/.github/workflows/build_allow_lists.yaml +++ b/.github/workflows/build_allow_lists.yaml @@ -7,6 +7,7 @@ on: # yamllint disable-line rule:truthy branches: [develop] schedule: - cron: "0 0 */6 * *" # run every six days in order to keep the cache fresh + workflow_dispatch: # allow this workflow to be manually triggered # checkout needs 'contents:read' # pull request needs 'pull-requests:write' and 'contents:write' @@ -22,10 +23,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Cache dependencies - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: environment_configs/package_lists/dependency-cache.json key: dependencies-${{ github.sha }} # request a cache that does not yet exist @@ -49,12 +50,12 @@ jobs: - name: Get current date id: date - run: echo "::set-output name=date::$(date +'%Y-%m-%d')" + run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - name: Create pull request - if: ${{ (! env.TIMEOUT_REACHED) && (! env.ACT) }} + if: ${{ (env.TIMEOUT_REACHED == 0) && (! env.ACT) }} id: pull-request - uses: peter-evans/create-pull-request@dcd5fd746d53dd8de555c0f10bca6c35628be47a # This commit corresponds to tag 3.12.0 + uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # This commit corresponds to tag 4.2.4 with: commit-message: Update PyPI and CRAN allow lists committer: GitHub Actions diff --git a/.github/workflows/build_docs.yaml b/.github/workflows/build_docs.yaml deleted file mode 100644 index 7f6416933c..0000000000 --- a/.github/workflows/build_docs.yaml +++ /dev/null @@ -1,77 +0,0 @@ ---- -name: Build documentation - -# Run workflow on pushes to matching branches -on: # yamllint disable-line rule:truthy - push: - branches: [develop, latest] - -# checkout needs 'contents:read' -# deploy needs 'contents:write' -permissions: - contents: write - -jobs: - build_docs: - runs-on: ubuntu-latest - steps: - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: '3.10' - - name: Setup Ruby - uses: ruby/setup-ruby@v1 - with: - ruby-version: 3.1.3 - - name: Checkout code - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - name: Install required packages - run: | - sudo apt-get update - sudo apt-get install -y git - pip install -r docs/build/requirements.txt - - name: Create common directories - shell: bash - run: | - echo "OUTPUTDIR=$(mktemp -d)" >> $GITHUB_ENV - - name: Build all configured releases - shell: bash - run: | - python3 ./docs/build/build_all_supported_versions.py -o ${{env.OUTPUTDIR}} - - name: Install html-proofer - shell: bash - run: gem install html-proofer - - name: Run HTML proofer - shell: bash - run: | - # - allow links to "#" - # - rewrite the base URL - # - ignore links to: - # - the data-safe-haven repo (as it is private) - # - the data-classification-app repo (as it is private) - # - turing.ac.uk (as it requires a CAPTCHA) - htmlproofer ${{env.OUTPUTDIR}}/develop \ - --allow-hash-href \ - --enforce-https \ - --ignore-files "/_static/" \ - --ignore-status-codes "403,429,503" \ - --ignore-urls "/github.com\/alan-turing-institute\/data-classification-app/,/turing.ac.uk\//" \ - --swap-urls "^\/data-safe-haven:/.." - - - name: Deploy documentation to GitHub Pages - if: ${{ !env.ACT }} - uses: JamesIves/github-pages-deploy-action@ba1486788b0490a235422264426c45848eac35c6 # This commit corresponds to tag 4.4.1 - with: - branch: autodocs # The branch the action should deploy to. - folder: ${{env.OUTPUTDIR}} # The folder the action should deploy. - git-config-name: Deployment Bot # Name of the committer - git-config-email: deploy@github.com # Email of the committer - single-commit: true # Only keep one commit on the branch - - name: Finalise build if running locally - if: ${{ env.ACT }} - shell: bash - run: | - echo "Deliberately causing job to fail so that container is kept alive..." - exit 1 diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml new file mode 100644 index 0000000000..98942a59a5 --- /dev/null +++ b/.github/workflows/documentation.yaml @@ -0,0 +1,57 @@ +--- +name: Documentation + +# Run workflow on pushes to matching branches +on: # yamllint disable-line rule:truthy + push: + branches: [develop] + pull_request: + branches: [develop] + +jobs: + build: + name: Build + runs-on: ubuntu-latest + defaults: + run: + working-directory: ./docs/ + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: Install dependencies + run: | + pip install -r requirements.txt + - name: Sphinx build + run: | + make html SPHINXOPTS="-W" + + check_links: + name: Check links + runs-on: ubuntu-latest + needs: build + defaults: + run: + working-directory: ./docs/ + steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: Install dependencies + run: | + pip install -r requirements.txt + - name: Sphinx build + # Set warnings to be treated as errors + run: | + make html SPHINXOPTS="-W" + - name: Link Checker + uses: lycheeverse/lychee-action@v1.7.0 + with: + args: --config='./.lychee.toml' --no-progress './docs/build/html/**/*.html' + fail: true # fail on broken links diff --git a/.github/workflows/lint_code.yaml b/.github/workflows/lint_code.yaml index e5646dc49e..6b5815f172 100644 --- a/.github/workflows/lint_code.yaml +++ b/.github/workflows/lint_code.yaml @@ -9,32 +9,6 @@ on: # yamllint disable-line rule:truthy branches: [develop] jobs: - lint_html: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v3 - - name: Get tags - run: git fetch --tags origin - - name: Install requirements - shell: bash - run: | - sudo gem install html-proofer -v 4.4.1 - pip install -r docs/build/requirements.txt - - name: Generate HTML documention - shell: bash - run: | - DISABLE_VERSION_LINKS=1 make -C docs html - - name: Lint HTML - shell: bash - run: | - htmlproofer docs/_output \ - --allow-missing-href=true \ - --enforce-https=true \ - --ignore-files "/_static/" \ - --ignore-status-codes "502,503" \ - --ignore-urls "/github.com\/alan-turing-institute\/data-classification-app/,/www.turing.ac.uk/" - lint_json: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/test_code.yaml b/.github/workflows/test_code.yaml index 6ecedf9c05..a1fa737f67 100644 --- a/.github/workflows/test_code.yaml +++ b/.github/workflows/test_code.yaml @@ -31,7 +31,8 @@ jobs: - name: Install requirements shell: bash run: npm install -g markdown-link-check - - name: Test Markdown for dead links - shell: bash - run: find . -name "*.md" | xargs -n 1 markdown-link-check -p -c .markdownlinkcheck.json - continue-on-error: true + - name: Link Checker + uses: lycheeverse/lychee-action@v1.7.0 + with: + args: --config='./.lychee.toml' --no-progress --offline '**/*.md' --exclude-path './docs' + fail: true # fail on broken links diff --git a/.github/workflows/update_package_versions.yaml b/.github/workflows/update_package_versions.yaml index 3ef8166ff6..2a70bf9e64 100644 --- a/.github/workflows/update_package_versions.yaml +++ b/.github/workflows/update_package_versions.yaml @@ -7,6 +7,7 @@ on: # yamllint disable-line rule:truthy branches: [develop] schedule: - cron: "0 0 */7 * *" # run once per week + workflow_dispatch: # allow this workflow to be manually triggered # checkout needs 'contents:read' # pull request needs 'pull-requests:write' and 'contents:write' @@ -19,10 +20,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: '3.10' @@ -54,7 +55,7 @@ jobs: - name: Create pull request if: ${{ ! env.ACT }} id: pull-request - uses: peter-evans/create-pull-request@dcd5fd746d53dd8de555c0f10bca6c35628be47a # This commit corresponds to tag 3.12.0 + uses: peter-evans/create-pull-request@153407881ec5c347639a548ade7d8ad1d6740e38 # This commit corresponds to tag 5.0.2 with: commit-message: Update SRD package versions committer: GitHub Actions diff --git a/.gitignore b/.gitignore index b42c59f1ca..39e3f1e981 100644 --- a/.gitignore +++ b/.gitignore @@ -28,9 +28,13 @@ docs/*.pdf # ... except the tiers flowchart !docs/tiersflowchart.pdf -# Sphinx output -docs/_output +# Sphinx output and venv +docs/build +docs/venv # Files produced during testing .mustache_config.json -expanded.yaml \ No newline at end of file +expanded.yaml + +# Lychee cache +.lycheecache diff --git a/.lychee.toml b/.lychee.toml new file mode 100644 index 0000000000..14fd5a9426 --- /dev/null +++ b/.lychee.toml @@ -0,0 +1,124 @@ +############################# Display ############################# + +# Verbose program output +# Accepts log level: "error", "warn", "info", "debug", "trace" +verbose = "error" + +# Don't show interactive progress bar while checking links. +no_progress = false + +# Path to summary output file. +# output = ".config.dummy.report.md" + +############################# Cache ############################### + +# Enable link caching. This can be helpful to avoid checking the same links on +# multiple runs. +cache = true + +# Discard all cached requests older than this duration. +max_cache_age = "1d" + +############################# Runtime ############################# + +# Number of threads to utilize. +# Defaults to number of cores available to the system if omitted. +threads = 2 + +# Maximum number of allowed redirects. +max_redirects = 10 + +# Maximum number of allowed retries before a link is declared dead. +max_retries = 2 + +# Maximum number of concurrent link checks. +max_concurrency = 14 + +############################# Requests ############################ + +# User agent to send with each request. +user_agent = "curl/7.83. 1" + +# Website timeout from connect to response finished. +timeout = 20 + +# Minimum wait time in seconds between retries of failed requests. +retry_wait_time = 2 + +# Comma-separated list of accepted status codes for valid links. +accept = [200, 429] + +# Proceed for server connections considered insecure (invalid TLS). +insecure = false + +# Only test links with the given schemes (e.g. https). +# Omit to check links with any scheme. +# scheme = [ "https" ] + +# When links are available using HTTPS, treat HTTP links as errors. +require_https = true + +# Request method +method = "get" + +# Custom request headers +headers = [] + +# Remap URI matching pattern to different URI. +# remap = [ "https://example.com http://example.invalid" ] + +# Base URL or website root directory to check relative URLs. +# base = "https://example.com" + +# HTTP basic auth support. This will be the username and password passed to the +# authorization HTTP header. See +# +# basic_auth = "user:pwd" + +############################# Exclusions ########################## + +# Skip missing input files (default is to error if they don't exist). +skip_missing = false + +# Check links inside `` and `
` blocks as well as Markdown code
+# blocks.
+include_verbatim = false
+
+# Ignore case of paths when matching glob patterns.
+glob_ignore_case = false
+
+# Exclude URLs and mail addresses from checking (supports regex).
+# exclude = [ '.*\.github.com\.*' ]
+exclude = [
+  'github\.com',  # Requires authentication (403)
+  'turing\.ac\.uk',  # DDOS protection
+  'ipaddressguide\.com',  # 403
+  'opensource\.org',  # 403
+  'portal\.azure\.com',  # 403
+]
+
+# Exclude these filesystem paths from getting checked.
+# exclude_path = ["file/path/to/Ignore", "./other/file/path/to/Ignore"]
+exclude_path = [
+  'docs/build/html/_static/webpack-macros.html'
+]
+
+# URLs to check (supports regex). Has preference over all excludes.
+# include = [ 'gist\.github\.com.*' ]
+
+# Exclude all private IPs from checking.
+# Equivalent to setting `exclude_private`, `exclude_link_local`, and
+# `exclude_loopback` to true.
+exclude_all_private = false
+
+# Exclude private IP address ranges from checking.
+exclude_private = false
+
+# Exclude link-local IP address range from checking.
+exclude_link_local = false
+
+# Exclude loopback IP address range and localhost from checking.
+exclude_loopback = false
+
+# Exclude all mail addresses from checking.
+exclude_mail = false
diff --git a/.markdownlinkcheck.json b/.markdownlinkcheck.json
deleted file mode 100644
index 0857236a82..0000000000
--- a/.markdownlinkcheck.json
+++ /dev/null
@@ -1,22 +0,0 @@
-{
-    "ignorePatterns": [
-        {
-            "_comment": "As the repository is private, without authentication these paths will return 404",
-            "pattern": "^https://github.com/alan-turing-institute/data-safe-haven"
-        },
-        {
-            "_comment": "As the repository is private, without authentication these paths will return 404",
-            "pattern": "^https://github.com/alan-turing-institute/data-classification-app"
-        },
-        {
-            "_comment": "Returns permanent direct code",
-            "pattern": "^https://www.egress.com"
-        },
-        {
-            "_comment": "Seems to return redirect codes in curl, 200 in browser. markdown-link-check does not return the http code",
-            "pattern": "^https://www.turing.ac.uk/"
-        }
-    ],
-    "retryOn429": true,
-    "aliveStatusCodes":[200, 302, 304]
-}
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 0000000000..cc347e9fa2
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,21 @@
+# .readthedocs.yaml
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+version: 2
+
+build:
+  os: ubuntu-22.04
+  tools:
+    python: "3.11"
+
+sphinx:
+  configuration: docs/source/conf.py
+
+formats:
+  - htmlzip
+  - pdf
+
+python:
+  install:
+    - requirements: docs/requirements.txt
diff --git a/CITATION.cff b/CITATION.cff
index e3ee508a7f..e8ec7a6760 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -1,7 +1,7 @@
 cff-version: 1.2.0
 message: "To acknowledge the data safe haven please use the citation and references below."
 title: "Turing Data Safe Haven"
-url: "https://alan-turing-institute.github.io/data-safe-haven/develop/index.html"
+url: "https://data-safe-haven.readthedocs.io"
 repository-code: "https://github.com/alan-turing-institute/data-safe-haven"
 authors:
   - given-names: James
diff --git a/CODEOWNERS b/CODEOWNERS
index 185dcc737d..411890bbaa 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -11,3 +11,4 @@
 # main and other branches that may be deployed from.
 # Note: /dir/ applies to directory and all subdirectories
 /deployment/      @martintoreilly @jemrobinson @JimMadge
+/docs/            @martintoreilly @jemrobinson @JimMadge @craddm @edwardchalstrey1
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 2b204ef8fd..7e406ecc7c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -34,7 +34,7 @@ For the Data Safe Haven project, we follow a DevOps development philosophy.
 This means different things to different people, but the definition that we're using is [best summed up here](https://www.guru99.com/agile-vs-devops.html).
 
 

- +

This is a software development method which focuses on: @@ -87,7 +87,7 @@ In order to contribute via GitHub you'll need to set up a free account and sign We use the [Gitflow Workflow](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow).

- +

This means that: @@ -172,7 +172,7 @@ For example, `@KirstieJane` will send a notification to `Kirstie Whitaker` so sh Alternatively (and this is encouraged) you can use the issue to keep track of where you're up to with the task and add information about next steps and barriers.

- +

### Working in a private repository @@ -208,7 +208,7 @@ Make sure to [keep your fork up to date](https://docs.github.com/en/pull-request #### 3. Make the changes you've discussed Try to keep the changes focused. If you submit a large amount of work in all in one go it will be much more work for whomever is reviewing your pull request. [Help them help you](https://media.giphy.com/media/uRb2p09vY8lEs/giphy.gif) :wink: -If you feel tempted to "branch out" then please make a [new branch](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-and-deleting-branches-within-your-repository) and a [new issue][https://github.com/alan-turing-institute/data-safe-haven/issues] to go with it. +If you feel tempted to "branch out" then please make a [new branch](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-and-deleting-branches-within-your-repository) and a [new issue](https://github.com/alan-turing-institute/data-safe-haven/issues) to go with it. #### 4. Submit a pull request @@ -222,18 +222,11 @@ GitHub has a [nice introduction](https://docs.github.com/en/get-started/quicksta ### Making a change to the documentation -The docs, including for older releases, are available [here](https://alan-turing-institute.github.io/data-safe-haven). +The docs, including for older releases, are available [here](https://data-safe-haven.readthedocs.io). You should follow the same instructions as above to [make a change with a pull request](#making-a-change-with-a-pull-request) when editing the documentation. -To test your changes, build the docs locally by checking out your fork of the repo, navigating to the `docs` folder and `make`ing them: - -```{bash} -cd data-safe-haven/docs -make html -``` - -This will add the contents to a folder called `_output` inside `docs`. Open the index html from a browser and you should be able to navigate the docs and view your changes. +To preview your changes, you can build the docs locally. See [docs/README.md](docs/README.md). ### Who's involved in the project diff --git a/README.md b/README.md index 8ff4c7e4fd..d4cc0cdf40 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![Data Safe Haven cartoon by Scriberia for The Alan Turing Institute](docs/static/scriberia_diagram.jpg) +![Data Safe Haven cartoon by Scriberia for The Alan Turing Institute](docs/source/_static/scriberia_diagram.jpg) # :eyes: What is the Turing Data Safe Haven? @@ -6,15 +6,17 @@ The **Turing Data Safe Haven** is an open-source framework for creating secure e It provides a set of scripts and templates that will allow you to deploy, administer and use your own secure environment. It was developed as part of the Alan Turing Institute's [Data Safe Havens in the Cloud](https://www.turing.ac.uk/research/research-projects/data-safe-havens-cloud) project. -[![Docs](https://github.com/alan-turing-institute/data-safe-haven/actions/workflows/build_docs.yaml/badge.svg)](https://alan-turing-institute.github.io/data-safe-haven) -[![Build status](https://app.travis-ci.com/alan-turing-institute/data-safe-haven.svg?token=fmccRP1RHVJaEoiWy6QF&branch=develop)](https://app.travis-ci.com/alan-turing-institute/data-safe-haven) +[![Documentation](https://readthedocs.org/projects/data-safe-haven/badge/?version=latest)](https://data-safe-haven.readthedocs.io/en/latest/?badge=latest) +[![Lint code](https://github.com/alan-turing-institute/data-safe-haven/actions/workflows/lint_code.yaml/badge.svg)](https://github.com/alan-turing-institute/data-safe-haven/actions/workflows/lint_code.yaml) +[![Test code](https://github.com/alan-turing-institute/data-safe-haven/actions/workflows/test_code.yaml/badge.svg)](https://github.com/alan-turing-institute/data-safe-haven/actions/workflows/test_code.yaml) [![Latest version](https://img.shields.io/github/v/release/alan-turing-institute/data-safe-haven?style=flat&label=Latest&color=%234B78E6)](https://github.com/alan-turing-institute/data-safe-haven/releases) [![Slack](https://img.shields.io/badge/Join%20us!-yellow?style=flat&logo=slack&logoColor=white&labelColor=4A154B&label=Slack)](https://join.slack.com/t/turingdatasafehaven/signup) ![Licence](https://img.shields.io/github/license/alan-turing-institute/data-safe-haven) +[![Citation](https://img.shields.io/badge/citation-cite%20this%20project-informational)](https://github.com/alan-turing-institute/data-safe-haven/blob/develop/CITATION.cff) ## :family: Community & support -- Visit the [Data Safe Haven website](https://alan-turing-institute.github.io/data-safe-haven) for full documentation and useful links. +- Visit the [Data Safe Haven website](https://data-safe-haven.readthedocs.io) for full documentation and useful links. - Join our [Slack server](https://join.slack.com/t/turingdatasafehaven/shared_invite/zt-104oyd8wn-DyOufeaAQFiJDlG5dDGk~w) to ask questions, discuss features, and for general API chat. - Open a [discussion on GitHub](https://github.com/alan-turing-institute/data-safe-haven/discussions) for general questions, feature suggestions, and help with our deployment scripts. - Look through our [issues on GitHub](https://github.com/alan-turing-institute/data-safe-haven/issues) to see what we're working on and progress towards specific fixes. @@ -48,7 +50,7 @@ If you believe you have found a security vulnerability, please report it as outl We are grateful for the following support for this project: -- The Alan Turing Institute's core EPSRC funding ([EP/N510129/1](https://gow.epsrc.ukri.org/NGBOViewGrant.aspx?GrantRef=EP/N510129/1)). +- The Alan Turing Institute's core and additional EPSRC funding ([EP/N510129/1](https://gtr.ukri.org/projects?ref=EP%2FN510129%2F1), [EP/W001381/1](https://gtr.ukri.org/projects?ref=EP%2FW001381%2F1), [EP/W037211/1](https://gtr.ukri.org/projects?ref=EP%2FW037211%2F1), [EP/X03870X/1](https://gtr.ukri.org/projects?ref=EP%2FX03870X%2F1)). - The UKRI Strategic Priorities Fund - AI for Science, Engineering, Health and Government programme ([EP/T001569/1](https://gow.epsrc.ukri.org/NGBOViewGrant.aspx?GrantRef=EP/T001569/1)), particularly the "Tools, Practices and Systems" theme within that grant. - Microsoft's generous [donation of Azure credits](https://www.microsoft.com/en-us/research/blog/microsoft-accelerates-data-science-at-the-alan-turing-institute-with-5m-in-cloud-computing-credits/) to the Alan Turing Institute. diff --git a/SECURITY.md b/SECURITY.md index 3536430875..e5eac6d896 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -23,14 +23,16 @@ disclosures. ### Reporting -If you believe you have found a security vulnerability, please submit your report to us -using the following email: safehavendevs@turing.ac.uk +If you believe you have found a security vulnerability, please check the list of +published [security advisories](https://github.com/alan-turing-institute/data-safe-haven/security/advisories) +and, if the vulnerability you have identified is not covered by an existing advisory, use the "Report a vulnerability" button to submit a vulnerability report. -In your report please include details of: +In your report please include the details requested in the report form, including: - The area / component of the Data Safe Haven where the vulnerability can be observed. - A brief description of the type of vulnerability, for example; “unexpected outbound data access” or "privilege escalation to admin user". - Steps to reproduce. These should be a benign, non-destructive, proof of concept. This helps to ensure that the report can be triaged quickly and accurately. It also reduces the likelihood of duplicate reports, or malicious exploitation of some vulnerabilities. +- An indication of the severity of the issue. ### What to expect @@ -50,6 +52,19 @@ Once your vulnerability has been resolved, we welcome requests to disclose your report. We’d like to unify guidance to affected users, so please do continue to coordinate any public release with us. +We will generally look to publish a public security advisory on this repository's +[security advisories](https://github.com/alan-turing-institute/data-safe-haven/security/advisories) +page once a vulnerabilty has been resolved and we have given those organisations +we know of with active deployments reasonable time to patch or update their deployments. +We will credit you with reporting the vulnerability and with any other assistance +you have provided characterising and resolving it in the published security advisory. +If you would prefer not to be credited in the public security advisory, please let us know. + +In some instances we may already be aware of the reported vulnerability but not yet +have published a public security advisory. We still welcome additional reports in these +cases as they often provide additional useful information. Where multiple people have reported +the same vulnerability we will credit each of them in the public advisory when it is published. + ### Guidance You must NOT: diff --git a/VERSIONING.md b/VERSIONING.md index 203a10a1ff..e072045b15 100644 --- a/VERSIONING.md +++ b/VERSIONING.md @@ -64,6 +64,7 @@ We usually deploy the latest available version of the Data Safe Haven for each o | December 2021 | DSG 2021-12 | [v3.3.1](https://github.com/alan-turing-institute/data-safe-haven/releases/tag/v3.3.1) | | December 2022 | DSG 2022-12 | [v4.0.2](https://github.com/alan-turing-institute/data-safe-haven/releases/tag/v4.0.2) | | February 2023 | DSG 2023-02 | [v4.0.3](https://github.com/alan-turing-institute/data-safe-haven/releases/tag/v4.0.3) | +| May 2023 | DSG 2023-05 | [v4.0.3](https://github.com/alan-turing-institute/data-safe-haven/releases/tag/v4.0.3) | ## Versions that have undergone formal security evaluation diff --git a/deployment/CheckRequirements.ps1 b/deployment/CheckRequirements.ps1 index f3dce22c3a..6fad3f01c4 100644 --- a/deployment/CheckRequirements.ps1 +++ b/deployment/CheckRequirements.ps1 @@ -8,26 +8,26 @@ param ( Import-Module $PSScriptRoot/common/Logging -Force -ErrorAction Stop # Requirements -$PowershellMinVersion = "7.0.0" -$PowershellMaxVersion = "7.2.8" +$PowershellSupportedVersion = "7.3.6" $ModuleVersionRequired = @{ - "Az.Accounts" = @("ge", "2.9.0") - "Az.Automation" = @("ge", "1.7.3") - "Az.Compute" = @("ge", "4.29.0") + "Az.Accounts" = @("ge", "2.11.1") + "Az.Automation" = @("ge", "1.9.0") + "Az.Compute" = @("ge", "5.3.0") "Az.DataProtection" = @("ge", "0.4.0") "Az.Dns" = @("ge", "1.1.2") - "Az.KeyVault" = @("ge", "4.6.0") - "Az.Monitor" = @("ge", "3.0.1") + "Az.KeyVault" = @("ge", "4.9.1") + "Az.Monitor" = @("ge", "4.2.0") "Az.MonitoringSolutions" = @("ge", "0.1.0") - "Az.Network" = @("ge", "4.18.0") + "Az.Network" = @("ge", "5.3.0") "Az.OperationalInsights" = @("ge", "3.1.0") "Az.PrivateDns" = @("ge", "1.0.3") "Az.RecoveryServices" = @("ge", "5.4.1") - "Az.Resources" = @("ge", "6.0.1") + "Az.Resources" = @("ge", "6.5.1") "Az.Storage" = @("ge", "4.7.0") - "Microsoft.Graph.Authentication" = @("ge", "1.10.0") - "Microsoft.Graph.Applications" = @("ge", "1.10.0") - "Microsoft.Graph.Identity.DirectoryManagement" = @("ge", "1.10.0") + "Microsoft.Graph.Authentication" = @("ge", "1.21.0") + "Microsoft.Graph.Applications" = @("ge", "1.21.0") + "Microsoft.Graph.Identity.DirectoryManagement" = @("ge", "1.21.0") + "Microsoft.Graph.Users" = @("ge", "1.21.0") "Poshstache" = @("ge", "0.1.10") "Powershell-Yaml" = @("ge", "0.4.2") } @@ -37,15 +37,16 @@ if ($IncludeDev.IsPresent) { } # Powershell version -$PowershellVersion = (Get-Host | Select-Object Version).Version -if ($PowershellVersion -gt $PowershellMaxVersion) { - Add-LogMessage -Level Fatal "Please downgrade Powershell to a minimum version of $PowershellMinVersion and a maximum of $PowershellMaxVersion (currently using $PowershellVersion)!" -} elseif ($PowershellVersion -lt $PowershellMinVersion) { - Add-LogMessage -Level Fatal "Please upgrade Powershell to a minimum version of $PowershellMinVersion and a maximum of $PowershellMaxVersion (currently using $PowershellVersion)!" +$PowershellVersion = $PSVersionTable.PSVersion +if ($PowershellVersion -ne $PowershellSupportedVersion) { + Add-LogMessage -Level Warning "Powershell version: $PowershellVersion" + Add-LogMessage -Level Warning "The currently supported version of Powershell is $PowershellSupportedVersion." + Add-LogMessage -Level Warning "In case of errors originating from Powershell code, ensure that you are running the currently supported version." } else { Add-LogMessage -Level Success "Powershell version: $PowershellVersion" } + # Powershell modules $RepositoryName = "PSGallery" foreach ($ModuleName in $ModuleVersionRequired.Keys) { diff --git a/deployment/administration/SHM_Expand_Allowlist_Dependencies.ps1 b/deployment/administration/SHM_Expand_Allowlist_Dependencies.ps1 index ca0ba16087..577f71bba6 100644 --- a/deployment/administration/SHM_Expand_Allowlist_Dependencies.ps1 +++ b/deployment/administration/SHM_Expand_Allowlist_Dependencies.ps1 @@ -32,11 +32,13 @@ function Test-PackageExistence { if ($Repository -eq "pypi") { # The best PyPI results come from the package JSON files $response = Invoke-RestMethod -Uri "https://pypi.org/${Repository}/${Package}/json" -MaximumRetryCount 4 -RetryIntervalSec 1 -ErrorAction Stop + if ($response -is [String]) { $response = $response | ConvertFrom-Json -AsHashtable } $versions = $response.releases | Get-Member -MemberType NoteProperty | ForEach-Object { $_.Name } $name = $response.info.name } elseif ($Repository -eq "cran") { # Use the RStudio package manager for CRAN packages $response = Invoke-RestMethod -Uri "https://packagemanager.rstudio.com/__api__/repos/${RepositoryId}/packages?name=${Package}&case_insensitive=true" -MaximumRetryCount 4 -RetryIntervalSec 1 -ErrorAction Stop + if ($response -is [String]) { $response = $response | ConvertFrom-Json -AsHashtable } $name = $response.name $response = Invoke-RestMethod -Uri "https://packagemanager.rstudio.com/__api__/repos/${RepositoryId}/packages/${name}" -MaximumRetryCount 4 -RetryIntervalSec 1 -ErrorAction Stop $versions = @($response.version) + ($response.archived | ForEach-Object { $_.version }) @@ -44,6 +46,7 @@ function Test-PackageExistence { # For other repositories we use libraries.io # As we are rate-limited to 60 requests per minute this request can fail. If it does, we retry every few seconds for 1 minute $response = Invoke-RestMethod -Uri "https://libraries.io/api/${Repository}/${Package}?api_key=${ApiKey}" -MaximumRetryCount 16 -RetryIntervalSec 4 -ErrorAction Stop + if ($response -is [String]) { $response = $response | ConvertFrom-Json -AsHashtable } $versions = $response.versions | ForEach-Object { $_.number } $name = $response.Name } @@ -93,7 +96,8 @@ function Get-Dependencies { if ($Repository -eq "pypi") { # The best PyPI results come from the package JSON files $response = Invoke-RestMethod -Uri "https://pypi.org/${Repository}/${Package}/${Version}/json" -MaximumRetryCount 4 -RetryIntervalSec 1 -ErrorAction Stop - $Cache[$Repository][$Package][$Version] = @($response.info.requires_dist | Where-Object { $_ -and ($_ -notmatch "extra ==") } | ForEach-Object { ($_ -split '[;[( ><=]')[0].Trim() } | Sort-Object -Unique) + # Add canonical names to dependencies + $Cache[$Repository][$Package][$Version] = @($response.info.requires_dist | Where-Object { $_ -and ($_ -notmatch "extra ==") } | ForEach-Object { ($_ -split '[;[( ><=!~]')[0].Trim().ToLower() } | Sort-Object -Unique) } else { # For other repositories we use libraries.io try { @@ -123,10 +127,10 @@ function Get-Dependencies { # -------------------------- $languageName = @{cran = "r"; pypi = "python" }[$Repository] $coreAllowlistPath = Join-Path $PSScriptRoot ".." ".." "environment_configs" "package_lists" "allowlist-core-${languageName}-${Repository}-tier3.list" +$extraAllowlistPath = Join-Path $PSScriptRoot ".." ".." "environment_configs" "package_lists" "allowlist-extra-${languageName}-${Repository}-tier3.list" $fullAllowlistPath = Join-Path $PSScriptRoot ".." ".." "environment_configs" "package_lists" "allowlist-full-${languageName}-${Repository}-tier3.list" $dependencyCachePath = Join-Path $PSScriptRoot ".." ".." "environment_configs" "package_lists" "dependency-cache.json" -$corePackageList = Get-Content $coreAllowlistPath | Sort-Object -Unique - +$corePackageList = (Get-Content $coreAllowlistPath) + (Get-Content $extraAllowlistPath) | Sort-Object -Unique # Initialise the package queue # ---------------------------- @@ -142,6 +146,7 @@ if (-not $NoCache) { if (Test-Path $dependencyCachePath -PathType Leaf) { $dependencyCache = Get-Content $dependencyCachePath | ConvertFrom-Json -AsHashtable } + if (-not $dependencyCache) { $dependencyCache = [ordered]@{} } } if ($Repository -notin $dependencyCache.Keys) { $dependencyCache[$Repository] = [ordered]@{} } if ("unavailable_packages" -notin $dependencyCache.Keys) { $dependencyCache["unavailable_packages"] = [ordered]@{} } @@ -171,6 +176,10 @@ while ($queue.Count) { # Check that the package exists and add it to the allowlist if so Add-LogMessage -Level Info "Looking for '${unverifiedName}' in ${Repository}..." $packageData = Test-PackageExistence -Repository $Repository -Package $unverifiedName -ApiKey $ApiKey -RepositoryId $RepositoryId + if (-not $($packageData.name)) { + Add-LogMessage -Level Error "Package '${unverifiedName}' could not be found!" + continue + } if ($packageData.name -cne $unverifiedName) { Add-LogMessage -Level Warning "Package '${unverifiedName}' should be '$($packageData.name)'" } diff --git a/deployment/administration/SRE_Teardown.ps1 b/deployment/administration/SRE_Teardown.ps1 index 2d1a7d54c1..96e5b47edf 100644 --- a/deployment/administration/SRE_Teardown.ps1 +++ b/deployment/administration/SRE_Teardown.ps1 @@ -39,8 +39,13 @@ if ($dryRun.IsPresent) { # Remove backup instances and policies. Without this the backup vault cannot be deleted # ------------------------------------------------------------------------------------- -Remove-DataProtectionBackupInstances -ResourceGroupName $config.sre.backup.rg -VaultName $config.sre.backup.vault.name -Remove-DataProtectionBackupDiskSnapshots -ResourceGroupName $config.sre.backup.rg +if ($dryRun.IsPresent) { + Add-LogMessage -Level Info "Backup instances from $($config.sre.backup.vault.name) would be deleted" + Add-LogMessage -Level Info "Disk snapshots from from $($config.sre.backup.rg) would be deleted" +} else { + Remove-DataProtectionBackupInstances -ResourceGroupName $config.sre.backup.rg -VaultName $config.sre.backup.vault.name + Remove-DataProtectionBackupDiskSnapshots -ResourceGroupName $config.sre.backup.rg +} # Remove SRE resource groups and the resources they contain diff --git a/deployment/common/AzureMonitor.psm1 b/deployment/common/AzureMonitor.psm1 index a0e12b62b0..ec7fc67e41 100644 --- a/deployment/common/AzureMonitor.psm1 +++ b/deployment/common/AzureMonitor.psm1 @@ -70,7 +70,7 @@ function Set-LogAnalyticsDiagnostics { [string]$WorkspaceId ) Add-LogMessage -Level Info "Enable logging for $ResourceName to log analytics workspace" - $null = Set-AzDiagnosticSetting -ResourceId $ResourceId -WorkspaceId $WorkspaceId -Enabled $true + $null = New-AzDiagnosticSetting -Name "LogToWorkspace" -ResourceId $ResourceId -WorkspaceId $WorkspaceId if ($?) { Add-LogMessage -Level Success "Enabled logging for $ResourceName to log analytics workspace" } else { diff --git a/deployment/common/Configuration.psm1 b/deployment/common/Configuration.psm1 index c1090d9bb8..a77c9416d3 100644 --- a/deployment/common/Configuration.psm1 +++ b/deployment/common/Configuration.psm1 @@ -332,7 +332,7 @@ function Get-ShmConfig { ) # *-jobruntimedata-prod-su1.azure-automation.net linux = ( @("72.32.157.246", "87.238.57.227", "147.75.85.69", "217.196.149.55") + # apt.postgresql.org - @("91.189.91.38", "91.189.91.39", "185.125.190.36", "185.125.190.39") + # archive.ubuntu.com, changelogs.ubuntu.com, security.ubuntu.com + @("91.189.91.38", "91.189.91.39", "91.189.91.48", "91.189.91.49", "91.189.91.81", "91.189.91.82", "91.189.91.83", "185.125.190.17", "185.125.190.18", "185.125.190.36", "185.125.190.39") + # archive.ubuntu.com, changelogs.ubuntu.com, security.ubuntu.com $cloudFlareIpAddresses + # database.clamav.net, packages.gitlab.com and qgis.org use Cloudflare $cloudFrontIpAddresses + # packages.gitlab.com uses Cloudfront to host its Release file @("104.131.190.124") + # dbeaver.io diff --git a/deployment/common/RemoteCommands.psm1 b/deployment/common/RemoteCommands.psm1 index deebad6ea1..d5b9c2472f 100644 --- a/deployment/common/RemoteCommands.psm1 +++ b/deployment/common/RemoteCommands.psm1 @@ -1,4 +1,5 @@ Import-Module $PSScriptRoot/AzureCompute -ErrorAction Stop +Import-Module $PSScriptRoot/DataStructures -ErrorAction Stop Import-Module $PSScriptRoot/Logging -ErrorAction Stop diff --git a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-cran.mustache.yaml b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-cran.mustache.yaml index 38b7e3cb87..9bcd26b283 100644 --- a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-cran.mustache.yaml +++ b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-cran.mustache.yaml @@ -9,7 +9,7 @@ disk_setup: fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: diff --git a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-pypi.mustache.yaml b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-pypi.mustache.yaml index 8c09932601..0b38914c3b 100644 --- a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-pypi.mustache.yaml +++ b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-external-pypi.mustache.yaml @@ -9,7 +9,7 @@ disk_setup: fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: @@ -122,7 +122,7 @@ write_files: - path: "/etc/cron.d/pull-from-internet" permissions: "0644" content: | - # External update (rsync from CRAN) every 6 hours + # External update from PyPi every 6 hours 0 */6 * * * mirrordaemon ~mirrordaemon/pull_from_internet.sh - path: "/etc/cron.d/pull-then-push" @@ -313,7 +313,7 @@ runcmd: # Install bandersnatch with pip - echo ">=== Installing bandersnatch... ===<" - - pip3 install bandersnatch==4.2.0 + - pip3 install bandersnatch==4.2.0 packaging==21.3 - echo "Using bandersnatch from '$(which bandersnatch)'" # Initialise allowlist if appropriate diff --git a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-cran.mustache.yaml b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-cran.mustache.yaml index 9f0a108f60..6c67f7d4dd 100644 --- a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-cran.mustache.yaml +++ b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-cran.mustache.yaml @@ -9,7 +9,7 @@ disk_setup: fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: diff --git a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-pypi.mustache.yaml b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-pypi.mustache.yaml index 2fad1ab1ee..3507b51dbd 100644 --- a/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-pypi.mustache.yaml +++ b/deployment/safe_haven_management_environment/cloud_init/cloud-init-repository-mirror-internal-pypi.mustache.yaml @@ -9,7 +9,7 @@ disk_setup: fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: diff --git a/deployment/safe_haven_management_environment/cloud_init/resources/configure_nexus.py b/deployment/safe_haven_management_environment/cloud_init/resources/configure_nexus.py index bb9bed8d3a..f460162799 100755 --- a/deployment/safe_haven_management_environment/cloud_init/resources/configure_nexus.py +++ b/deployment/safe_haven_management_environment/cloud_init/resources/configure_nexus.py @@ -588,20 +588,21 @@ def get_allowlists(pypi_package_file, cran_package_file): cran_allowlist = [] if pypi_package_file: - pypi_allowlist = get_allowlist(pypi_package_file) + pypi_allowlist = get_allowlist(pypi_package_file, False) if cran_package_file: - cran_allowlist = get_allowlist(cran_package_file) + cran_allowlist = get_allowlist(cran_package_file, True) return (pypi_allowlist, cran_allowlist) -def get_allowlist(allowlist_path): +def get_allowlist(allowlist_path, is_cran): """ Read list of allowed packages from a file Args: allowlist_path: Path to the allowlist file + is_cran: True if the allowlist if for CRAN, False if it is for PyPI Returns: List of the package names specified in the file @@ -609,12 +610,15 @@ def get_allowlist(allowlist_path): allowlist = [] with open(allowlist_path, "r") as allowlist_file: # Sanitise package names - # - convert to lower case + # - convert to lower case if the package is on PyPI. Leave alone on CRAN to prevent issues with case-sensitivity # - convert special characters to '-' # - remove any blank entries, which act as a wildcard that would allow any package special_characters = re.compile(r"[^0-9a-zA-Z]+") for package_name in allowlist_file.readlines(): - package_name = special_characters.sub("-", package_name.lower().strip()) + if is_cran: + package_name = special_characters.sub("-", package_name.strip()) + else: + package_name = special_characters.sub("-", package_name.lower().strip()) if package_name: allowlist.append(package_name) return allowlist @@ -725,7 +729,7 @@ def recreate_privileges(tier, nexus_api, pypi_allowlist=[], nexus_api, name=f"cran-{package}", description=f"allow access to {package} on CRAN", - expression=f'format == "r" and path=^"/src/contrib/{package}"', + expression=f'format == "r" and path=^"/src/contrib/{package}_"', repo_type=_NEXUS_REPOSITORIES["cran_proxy"]["repo_type"], repo=_NEXUS_REPOSITORIES["cran_proxy"]["name"] ) diff --git a/deployment/safe_haven_management_environment/desired_state_configuration/dc1Artifacts/CreateUsers.ps1 b/deployment/safe_haven_management_environment/desired_state_configuration/dc1Artifacts/CreateUsers.ps1 index fff905ab1e..a99dc67335 100644 --- a/deployment/safe_haven_management_environment/desired_state_configuration/dc1Artifacts/CreateUsers.ps1 +++ b/deployment/safe_haven_management_environment/desired_state_configuration/dc1Artifacts/CreateUsers.ps1 @@ -26,7 +26,7 @@ Import-Csv $userFilePath | ForEach-Object { Path = "$userOuPath" Enabled = $True AccountPassword = (ConvertTo-SecureString $Password -AsPlainText -Force) - PasswordNeverExpires = $False + PasswordNeverExpires = $True Mobile = $_.Mobile Email = $_.SecondaryEmail Country = "GB" diff --git a/deployment/safe_haven_management_environment/network_rules/shm-firewall-rules.json b/deployment/safe_haven_management_environment/network_rules/shm-firewall-rules.json index d3d0b897b3..9d5e1e4f04 100644 --- a/deployment/safe_haven_management_environment/network_rules/shm-firewall-rules.json +++ b/deployment/safe_haven_management_environment/network_rules/shm-firewall-rules.json @@ -190,565 +190,17 @@ ], "fqdnTags": [], "targetFqdns": [ - "g10-prod-ch3-003-sb.servicebus.windows.net", - "g10-prod-ch3-004-sb.servicebus.windows.net", - "g10-prod-ch3-005-sb.servicebus.windows.net", - "g10-prod-ch3-006-sb.servicebus.windows.net", - "g10-prod-ch3-007-sb.servicebus.windows.net", - "g10-prod-sn3-003-sb.servicebus.windows.net", - "g10-prod-sn3-004-sb.servicebus.windows.net", - "g10-prod-sn3-005-sb.servicebus.windows.net", - "g10-prod-sn3-006-sb.servicebus.windows.net", - "g10-prod-sn3-007-sb.servicebus.windows.net", - "g10-prod-sn3-009-sb.servicebus.windows.net", - "g10-prod-sn3-010-sb.servicebus.windows.net", - "g10-prod-sn3-011-sb.servicebus.windows.net", - "g10-prod-sn3-012-sb.servicebus.windows.net", - "g11-prod-ch3-003-sb.servicebus.windows.net", - "g11-prod-ch3-004-sb.servicebus.windows.net", - "g11-prod-ch3-005-sb.servicebus.windows.net", - "g11-prod-ch3-006-sb.servicebus.windows.net", - "g11-prod-ch3-007-sb.servicebus.windows.net", - "g11-prod-sn3-003-sb.servicebus.windows.net", - "g11-prod-sn3-004-sb.servicebus.windows.net", - "g11-prod-sn3-005-sb.servicebus.windows.net", - "g11-prod-sn3-006-sb.servicebus.windows.net", - "g11-prod-sn3-007-sb.servicebus.windows.net", - "g11-prod-sn3-009-sb.servicebus.windows.net", - "g11-prod-sn3-010-sb.servicebus.windows.net", - "g11-prod-sn3-011-sb.servicebus.windows.net", - "g11-prod-sn3-012-sb.servicebus.windows.net", - "g12-prod-ch3-003-sb.servicebus.windows.net", - "g12-prod-ch3-004-sb.servicebus.windows.net", - "g12-prod-ch3-005-sb.servicebus.windows.net", - "g12-prod-ch3-006-sb.servicebus.windows.net", - "g12-prod-ch3-007-sb.servicebus.windows.net", - "g12-prod-sn3-003-sb.servicebus.windows.net", - "g12-prod-sn3-004-sb.servicebus.windows.net", - "g12-prod-sn3-005-sb.servicebus.windows.net", - "g12-prod-sn3-006-sb.servicebus.windows.net", - "g12-prod-sn3-007-sb.servicebus.windows.net", - "g12-prod-sn3-009-sb.servicebus.windows.net", - "g12-prod-sn3-010-sb.servicebus.windows.net", - "g12-prod-sn3-011-sb.servicebus.windows.net", - "g12-prod-sn3-012-sb.servicebus.windows.net", - "g13-prod-ch3-003-sb.servicebus.windows.net", - "g13-prod-ch3-004-sb.servicebus.windows.net", - "g13-prod-ch3-005-sb.servicebus.windows.net", - "g13-prod-ch3-006-sb.servicebus.windows.net", - "g13-prod-ch3-007-sb.servicebus.windows.net", - "g13-prod-sn3-003-sb.servicebus.windows.net", - "g13-prod-sn3-004-sb.servicebus.windows.net", - "g13-prod-sn3-005-sb.servicebus.windows.net", - "g13-prod-sn3-006-sb.servicebus.windows.net", - "g13-prod-sn3-007-sb.servicebus.windows.net", - "g13-prod-sn3-009-sb.servicebus.windows.net", - "g13-prod-sn3-010-sb.servicebus.windows.net", - "g13-prod-sn3-011-sb.servicebus.windows.net", - "g13-prod-sn3-012-sb.servicebus.windows.net", - "g14-prod-ch3-003-sb.servicebus.windows.net", - "g14-prod-ch3-004-sb.servicebus.windows.net", - "g14-prod-ch3-005-sb.servicebus.windows.net", - "g14-prod-ch3-006-sb.servicebus.windows.net", - "g14-prod-ch3-007-sb.servicebus.windows.net", - "g14-prod-sn3-003-sb.servicebus.windows.net", - "g14-prod-sn3-004-sb.servicebus.windows.net", - "g14-prod-sn3-005-sb.servicebus.windows.net", - "g14-prod-sn3-006-sb.servicebus.windows.net", - "g14-prod-sn3-007-sb.servicebus.windows.net", - "g14-prod-sn3-009-sb.servicebus.windows.net", - "g14-prod-sn3-010-sb.servicebus.windows.net", - "g14-prod-sn3-011-sb.servicebus.windows.net", - "g14-prod-sn3-012-sb.servicebus.windows.net", - "g15-prod-ch3-003-sb.servicebus.windows.net", - "g15-prod-ch3-004-sb.servicebus.windows.net", - "g15-prod-ch3-005-sb.servicebus.windows.net", - "g15-prod-ch3-006-sb.servicebus.windows.net", - "g15-prod-ch3-007-sb.servicebus.windows.net", - "g15-prod-sn3-003-sb.servicebus.windows.net", - "g15-prod-sn3-004-sb.servicebus.windows.net", - "g15-prod-sn3-005-sb.servicebus.windows.net", - "g15-prod-sn3-006-sb.servicebus.windows.net", - "g15-prod-sn3-007-sb.servicebus.windows.net", - "g15-prod-sn3-009-sb.servicebus.windows.net", - "g15-prod-sn3-010-sb.servicebus.windows.net", - "g15-prod-sn3-011-sb.servicebus.windows.net", - "g15-prod-sn3-012-sb.servicebus.windows.net", - "g16-prod-ch3-003-sb.servicebus.windows.net", - "g16-prod-ch3-004-sb.servicebus.windows.net", - "g16-prod-ch3-006-sb.servicebus.windows.net", - "g16-prod-ch3-007-sb.servicebus.windows.net", - "g16-prod-sn3-003-sb.servicebus.windows.net", - "g16-prod-sn3-004-sb.servicebus.windows.net", - "g16-prod-sn3-005-sb.servicebus.windows.net", - "g16-prod-sn3-006-sb.servicebus.windows.net", - "g16-prod-sn3-007-sb.servicebus.windows.net", - "g16-prod-sn3-010-sb.servicebus.windows.net", - "g16-prod-sn3-011-sb.servicebus.windows.net", - "g17-prod-ch3-003-sb.servicebus.windows.net", - "g17-prod-ch3-004-sb.servicebus.windows.net", - "g17-prod-ch3-006-sb.servicebus.windows.net", - "g17-prod-ch3-007-sb.servicebus.windows.net", - "g17-prod-sn3-003-sb.servicebus.windows.net", - "g17-prod-sn3-004-sb.servicebus.windows.net", - "g17-prod-sn3-005-sb.servicebus.windows.net", - "g17-prod-sn3-006-sb.servicebus.windows.net", - "g17-prod-sn3-007-sb.servicebus.windows.net", - "g17-prod-sn3-010-sb.servicebus.windows.net", - "g17-prod-sn3-011-sb.servicebus.windows.net", - "g18-prod-ch3-003-sb.servicebus.windows.net", - "g18-prod-ch3-004-sb.servicebus.windows.net", - "g18-prod-ch3-006-sb.servicebus.windows.net", - "g18-prod-ch3-007-sb.servicebus.windows.net", - "g18-prod-sn3-003-sb.servicebus.windows.net", - "g18-prod-sn3-004-sb.servicebus.windows.net", - "g18-prod-sn3-005-sb.servicebus.windows.net", - "g18-prod-sn3-006-sb.servicebus.windows.net", - "g18-prod-sn3-007-sb.servicebus.windows.net", - "g18-prod-sn3-010-sb.servicebus.windows.net", - "g18-prod-sn3-011-sb.servicebus.windows.net", - "g19-prod-ch3-003-sb.servicebus.windows.net", - "g19-prod-ch3-004-sb.servicebus.windows.net", - "g19-prod-ch3-006-sb.servicebus.windows.net", - "g19-prod-ch3-007-sb.servicebus.windows.net", - "g19-prod-sn3-003-sb.servicebus.windows.net", - "g19-prod-sn3-004-sb.servicebus.windows.net", - "g19-prod-sn3-005-sb.servicebus.windows.net", - "g19-prod-sn3-006-sb.servicebus.windows.net", - "g19-prod-sn3-007-sb.servicebus.windows.net", - "g19-prod-sn3-010-sb.servicebus.windows.net", - "g19-prod-sn3-011-sb.servicebus.windows.net", - "g20-prod-ch3-003-sb.servicebus.windows.net", - "g20-prod-ch3-004-sb.servicebus.windows.net", - "g20-prod-ch3-006-sb.servicebus.windows.net", - "g20-prod-ch3-007-sb.servicebus.windows.net", - "g20-prod-sn3-003-sb.servicebus.windows.net", - "g20-prod-sn3-004-sb.servicebus.windows.net", - "g20-prod-sn3-005-sb.servicebus.windows.net", - "g20-prod-sn3-006-sb.servicebus.windows.net", - "g20-prod-sn3-007-sb.servicebus.windows.net", - "g20-prod-sn3-010-sb.servicebus.windows.net", - "g20-prod-sn3-011-sb.servicebus.windows.net", - "g21-prod-ch3-003-sb.servicebus.windows.net", - "g21-prod-ch3-004-sb.servicebus.windows.net", - "g21-prod-ch3-006-sb.servicebus.windows.net", - "g21-prod-ch3-007-sb.servicebus.windows.net", - "g21-prod-sn3-003-sb.servicebus.windows.net", - "g21-prod-sn3-004-sb.servicebus.windows.net", - "g21-prod-sn3-005-sb.servicebus.windows.net", - "g21-prod-sn3-006-sb.servicebus.windows.net", - "g21-prod-sn3-007-sb.servicebus.windows.net", - "g21-prod-sn3-010-sb.servicebus.windows.net", - "g21-prod-sn3-011-sb.servicebus.windows.net", - "g22-prod-ch3-003-sb.servicebus.windows.net", - "g22-prod-ch3-004-sb.servicebus.windows.net", - "g22-prod-ch3-006-sb.servicebus.windows.net", - "g22-prod-ch3-007-sb.servicebus.windows.net", - "g22-prod-sn3-003-sb.servicebus.windows.net", - "g22-prod-sn3-004-sb.servicebus.windows.net", - "g22-prod-sn3-005-sb.servicebus.windows.net", - "g22-prod-sn3-006-sb.servicebus.windows.net", - "g22-prod-sn3-007-sb.servicebus.windows.net", - "g22-prod-sn3-010-sb.servicebus.windows.net", - "g22-prod-sn3-011-sb.servicebus.windows.net", - "g23-prod-ch3-003-sb.servicebus.windows.net", - "g23-prod-ch3-004-sb.servicebus.windows.net", - "g23-prod-ch3-006-sb.servicebus.windows.net", - "g23-prod-ch3-007-sb.servicebus.windows.net", - "g23-prod-sn3-003-sb.servicebus.windows.net", - "g23-prod-sn3-004-sb.servicebus.windows.net", - "g23-prod-sn3-005-sb.servicebus.windows.net", - "g23-prod-sn3-006-sb.servicebus.windows.net", - "g23-prod-sn3-007-sb.servicebus.windows.net", - "g23-prod-sn3-010-sb.servicebus.windows.net", - "g23-prod-sn3-011-sb.servicebus.windows.net", - "g24-prod-ch3-003-sb.servicebus.windows.net", - "g24-prod-ch3-004-sb.servicebus.windows.net", - "g24-prod-ch3-006-sb.servicebus.windows.net", - "g24-prod-ch3-007-sb.servicebus.windows.net", - "g24-prod-sn3-003-sb.servicebus.windows.net", - "g24-prod-sn3-004-sb.servicebus.windows.net", - "g24-prod-sn3-005-sb.servicebus.windows.net", - "g24-prod-sn3-006-sb.servicebus.windows.net", - "g24-prod-sn3-007-sb.servicebus.windows.net", - "g24-prod-sn3-010-sb.servicebus.windows.net", - "g24-prod-sn3-011-sb.servicebus.windows.net", - "g25-prod-ch3-003-sb.servicebus.windows.net", - "g25-prod-ch3-004-sb.servicebus.windows.net", - "g25-prod-ch3-006-sb.servicebus.windows.net", - "g25-prod-ch3-007-sb.servicebus.windows.net", - "g25-prod-sn3-003-sb.servicebus.windows.net", - "g25-prod-sn3-004-sb.servicebus.windows.net", - "g25-prod-sn3-005-sb.servicebus.windows.net", - "g25-prod-sn3-006-sb.servicebus.windows.net", - "g25-prod-sn3-007-sb.servicebus.windows.net", - "g25-prod-sn3-010-sb.servicebus.windows.net", - "g25-prod-sn3-011-sb.servicebus.windows.net", - "g26-prod-ch3-003-sb.servicebus.windows.net", - "g26-prod-ch3-004-sb.servicebus.windows.net", - "g26-prod-ch3-006-sb.servicebus.windows.net", - "g26-prod-ch3-007-sb.servicebus.windows.net", - "g26-prod-sn3-003-sb.servicebus.windows.net", - "g26-prod-sn3-004-sb.servicebus.windows.net", - "g26-prod-sn3-005-sb.servicebus.windows.net", - "g26-prod-sn3-006-sb.servicebus.windows.net", - "g26-prod-sn3-007-sb.servicebus.windows.net", - "g26-prod-sn3-010-sb.servicebus.windows.net", - "g26-prod-sn3-011-sb.servicebus.windows.net", - "g27-prod-ch3-003-sb.servicebus.windows.net", - "g27-prod-ch3-004-sb.servicebus.windows.net", - "g27-prod-ch3-006-sb.servicebus.windows.net", - "g27-prod-ch3-007-sb.servicebus.windows.net", - "g27-prod-sn3-003-sb.servicebus.windows.net", - "g27-prod-sn3-004-sb.servicebus.windows.net", - "g27-prod-sn3-005-sb.servicebus.windows.net", - "g27-prod-sn3-006-sb.servicebus.windows.net", - "g27-prod-sn3-007-sb.servicebus.windows.net", - "g27-prod-sn3-010-sb.servicebus.windows.net", - "g27-prod-sn3-011-sb.servicebus.windows.net", - "g28-prod-ch3-003-sb.servicebus.windows.net", - "g28-prod-ch3-004-sb.servicebus.windows.net", - "g28-prod-ch3-006-sb.servicebus.windows.net", - "g28-prod-ch3-007-sb.servicebus.windows.net", - "g28-prod-sn3-003-sb.servicebus.windows.net", - "g28-prod-sn3-004-sb.servicebus.windows.net", - "g28-prod-sn3-005-sb.servicebus.windows.net", - "g28-prod-sn3-006-sb.servicebus.windows.net", - "g28-prod-sn3-007-sb.servicebus.windows.net", - "g28-prod-sn3-010-sb.servicebus.windows.net", - "g28-prod-sn3-011-sb.servicebus.windows.net", - "g29-prod-ch3-003-sb.servicebus.windows.net", - "g29-prod-ch3-004-sb.servicebus.windows.net", - "g29-prod-ch3-006-sb.servicebus.windows.net", - "g29-prod-ch3-007-sb.servicebus.windows.net", - "g29-prod-sn3-003-sb.servicebus.windows.net", - "g29-prod-sn3-004-sb.servicebus.windows.net", - "g29-prod-sn3-005-sb.servicebus.windows.net", - "g29-prod-sn3-006-sb.servicebus.windows.net", - "g29-prod-sn3-007-sb.servicebus.windows.net", - "g29-prod-sn3-010-sb.servicebus.windows.net", - "g29-prod-sn3-011-sb.servicebus.windows.net", - "g30-prod-ch3-003-sb.servicebus.windows.net", - "g30-prod-ch3-004-sb.servicebus.windows.net", - "g30-prod-ch3-006-sb.servicebus.windows.net", - "g30-prod-ch3-007-sb.servicebus.windows.net", - "g30-prod-sn3-003-sb.servicebus.windows.net", - "g30-prod-sn3-004-sb.servicebus.windows.net", - "g30-prod-sn3-005-sb.servicebus.windows.net", - "g30-prod-sn3-006-sb.servicebus.windows.net", - "g30-prod-sn3-007-sb.servicebus.windows.net", - "g30-prod-sn3-010-sb.servicebus.windows.net", - "g30-prod-sn3-011-sb.servicebus.windows.net", - "g31-prod-ch3-003-sb.servicebus.windows.net", - "g31-prod-ch3-004-sb.servicebus.windows.net", - "g31-prod-ch3-006-sb.servicebus.windows.net", - "g31-prod-ch3-007-sb.servicebus.windows.net", - "g31-prod-sn3-003-sb.servicebus.windows.net", - "g31-prod-sn3-004-sb.servicebus.windows.net", - "g31-prod-sn3-005-sb.servicebus.windows.net", - "g31-prod-sn3-006-sb.servicebus.windows.net", - "g31-prod-sn3-007-sb.servicebus.windows.net", - "g31-prod-sn3-010-sb.servicebus.windows.net", - "g31-prod-sn3-011-sb.servicebus.windows.net", - "g32-prod-ch3-003-sb.servicebus.windows.net", - "g32-prod-ch3-004-sb.servicebus.windows.net", - "g32-prod-ch3-006-sb.servicebus.windows.net", - "g32-prod-ch3-007-sb.servicebus.windows.net", - "g32-prod-sn3-003-sb.servicebus.windows.net", - "g32-prod-sn3-004-sb.servicebus.windows.net", - "g32-prod-sn3-005-sb.servicebus.windows.net", - "g32-prod-sn3-006-sb.servicebus.windows.net", - "g32-prod-sn3-007-sb.servicebus.windows.net", - "g32-prod-sn3-010-sb.servicebus.windows.net", - "g32-prod-sn3-011-sb.servicebus.windows.net", - "g33-prod-ch3-003-sb.servicebus.windows.net", - "g33-prod-ch3-004-sb.servicebus.windows.net", - "g33-prod-ch3-006-sb.servicebus.windows.net", - "g33-prod-ch3-007-sb.servicebus.windows.net", - "g33-prod-sn3-003-sb.servicebus.windows.net", - "g33-prod-sn3-004-sb.servicebus.windows.net", - "g33-prod-sn3-005-sb.servicebus.windows.net", - "g33-prod-sn3-006-sb.servicebus.windows.net", - "g33-prod-sn3-007-sb.servicebus.windows.net", - "g33-prod-sn3-010-sb.servicebus.windows.net", - "g33-prod-sn3-011-sb.servicebus.windows.net", - "g34-prod-ch3-003-sb.servicebus.windows.net", - "g34-prod-ch3-004-sb.servicebus.windows.net", - "g34-prod-ch3-006-sb.servicebus.windows.net", - "g34-prod-ch3-007-sb.servicebus.windows.net", - "g34-prod-sn3-003-sb.servicebus.windows.net", - "g34-prod-sn3-004-sb.servicebus.windows.net", - "g34-prod-sn3-005-sb.servicebus.windows.net", - "g34-prod-sn3-006-sb.servicebus.windows.net", - "g34-prod-sn3-007-sb.servicebus.windows.net", - "g34-prod-sn3-010-sb.servicebus.windows.net", - "g34-prod-sn3-011-sb.servicebus.windows.net", - "g35-prod-ch3-003-sb.servicebus.windows.net", - "g35-prod-ch3-004-sb.servicebus.windows.net", - "g35-prod-ch3-006-sb.servicebus.windows.net", - "g35-prod-ch3-007-sb.servicebus.windows.net", - "g35-prod-sn3-003-sb.servicebus.windows.net", - "g35-prod-sn3-004-sb.servicebus.windows.net", - "g35-prod-sn3-005-sb.servicebus.windows.net", - "g35-prod-sn3-006-sb.servicebus.windows.net", - "g35-prod-sn3-007-sb.servicebus.windows.net", - "g35-prod-sn3-010-sb.servicebus.windows.net", - "g35-prod-sn3-011-sb.servicebus.windows.net", - "g36-prod-ch3-003-sb.servicebus.windows.net", - "g36-prod-ch3-004-sb.servicebus.windows.net", - "g36-prod-ch3-006-sb.servicebus.windows.net", - "g36-prod-ch3-007-sb.servicebus.windows.net", - "g36-prod-sn3-003-sb.servicebus.windows.net", - "g36-prod-sn3-004-sb.servicebus.windows.net", - "g36-prod-sn3-005-sb.servicebus.windows.net", - "g36-prod-sn3-006-sb.servicebus.windows.net", - "g36-prod-sn3-007-sb.servicebus.windows.net", - "g36-prod-sn3-010-sb.servicebus.windows.net", - "g36-prod-sn3-011-sb.servicebus.windows.net", - "g37-prod-ch3-003-sb.servicebus.windows.net", - "g37-prod-ch3-004-sb.servicebus.windows.net", - "g37-prod-ch3-006-sb.servicebus.windows.net", - "g37-prod-ch3-007-sb.servicebus.windows.net", - "g37-prod-sn3-003-sb.servicebus.windows.net", - "g37-prod-sn3-004-sb.servicebus.windows.net", - "g37-prod-sn3-005-sb.servicebus.windows.net", - "g37-prod-sn3-006-sb.servicebus.windows.net", - "g37-prod-sn3-007-sb.servicebus.windows.net", - "g37-prod-sn3-010-sb.servicebus.windows.net", - "g37-prod-sn3-011-sb.servicebus.windows.net", - "g38-prod-ch3-003-sb.servicebus.windows.net", - "g38-prod-ch3-004-sb.servicebus.windows.net", - "g38-prod-ch3-006-sb.servicebus.windows.net", - "g38-prod-ch3-007-sb.servicebus.windows.net", - "g38-prod-sn3-003-sb.servicebus.windows.net", - "g38-prod-sn3-004-sb.servicebus.windows.net", - "g38-prod-sn3-005-sb.servicebus.windows.net", - "g38-prod-sn3-006-sb.servicebus.windows.net", - "g38-prod-sn3-007-sb.servicebus.windows.net", - "g38-prod-sn3-010-sb.servicebus.windows.net", - "g38-prod-sn3-011-sb.servicebus.windows.net", - "g39-prod-ch3-003-sb.servicebus.windows.net", - "g39-prod-ch3-004-sb.servicebus.windows.net", - "g39-prod-ch3-006-sb.servicebus.windows.net", - "g39-prod-ch3-007-sb.servicebus.windows.net", - "g39-prod-sn3-003-sb.servicebus.windows.net", - "g39-prod-sn3-004-sb.servicebus.windows.net", - "g39-prod-sn3-005-sb.servicebus.windows.net", - "g39-prod-sn3-006-sb.servicebus.windows.net", - "g39-prod-sn3-007-sb.servicebus.windows.net", - "g39-prod-sn3-010-sb.servicebus.windows.net", - "g39-prod-sn3-011-sb.servicebus.windows.net", - "g40-prod-ch3-003-sb.servicebus.windows.net", - "g40-prod-ch3-007-sb.servicebus.windows.net", - "g40-prod-sn3-003-sb.servicebus.windows.net", - "g40-prod-sn3-004-sb.servicebus.windows.net", - "g40-prod-sn3-005-sb.servicebus.windows.net", - "g40-prod-sn3-006-sb.servicebus.windows.net", - "g40-prod-sn3-007-sb.servicebus.windows.net", - "g40-prod-sn3-010-sb.servicebus.windows.net", - "g41-prod-ch3-003-sb.servicebus.windows.net", - "g41-prod-ch3-007-sb.servicebus.windows.net", - "g41-prod-sn3-003-sb.servicebus.windows.net", - "g41-prod-sn3-004-sb.servicebus.windows.net", - "g41-prod-sn3-005-sb.servicebus.windows.net", - "g41-prod-sn3-006-sb.servicebus.windows.net", - "g41-prod-sn3-007-sb.servicebus.windows.net", - "g41-prod-sn3-010-sb.servicebus.windows.net", - "g42-prod-ch3-003-sb.servicebus.windows.net", - "g42-prod-ch3-007-sb.servicebus.windows.net", - "g42-prod-sn3-003-sb.servicebus.windows.net", - "g42-prod-sn3-004-sb.servicebus.windows.net", - "g42-prod-sn3-005-sb.servicebus.windows.net", - "g42-prod-sn3-006-sb.servicebus.windows.net", - "g42-prod-sn3-007-sb.servicebus.windows.net", - "g42-prod-sn3-010-sb.servicebus.windows.net", - "g43-prod-ch3-003-sb.servicebus.windows.net", - "g43-prod-ch3-007-sb.servicebus.windows.net", - "g43-prod-sn3-003-sb.servicebus.windows.net", - "g43-prod-sn3-004-sb.servicebus.windows.net", - "g43-prod-sn3-005-sb.servicebus.windows.net", - "g43-prod-sn3-006-sb.servicebus.windows.net", - "g43-prod-sn3-007-sb.servicebus.windows.net", - "g43-prod-sn3-010-sb.servicebus.windows.net", - "g44-prod-ch3-003-sb.servicebus.windows.net", - "g44-prod-ch3-007-sb.servicebus.windows.net", - "g44-prod-sn3-003-sb.servicebus.windows.net", - "g44-prod-sn3-004-sb.servicebus.windows.net", - "g44-prod-sn3-005-sb.servicebus.windows.net", - "g44-prod-sn3-006-sb.servicebus.windows.net", - "g44-prod-sn3-007-sb.servicebus.windows.net", - "g44-prod-sn3-010-sb.servicebus.windows.net", - "g45-prod-ch3-003-sb.servicebus.windows.net", - "g45-prod-ch3-007-sb.servicebus.windows.net", - "g45-prod-sn3-003-sb.servicebus.windows.net", - "g45-prod-sn3-004-sb.servicebus.windows.net", - "g45-prod-sn3-005-sb.servicebus.windows.net", - "g45-prod-sn3-006-sb.servicebus.windows.net", - "g45-prod-sn3-007-sb.servicebus.windows.net", - "g45-prod-sn3-010-sb.servicebus.windows.net", - "g46-prod-ch3-003-sb.servicebus.windows.net", - "g46-prod-ch3-007-sb.servicebus.windows.net", - "g46-prod-sn3-003-sb.servicebus.windows.net", - "g46-prod-sn3-004-sb.servicebus.windows.net", - "g46-prod-sn3-005-sb.servicebus.windows.net", - "g46-prod-sn3-006-sb.servicebus.windows.net", - "g46-prod-sn3-007-sb.servicebus.windows.net", - "g46-prod-sn3-010-sb.servicebus.windows.net", - "g47-prod-ch3-003-sb.servicebus.windows.net", - "g47-prod-ch3-007-sb.servicebus.windows.net", - "g47-prod-sn3-003-sb.servicebus.windows.net", - "g47-prod-sn3-004-sb.servicebus.windows.net", - "g47-prod-sn3-005-sb.servicebus.windows.net", - "g47-prod-sn3-006-sb.servicebus.windows.net", - "g47-prod-sn3-007-sb.servicebus.windows.net", - "g47-prod-sn3-010-sb.servicebus.windows.net", - "g48-prod-ch3-003-sb.servicebus.windows.net", - "g48-prod-ch3-007-sb.servicebus.windows.net", - "g48-prod-sn3-003-sb.servicebus.windows.net", - "g48-prod-sn3-004-sb.servicebus.windows.net", - "g48-prod-sn3-005-sb.servicebus.windows.net", - "g48-prod-sn3-006-sb.servicebus.windows.net", - "g48-prod-sn3-010-sb.servicebus.windows.net", - "g49-prod-ch3-003-sb.servicebus.windows.net", - "g49-prod-ch3-007-sb.servicebus.windows.net", - "g49-prod-sn3-003-sb.servicebus.windows.net", - "g49-prod-sn3-004-sb.servicebus.windows.net", - "g49-prod-sn3-005-sb.servicebus.windows.net", - "g49-prod-sn3-006-sb.servicebus.windows.net", - "g49-prod-sn3-010-sb.servicebus.windows.net", - "g50-prod-ch3-003-sb.servicebus.windows.net", - "g50-prod-ch3-007-sb.servicebus.windows.net", - "g50-prod-sn3-003-sb.servicebus.windows.net", - "g50-prod-sn3-004-sb.servicebus.windows.net", - "g50-prod-sn3-005-sb.servicebus.windows.net", - "g50-prod-sn3-006-sb.servicebus.windows.net", - "g50-prod-sn3-010-sb.servicebus.windows.net", - "g51-prod-ch3-003-sb.servicebus.windows.net", - "g51-prod-ch3-007-sb.servicebus.windows.net", - "g51-prod-sn3-003-sb.servicebus.windows.net", - "g51-prod-sn3-004-sb.servicebus.windows.net", - "g51-prod-sn3-005-sb.servicebus.windows.net", - "g51-prod-sn3-006-sb.servicebus.windows.net", - "g51-prod-sn3-010-sb.servicebus.windows.net", - "g52-prod-ch3-003-sb.servicebus.windows.net", - "g52-prod-ch3-007-sb.servicebus.windows.net", - "g52-prod-sn3-003-sb.servicebus.windows.net", - "g52-prod-sn3-004-sb.servicebus.windows.net", - "g52-prod-sn3-005-sb.servicebus.windows.net", - "g52-prod-sn3-006-sb.servicebus.windows.net", - "g52-prod-sn3-010-sb.servicebus.windows.net", - "g53-prod-ch3-003-sb.servicebus.windows.net", - "g53-prod-ch3-007-sb.servicebus.windows.net", - "g53-prod-sn3-003-sb.servicebus.windows.net", - "g53-prod-sn3-004-sb.servicebus.windows.net", - "g53-prod-sn3-005-sb.servicebus.windows.net", - "g53-prod-sn3-006-sb.servicebus.windows.net", - "g53-prod-sn3-010-sb.servicebus.windows.net", - "g54-prod-ch3-003-sb.servicebus.windows.net", - "g54-prod-ch3-007-sb.servicebus.windows.net", - "g54-prod-sn3-003-sb.servicebus.windows.net", - "g54-prod-sn3-004-sb.servicebus.windows.net", - "g54-prod-sn3-005-sb.servicebus.windows.net", - "g54-prod-sn3-006-sb.servicebus.windows.net", - "g54-prod-sn3-010-sb.servicebus.windows.net", - "g55-prod-ch3-003-sb.servicebus.windows.net", - "g55-prod-ch3-007-sb.servicebus.windows.net", - "g55-prod-sn3-003-sb.servicebus.windows.net", - "g55-prod-sn3-004-sb.servicebus.windows.net", - "g55-prod-sn3-005-sb.servicebus.windows.net", - "g55-prod-sn3-006-sb.servicebus.windows.net", - "g55-prod-sn3-010-sb.servicebus.windows.net", - "g56-prod-ch3-003-sb.servicebus.windows.net", - "g56-prod-ch3-007-sb.servicebus.windows.net", - "g56-prod-sn3-003-sb.servicebus.windows.net", - "g56-prod-sn3-004-sb.servicebus.windows.net", - "g56-prod-sn3-005-sb.servicebus.windows.net", - "g56-prod-sn3-006-sb.servicebus.windows.net", - "g56-prod-sn3-010-sb.servicebus.windows.net", - "g57-prod-ch3-003-sb.servicebus.windows.net", - "g57-prod-ch3-007-sb.servicebus.windows.net", - "g57-prod-sn3-003-sb.servicebus.windows.net", - "g57-prod-sn3-004-sb.servicebus.windows.net", - "g57-prod-sn3-005-sb.servicebus.windows.net", - "g57-prod-sn3-006-sb.servicebus.windows.net", - "g57-prod-sn3-010-sb.servicebus.windows.net", - "g58-prod-ch3-003-sb.servicebus.windows.net", - "g58-prod-ch3-007-sb.servicebus.windows.net", - "g58-prod-sn3-003-sb.servicebus.windows.net", - "g58-prod-sn3-004-sb.servicebus.windows.net", - "g58-prod-sn3-005-sb.servicebus.windows.net", - "g58-prod-sn3-006-sb.servicebus.windows.net", - "g58-prod-sn3-010-sb.servicebus.windows.net", - "g59-prod-ch3-003-sb.servicebus.windows.net", - "g59-prod-ch3-007-sb.servicebus.windows.net", - "g59-prod-sn3-003-sb.servicebus.windows.net", - "g59-prod-sn3-004-sb.servicebus.windows.net", - "g59-prod-sn3-005-sb.servicebus.windows.net", - "g59-prod-sn3-006-sb.servicebus.windows.net", - "g59-prod-sn3-010-sb.servicebus.windows.net", - "g60-prod-ch3-003-sb.servicebus.windows.net", - "g60-prod-ch3-007-sb.servicebus.windows.net", - "g60-prod-sn3-003-sb.servicebus.windows.net", - "g60-prod-sn3-004-sb.servicebus.windows.net", - "g60-prod-sn3-005-sb.servicebus.windows.net", - "g60-prod-sn3-006-sb.servicebus.windows.net", - "g60-prod-sn3-010-sb.servicebus.windows.net", - "g61-prod-ch3-003-sb.servicebus.windows.net", - "g61-prod-ch3-007-sb.servicebus.windows.net", - "g61-prod-sn3-003-sb.servicebus.windows.net", - "g61-prod-sn3-004-sb.servicebus.windows.net", - "g61-prod-sn3-005-sb.servicebus.windows.net", - "g61-prod-sn3-006-sb.servicebus.windows.net", - "g61-prod-sn3-010-sb.servicebus.windows.net", - "g62-prod-ch3-003-sb.servicebus.windows.net", - "g62-prod-ch3-007-sb.servicebus.windows.net", - "g62-prod-sn3-003-sb.servicebus.windows.net", - "g62-prod-sn3-004-sb.servicebus.windows.net", - "g62-prod-sn3-005-sb.servicebus.windows.net", - "g62-prod-sn3-006-sb.servicebus.windows.net", - "g62-prod-sn3-010-sb.servicebus.windows.net", - "g63-prod-ch3-003-sb.servicebus.windows.net", - "g63-prod-ch3-007-sb.servicebus.windows.net", - "g63-prod-sn3-003-sb.servicebus.windows.net", - "g63-prod-sn3-004-sb.servicebus.windows.net", - "g63-prod-sn3-005-sb.servicebus.windows.net", - "g63-prod-sn3-006-sb.servicebus.windows.net", - "g63-prod-sn3-010-sb.servicebus.windows.net", - "g64-prod-sn3-010-sb.servicebus.windows.net", - "g65-prod-sn3-010-sb.servicebus.windows.net", - "g66-prod-sn3-010-sb.servicebus.windows.net", - "g67-prod-sn3-010-sb.servicebus.windows.net", - "g68-prod-sn3-010-sb.servicebus.windows.net", - "g69-prod-sn3-010-sb.servicebus.windows.net", - "g70-prod-sn3-010-sb.servicebus.windows.net", - "g71-prod-sn3-010-sb.servicebus.windows.net", - "g72-prod-sn3-010-sb.servicebus.windows.net", - "g73-prod-sn3-010-sb.servicebus.windows.net", - "g74-prod-sn3-010-sb.servicebus.windows.net", - "g75-prod-sn3-010-sb.servicebus.windows.net", - "g76-prod-sn3-010-sb.servicebus.windows.net", - "g77-prod-sn3-010-sb.servicebus.windows.net", - "g78-prod-sn3-010-sb.servicebus.windows.net", - "g79-prod-sn3-010-sb.servicebus.windows.net", - "g80-prod-sn3-010-sb.servicebus.windows.net", - "g81-prod-sn3-010-sb.servicebus.windows.net", - "g82-prod-sn3-010-sb.servicebus.windows.net", - "g83-prod-sn3-010-sb.servicebus.windows.net", - "g84-prod-sn3-010-sb.servicebus.windows.net", - "g85-prod-sn3-010-sb.servicebus.windows.net", - "g86-prod-sn3-010-sb.servicebus.windows.net", - "g87-prod-sn3-010-sb.servicebus.windows.net", - "g88-prod-sn3-010-sb.servicebus.windows.net", - "g89-prod-sn3-010-sb.servicebus.windows.net", - "g90-prod-sn3-010-sb.servicebus.windows.net", - "g91-prod-sn3-010-sb.servicebus.windows.net", - "g92-prod-sn3-010-sb.servicebus.windows.net", - "g93-prod-sn3-010-sb.servicebus.windows.net", - "g94-prod-sn3-010-sb.servicebus.windows.net", - "g95-prod-sn3-010-sb.servicebus.windows.net", + "*-sb.servicebus.windows.net", "passwordreset.microsoftonline.com", + "ssprdedicatedsbprodeus2-1.servicebus.windows.net", + "ssprdedicatedsbprodfra-1.servicebus.windows.net", + "ssprdedicatedsbprodncu-2.servicebus.windows.net", "ssprdedicatedsbprodncu.servicebus.windows.net", - "ssprdedicatedsbprodscu.servicebus.windows.net" + "ssprdedicatedsbprodneu.servicebus.windows.net", + "ssprdedicatedsbprodscu-2.servicebus.windows.net", + "ssprdedicatedsbprodscu.servicebus.windows.net", + "ssprdedicatedsbprodsea-1.servicebus.windows.net", + "ssprdedicatedsbprodweu.servicebus.windows.net" ], "sourceAddresses": [ "{{network.vnet.subnets.identity.cidr}}" diff --git a/deployment/safe_haven_management_environment/setup/Deploy_SHM.ps1 b/deployment/safe_haven_management_environment/setup/Deploy_SHM.ps1 index 505e6dc0a8..bd79417d1c 100755 --- a/deployment/safe_haven_management_environment/setup/Deploy_SHM.ps1 +++ b/deployment/safe_haven_management_environment/setup/Deploy_SHM.ps1 @@ -1,6 +1,8 @@ param( [Parameter(Mandatory = $true, HelpMessage = "Enter SHM ID (e.g. use 'testa' for Turing Development Safe Haven A)")] - [string]$shmId + [string]$shmId, + [Parameter(Mandatory = $false, HelpMessage = "Use device authentication for connecting to Azure and Microsoft Graph")] + [switch]$UseDeviceAuthentication ) Import-Module Az.Accounts -ErrorAction Stop @@ -13,7 +15,11 @@ Import-Module $PSScriptRoot/../../common/Logging -Force -ErrorAction Stop # ---------------- if (Get-AzContext) { Disconnect-AzAccount | Out-Null } # force a refresh of the Azure token before starting Add-LogMessage -Level Info "Attempting to authenticate with Azure. Please sign in with an account with admin rights over the subscriptions you plan to use." -Connect-AzAccount -ErrorAction Stop | Out-Null +if ($UseDeviceAuthentication) { + Connect-AzAccount -UseDeviceAuthentication -ErrorAction Stop | Out-Null +} else { + Connect-AzAccount -ErrorAction Stop | Out-Null +} if (Get-AzContext) { Add-LogMessage -Level Success "Authenticated with Azure as $((Get-AzContext).Account.Id)" } else { @@ -25,7 +31,11 @@ if (Get-AzContext) { # -------------------------- if (Get-MgContext) { Disconnect-MgGraph | Out-Null } # force a refresh of the Microsoft Graph token before starting Add-LogMessage -Level Info "Attempting to authenticate with Microsoft Graph. Please sign in with an account with admin rights over the Azure Active Directory you plan to use." -Connect-MgGraph -TenantId $config.azureAdTenantId -Scopes "User.ReadWrite.All", "UserAuthenticationMethod.ReadWrite.All", "Directory.AccessAsUser.All", "RoleManagement.ReadWrite.Directory" -ErrorAction Stop -ContextScope Process | Out-Null +if ($UseDeviceAuthentication) { + Connect-MgGraph -TenantId $config.azureAdTenantId -Scopes "User.ReadWrite.All", "UserAuthenticationMethod.ReadWrite.All", "Directory.AccessAsUser.All", "RoleManagement.ReadWrite.Directory" -ErrorAction Stop -ContextScope Process -UseDeviceAuthentication +} else { + Connect-MgGraph -TenantId $config.azureAdTenantId -Scopes "User.ReadWrite.All", "UserAuthenticationMethod.ReadWrite.All", "Directory.AccessAsUser.All", "RoleManagement.ReadWrite.Directory" -ErrorAction Stop -ContextScope Process | Out-Null +} if (Get-MgContext) { Add-LogMessage -Level Success "Authenticated with Microsoft Graph as $((Get-MgContext).Account)" } else { diff --git a/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-1804.mustache.yaml b/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-1804.mustache.yaml index 3fe9203050..8f450ead3d 100644 --- a/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-1804.mustache.yaml +++ b/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-1804.mustache.yaml @@ -114,15 +114,10 @@ write_files: content: | {{packages-python.yaml}} - - path: "/opt/build/pyenv/pyproject_template.toml" - permissions: "0400" - content: | - {{pyenv_pyproject_template.toml}} - - path: "/opt/build/rstudio.debinfo" permissions: "0400" content: | - {{deb-rstudio-bionic.version}} + {{deb-rstudio-focal.version}} - path: "/opt/monitoring/analyse_build.py" permissions: "0755" @@ -380,7 +375,8 @@ runcmd: # Remove netcat (potential security issue) [Note this will remove the 'ubuntu-minimal' metapackage but does not remove any other real packages] # Remove xscreensaver (unnecessary) - apt-get remove -y netcat-openbsd xscreensaver - # Remove any unused auto-installed packages + # Fix any broken packages then clean and remove any unused packages + - apt-get --fix-broken install -y - apt-get autoclean -y - apt-get autoremove -y --purge - apt-get clean diff --git a/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2004.mustache.yaml b/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2004.mustache.yaml index d746ce58b1..575de168d0 100644 --- a/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2004.mustache.yaml +++ b/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2004.mustache.yaml @@ -114,15 +114,10 @@ write_files: content: | {{packages-python.yaml}} - - path: "/opt/build/pyenv/pyproject_template.toml" - permissions: "0400" - content: | - {{pyenv_pyproject_template.toml}} - - path: "/opt/build/rstudio.debinfo" permissions: "0400" content: | - {{deb-rstudio-bionic.version}} + {{deb-rstudio-focal.version}} - path: "/opt/monitoring/analyse_build.py" permissions: "0755" @@ -381,6 +376,10 @@ runcmd: # Remove netcat (potential security issue) [Note this will remove the 'ubuntu-minimal' metapackage but does not remove any other real packages] # Remove xscreensaver (unnecessary) - apt-get remove -y netcat-openbsd xscreensaver + # Fix any broken packages then clean and remove any unused packages + - apt-get --fix-broken install -y + - apt-get autoclean -y + - apt-get autoremove -y --purge - apt-get clean # Remove temporary files - tmpreaper 10m /tmp/ /var/tmp/ # remove temporary files that have not been accessed in 10 minutes diff --git a/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2204.mustache.yaml b/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2204.mustache.yaml index dcc987b3ca..059dece327 100644 --- a/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2204.mustache.yaml +++ b/deployment/secure_research_desktop/cloud_init/cloud-init-buildimage-ubuntu-2204.mustache.yaml @@ -118,11 +118,6 @@ write_files: content: | {{packages-python.yaml}} - - path: "/opt/build/pyenv/pyproject_template.toml" - permissions: "0400" - content: | - {{pyenv_pyproject_template.toml}} - - path: "/opt/build/rbase.debinfo" permissions: "0400" content: | @@ -387,7 +382,8 @@ runcmd: # Remove netcat (potential security issue) [Note this will remove the 'ubuntu-minimal' metapackage but does not remove any other real packages] # Remove xscreensaver (unnecessary) - apt-get remove -y netcat-openbsd xscreensaver - # Remove any unused auto-installed packages + # Fix any broken packages then clean and remove any unused packages + - apt-get --fix-broken install -y - apt-get autoclean -y - apt-get autoremove -y --purge - apt-get clean diff --git a/deployment/secure_research_desktop/cloud_init/resources/analyse_build.py b/deployment/secure_research_desktop/cloud_init/resources/analyse_build.py index 2b0b846c55..1bd867932a 100644 --- a/deployment/secure_research_desktop/cloud_init/resources/analyse_build.py +++ b/deployment/secure_research_desktop/cloud_init/resources/analyse_build.py @@ -146,12 +146,12 @@ def main(): with suppress(FileNotFoundError): for fname in glob.glob("/opt/monitoring/python-*-safety-check.json"): with open(fname, "r") as f_safety_check: - packages = json.load(f_safety_check) - if packages: + output = json.load(f_safety_check) + if output["affected_packages"]: python_version = fname.split("-")[1] log(timestamp, "WARNING", f"Safety check found problems with Python {python_version}") - for package in packages: - log(timestamp, "WARNING", f"... {package[0]} [{package[2]}] is affected by issue {package[4]} (for versions {package[1]})") + for package in output["vulnerabilities"]: + log(timestamp, "WARNING", f"... {package['package_name']} [{package['analyzed_version']}] is affected by issue {package['vulnerability_id']} (for versions {package['all_vulnerable_specs']})") if __name__ == "__main__": diff --git a/deployment/secure_research_desktop/cloud_init/resources/pyenv_install.sh b/deployment/secure_research_desktop/cloud_init/resources/pyenv_install.sh index 555c46c14f..56898d3915 100644 --- a/deployment/secure_research_desktop/cloud_init/resources/pyenv_install.sh +++ b/deployment/secure_research_desktop/cloud_init/resources/pyenv_install.sh @@ -9,7 +9,6 @@ if [ $# -ne 1 ]; then fi PYTHON_VERSION=$1 PYENV_ROOT="$(pyenv root)" -PYPROJECT_TOML="/opt/build/python-${PYTHON_VERSION}-pyproject.toml" MONITORING_LOG="/opt/monitoring/python-${PYTHON_VERSION}-package-versions.log" REQUIREMENTS_TXT="/opt/build/python-${PYTHON_VERSION}-requirements.txt" REQUESTED_PACKAGE_LIST="/opt/build/packages/packages-python-${PYTHON_VERSION}.list" @@ -27,24 +26,19 @@ echo "Installed $(${EXE_PATH}/python --version)" # Install and upgrade installation prerequisites # ---------------------------------------------- echo "Installing and upgrading installation prerequisites for Python ${PYTHON_VERSION}..." -${EXE_PATH}/pip install --upgrade pip poetry +${EXE_PATH}/pip install --upgrade pip pip-tools setuptools -# Solve dependencies and install using poetry -# ------------------------------------------- -echo "Installing packages with poetry..." -${EXE_PATH}/poetry config virtualenvs.create false -${EXE_PATH}/poetry config virtualenvs.in-project true -rm poetry.lock pyproject.toml 2> /dev/null -sed -e "s/PYTHON_VERSION/$PYTHON_VERSION/" /opt/build/pyenv/pyproject_template.toml > $PYPROJECT_TOML -ln -s $PYPROJECT_TOML pyproject.toml -${EXE_PATH}/poetry add $(tr '\n' ' ' < $REQUIREMENTS_TXT) || exit 3 +# Solve dependencies and write package versions to monitoring log +# --------------------------------------------------------------- +echo "Determining package versions with pip-compile..." +${EXE_PATH}/pip-compile -o "$MONITORING_LOG" "$REQUIREMENTS_TXT" -# Write package versions to monitoring log -# ---------------------------------------- -${EXE_PATH}/poetry show > $MONITORING_LOG -${EXE_PATH}/poetry show --tree >> $MONITORING_LOG +# Install pinned packages using pip +# --------------------------------- +echo "Installing packages with pip..." +${EXE_PATH}/pip install -r "$MONITORING_LOG" # Run any post-install commands diff --git a/deployment/secure_research_desktop/cloud_init/resources/pyenv_pyproject_template.toml b/deployment/secure_research_desktop/cloud_init/resources/pyenv_pyproject_template.toml deleted file mode 100644 index 3f0998952a..0000000000 --- a/deployment/secure_research_desktop/cloud_init/resources/pyenv_pyproject_template.toml +++ /dev/null @@ -1,12 +0,0 @@ -[tool.poetry] -name = "Python PYTHON_VERSION" -version = "1.0.0" -description = "Python PYTHON_VERSION" -authors = ["ROOT "] - -[tool.poetry.dependencies] -python = "PYTHON_VERSION" - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" diff --git a/deployment/secure_research_desktop/packages/deb-azuredatastudio.version b/deployment/secure_research_desktop/packages/deb-azuredatastudio.version index 6d6c00f6ed..19844aca82 100644 --- a/deployment/secure_research_desktop/packages/deb-azuredatastudio.version +++ b/deployment/secure_research_desktop/packages/deb-azuredatastudio.version @@ -1,4 +1,4 @@ -hash: 53321d293b9afd130eba59546e95a65ed3167019708455168f892df4cb07fa2a -version: 1.40.2 +hash: f413c7e1315516dc37c2a7d0c31cece0663556266dcb3a81aa9c402620a5c236 +version: 1.44.1 debfile: azuredatastudio-linux-|VERSION|.deb -remote: https://sqlopsbuilds.azureedge.net/stable/661384637db384fe5d4e6224069adbe708580b16/|DEBFILE| +remote: https://sqlopsbuilds.azureedge.net/stable/8f53a316fa00a98264f1ab119641cd540b5af25c/|DEBFILE| diff --git a/deployment/secure_research_desktop/packages/deb-rstudio-bionic.version b/deployment/secure_research_desktop/packages/deb-rstudio-bionic.version deleted file mode 100644 index a70cb17fe1..0000000000 --- a/deployment/secure_research_desktop/packages/deb-rstudio-bionic.version +++ /dev/null @@ -1,4 +0,0 @@ -hash: 23cae58f8ed1a7d7b96b266287a2fde67871b112339bbb95a203c6e672920083 -version: 2022.12.0-353 -debfile: rstudio-|VERSION|-amd64.deb -remote: https://download1.rstudio.org/electron/bionic/amd64/|DEBFILE| diff --git a/deployment/secure_research_desktop/packages/deb-rstudio-focal.version b/deployment/secure_research_desktop/packages/deb-rstudio-focal.version new file mode 100644 index 0000000000..fec4214082 --- /dev/null +++ b/deployment/secure_research_desktop/packages/deb-rstudio-focal.version @@ -0,0 +1,4 @@ +hash: 49e24a6956f9f12ffeded493f571cd39f3e6c89411fc60d3bb228661793320da +version: 2023.06.1-524 +debfile: rstudio-|VERSION|-amd64.deb +remote: https://download1.rstudio.org/electron/focal/amd64/|DEBFILE| diff --git a/deployment/secure_research_desktop/packages/deb-rstudio-jammy.version b/deployment/secure_research_desktop/packages/deb-rstudio-jammy.version index 70e6f4ffe8..6c96a9ac17 100644 --- a/deployment/secure_research_desktop/packages/deb-rstudio-jammy.version +++ b/deployment/secure_research_desktop/packages/deb-rstudio-jammy.version @@ -1,4 +1,4 @@ -hash: 8bc3f84dd3ad701e43bc4fac0a5c24066c8e08a9345821cceb012514be242221 -version: 2022.12.0-353 +hash: c030ec8338f1c76b3ae27997ec4411a0af43b2367dedb3d48e95c319b5425698 +version: 2023.06.1-524 debfile: rstudio-|VERSION|-amd64.deb remote: https://download1.rstudio.org/electron/jammy/amd64/|DEBFILE| diff --git a/deployment/secure_research_desktop/packages/packages-python.yaml b/deployment/secure_research_desktop/packages/packages-python.yaml index 184a7f6fad..d520b3d60e 100644 --- a/deployment/secure_research_desktop/packages/packages-python.yaml +++ b/deployment/secure_research_desktop/packages/packages-python.yaml @@ -6,9 +6,9 @@ # [Optional] arguments to each package # : versions: - - "3.8" - "3.9" - "3.10" + - "3.11" packages: arviz: beautifulsoup4: @@ -25,21 +25,22 @@ packages: html5lib: ipykernel: keras: + "3.11": ["uninstallable"] # requires tensorflow lightgbm: - llvmlite: - "all": [">0.35.0"] # specify llvmlite to increase chance of getting a pre-compiled version lxml: matplotlib: nltk: + numba: + "3.11": ["uninstallable"] # does not yet support 3.11 numpy: - "all": [">1.0.0"] # allow flexibility in numpy version + "all": [">1.0.0"] # increase solver flexibility pandas: pandasql: pathos: pg8000: Pillow: + pip-tools: plotly: - poetry: # also used by installation scripts prophet: psycopg2: pydot: @@ -55,8 +56,8 @@ packages: regex: requests: safety: # also used by installation scripts - "all": ["<2.1.1"] # 2.1.1 requires a version of 'packaging' that is incompatible with poetry scikit-image: + "all": [">0.10.0"] # increase solver flexibility scikit-learn: scipy: seaborn: @@ -65,16 +66,18 @@ packages: Sphinx: SQLAlchemy: statsmodels: - "all": ["<0.13.2"] # 0.13.2 requires a version of 'packaging' that is incompatible with poetry + "all": [">0.10.0"] # increase solver flexibility sympy: tables: - tensorboard: tensorflow: + "3.11": ["uninstallable"] # does not yet support 3.11 thinc: torch: torchvision: + "3.11": ["uninstallable"] # does not yet support 3.11 tsfresh: - "3.10": ["uninstallable"] + "3.11": ["uninstallable"] # requires numba wordcloud: + "3.11": ["uninstallable"] # does not yet support 3.11 xgboost: xlrd: diff --git a/deployment/secure_research_desktop/packages/packages-r-cran.list b/deployment/secure_research_desktop/packages/packages-r-cran.list index 8a7b1337c1..7cf7e374a6 100644 --- a/deployment/secure_research_desktop/packages/packages-r-cran.list +++ b/deployment/secure_research_desktop/packages/packages-r-cran.list @@ -1,3 +1,4 @@ +arrow BiocManager caret csv @@ -35,7 +36,6 @@ text2vec tidytext tidyverse tidyxl -tmap tsbox validate vroom diff --git a/deployment/secure_research_desktop/setup/Provision_Compute_VM.ps1 b/deployment/secure_research_desktop/setup/Provision_Compute_VM.ps1 index 61d4ee1db0..4f4315011a 100644 --- a/deployment/secure_research_desktop/setup/Provision_Compute_VM.ps1 +++ b/deployment/secure_research_desktop/setup/Provision_Compute_VM.ps1 @@ -81,7 +81,7 @@ $subnet = Deploy-Subnet -Name $config.srdImage.build.subnet.name -VirtualNetwork Add-LogMessage -Level Info "Ensure that build NSG '$($config.srdImage.build.nsg.name)' exists..." $buildNsg = Deploy-NetworkSecurityGroup -Name $config.srdImage.build.nsg.name -ResourceGroupName $config.srdImage.network.rg -Location $config.srdImage.location # Get list of IP addresses which are allowed to connect to the VM candidates -$existingRule = Get-AzNetworkSecurityRuleConfig -NetworkSecurityGroup $buildNsg | Where-Object { $_.Name -eq "AllowBuildAdminSshInbound" } +$existingRule = Get-AzNetworkSecurityRuleConfig -NetworkSecurityGroup $buildNsg | Where-Object { $_.Name -eq "AllowAdminApprovedSshInbound" } $allowedIpAddresses = @($config.srdImage.build.nsg.allowedIpAddresses) $allowedIpAddresses += $existingRule ? @($existingRule.SourceAddressPrefix) : @() $config["buildAdminIpAddresses"] = $allowedIpAddresses | Where-Object { $_ } | Sort-Object | Get-Unique diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-cocalc.mustache.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-cocalc.mustache.yaml index 43da1b27da..3b43fd408c 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-cocalc.mustache.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-cocalc.mustache.yaml @@ -161,7 +161,7 @@ disk_setup: overwrite: true fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: - [/dev/disk/azure/scsi1/lun1-part1, /data, ext4, "defaults,nofail"] diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-codimd.mustache.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-codimd.mustache.yaml index f2afe406c1..890c39d5e3 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-codimd.mustache.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-codimd.mustache.yaml @@ -132,7 +132,7 @@ disk_setup: overwrite: true fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: - [/dev/disk/azure/scsi1/lun1-part1, /data, ext4, "defaults,nofail"] diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.mustache.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.mustache.yaml index 8eefcfcbf6..b887420e2b 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.mustache.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-gitlab.mustache.yaml @@ -122,7 +122,7 @@ disk_setup: overwrite: true fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: - [/dev/disk/azure/scsi1/lun1-part1, /data, ext4, "defaults,nofail"] diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-postgres.mustache.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-postgres.mustache.yaml index 16bcb94fa9..2a6c8f707b 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-postgres.mustache.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-postgres.mustache.yaml @@ -25,7 +25,7 @@ disk_setup: fs_setup: - device: /dev/disk/azure/scsi1/lun1 - partition: 1 + partition: auto filesystem: ext4 mounts: diff --git a/deployment/secure_research_environment/cloud_init/cloud-init-srd.mustache.yaml b/deployment/secure_research_environment/cloud_init/cloud-init-srd.mustache.yaml index 12065238e6..3635a7a342 100644 --- a/deployment/secure_research_environment/cloud_init/cloud-init-srd.mustache.yaml +++ b/deployment/secure_research_environment/cloud_init/cloud-init-srd.mustache.yaml @@ -12,7 +12,7 @@ disk_setup: fs_setup: - device: /dev/disk/azure/scsi1/lun1 filesystem: ext4 - partition: 1 + partition: auto # Note that we do not include the blobfuse mounts here as these are controlled by systemd mounts: diff --git a/deployment/secure_research_environment/remote/create_databases/scripts/Lockdown_Sql_Server.ps1 b/deployment/secure_research_environment/remote/create_databases/scripts/Lockdown_Sql_Server.ps1 index a47ddba25b..101c9f2f7b 100644 --- a/deployment/secure_research_environment/remote/create_databases/scripts/Lockdown_Sql_Server.ps1 +++ b/deployment/secure_research_environment/remote/create_databases/scripts/Lockdown_Sql_Server.ps1 @@ -107,7 +107,7 @@ if ($operationFailed -Or (-Not $loginExists)) { # Create a DB user for each login group Write-Output "Ensuring that an SQL user exists for '$domainGroup' on: '$serverName'..." $sqlCommand = "IF NOT EXISTS(SELECT * FROM sys.database_principals WHERE name = '$domainGroup') CREATE USER [$domainGroup] FOR LOGIN [$domainGroup];" - Invoke-SqlCmd -ServerInstance $serverInstance -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true + Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query "$sqlCommand" -TrustServerCertificate -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true if ($? -And -Not $sqlErrorMessage) { Write-Output " [o] Ensured that '$domainGroup' user exists on: '$serverName'" Start-Sleep -s 10 # allow time for the database action to complete @@ -124,7 +124,7 @@ if ($operationFailed -Or (-Not $loginExists)) { foreach ($groupSchemaTuple in @(($DataAdminGroup, "data"), ($ResearchUsersGroup, "dbopublic"))) { $domainGroup, $schemaName = $groupSchemaTuple $sqlCommand = "IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = N'$schemaName') EXEC('CREATE SCHEMA $schemaName AUTHORIZATION [$domainGroup]');" - Invoke-SqlCmd -ServerInstance $serverInstance -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true + Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -TrustServerCertificate -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true if ($? -And -Not $sqlErrorMessage) { Write-Output " [o] Successfully ensured that '$schemaName' schema exists on: '$serverName'" Start-Sleep -s 10 # allow time for the database action to complete @@ -154,7 +154,7 @@ if ($operationFailed -Or (-Not $loginExists)) { Write-Output " [x] Role $role not recognised!" continue } - Invoke-SqlCmd -ServerInstance $serverInstance -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true + Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -TrustServerCertificate -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true if ($? -And -Not $sqlErrorMessage) { Write-Output " [o] Successfully gave '$domainGroup' $role permissions on: '$serverName'" Start-Sleep -s 10 # allow time for the database action to complete @@ -171,7 +171,7 @@ if ($operationFailed -Or (-Not $loginExists)) { # ------------------------------------ Write-Output "Running T-SQL lockdown script on: '$serverName'..." $sqlCommand = [System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($ServerLockdownCommandB64)) - Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true + Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -TrustServerCertificate -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true if ($? -And -Not $sqlErrorMessage) { Write-Output " [o] Successfully ran T-SQL lockdown script on: '$serverName'" } else { @@ -187,7 +187,7 @@ if ($operationFailed -Or (-Not $loginExists)) { $windowsAdmin = "${serverName}\${VmAdminUsername}" Write-Output "Removing database access from $windowsAdmin on: '$serverName'..." $sqlCommand = "DROP USER IF EXISTS [$windowsAdmin]; IF EXISTS(SELECT * FROM master.dbo.syslogins WHERE loginname = '$windowsAdmin') DROP LOGIN [$windowsAdmin]" - Invoke-SqlCmd -ServerInstance $serverInstance -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true + Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -TrustServerCertificate -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true if ($? -And -Not $sqlErrorMessage) { Write-Output " [o] Successfully removed database access for $windowsAdmin on: '$serverName'" Start-Sleep -s 10 # allow time for the database action to complete @@ -203,7 +203,7 @@ if ($operationFailed -Or (-Not $loginExists)) { # --------------------------------------------------------------------------------- Write-Output "Revoking sysadmin role from $DbAdminUsername on: '$serverName'..." $sqlCommand = "ALTER SERVER ROLE sysadmin DROP MEMBER $DbAdminUsername;" - Invoke-SqlCmd -ServerInstance $serverInstance -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true + Invoke-SqlCmd -ServerInstance $serverName -Credential $sqlAdminCredentials -QueryTimeout $connectionTimeoutInSeconds -Query $sqlCommand -TrustServerCertificate -ErrorAction SilentlyContinue -ErrorVariable sqlErrorMessage -OutputSqlErrors $true if ($? -And -Not $sqlErrorMessage) { Write-Output " [o] Successfully revoked sysadmin role on: '$serverName'" Start-Sleep -s 10 # allow time for the database action to complete diff --git a/deployment/secure_research_environment/setup/Configure_External_DNS_Queries.ps1 b/deployment/secure_research_environment/setup/Configure_External_DNS_Queries.ps1 index ba04a8a0ac..51558b00b5 100644 --- a/deployment/secure_research_environment/setup/Configure_External_DNS_Queries.ps1 +++ b/deployment/secure_research_environment/setup/Configure_External_DNS_Queries.ps1 @@ -26,10 +26,11 @@ $null = Set-AzContext -SubscriptionId $config.sre.subscriptionName -ErrorAction # -------------------------------------- $firewallRules = Get-JsonFromMustacheTemplate -TemplatePath (Join-Path $PSScriptRoot ".." ".." "safe_haven_management_environment" "network_rules" "shm-firewall-rules.json") -Parameters $config.shm -AsHashtable $allowedFqdns = @($firewallRules.applicationRuleCollections | ForEach-Object { $_.properties.rules.targetFqdns }) + - @(Get-PrivateDnsZones -ResourceGroupName $config.shm.network.vnet.rg -SubscriptionName $config.shm.subscriptionName | ForEach-Object { $_.Name }) + @(Get-PrivateDnsZones -ResourceGroupName $config.shm.network.vnet.rg -SubscriptionName $config.shm.subscriptionName | ForEach-Object { $_.Name }) + + @("docker.io") # List all unique FQDNs $allowedFqdns = $allowedFqdns | - Where-Object { $_ -notlike "g*.servicebus.windows.net" } | # Remove AzureADConnect password reset endpoints + Where-Object { $_ -notlike "*-sb.servicebus.windows.net" } | # Remove AzureADConnect password reset endpoints Where-Object { $_ -notlike "pksproddatastore*.blob.core.windows.net" } | # Remove AzureAD operations endpoints Sort-Object -Unique Add-LogMessage -Level Info "Restricted networks will be allowed to run DNS lookup on the following $($allowedFqdns.Count) FQDNs:" @@ -81,7 +82,8 @@ Add-LogMessage -Level Info "Looking for SRD with IP address '$vmIpAddress'..." if (-not $vmIpAddress) { Add-LogMessage -Level Fatal "No SRD found with IP address '$vmIpAddress'. Cannot run test to confirm external DNS resolution." } else { - $vmName = @(Get-AzNetworkInterface | Where-Object { $_.IpConfigurations.PrivateIpAddress -eq $vmIpAddress } | ForEach-Object { $_.VirtualMachine.Id.Split("/")[-1] })[0] + # Match on IP address within approriate SRE resource group + $vmName = @(Get-AzNetworkInterface -ResourceGroupName $config.sre.srd.rg | Where-Object { $_.IpConfigurations.PrivateIpAddress -eq $vmIpAddress } | ForEach-Object { $_.VirtualMachine.Id.Split("/")[-1] })[0] Add-LogMessage -Level Info "Testing external DNS resolution on VM '$vmName'..." $params = @{ SHM_DOMAIN_FQDN = $config.shm.domain.fqdn diff --git a/deployment/secure_research_environment/setup/Setup_SRE_Databases.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_Databases.ps1 index 0b7fbb2361..c7e8586193 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_Databases.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_Databases.ps1 @@ -58,7 +58,6 @@ foreach ($databaseCfg in $config.sre.databases.instances) { } } else { Add-LogMessage -Level Warning "Database VM '$($databaseCfg.vmName)' already exists. Use the '-Redeploy' option if you want to remove the existing database and its data and deploy a new one." - continue } } diff --git a/deployment/secure_research_environment/setup/Setup_SRE_Key_Vault_And_Users.ps1 b/deployment/secure_research_environment/setup/Setup_SRE_Key_Vault_And_Users.ps1 index a1be60f18a..d35671c265 100644 --- a/deployment/secure_research_environment/setup/Setup_SRE_Key_Vault_And_Users.ps1 +++ b/deployment/secure_research_environment/setup/Setup_SRE_Key_Vault_And_Users.ps1 @@ -68,11 +68,13 @@ try { # :: Databases try { foreach ($keyName in $config.sre.databases.Keys) { - if ($config.sre.databases[$keyName] -isnot [System.Collections.IDictionary]) { continue } - $dbAdminUsername = ($keyName -eq "dbpostgresql") ? "postgres" : "sre$($config.sre.id)dbadmin".ToLower() # The postgres admin username is hardcoded as 'postgres' but we save it to the keyvault to ensure a consistent record structure - $null = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.databases[$keyName].adminPasswordSecretName -DefaultLength 20 -AsPlaintext - $null = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.databases[$keyName].dbAdminUsernameSecretName $dbAdminUsername -AsPlaintext - $null = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $config.sre.databases[$keyName].dbAdminPasswordSecretName -DefaultLength 20 -AsPlaintext + foreach ($dbInstance in $config.sre.databases[$keyName]) { + if ($dbInstance -isnot [System.Collections.IDictionary]) { continue } + $dbAdminUsername = ($keyName -eq "dbpostgresql") ? "postgres" : "sre$($config.sre.id)dbadmin".ToLower() # The postgres admin username is hardcoded as 'postgres' but we save it to the keyvault to ensure a consistent record structure + $null = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $dbInstance.adminPasswordSecretName -DefaultLength 20 -AsPlaintext + $null = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $dbInstance.dbAdminUsernameSecretName $dbAdminUsername -AsPlaintext + $null = Resolve-KeyVaultSecret -VaultName $config.sre.keyVault.name -SecretName $dbInstance.dbAdminPasswordSecretName -DefaultLength 20 -AsPlaintext + } } Add-LogMessage -Level Success "Ensured that SRE database secrets exist" } catch { diff --git a/docs/Makefile b/docs/Makefile index d3b53ad27d..a2cf542beb 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,38 +5,16 @@ # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -CONFIGDIR = build -TARGETDIR = _output +SOURCEDIR = source +BUILDDIR = build -# Determine which PDFs to build -# If you want to add a PDF make the following changes -# - Add SOURCE and TARGET variables -# - Add TARGET to PDFTARGETS rule -# - Add a 'TARGET: SOURCE' rule -# - Edit conf.py if you want them to be downloadable -PDFTARGETDIR = $(TARGETDIR)/pdf - -# Put help first so that "make" without argument is like "make help". +# Put it first so that "make" without argument is like "make help". help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(TARGETDIR)" $(SPHINXOPTS) $(O) + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile -# 'all' will make HTML and PDFs -all: html pdf - -# Use rinoh (via Sphinx) to build PDFs from Markdown -pdf: - make rinoh - rm $(PDFTARGETDIR)/*.rtc - rm $(PDFTARGETDIR)/*.stylelog - -# Ensure that clean also removes the contents of $(PDFTARGETDIR) -clean: - rm -rf $(PDFTARGETDIR)/* - @$(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(TARGETDIR)" - -# Use Sphinx for other options. $(O) is meant as a shortcut for $(SPHINXOPTS). -.DEFAULT: Makefile - @$(SPHINXBUILD) -b $@ -c "$(CONFIGDIR)" "$(SOURCEDIR)" "$(TARGETDIR)" $(SPHINXOPTS) $(O) +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000000..488a2ab2b8 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,57 @@ +# Documentation + +The documentation is built from Markdown files using [Sphinx](https://www.sphinx-doc.org/) and [MyST parser](https://myst-parser.readthedocs.io/). + +## Building the Documentation + +Create a virtual environment + +```console +python3 -m venv ./venv +source ./venv/bin/activate +``` + +Install the python dependencies (specified in [`requirements.txt`](./requirements.txt)) + +```console +pip install -r requirements.txt +``` + +Use the [`Makefile`](./Makefile) to build the document site + +```console +make html +``` + +The generated documents will be placed under `build/html/`. +To view the documents open `build/html/index.html` in your browser. +For example + +```console +firefox build/html/index.html +``` + +## Reproducible Builds + +To improve the reproducibly of build at each commit, [`requirements.txt`](./requirements.txt) contains a complete list of dependencies and specific versions. + +The projects _direct_ dependencies are listed in [`requirements.in`](./requirements.in). +The full list is then generated using [`pip-compile`](https://pip-tools.readthedocs.io/en/latest/#requirements-from-requirements-in) + +```console +pip-compile requirements.in +``` + +### Updating Requirements + +All requirements can be updated with + +```console +pip-compile --upgrade requirements.in +``` + +Your virtual environment can be updated with + +```console +pip-sync +``` diff --git a/docs/build/_static/toggle.js b/docs/build/_static/toggle.js deleted file mode 100644 index c5b6e4fcba..0000000000 --- a/docs/build/_static/toggle.js +++ /dev/null @@ -1,7 +0,0 @@ -// Toggle the 'shift-up' class when rst-versions objects are clicked -$(document).on("click", "[data-toggle='rst-versions']", function () { - $("[data-toggle='rst-versions']").toggleClass("shift-up"); -}); -$(document).on("click", "[data-toggle='rst-downloads']", function () { - $("[data-toggle='rst-downloads']").toggleClass("shift-up"); -}); diff --git a/docs/build/_templates/sidebar-section-navigation.html b/docs/build/_templates/sidebar-section-navigation.html deleted file mode 100644 index 4b9cee0c3b..0000000000 --- a/docs/build/_templates/sidebar-section-navigation.html +++ /dev/null @@ -1,5 +0,0 @@ - \ No newline at end of file diff --git a/docs/build/_templates/sidebar-versions.html b/docs/build/_templates/sidebar-versions.html deleted file mode 100644 index db2a496460..0000000000 --- a/docs/build/_templates/sidebar-versions.html +++ /dev/null @@ -1,58 +0,0 @@ -{% if READTHEDOCS or display_lower_left %} -
- - - Currently reading: {{ current_version }} - - -
- {% if languages|length >= 1 %} -
-
{{ _('Languages') }}
- {% for slug, url in languages %} - {% if slug == current_language %} {% endif %} -
{{ slug }}
- {% if slug == current_language %}
{% endif %} - {% endfor %} -
- {% endif %} - {% if versions|length >= 1 %} -
-
{{ _('Versions') }}
- {% for slug, url in versions %} - {% if slug == current_version %} {% endif %} -
{{ slug }}
- {% if slug == current_version %}
{% endif %} - {% endfor %} -
- {% endif %} - {% if READTHEDOCS %} -
-
{{ _('On Read the Docs') }}
-
- {{ _('Project Home') }} -
-
- {{ _('Builds') }} -
-
- {% endif %} -
-
- {% if downloads|length >= 1 %} -
- - - Download PDF resources - - -
-
- {% for type, url in downloads %} -
{{ type }}
- {% endfor %} -
-
-
- {% endif %} -{% endif %} \ No newline at end of file diff --git a/docs/build/_templates/sphinx-version.html b/docs/build/_templates/sphinx-version.html deleted file mode 100644 index c03f4af166..0000000000 --- a/docs/build/_templates/sphinx-version.html +++ /dev/null @@ -1,5 +0,0 @@ -

- {% trans sphinx_version=sphinx_version|e %}Created using Sphinx {{ sphinx_version }}.{% endtrans %} - {% trans %}Hosted by GitHub Pages.{% endtrans %} -
-

diff --git a/docs/build/build_all_supported_versions.py b/docs/build/build_all_supported_versions.py deleted file mode 100755 index dd2e565db2..0000000000 --- a/docs/build/build_all_supported_versions.py +++ /dev/null @@ -1,173 +0,0 @@ -#! /usr/bin/env python3 -import argparse -import emoji -import git -import os -import pathlib -import shutil -import subprocess -import sys -import tempfile - -# Set git repository details -development_branch = "develop" -earliest_supported_release = "v4.0.0" - -# --- Parse arguments --- -parser = argparse.ArgumentParser( - prog="python build_docs_all.py", - description="Build documentation for all supported versions", -) -parser.add_argument( - "-o", "--output-dir", help="Directory to store built documentation", required=True -) -parser.add_argument( - "-s", - "--skip-pdfs", - action="store_true", - help="Skip building PDFs (use only for faster testing)", -) -args = parser.parse_args() -skip_pdfs = args.skip_pdfs - -# Create output directory -combined_output_dir = pathlib.Path(args.output_dir).resolve() -if combined_output_dir.exists(): - shutil.rmtree(combined_output_dir) -combined_output_dir.mkdir(parents=True, exist_ok=True) - -# Necessary directories -temp_dir = tempfile.TemporaryDirectory() -build_dir = pathlib.Path(__file__).parent.resolve() -docs_dir = build_dir.parent -build_output_dir = docs_dir / "_output" -config_backup_dir = pathlib.Path(temp_dir.name) / "build_config" - -# Get git repository details -repo = git.Repo(search_parent_directories=True) -repo_name = repo.remotes.origin.url.split(".git")[0].split("/")[-1] - -# Load all release since earliest_supported_release -releases = sorted((t.name for t in repo.tags), reverse=True) -supported_versions = ( - releases[: releases.index(earliest_supported_release) + 1] - + [development_branch] -) -default_version = supported_versions[0] # Latest stable release -current_version = ( - [tag.name for tag in repo.tags if tag.commit == repo.head.commit] - + [branch.name for branch in repo.branches if branch.commit == repo.head.commit] - + [repo.head.commit] -)[0] # Tag or branch name or commit ID if no name is available - -# --- Ensure local repo is clean -- -if repo.is_dirty(untracked_files=True): - print( - "Repo is not clean. Run 'git status' and ensure repo is clean before rerunning this script." - ) - exit(1) - -# --- Backup documentastion build configuration --- -# Backup Sphinx docs build configuration from current branch to ensure -# consistent style and navigation elements for docs across all versions -# NOTE: copytree() requires the destination directory does not exist -# This is why we target a subfolder of the TemporaryDirectory we create -# earlier as the config backup directory -print(f"Backing up build config to: '{config_backup_dir}'") -shutil.copytree(build_dir, config_backup_dir) - -# --- Build docs for all supported versions --- -print(f"Building docs for all supported versions: {supported_versions}") -print(f"Default version: {default_version}") - - -# Flag to bypass Jekyll processing since this is a static html site -open(combined_output_dir / ".nojekyll", "w+").close() - -# Build docs for each branch -for version in supported_versions: - print(f"{emoji.emojize(':hourglass:', language='alias')} Generating docs for version '{version}'...") - - try: - # Checkout repo at this version - repo.git.checkout(version) - - # Restore Sphinx docs build configuration from backup for consistent style, - # clearing any existing build configuration directory content first. - if os.path.exists(build_dir): - shutil.rmtree(build_dir) - shutil.copytree(config_backup_dir, build_dir) - - # Use the first of these files that exists as the index: - # - index.md - # - README.md - # - DSG-user-documentation.md - # - An empty index.md - target = docs_dir / "index.md" - if target.is_file(): - # Use existing index file. Nothing to do. - pass - elif (source := docs_dir / "README.md").is_file(): - # Use docs README - shutil.move(source, target) - elif (source := docs_dir / "DSG-user-documentation.md").is_file(): - # Use docs DSG user documentation - shutil.move(source, target) - else: - # Use empty index file - shutil.copy(build_dir / "meta" / "index.empty.md", target) - - # Clean the output directory - subprocess.run( - ["make", "clean"], - cwd=docs_dir, - check=True, - ) - # Build docs for this version - build_commands = ["make", "html"] - if not skip_pdfs: - build_commands.append("pdf") - subprocess.run( - build_commands, - cwd=docs_dir, - check=True, - ) - # Store docs in the output directory - shutil.copytree(build_output_dir, combined_output_dir / version) - shutil.rmtree(build_output_dir) - - except subprocess.CalledProcessError: - print(f"Error encountered during build for version '{version}'") - raise - else: - print(f"{emoji.emojize(':sparkles:', language='alias')} Successfully built docs for version '{version}'") - finally: - # Revert any changes made to current branch - print(f"Reverting changes made to '{version}'") - repo.git.reset("--hard", "HEAD") - repo.git.clean("-fd") - -# Write top-level index file to redirect to default version of docs -with open(os.path.join(docs_dir, "build", "meta", "index.html"), "r") as file: - filedata = file.read() -filedata = filedata.replace("{{latest_stable}}", default_version) -with open(os.path.join(combined_output_dir, "index.html"), "w+") as file: - file.write(filedata) - -# -- Restore original branch and copy docs to specified output directory -- -print(f"Documentation builds complete for all versions: {supported_versions}") -# Checkout original branch -print(f"Restoring original '{current_version}' branch.") -repo.git.checkout(current_version) -temp_dir.cleanup() - -# Check that all versions have built -n_failures = 0 -for version in supported_versions: - if (combined_output_dir / version / "index.html").is_file(): - print(f"{emoji.emojize(':white_check_mark:', language='alias')} {version} documentation built successfully") - else: - print(f"{emoji.emojize(':x:', language='alias')} {version} documentation failed to build!") - n_failures += 1 - if n_failures: - sys.exit(1) diff --git a/docs/build/emoji_support.rts b/docs/build/emoji_support.rts deleted file mode 100644 index 739d5d729f..0000000000 --- a/docs/build/emoji_support.rts +++ /dev/null @@ -1,7 +0,0 @@ -[STYLESHEET] -name=Emoji Support -description=Small tweaks made to the Sphinx style sheet -base=sphinx - -[VARIABLES] -fallback_typeface=Symbola diff --git a/docs/build/emoji_support.rtt b/docs/build/emoji_support.rtt deleted file mode 100644 index 3c3e005513..0000000000 --- a/docs/build/emoji_support.rtt +++ /dev/null @@ -1,5 +0,0 @@ -[TEMPLATE_CONFIGURATION] -name = Emoji Support -template = article - -stylesheet = emoji_support.rts diff --git a/docs/build/meta/index.empty.md b/docs/build/meta/index.empty.md deleted file mode 100644 index 989cf73460..0000000000 --- a/docs/build/meta/index.empty.md +++ /dev/null @@ -1,3 +0,0 @@ -# Safe Haven Documentation - -This release does not have any documentation diff --git a/docs/build/meta/index.html b/docs/build/meta/index.html deleted file mode 100644 index e8db9ab780..0000000000 --- a/docs/build/meta/index.html +++ /dev/null @@ -1,10 +0,0 @@ - - - - Data Safe Haven Documentation - - - -

Please wait while you're redirected to our documentation.

- - \ No newline at end of file diff --git a/docs/build/requirements.txt b/docs/build/requirements.txt deleted file mode 100644 index a6f3275d8b..0000000000 --- a/docs/build/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -emoji==2.2.0 -GitPython==3.1.30 -Jinja2==3.1.2 -myst-parser==0.18.1 -Pygments==2.14.0 -pydata-sphinx-theme==0.12.0 -rinohtype==0.5.4 -rinoh-typeface-symbola==0.1.1 -Sphinx==5.3.0 diff --git a/docs/build/run_act_build.sh b/docs/build/run_act_build.sh deleted file mode 100755 index 3546d96d63..0000000000 --- a/docs/build/run_act_build.sh +++ /dev/null @@ -1,68 +0,0 @@ -#! /bin/sh - -# Document usage for this script -usage() { - echo "usage: $(basename "$0") -d directory [-h] [-p port]" - echo " -d directory [required] specify directory where output should be stored" - echo " -h display help" - echo " -p port specify port to run webserver on [default: 8080]" - exit 1 -} - -# Read command line arguments, overriding defaults where necessary -target_directory="" -port="8080" -while getopts "d:hp:" option; do - case $option in - d) - target_directory=$OPTARG - ;; - h) - usage - ;; - p) - port=$OPTARG - ;; - \?) - echo "Invalid option: -$OPTARG" >&2 - ;; - esac -done - -# Check that target and output directories exist -target_directory="$(realpath "$target_directory")" -if [ -z "$target_directory" ]; then usage; fi -output_directory="${target_directory:?}/data-safe-haven" -mkdir -p "${output_directory}" - -# Check that output directory is empty -if [ "$(ls -A "$output_directory")" ]; then - while true; do - echo "$output_directory is not empty. Delete its contents? [y/n] " - read -r response - case $response in - [Yy]*) - rm -rf "${output_directory}" - break - ;; - [Nn]*) exit 0 ;; - *) echo "Please answer yes or no." ;; - esac - done -fi - -# Build docs with act -echo "Building docs" -act -j build_docs -C "$(git rev-parse --show-toplevel)" 2>/dev/null - -# Move the docs to a local directory -echo "Moving docs to target directory" -CONTAINER_ID=$(docker container ls -a | grep build-docs | cut -d ' ' -f 1) -echo "Starting container $(docker container start "$CONTAINER_ID")..." -DOCS_DIR=$(dirname "$(docker exec -it "$CONTAINER_ID" /bin/bash -c "find /tmp -type d -name develop")") -docker cp "${CONTAINER_ID}:${DOCS_DIR}/." "${output_directory}" -echo "Stopping container $(docker container stop "$CONTAINER_ID")" - -# Start a Python webserver in local directory -echo "Starting webserver at http://localhost:${port}" -python -m http.server --directory "${target_directory}" "$port" diff --git a/docs/processes/data_ingress.md b/docs/processes/data_ingress.md deleted file mode 100644 index 6136fff9e7..0000000000 --- a/docs/processes/data_ingress.md +++ /dev/null @@ -1,25 +0,0 @@ -(process_data_ingress)= - -# Data ingress process - -## Introduction - -The Data Safe Haven has various technical controls to ensure data security. -However, the processes and contractual agreements that the **Dataset Provider** agrees to are equally important. - -## Bringing data into the environment - -```{attention} -Before starting any data ingress, make sure that you have gone through the {ref}`data classification process `. -``` - -Talk to your {ref}`role_system_manager` to discuss possible methods of bringing data into the environments. -It may be convenient to use [Azure Storage Explorer](https://azure.microsoft.com/en-us/products/storage/storage-explorer/). -In this case you will not need log-in credentials, as your {ref}`role_system_manager` can provide a short-lived secure access token which will let you upload data. - -```{tip} -You may want to keep the following considerations in mind when transferring data in order to reduce the chance of a data breach -- use of short-lived access tokens limits the time within which an attacker can operate -- letting your {ref}`role_system_manager` know a fixed IP address you will be connecting from (eg. a corporate VPN) limits the places an attacker can operate from -- communicating with your {ref}`role_system_manager` through a secure out-of-band channel (eg. encrypted email) reduces the chances that an attacker can intercept or alter your messages in transit -``` diff --git a/docs/requirements.in b/docs/requirements.in new file mode 100644 index 0000000000..a6230b9742 --- /dev/null +++ b/docs/requirements.in @@ -0,0 +1,4 @@ +emoji +myst-parser +pydata-sphinx-theme +Sphinx diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000..0c62fb5ada --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,87 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile requirements.in +# +accessible-pygments==0.0.4 + # via pydata-sphinx-theme +alabaster==0.7.13 + # via sphinx +babel==2.12.1 + # via + # pydata-sphinx-theme + # sphinx +beautifulsoup4==4.12.2 + # via pydata-sphinx-theme +certifi==2023.7.22 + # via requests +charset-normalizer==3.1.0 + # via requests +docutils==0.19 + # via + # myst-parser + # pydata-sphinx-theme + # sphinx +emoji==2.2.0 + # via -r requirements.in +idna==3.4 + # via requests +imagesize==1.4.1 + # via sphinx +jinja2==3.1.2 + # via + # myst-parser + # sphinx +markdown-it-py==2.2.0 + # via + # mdit-py-plugins + # myst-parser +markupsafe==2.1.2 + # via jinja2 +mdit-py-plugins==0.3.5 + # via myst-parser +mdurl==0.1.2 + # via markdown-it-py +myst-parser==1.0.0 + # via -r requirements.in +packaging==23.1 + # via + # pydata-sphinx-theme + # sphinx +pydata-sphinx-theme==0.13.3 + # via -r requirements.in +pygments==2.15.1 + # via + # accessible-pygments + # pydata-sphinx-theme + # sphinx +pyyaml==6.0 + # via myst-parser +requests==2.31.0 + # via sphinx +snowballstemmer==2.2.0 + # via sphinx +soupsieve==2.4.1 + # via beautifulsoup4 +sphinx==6.2.1 + # via + # -r requirements.in + # myst-parser + # pydata-sphinx-theme +sphinxcontrib-applehelp==1.0.4 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.1 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +typing-extensions==4.6.1 + # via pydata-sphinx-theme +urllib3==2.0.2 + # via requests diff --git a/docs/roles/researcher/snippets/01_introduction.partial.md b/docs/roles/researcher/snippets/01_introduction.partial.md deleted file mode 100644 index 14b2c4162b..0000000000 --- a/docs/roles/researcher/snippets/01_introduction.partial.md +++ /dev/null @@ -1,43 +0,0 @@ -## {{beginner}} Introduction - -{{tada}} Welcome to the Turing Data Safe Haven! {{tada}} - -Secure research environments (SREs) for analysis of sensitive datasets are essential to give data providers confidence that their datasets will be kept secure over the course of a project. -The Data Safe Haven is a prescription for how to set up one or more SREs and give users access to them. -The Data Safe Haven SRE design is aimed at allowing groups of researchers to work together on projects that involve sensitive or confidential datasets at scale. -Our goal is to ensure that you are able to implement the most cutting edge data science techniques while maintaining all ethical and legal responsibilities of information governance and access. - -The data you are working on will have been classified into one of five sensitivity tiers, ranging from open data at Tier 0, to highly sensitive and high risk data at Tier 4. -The tiers are defined by the most sensitive data in your project, and may be increased if the combination of data is deemed to be require additional levels of security. -You can read more about this process in our policy paper: _Arenas et al, 2019_, [`arXiv:1908.08737`](https://arxiv.org/abs/1908.08737). - -The level of sensitivity of your data determines whether you have access to the internet within the SRE and whether you are allowed to copy and paste between the secure research environment and other windows on your computer. -This means you may be limited in which data science tools you are allowed to install. -You will find that many software packages are already available, and the administrator of the SRE will ingress - bring into the environment - as many additional resources as possible. - -```{important} -Please read this user guide carefully and remember to refer back to it when you have questions. -In many cases the answer is already here, but if you think this resource could be clearer, please let us know so we can improve the documentation for future users. -``` - -### Definitions - -The following definitions might be useful during the rest of this guide - -Secure Research Environment (SRE) -: the environment that you will be using to access the sensitive data. - -Data Safe Haven -: the overall project that details how to create and manage one or more SREs. - -(user_guide_username_domain)= -Username domain -: the domain (for example `projects.turingsafehaven.ac.uk`) which your user account will belong to. Multiple SREs can share the same domain for managing users in common. - -(user_guide_sre_id)= -SRE ID -: each SRE has a unique short ID, for example `sandbox` which your {ref}`System Manager ` will use to distinguish different SREs in the same Data Safe Haven. - -(user_guide_sre_url)= -SRE URL -: each SRE has a unique URL (for example `sandbox.projects.turingsafehaven.ac.uk`) which is used to access the data. diff --git a/docs/roles/system_manager/administrator_guide/srd_login_failure.png b/docs/roles/system_manager/administrator_guide/srd_login_failure.png deleted file mode 100644 index 1c00b6152d..0000000000 Binary files a/docs/roles/system_manager/administrator_guide/srd_login_failure.png and /dev/null differ diff --git a/docs/roles/system_manager/administrator_guide/srd_login_prompt.png b/docs/roles/system_manager/administrator_guide/srd_login_prompt.png deleted file mode 100644 index 225916453c..0000000000 Binary files a/docs/roles/system_manager/administrator_guide/srd_login_prompt.png and /dev/null differ diff --git a/docs/roles/system_manager/manage_deployments.md b/docs/roles/system_manager/manage_deployments.md deleted file mode 100644 index a6ca4f866b..0000000000 --- a/docs/roles/system_manager/manage_deployments.md +++ /dev/null @@ -1,88 +0,0 @@ -(administrator_manage_deployments)= - -# Managing Data Safe Haven deployments - -```{important} -This document assumes that you already have access to a {ref}`Safe Haven Management (SHM) environment ` and one or more {ref}`Secure Research Environments (SREs) ` that are linked to it. -``` - -## {{fire}} Remove a single SRE - -In order to tear down an SRE, use the following procedure: - -On your **deployment machine**. - -- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team -- Open a `Powershell` terminal and navigate to the `deployment/administration` directory within the Data Safe Haven repository -- Ensure you are logged into `Azure` within `Powershell` using the command: `Connect-AzAccount`. This command will give you a URL and a short alphanumeric code. You will need to visit that URL in a web browser and enter the code -- NB. If your account is a guest in additional Azure tenants, you may need to add the `-Tenant ` flag, where `` is the ID of the Azure tenant you want to deploy into. -- Run the following script: - - ```powershell - ./SRE_Teardown.ps1 -shmId -sreId - ``` - -- If you provide the optional `-dryRun` parameter then the names of all affected resources will be printed, but nothing will be deleted - -## {{end}} Remove a complete Safe Haven - -### {{collision}} Tear down any attached SREs - -On your **deployment machine**. - -- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team -- Open a `Powershell` terminal and navigate to the `deployment/administration` directory within the Data Safe Haven repository -- Ensure you are logged into `Azure` within `Powershell` using the command: `Connect-AzAccount`. This command will give you a URL and a short alphanumeric code. You will need to visit that URL in a web browser and enter the code - - ```{attention} - If your account is a guest in additional Azure tenants, you may need to add the `-Tenant ` flag, where `` is the ID of the Azure tenant you want to deploy into. - ``` - -- For each SRE attached to the SHM, do the following: - - Tear down the SRE by running: - - ```powershell - ./SRE_Teardown.ps1 -sreId - ``` - - where the SRE ID is the one specified in the relevant config file - - ```{note} - If you provide the optional `-dryRun` parameter then the names of all affected resources will be printed, but nothing will be deleted - ``` - -### {{unlock}} Disconnect from the Azure Active Directory - -Connect to the **SHM Domain Controller (DC1)** via Remote Desktop Client over the SHM VPN connection - -- Log in as a **domain** user (ie. `@`) using the username and password obtained from the Azure portal -- If you see a warning dialog that the certificate cannot be verified as root, accept this and continue -- Open Powershell as an administrator - - Navigate to `C:\Installation` - - Run `.\Disconnect_AD.ps1` - - You will need to provide login credentials (including MFA if set up) for `@` - -```{attention} -Full disconnection of the Azure Active Directory can take up to 72 hours but is typically less. -If you are planning to install a new SHM connected to the same Azure Active Directory you may find the `AzureADConnect` installation step requires you to wait for the previous disconnection to complete. -``` - -### {{bomb}} Tear down the SHM - -On your **deployment machine**. - -- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team -- Open a `Powershell` terminal and navigate to the `deployment/administration` directory within the Data Safe Haven repository -- Ensure you are logged into `Azure` within `Powershell` using the command: `Connect-AzAccount`. This command will give you a URL and a short alphanumeric code. You will need to visit that URL in a web browser and enter the code - - ```{attention} - If your account is a guest in additional Azure tenants, you may need to add the `-Tenant ` flag, where `` is the ID of the Azure tenant you want to deploy into. - ``` - -- Tear down the SHM by running: - - ```powershell - ./SHM_Teardown.ps1 -shmId - ``` - - where `` is the {ref}`management environment ID ` specified in the configuration file. diff --git a/docs/build/_static/favicon.ico b/docs/source/_static/favicon.ico similarity index 100% rename from docs/build/_static/favicon.ico rename to docs/source/_static/favicon.ico diff --git a/docs/build/_static/logo_turing_dark.png b/docs/source/_static/logo_turing_dark.png similarity index 100% rename from docs/build/_static/logo_turing_dark.png rename to docs/source/_static/logo_turing_dark.png diff --git a/docs/build/_static/logo_turing_light.png b/docs/source/_static/logo_turing_light.png similarity index 100% rename from docs/build/_static/logo_turing_light.png rename to docs/source/_static/logo_turing_light.png diff --git a/docs/build/_static/overrides.css b/docs/source/_static/overrides.css similarity index 100% rename from docs/build/_static/overrides.css rename to docs/source/_static/overrides.css diff --git a/docs/static/scriberia_diagram.jpg b/docs/source/_static/scriberia_diagram.jpg similarity index 100% rename from docs/static/scriberia_diagram.jpg rename to docs/source/_static/scriberia_diagram.jpg diff --git a/docs/build/conf.py b/docs/source/conf.py similarity index 50% rename from docs/build/conf.py rename to docs/source/conf.py index 879ed237cf..97d0811fe4 100644 --- a/docs/build/conf.py +++ b/docs/source/conf.py @@ -5,45 +5,18 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html import datetime import emoji -import git -import os # -- Project information ----------------------------------------------------- project = "Data Safe Haven" -copyright = f"CC-BY-4.0 {datetime.date.today().year}, The Alan Turing Institute." +copyright = f"CC-BY-4.0 {datetime.date.today().year}, The Alan Turing Institute" author = "The Alan Turing Institute" development_branch = "develop" -earliest_supported_release = "v4.0.0" - - -# -- Git repository details -repo = git.Repo(search_parent_directories=True) -repo_name = repo.remotes.origin.url.split(".git")[0].split("/")[-1] -releases = sorted((t.name for t in repo.tags), reverse=True) -supported_versions = ( - releases[: releases.index(earliest_supported_release) + 1] - + [development_branch] -) -default_version = supported_versions[0] # Latest stable release -current_version = ( - [tag.name for tag in repo.tags if tag.commit == repo.head.commit] - + [branch.name for branch in repo.branches if branch.commit == repo.head.commit] - + [str(repo.head.commit)] -)[0] # Tag or branch name or commit ID if no name is available -current_commit_hash = repo.head.commit.hexsha -current_commit_date = repo.head.commit.authored_datetime.strftime(r"%d %b %Y") -del repo # all unpickleable objects must be deleted # -- Customisation ----------------------------------------------------------- -# Extracted repository variables -print(f"Supported versions: {supported_versions}") -print(f"Default version: {default_version}") -print(f"Current version: {current_version}") - # Construct list of emoji substitutions emoji_codes = set( [ @@ -59,25 +32,7 @@ # Set sidebar variables if "html_context" not in globals(): html_context = dict() -html_context["display_lower_left"] = True -html_context["default_version"] = default_version -html_context["current_version"] = current_version -html_context["versions"] = [ - (v, f"/{repo_name}/{v}/index.html") for v in supported_versions -] -# Downloadable PDFs -pdf_version_string = f"Version: {current_version} ({current_commit_hash})" -print(f"PDF version string: {pdf_version_string}") -html_context["downloads"] = [ - ( - "User guide (Apache Guacamole)", - f"/{repo_name}/{current_version}/pdf/data_safe_haven_user_guide_guacamole.pdf", - ), - ( - "User guide (Microsoft RDS)", - f"/{repo_name}/{current_version}/pdf/data_safe_haven_user_guide_msrds.pdf", - ), -] + # Add 'Edit on GitHub' link html_context["github_user"] = "alan-turing-institute" html_context["github_repo"] = "data-safe-haven" @@ -92,24 +47,16 @@ # ones. extensions = [ "myst_parser", - "rinoh.frontend.sphinx", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] + # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [ - "build", - "_output", - "Thumbs.db", - ".DS_Store", - "**/*.partial.md", -] - - +exclude_patterns = ["**/*.partial.md"] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -127,23 +74,13 @@ } ], "logo": { - "image_light": "logo_turing_light.png", - "image_dark": "logo_turing_dark.png", + "image_light": "_static/logo_turing_light.png", + "image_dark": "_static/logo_turing_dark.png", }, - "page_sidebar_items": ["edit-this-page", "sourcelink"], + "secondary_sidebar_items": ["page-toc", "edit-this-page", "sourcelink"], "use_edit_page_button": True, } -# Set the left-hand sidebars -html_sidebars = { - "**": [ - "search-field.html", - "sidebar-section-navigation.html", - ] -} -if not os.getenv("DISABLE_VERSION_LINKS"): - html_sidebars["**"] += ["sidebar-versions.html"] - # Location of favicon html_favicon = "_static/favicon.ico" @@ -171,26 +108,5 @@ for emoji_code in emoji_codes } -# -- Options for Rinoh ------------------------------------------------------- - -# List of documents to convert to PDF -rinoh_documents = [ - dict( - doc="roles/researcher/user_guide_guacamole", - target="pdf/data_safe_haven_user_guide_guacamole", - title="Data Safe Haven User Guide\nApache Guacamole", - subtitle=pdf_version_string, - date=current_commit_date, - author=author, - template="emoji_support.rtt", - ), - dict( - doc="roles/researcher/user_guide_msrds", - target="pdf/data_safe_haven_user_guide_msrds", - title="Data Safe Haven User Guide\nMicrosoft Remote Desktop", - subtitle=pdf_version_string, - date=current_commit_date, - author=author, - template="emoji_support.rtt", - ), -] +# Allow MyST to generate anchors for section titles +myst_heading_anchors = 4 diff --git a/docs/contributing/devops_gap.png b/docs/source/contributing/devops_gap.png similarity index 100% rename from docs/contributing/devops_gap.png rename to docs/source/contributing/devops_gap.png diff --git a/docs/contributing/example-conversation-in-issue.png b/docs/source/contributing/example-conversation-in-issue.png similarity index 100% rename from docs/contributing/example-conversation-in-issue.png rename to docs/source/contributing/example-conversation-in-issue.png diff --git a/docs/contributing/gitflow.svg b/docs/source/contributing/gitflow.svg similarity index 100% rename from docs/contributing/gitflow.svg rename to docs/source/contributing/gitflow.svg diff --git a/docs/deployment/build_srd_image.md b/docs/source/deployment/build_srd_image.md similarity index 99% rename from docs/deployment/build_srd_image.md rename to docs/source/deployment/build_srd_image.md index 55e4e769e5..9482988160 100644 --- a/docs/deployment/build_srd_image.md +++ b/docs/source/deployment/build_srd_image.md @@ -108,7 +108,7 @@ PS> ./Provision_Compute_VM.ps1 -shmId ```{note} - Although the `./Provision_Compute_VM.ps1` script will finish running in a few minutes, the build itself will take several hours. -- We recommend **monitoring** the build by accessing the machine using `ssh` (the ssh info should be printed at the end of the Provision_Compute_VM.ps1 script) and either reading through the full build log at `/var/log/cloud-init-output.log` or running the summary script using `/opt/verification/analyse_build.py`. +- We recommend **monitoring** the build by accessing the machine using `ssh` (the ssh info should be printed at the end of the Provision_Compute_VM.ps1 script) and either reading through the full build log at `/var/log/cloud-init-output.log` or running the summary script using `/opt/monitoring/analyse_build.py`. - **NB.** You will need to connect from an approved administrator IP address - **NB.** the VM will automatically shutdown at the end of the cloud-init process - if you want to analyse the build after this point, you will need to turn it back on in the `Azure` portal. ``` diff --git a/docs/deployment/deploy_shm.md b/docs/source/deployment/deploy_shm.md similarity index 99% rename from docs/deployment/deploy_shm.md rename to docs/source/deployment/deploy_shm.md index 4825fbcb0c..94f652b1f1 100644 --- a/docs/deployment/deploy_shm.md +++ b/docs/source/deployment/deploy_shm.md @@ -23,7 +23,7 @@ Alternatively, you may run multiple SHMs concurrently, for example you may have ``` - `PowerShell` - - Install [PowerShell v7.0 or above](https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell) + - We recommend [installing](https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell) the [latest stable release](https://learn.microsoft.com/en-us/powershell/scripting/install/powershell-support-lifecycle?view=powershell-7.3) of Powershell. We have most recently tested deployment using version `7.3.2`. - `Powershell` cross-platform modules ````{tip} @@ -384,7 +384,7 @@ Make sure you have activated your account and **successfully logged in** with th ## 6. {{computer}} Deploy SHM -![Powershell: a few hours](https://img.shields.io/static/v1?style=for-the-badge&logo=powershell&label=local&color=blue&message=a%20few%20hours) at {{file_folder}} `./deployment/secure_research_environment/setup` +![Powershell: a few hours](https://img.shields.io/static/v1?style=for-the-badge&logo=powershell&label=local&color=blue&message=a%20few%20hours) at {{file_folder}} `./deployment/safe_haven_management_environment/setup` ```powershell PS> ./Deploy_SHM.ps1 -shmId diff --git a/docs/deployment/deploy_shm/AAD.png b/docs/source/deployment/deploy_shm/AAD.png similarity index 100% rename from docs/deployment/deploy_shm/AAD.png rename to docs/source/deployment/deploy_shm/AAD.png diff --git a/docs/deployment/deploy_shm/aad_authentication_methods.png b/docs/source/deployment/deploy_shm/aad_authentication_methods.png similarity index 100% rename from docs/deployment/deploy_shm/aad_authentication_methods.png rename to docs/source/deployment/deploy_shm/aad_authentication_methods.png diff --git a/docs/deployment/deploy_shm/aad_create_admin.png b/docs/source/deployment/deploy_shm/aad_create_admin.png similarity index 100% rename from docs/deployment/deploy_shm/aad_create_admin.png rename to docs/source/deployment/deploy_shm/aad_create_admin.png diff --git a/docs/deployment/deploy_shm/aad_creation.png b/docs/source/deployment/deploy_shm/aad_creation.png similarity index 100% rename from docs/deployment/deploy_shm/aad_creation.png rename to docs/source/deployment/deploy_shm/aad_creation.png diff --git a/docs/deployment/deploy_shm/aad_global_admin.png b/docs/source/deployment/deploy_shm/aad_global_admin.png similarity index 100% rename from docs/deployment/deploy_shm/aad_global_admin.png rename to docs/source/deployment/deploy_shm/aad_global_admin.png diff --git a/docs/deployment/deploy_shm/aad_mfa_settings.png b/docs/source/deployment/deploy_shm/aad_mfa_settings.png similarity index 100% rename from docs/deployment/deploy_shm/aad_mfa_settings.png rename to docs/source/deployment/deploy_shm/aad_mfa_settings.png diff --git a/docs/deployment/deploy_shm/aad_sspr.png b/docs/source/deployment/deploy_shm/aad_sspr.png similarity index 100% rename from docs/deployment/deploy_shm/aad_sspr.png rename to docs/source/deployment/deploy_shm/aad_sspr.png diff --git a/docs/deployment/deploy_shm/aad_tenant_id.png b/docs/source/deployment/deploy_shm/aad_tenant_id.png similarity index 100% rename from docs/deployment/deploy_shm/aad_tenant_id.png rename to docs/source/deployment/deploy_shm/aad_tenant_id.png diff --git a/docs/deployment/deploy_shm/catalina_authentication.png b/docs/source/deployment/deploy_shm/catalina_authentication.png similarity index 100% rename from docs/deployment/deploy_shm/catalina_authentication.png rename to docs/source/deployment/deploy_shm/catalina_authentication.png diff --git a/docs/deployment/deploy_shm/certificate_details.png b/docs/source/deployment/deploy_shm/certificate_details.png similarity index 100% rename from docs/deployment/deploy_shm/certificate_details.png rename to docs/source/deployment/deploy_shm/certificate_details.png diff --git a/docs/deployment/deploy_shm/dc_resource_groups.png b/docs/source/deployment/deploy_shm/dc_resource_groups.png similarity index 100% rename from docs/deployment/deploy_shm/dc_resource_groups.png rename to docs/source/deployment/deploy_shm/dc_resource_groups.png diff --git a/docs/deployment/deploy_shm/enable_password_writeback.png b/docs/source/deployment/deploy_shm/enable_password_writeback.png similarity index 100% rename from docs/deployment/deploy_shm/enable_password_writeback.png rename to docs/source/deployment/deploy_shm/enable_password_writeback.png diff --git a/docs/deployment/deploy_shm/nps_accounting.png b/docs/source/deployment/deploy_shm/nps_accounting.png similarity index 100% rename from docs/deployment/deploy_shm/nps_accounting.png rename to docs/source/deployment/deploy_shm/nps_accounting.png diff --git a/docs/deployment/deploy_shm/shm_subdomain_ns.png b/docs/source/deployment/deploy_shm/shm_subdomain_ns.png similarity index 100% rename from docs/deployment/deploy_shm/shm_subdomain_ns.png rename to docs/source/deployment/deploy_shm/shm_subdomain_ns.png diff --git a/docs/deployment/deploy_shm/vnet_resource_groups.png b/docs/source/deployment/deploy_shm/vnet_resource_groups.png similarity index 100% rename from docs/deployment/deploy_shm/vnet_resource_groups.png rename to docs/source/deployment/deploy_shm/vnet_resource_groups.png diff --git a/docs/deployment/deploy_sre.md b/docs/source/deployment/deploy_sre.md similarity index 100% rename from docs/deployment/deploy_sre.md rename to docs/source/deployment/deploy_sre.md diff --git a/docs/deployment/deploy_sre/guacamole_aad_app_registration_idtoken.png b/docs/source/deployment/deploy_sre/guacamole_aad_app_registration_idtoken.png similarity index 100% rename from docs/deployment/deploy_sre/guacamole_aad_app_registration_idtoken.png rename to docs/source/deployment/deploy_sre/guacamole_aad_app_registration_idtoken.png diff --git a/docs/deployment/deploy_sre/guacamole_aad_idtoken_failure.png b/docs/source/deployment/deploy_sre/guacamole_aad_idtoken_failure.png similarity index 100% rename from docs/deployment/deploy_sre/guacamole_aad_idtoken_failure.png rename to docs/source/deployment/deploy_sre/guacamole_aad_idtoken_failure.png diff --git a/docs/deployment/deploy_sre/guacamole_desktop.png b/docs/source/deployment/deploy_sre/guacamole_desktop.png similarity index 100% rename from docs/deployment/deploy_sre/guacamole_desktop.png rename to docs/source/deployment/deploy_sre/guacamole_desktop.png diff --git a/docs/deployment/deploy_sre/msrds_desktop.png b/docs/source/deployment/deploy_sre/msrds_desktop.png similarity index 100% rename from docs/deployment/deploy_sre/msrds_desktop.png rename to docs/source/deployment/deploy_sre/msrds_desktop.png diff --git a/docs/deployment/deploy_sre/sre_subdomain_ns.png b/docs/source/deployment/deploy_sre/sre_subdomain_ns.png similarity index 100% rename from docs/deployment/deploy_sre/sre_subdomain_ns.png rename to docs/source/deployment/deploy_sre/sre_subdomain_ns.png diff --git a/docs/deployment/deploy_sre_apache_guacamole.md b/docs/source/deployment/deploy_sre_apache_guacamole.md similarity index 83% rename from docs/deployment/deploy_sre_apache_guacamole.md rename to docs/source/deployment/deploy_sre_apache_guacamole.md index c273fa5e6b..94403084fc 100644 --- a/docs/deployment/deploy_sre_apache_guacamole.md +++ b/docs/source/deployment/deploy_sre_apache_guacamole.md @@ -32,7 +32,8 @@ PS> ./Deploy_SRE.ps1 -shmId -sreId -VMs - where `` is the {ref}`management environment ID ` for this SHM - where `` is the {ref}`secure research environment ID ` for this SRE -- where `` is a list of [Azure VM sizes](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes) that you want to create. For example `'Standard_D2s_v3', 'default', 'Standard_NC6s_v3'`. If you are unsure of the appropriate VM sizes, run the script with a single `'default'` (resizing can be done after deployment via the Azure portal). +- where `` is a list of [Azure VM sizes](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes) that you want to create. For example `'Standard_D2s_v3', 'default', 'Standard_NC6s_v3'`. If you are unsure of the appropriate VM sizes, run the script with a single `'default'`. +- VMs can be resized after deployment. See how to do so in the {ref}`System Manager instructions `. You will be prompted for credentials for: @@ -104,6 +105,42 @@ PS> ./Setup_SRE_Guacamole_Servers.ps1 -shmId -sreId +
+Update SSL certificate + +![Powershell: five minutes](https://img.shields.io/static/v1?style=for-the-badge&logo=powershell&label=local&color=blue&message=five%20minutes) at {{file_folder}} `./deployment/secure_research_environment/setup` + +```powershell +PS> ./Update_SRE_SSL_Certificate.ps1 -shmId -sreId +``` + +- where `` is the {ref}`management environment ID ` for this SHM +- where `` is the {ref}`secure research environment ID ` for this SRE +- where `` is an email address that you want to be notified when certificates are close to expiry + +```{tip} +`./Update_SRE_RDS_SSL_Certificate.ps1` should be run again whenever you want to update the certificate for this SRE. +``` + +```{caution} +`Let's Encrypt` will only issue **5 certificates per week** for a particular host (e.g. `rdg-sre-sandbox.project.turingsafehaven.ac.uk`). +To reduce the number of calls to `Let's Encrypt`, the signed certificates are stored in the Key Vault for easy redeployment. +For production environments this should usually not be an issue. +``` + +````{important} +If you find yourself frequently redeploying a test environment and hit the `Let's Encrypt` certificate limit, you can can use: + +```powershell +> ./Update_SRE_RDS_SSL_Certificate.ps1 -dryRun $true +``` + +to use the `Let's Encrypt` staging server, which will issue certificates more frequently. +These certificates will **not** be trusted by your browser, and so should not be used in production. +```` + +
+
Deploy web applications (CoCalc, CodiMD and GitLab) diff --git a/docs/deployment/deploy_sre_microsoft_rds.md b/docs/source/deployment/deploy_sre_microsoft_rds.md similarity index 97% rename from docs/deployment/deploy_sre_microsoft_rds.md rename to docs/source/deployment/deploy_sre_microsoft_rds.md index 6087246d74..3ea0cd14e2 100644 --- a/docs/deployment/deploy_sre_microsoft_rds.md +++ b/docs/source/deployment/deploy_sre_microsoft_rds.md @@ -2,6 +2,10 @@ # Deploy an SRE with Microsoft RDS +```{warning} +Support for Microsoft Remote Desktop is deprecated. Deployment scripts and related documentation will be removed in version `4.2.0` of the Data Safe Haven. +``` + These instructions will walk you through deploying a Secure Research Environment (SRE) that uses an existing Safe Haven Management (SHM) environment. ```{important} @@ -207,6 +211,10 @@ These certificates will **not** be trusted by your browser, and so should not be To complete the account setup, follow the instructions for password and MFA setup present in the {ref}`user guide `. +```{warning} +At present, only phone call identification works correctly with MS RDS. Do not attempt to use the Authenticator app. If you have both the Authenticator and phone call set up as authentication methods, select phone call as the default when intending to use the MS RDS interface. +``` + ### {{nut_and_bolt}} Test the Microsoft RDS remote desktop - Launch a local web browser on your **deployment machine** and go to `https://.` and log in with the user name and password you set up for the non-privileged user account. diff --git a/docs/deployment/index.md b/docs/source/deployment/index.md similarity index 100% rename from docs/deployment/index.md rename to docs/source/deployment/index.md diff --git a/docs/deployment/security_checklist.md b/docs/source/deployment/security_checklist.md similarity index 99% rename from docs/deployment/security_checklist.md rename to docs/source/deployment/security_checklist.md index f8dcc553c6..07e9339f2e 100644 --- a/docs/deployment/security_checklist.md +++ b/docs/source/deployment/security_checklist.md @@ -687,7 +687,7 @@ To test all the above, you will need to act both as the {ref}`role_system_manage ``` ```{attention} -{{white_check_mark}} **Verify that:** software uploaded to the by a non-admin can be read by administrators +{{white_check_mark}} **Verify that:** software uploaded by a non-admin can be read by administrators ``` ```{attention} diff --git a/docs/deployment/security_checklist/aad_additional_security_verification.png b/docs/source/deployment/security_checklist/aad_additional_security_verification.png similarity index 100% rename from docs/deployment/security_checklist/aad_additional_security_verification.png rename to docs/source/deployment/security_checklist/aad_additional_security_verification.png diff --git a/docs/deployment/security_checklist/aad_mfa_approve_signin_request.png b/docs/source/deployment/security_checklist/aad_mfa_approve_signin_request.png similarity index 100% rename from docs/deployment/security_checklist/aad_mfa_approve_signin_request.png rename to docs/source/deployment/security_checklist/aad_mfa_approve_signin_request.png diff --git a/docs/deployment/security_checklist/guacamole_srd_desktop.png b/docs/source/deployment/security_checklist/guacamole_srd_desktop.png similarity index 100% rename from docs/deployment/security_checklist/guacamole_srd_desktop.png rename to docs/source/deployment/security_checklist/guacamole_srd_desktop.png diff --git a/docs/deployment/security_checklist/login_no_mfa_guacamole.png b/docs/source/deployment/security_checklist/login_no_mfa_guacamole.png similarity index 100% rename from docs/deployment/security_checklist/login_no_mfa_guacamole.png rename to docs/source/deployment/security_checklist/login_no_mfa_guacamole.png diff --git a/docs/deployment/security_checklist/login_no_mfa_msrds.png b/docs/source/deployment/security_checklist/login_no_mfa_msrds.png similarity index 100% rename from docs/deployment/security_checklist/login_no_mfa_msrds.png rename to docs/source/deployment/security_checklist/login_no_mfa_msrds.png diff --git a/docs/deployment/security_checklist/msrds_dashboard_with_apps.png b/docs/source/deployment/security_checklist/msrds_dashboard_with_apps.png similarity index 100% rename from docs/deployment/security_checklist/msrds_dashboard_with_apps.png rename to docs/source/deployment/security_checklist/msrds_dashboard_with_apps.png diff --git a/docs/deployment/security_checklist/msrds_failed_to_connect.png b/docs/source/deployment/security_checklist/msrds_failed_to_connect.png similarity index 100% rename from docs/deployment/security_checklist/msrds_failed_to_connect.png rename to docs/source/deployment/security_checklist/msrds_failed_to_connect.png diff --git a/docs/deployment/security_checklist/msrds_srd_desktop.png b/docs/source/deployment/security_checklist/msrds_srd_desktop.png similarity index 100% rename from docs/deployment/security_checklist/msrds_srd_desktop.png rename to docs/source/deployment/security_checklist/msrds_srd_desktop.png diff --git a/docs/deployment/security_checklist/nsg_inbound_access.png b/docs/source/deployment/security_checklist/nsg_inbound_access.png similarity index 100% rename from docs/deployment/security_checklist/nsg_inbound_access.png rename to docs/source/deployment/security_checklist/nsg_inbound_access.png diff --git a/docs/deployment/security_checklist/nsg_outbound_access.png b/docs/source/deployment/security_checklist/nsg_outbound_access.png similarity index 100% rename from docs/deployment/security_checklist/nsg_outbound_access.png rename to docs/source/deployment/security_checklist/nsg_outbound_access.png diff --git a/docs/deployment/security_checklist/shmdc_website_deny.png b/docs/source/deployment/security_checklist/shmdc_website_deny.png similarity index 100% rename from docs/deployment/security_checklist/shmdc_website_deny.png rename to docs/source/deployment/security_checklist/shmdc_website_deny.png diff --git a/docs/deployment/security_checklist/shmdc_windows_update.png b/docs/source/deployment/security_checklist/shmdc_windows_update.png similarity index 100% rename from docs/deployment/security_checklist/shmdc_windows_update.png rename to docs/source/deployment/security_checklist/shmdc_windows_update.png diff --git a/docs/deployment/security_checklist/srd_installed_software.png b/docs/source/deployment/security_checklist/srd_installed_software.png similarity index 100% rename from docs/deployment/security_checklist/srd_installed_software.png rename to docs/source/deployment/security_checklist/srd_installed_software.png diff --git a/docs/deployment/security_checklist/srd_no_curl.png b/docs/source/deployment/security_checklist/srd_no_curl.png similarity index 100% rename from docs/deployment/security_checklist/srd_no_curl.png rename to docs/source/deployment/security_checklist/srd_no_curl.png diff --git a/docs/deployment/security_checklist/srd_no_internet.png b/docs/source/deployment/security_checklist/srd_no_internet.png similarity index 100% rename from docs/deployment/security_checklist/srd_no_internet.png rename to docs/source/deployment/security_checklist/srd_no_internet.png diff --git a/docs/deployment/security_checklist/srd_no_nslookup.png b/docs/source/deployment/security_checklist/srd_no_nslookup.png similarity index 100% rename from docs/deployment/security_checklist/srd_no_nslookup.png rename to docs/source/deployment/security_checklist/srd_no_nslookup.png diff --git a/docs/deployment/security_checklist/srd_no_ssh_by_fqdn.png b/docs/source/deployment/security_checklist/srd_no_ssh_by_fqdn.png similarity index 100% rename from docs/deployment/security_checklist/srd_no_ssh_by_fqdn.png rename to docs/source/deployment/security_checklist/srd_no_ssh_by_fqdn.png diff --git a/docs/deployment/security_checklist/srd_no_ssh_by_ip.png b/docs/source/deployment/security_checklist/srd_no_ssh_by_ip.png similarity index 100% rename from docs/deployment/security_checklist/srd_no_ssh_by_ip.png rename to docs/source/deployment/security_checklist/srd_no_ssh_by_ip.png diff --git a/docs/deployment/security_checklist/srd_pypi_tier2_allowed.png b/docs/source/deployment/security_checklist/srd_pypi_tier2_allowed.png similarity index 100% rename from docs/deployment/security_checklist/srd_pypi_tier2_allowed.png rename to docs/source/deployment/security_checklist/srd_pypi_tier2_allowed.png diff --git a/docs/deployment/security_checklist/srd_pypi_tier2_denied.png b/docs/source/deployment/security_checklist/srd_pypi_tier2_denied.png similarity index 100% rename from docs/deployment/security_checklist/srd_pypi_tier2_denied.png rename to docs/source/deployment/security_checklist/srd_pypi_tier2_denied.png diff --git a/docs/deployment/security_checklist/srd_pypi_tier3_allowed.png b/docs/source/deployment/security_checklist/srd_pypi_tier3_allowed.png similarity index 100% rename from docs/deployment/security_checklist/srd_pypi_tier3_allowed.png rename to docs/source/deployment/security_checklist/srd_pypi_tier3_allowed.png diff --git a/docs/deployment/security_checklist/srd_pypi_tier3_denied.png b/docs/source/deployment/security_checklist/srd_pypi_tier3_denied.png similarity index 100% rename from docs/deployment/security_checklist/srd_pypi_tier3_denied.png rename to docs/source/deployment/security_checklist/srd_pypi_tier3_denied.png diff --git a/docs/deployment/security_checklist/ssh_connection_fail.png b/docs/source/deployment/security_checklist/ssh_connection_fail.png similarity index 100% rename from docs/deployment/security_checklist/ssh_connection_fail.png rename to docs/source/deployment/security_checklist/ssh_connection_fail.png diff --git a/docs/deployment/snippets/00_symbols.partial.md b/docs/source/deployment/snippets/00_symbols.partial.md similarity index 100% rename from docs/deployment/snippets/00_symbols.partial.md rename to docs/source/deployment/snippets/00_symbols.partial.md diff --git a/docs/deployment/snippets/01_prerequisites.partial.md b/docs/source/deployment/snippets/01_prerequisites.partial.md similarity index 88% rename from docs/deployment/snippets/01_prerequisites.partial.md rename to docs/source/deployment/snippets/01_prerequisites.partial.md index 254b8ef79b..3f3cf684c7 100644 --- a/docs/deployment/snippets/01_prerequisites.partial.md +++ b/docs/source/deployment/snippets/01_prerequisites.partial.md @@ -14,7 +14,7 @@ ### {{beginner}} Software - `PowerShell` with support for Azure - - Install [PowerShell v7.0 or above](https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell) + - We recommend [installing](https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell) the [latest stable release](https://learn.microsoft.com/en-us/powershell/scripting/install/powershell-support-lifecycle?view=powershell-7.3) of Powershell. We have most recently tested deployment using version `7.3.2`. - Install the [Azure PowerShell Module](https://docs.microsoft.com/en-us/powershell/azure/install-az-ps) using `Install-Module -Name Az -RequiredVersion 5.0.0 -Repository PSGallery` - `Microsoft Remote Desktop` - On macOS this can be installed from the [Apple store](https://www.apple.com/app-store/) diff --git a/docs/deployment/snippets/02_configuration.partial.md b/docs/source/deployment/snippets/02_configuration.partial.md similarity index 100% rename from docs/deployment/snippets/02_configuration.partial.md rename to docs/source/deployment/snippets/02_configuration.partial.md diff --git a/docs/deployment/snippets/03_01_remove_data.partial.md b/docs/source/deployment/snippets/03_01_remove_data.partial.md similarity index 100% rename from docs/deployment/snippets/03_01_remove_data.partial.md rename to docs/source/deployment/snippets/03_01_remove_data.partial.md diff --git a/docs/deployment/snippets/03_02_register_sre.partial.md b/docs/source/deployment/snippets/03_02_register_sre.partial.md similarity index 100% rename from docs/deployment/snippets/03_02_register_sre.partial.md rename to docs/source/deployment/snippets/03_02_register_sre.partial.md diff --git a/docs/deployment/snippets/04_01_sre_dns.partial.md b/docs/source/deployment/snippets/04_01_sre_dns.partial.md similarity index 100% rename from docs/deployment/snippets/04_01_sre_dns.partial.md rename to docs/source/deployment/snippets/04_01_sre_dns.partial.md diff --git a/docs/deployment/snippets/04_02_manual_dns.partial.md b/docs/source/deployment/snippets/04_02_manual_dns.partial.md similarity index 100% rename from docs/deployment/snippets/04_02_manual_dns.partial.md rename to docs/source/deployment/snippets/04_02_manual_dns.partial.md diff --git a/docs/deployment/snippets/04_03_deploy_vnet.partial.md b/docs/source/deployment/snippets/04_03_deploy_vnet.partial.md similarity index 100% rename from docs/deployment/snippets/04_03_deploy_vnet.partial.md rename to docs/source/deployment/snippets/04_03_deploy_vnet.partial.md diff --git a/docs/deployment/snippets/05_storage_accounts.partial.md b/docs/source/deployment/snippets/05_storage_accounts.partial.md similarity index 100% rename from docs/deployment/snippets/05_storage_accounts.partial.md rename to docs/source/deployment/snippets/05_storage_accounts.partial.md diff --git a/docs/deployment/snippets/06_01_create_user_account.partial.md b/docs/source/deployment/snippets/06_01_create_user_account.partial.md similarity index 100% rename from docs/deployment/snippets/06_01_create_user_account.partial.md rename to docs/source/deployment/snippets/06_01_create_user_account.partial.md diff --git a/docs/deployment/snippets/07_deploy_webapps.partial.md b/docs/source/deployment/snippets/07_deploy_webapps.partial.md similarity index 100% rename from docs/deployment/snippets/07_deploy_webapps.partial.md rename to docs/source/deployment/snippets/07_deploy_webapps.partial.md diff --git a/docs/deployment/snippets/08_databases.partial.md b/docs/source/deployment/snippets/08_databases.partial.md similarity index 100% rename from docs/deployment/snippets/08_databases.partial.md rename to docs/source/deployment/snippets/08_databases.partial.md diff --git a/docs/deployment/snippets/09_single_srd.partial.md b/docs/source/deployment/snippets/09_single_srd.partial.md similarity index 100% rename from docs/deployment/snippets/09_single_srd.partial.md rename to docs/source/deployment/snippets/09_single_srd.partial.md diff --git a/docs/deployment/snippets/10_network_lockdown.partial.md b/docs/source/deployment/snippets/10_network_lockdown.partial.md similarity index 100% rename from docs/deployment/snippets/10_network_lockdown.partial.md rename to docs/source/deployment/snippets/10_network_lockdown.partial.md diff --git a/docs/deployment/snippets/11_configure_firewall.partial.md b/docs/source/deployment/snippets/11_configure_firewall.partial.md similarity index 100% rename from docs/deployment/snippets/11_configure_firewall.partial.md rename to docs/source/deployment/snippets/11_configure_firewall.partial.md diff --git a/docs/deployment/snippets/12_configure_monitoring.partial.md b/docs/source/deployment/snippets/12_configure_monitoring.partial.md similarity index 100% rename from docs/deployment/snippets/12_configure_monitoring.partial.md rename to docs/source/deployment/snippets/12_configure_monitoring.partial.md diff --git a/docs/deployment/snippets/13_enable_backup.partial.md b/docs/source/deployment/snippets/13_enable_backup.partial.md similarity index 100% rename from docs/deployment/snippets/13_enable_backup.partial.md rename to docs/source/deployment/snippets/13_enable_backup.partial.md diff --git a/docs/deployment/snippets/14_run_smoke_tests.partial.md b/docs/source/deployment/snippets/14_run_smoke_tests.partial.md similarity index 89% rename from docs/deployment/snippets/14_run_smoke_tests.partial.md rename to docs/source/deployment/snippets/14_run_smoke_tests.partial.md index a44f332d24..cd864a4c97 100644 --- a/docs/deployment/snippets/14_run_smoke_tests.partial.md +++ b/docs/source/deployment/snippets/14_run_smoke_tests.partial.md @@ -10,4 +10,4 @@ They are automatically uploaded to the SRD during the deployment step. - if any of the tests fail, check the `README.md` in this folder for help in diagnosing the issues - Copy `tests/test_jupyter.ipynb` to your home directory - activate each of the available Python versions in turn - - run `jupyter notebook` in each case and check that you can run the notebook and that all versions and paths match throughout. See {ref}`Available Python Versions `. + - run `jupyter notebook` in each case and check that you can run the notebook and that all versions and paths match throughout. See [Available Python and R versions](/roles/researcher/user_guide_guacamole.md#available-python-and-r-versions) diff --git a/docs/deployment/snippets/user_csv_format.partial.md b/docs/source/deployment/snippets/user_csv_format.partial.md similarity index 88% rename from docs/deployment/snippets/user_csv_format.partial.md rename to docs/source/deployment/snippets/user_csv_format.partial.md index 93a4e563f9..42940bf20d 100644 --- a/docs/deployment/snippets/user_csv_format.partial.md +++ b/docs/source/deployment/snippets/user_csv_format.partial.md @@ -44,7 +44,7 @@ This is **not** uploaded to their Data Safe Haven user account but is needed when sending account activation messages. ``` - - `GroupName`: [Optional] The name of the Active Directory security group(s) that the users should be added to (eg. `SG SANDBOX Research Users` ). + - `GroupName`: The name of the {ref}`security_groups` that the users should be added to. Users will need to be added to the relevant security group before they can access an SRE. You can also change this manually later on (see {ref}`adding_users_manually`). ```{tip} If the user needs to be added to multiple groups, separate them with a pipe-character ( `|` ). diff --git a/docs/design/architecture/architecture_mirrors.png b/docs/source/design/architecture/architecture_mirrors.png similarity index 100% rename from docs/design/architecture/architecture_mirrors.png rename to docs/source/design/architecture/architecture_mirrors.png diff --git a/docs/design/architecture/index.md b/docs/source/design/architecture/index.md similarity index 100% rename from docs/design/architecture/index.md rename to docs/source/design/architecture/index.md diff --git a/docs/design/architecture/safe_haven_architecture.png b/docs/source/design/architecture/safe_haven_architecture.png similarity index 100% rename from docs/design/architecture/safe_haven_architecture.png rename to docs/source/design/architecture/safe_haven_architecture.png diff --git a/docs/design/architecture/shm_architecture.png b/docs/source/design/architecture/shm_architecture.png similarity index 100% rename from docs/design/architecture/shm_architecture.png rename to docs/source/design/architecture/shm_architecture.png diff --git a/docs/design/architecture/shm_details.md b/docs/source/design/architecture/shm_details.md similarity index 100% rename from docs/design/architecture/shm_details.md rename to docs/source/design/architecture/shm_details.md diff --git a/docs/design/architecture/sre_architecture.png b/docs/source/design/architecture/sre_architecture.png similarity index 100% rename from docs/design/architecture/sre_architecture.png rename to docs/source/design/architecture/sre_architecture.png diff --git a/docs/design/architecture/sre_details.md b/docs/source/design/architecture/sre_details.md similarity index 100% rename from docs/design/architecture/sre_details.md rename to docs/source/design/architecture/sre_details.md diff --git a/docs/design/index.md b/docs/source/design/index.md similarity index 100% rename from docs/design/index.md rename to docs/source/design/index.md diff --git a/docs/design/security/index.md b/docs/source/design/security/index.md similarity index 100% rename from docs/design/security/index.md rename to docs/source/design/security/index.md diff --git a/docs/design/security/objectives.md b/docs/source/design/security/objectives.md similarity index 100% rename from docs/design/security/objectives.md rename to docs/source/design/security/objectives.md diff --git a/docs/design/security/reference_configuration.md b/docs/source/design/security/reference_configuration.md similarity index 100% rename from docs/design/security/reference_configuration.md rename to docs/source/design/security/reference_configuration.md diff --git a/docs/design/security/sample_security_controls.png b/docs/source/design/security/sample_security_controls.png similarity index 100% rename from docs/design/security/sample_security_controls.png rename to docs/source/design/security/sample_security_controls.png diff --git a/docs/design/security/technical_controls.md b/docs/source/design/security/technical_controls.md similarity index 100% rename from docs/design/security/technical_controls.md rename to docs/source/design/security/technical_controls.md diff --git a/docs/index.md b/docs/source/index.md similarity index 98% rename from docs/index.md rename to docs/source/index.md index 19caa8d65c..b048a58177 100644 --- a/docs/index.md +++ b/docs/source/index.md @@ -11,7 +11,7 @@ processes/index.md roles/index.md ``` -```{image} static/scriberia_diagram.jpg +```{image} _static/scriberia_diagram.jpg :alt: Data Safe Haven cartoon by Scriberia for The Alan Turing Institute :align: center ``` diff --git a/docs/overview/index.md b/docs/source/overview/index.md similarity index 100% rename from docs/overview/index.md rename to docs/source/overview/index.md diff --git a/docs/overview/sensitivity_tiers.md b/docs/source/overview/sensitivity_tiers.md similarity index 100% rename from docs/overview/sensitivity_tiers.md rename to docs/source/overview/sensitivity_tiers.md diff --git a/docs/overview/what_is_dsh.md b/docs/source/overview/what_is_dsh.md similarity index 100% rename from docs/overview/what_is_dsh.md rename to docs/source/overview/what_is_dsh.md diff --git a/docs/overview/why_use_dsh.md b/docs/source/overview/why_use_dsh.md similarity index 100% rename from docs/overview/why_use_dsh.md rename to docs/source/overview/why_use_dsh.md diff --git a/docs/processes/data_access_controls.md b/docs/source/processes/data_access_controls.md similarity index 100% rename from docs/processes/data_access_controls.md rename to docs/source/processes/data_access_controls.md diff --git a/docs/processes/data_classification.md b/docs/source/processes/data_classification.md similarity index 100% rename from docs/processes/data_classification.md rename to docs/source/processes/data_classification.md diff --git a/docs/processes/data_egress.md b/docs/source/processes/data_egress.md similarity index 96% rename from docs/processes/data_egress.md rename to docs/source/processes/data_egress.md index 7e007672d2..c4a8f4b102 100644 --- a/docs/processes/data_egress.md +++ b/docs/source/processes/data_egress.md @@ -5,6 +5,8 @@ The outputs of the work being done in a Data Safe Haven are also stored in the SRE. There are technical and policy controls that must be satisfied before any data can be brought out of the Data Safe Haven. +(process_data_egress_classification)= + ## Classification The first stage of egressing outputs is to classify them. @@ -20,6 +22,8 @@ Each time you want to bring code or data out of the environment, you'll have to Once the outputs are classified, the classification team should let the {ref}`role_system_manager` know who will be performing the egress and how they want this to be done. +(process_data_egress_removal)= + ## Bringing data out of the environment Talk to your {ref}`role_system_manager` to discuss possible methods of bringing data out of the environments. diff --git a/docs/processes/data_handling.md b/docs/source/processes/data_handling.md similarity index 100% rename from docs/processes/data_handling.md rename to docs/source/processes/data_handling.md diff --git a/docs/source/processes/data_ingress.md b/docs/source/processes/data_ingress.md new file mode 100644 index 0000000000..171453b05b --- /dev/null +++ b/docs/source/processes/data_ingress.md @@ -0,0 +1,98 @@ +(process_data_ingress)= + +# Data ingress process + +## Introduction + +The Data Safe Haven has various technical controls to ensure data security. +However, the processes and contractual agreements that the **Dataset Provider** agrees to are equally important. + +## Preparing data + +This section has some recommendations for preparing input data for the Data Safe Haven. + +### Avoid archives + +The input data is presented to researchers on a read-only filesystem. +This means that researchers will be unable to extract inputs in-place. +Instead, they would have to extract to a read-write space within the environment. +This could unnecessarily duplicate the data and leads to a greater risk of loss of integrity as the inputs can be modified (intentionally or accidentally). + +### Avoiding name clashes + +In the recommended upload process there is no protection for overwriting files. +It is therefore important to avoid uploading files with the same pathname as the later files will replace existing files. + +To help avoid name clashes, if you are uploading multiple data sets you should use unique names for each data set. +For example, if the data sets are single files, use unique file names. +If data sets consist of multiple files, collect them in uniquely named directories. + +If there are multiple data providers uploading data for a single work package, each provider should use a uniquely named directory, or prepend their files with a unique name. + +### Describe the data + +Explaining the structure and format of the data will help researchers be most effective. +It is a good idea to upload a plain text file explaining the directory structure, file format, data columns, meaning of special terms, _etc._. +This file will be easy for researchers to read using tools inside the environment and they will be able to find it alongside the data. + +### Data integrity + +You will want to ensure that researchers have the correct data and that they can verify this. +We recommend using [checksums](https://www.redhat.com/sysadmin/hashing-checksums) to do this. + +A checksum is a short string computed in a one-way process from some data. +A small change in the data (even a single bit) will result in a different checksum. +We can therefore use checksums to verify that data has not been changed. +In the safe haven this is useful for verifying that the data inside the environment is complete and correct. +It proves the data has not been modified or corrupted during transfer. + +We recommended considering the hashing algorithms `md5sum` and `sha256`. +They are common algorithms built into many operating systems, and included in the Data Safe Haven. +`md5sum` is fast and sufficient for integrity checks. +`sha256` is slower but more secure, it better protects against malicious modification. + +You can generate a checksum file, which can be used to verify the integrity of files. +If you upload this file then researchers will be able to independently verify data integrity within the environment. + +Here are instructions to generate a checksum file using the `md5sum` algorithm for a data set stored in a directory called `data`. + +```console +find ./data/ -type fl -exec md5sum {} + > hashes.txt +``` + +`find` searches the `data` directory for files and symbolic links (`-type fl`). +`find` also runs the checksum command `md5sum` on all matching files (`-exec md5sum {} +`). +Finally, the checksums are written to a file called `hashes.txt` (`> hashes.txt`). + +The data can be checked, by comparing to the checksums. + +```console +md5sum -c hashes.txt +``` + +If a file has changed the command will return a non-zero exit code (an error). +The failing files will be listed as `: FAILED` in the output. +Those files can be easily identified using `grep` + +```console +md5sum -c hashes.txt | grep FAILED +``` + +To use the `sha256` algorithm, replace `md5sum` with `sha256` in the above commands. + +## Bringing data into the environment + +```{attention} +Before starting any data ingress, make sure that you have gone through the {ref}`data classification process `. +``` + +Talk to your {ref}`role_system_manager` to discuss possible methods of bringing data into the environments. +It may be convenient to use [Azure Storage Explorer](https://azure.microsoft.com/en-us/products/storage/storage-explorer/). +In this case you will not need log-in credentials, as your {ref}`role_system_manager` can provide a short-lived secure access token which will let you upload data. + +```{tip} +You may want to keep the following considerations in mind when transferring data in order to reduce the chance of a data breach +- use of short-lived access tokens limits the time within which an attacker can operate +- letting your {ref}`role_system_manager` know a fixed IP address you will be connecting from (eg. a corporate VPN) limits the places an attacker can operate from +- communicating with your {ref}`role_system_manager` through a secure out-of-band channel (eg. encrypted email) reduces the chances that an attacker can intercept or alter your messages in transit +``` diff --git a/docs/processes/data_transfer_protocol.md b/docs/source/processes/data_transfer_protocol.md similarity index 100% rename from docs/processes/data_transfer_protocol.md rename to docs/source/processes/data_transfer_protocol.md diff --git a/docs/processes/index.md b/docs/source/processes/index.md similarity index 85% rename from docs/processes/index.md rename to docs/source/processes/index.md index ce375164fa..617361298d 100644 --- a/docs/processes/index.md +++ b/docs/source/processes/index.md @@ -10,6 +10,7 @@ data_ingress.md data_egress.md data_access_controls.md data_transfer_protocol.md +software_package_approval.md ``` [Data handling](data_handling.md) @@ -29,3 +30,6 @@ data_transfer_protocol.md [Data transfer protocol](data_transfer_protocol.md) : A description of the protocol used for data transfer at the Turing + +[Software approval policy](software_package_approval.md) +: The checks needed before a software package can be approved for use in Tier 3 environments diff --git a/docs/source/processes/software_package_approval.md b/docs/source/processes/software_package_approval.md new file mode 100644 index 0000000000..cf14f4ea6b --- /dev/null +++ b/docs/source/processes/software_package_approval.md @@ -0,0 +1,78 @@ +# Software approval policy + +To maximise the usability of the secure research environments, we pre-install certain software packages that are deemed broadly useful to a cross section of researchers, thus making them available to all users. + +Other software packages which are only of interest to a subset of users can be made available for download from remote package repositories. +Currently, we support PyPI (Python) and CRAN (R) as remote repositories. + +For higher {ref}`sensitivity tiers ` ({ref}`policy_tier_3` and above), only a subset of packages are made available in this way. +This subset of packages constitutes an "allowlist" of packages that have been vetted to mitigate the risk of introducing malicious or unsound software into the secure environment. + +```{warning} +The Data Safe Haven team manages a default allowlist, but individual deployments may be using their own versions. +Check with your {ref}`role_system_manager` what is being used in your case +``` + +This page sets out the policy for adding software packages to the default allowlist and/or software to the pre-installed software list. +It also describes the procedure that users of the secure environment should follow to request new additions to the allowlist. + +## Background + +Given the safeguards afforded by the safe haven environment, and the separation of responsibilities between its constituent resources, the level of risk associated with the introduction of third party software packages is considered low. +Moreover, access to the environment is carefully controlled and there is a presumption of trust in the individual researchers to whom access is granted. + +Nevertheless, the introduction of any software into the safe haven must be considered against the potential risks of: + +- approved users having access to data to which they shouldn't (e.g. from data mixing) +- unapproved users having access to data (e.g. from a data breach) +- poisoning of data and/or outputs +- resource misuse (allocation of computational resources for unintended or wasteful purposes). + +Such risks may originate unwittingly, from a user who wants to "just get the job done", or from a user, network team member or administrator acting maliciously. + +Specific risks which this policy aims to mitigate include: + +- package name squatting (allowlisting a similarly-named package instead of the intended one) +- privilege escalation attacks (enabling a user to gain elevated access permissions) +- unauthorised data ingress (in particular, it is possible to upload arbitrary data to PyPI without review) + +(package_inclusion_policy)= + +## Policy + +- For each supported repository, three package lists will be maintained: + - a core allowlist of broadly useful packages that should be pre-installed in each environment + - an extra allowlist of packages that may be useful for specific projects + - an expanded list to be made available from the package repositories consisting of the core and extra packages plus their dependencies +- Users may request to add packages to these allowlists via the {ref}`package request procedure `. + - In the interests of improving researcher productivity the aim will be to accommodate such requests, provided there are no outweighing security concerns associated with the package or its dependencies. +- Requests will be reviewed by the project team using the information provided by the user when making the request +- If approved, a requested package will be added to either the core or extra allowlist (as appropriate) + +(package_inclusion_criteria)= + +### Criteria for inclusion in core + +Only software that is considered broadly useful to a cross section of researchers should be included in core. + +To meet this condition, a package should: + +- implement at least one generic (i.e. not domain-specific) statistical algorithm or method, or +- provide support for a cross-cutting analysis technique (e.g. geospatial data analysis, NLP), or +- facilitate data science or software development best practices (e.g. for robustness, correctness, reproducibility), or +- enhance the presentational features of the programming language (e.g. for producing plots, notebooks, articles, websites), or +- enhance the usability of the programming language or development environment (e.g. RStudio, PyCharm) + +(package_request_procedure)= + +## Package request/review procedure + +- A user requests a package by opening a `Software package request` issue on the Data Safe Haven GitHub repository, including responses to the following questions: + - Is this package the mostly widely supported for the intended purpose? + - What will you be able to do with this package that you can't currently do? What alternatives are there? + - What risks to data integrity/security might arise from including this package or its dependencies? +- A member of the project team reviews the request according to the terms of the {ref}`package_inclusion_policy`. +- The reviewer adds their decision (accept/reject) to the issue and notifies the user who made the request. + - If the decision is to reject, the reviewer must include an explanation. Any subsequent request for the same package should address the specific concern raised. + - If the decision is to accept, a pull request should be made that will add the package to the appropriate list. +- Once the pull request is approved, system administrators of any running deployment can decide whether to update to the new allowlist definitions. diff --git a/docs/roles/data_provider_representative/azcopy_warning.png b/docs/source/roles/data_provider_representative/azcopy_warning.png similarity index 100% rename from docs/roles/data_provider_representative/azcopy_warning.png rename to docs/source/roles/data_provider_representative/azcopy_warning.png diff --git a/docs/roles/data_provider_representative/azure_storage_explorer_connect.png b/docs/source/roles/data_provider_representative/azure_storage_explorer_connect.png similarity index 100% rename from docs/roles/data_provider_representative/azure_storage_explorer_connect.png rename to docs/source/roles/data_provider_representative/azure_storage_explorer_connect.png diff --git a/docs/roles/data_provider_representative/azure_storage_explorer_container.png b/docs/source/roles/data_provider_representative/azure_storage_explorer_container.png similarity index 100% rename from docs/roles/data_provider_representative/azure_storage_explorer_container.png rename to docs/source/roles/data_provider_representative/azure_storage_explorer_container.png diff --git a/docs/roles/data_provider_representative/azure_storage_explorer_error.png b/docs/source/roles/data_provider_representative/azure_storage_explorer_error.png similarity index 100% rename from docs/roles/data_provider_representative/azure_storage_explorer_error.png rename to docs/source/roles/data_provider_representative/azure_storage_explorer_error.png diff --git a/docs/roles/data_provider_representative/index.md b/docs/source/roles/data_provider_representative/index.md similarity index 100% rename from docs/roles/data_provider_representative/index.md rename to docs/source/roles/data_provider_representative/index.md diff --git a/docs/roles/index.md b/docs/source/roles/index.md similarity index 100% rename from docs/roles/index.md rename to docs/source/roles/index.md diff --git a/docs/roles/investigator/data_egress.md b/docs/source/roles/investigator/data_egress.md similarity index 95% rename from docs/roles/investigator/data_egress.md rename to docs/source/roles/investigator/data_egress.md index 59f3a7f27b..60f7cf3f4d 100644 --- a/docs/roles/investigator/data_egress.md +++ b/docs/source/roles/investigator/data_egress.md @@ -6,7 +6,7 @@ Once you have finished working with the data for your project, you'll have to eg ## Classification The first stage of egressing outputs is to classify them. -This follows the {ref}`same workflow ` as for {ref}`data ingress `. +This follows the {ref}`same workflow ` as for {ref}`data ingress `. ```{hint} Get the same people who ran the ingress classification process to do this - {ref}`Data Provider Representive `, {ref}`role_investigator` and {ref}`role_referee` (optional). diff --git a/docs/roles/investigator/data_ingress.md b/docs/source/roles/investigator/data_ingress.md similarity index 96% rename from docs/roles/investigator/data_ingress.md rename to docs/source/roles/investigator/data_ingress.md index d6786c3aa6..bff92040a4 100644 --- a/docs/roles/investigator/data_ingress.md +++ b/docs/source/roles/investigator/data_ingress.md @@ -1,4 +1,4 @@ -(role_investigator_egress)= +(role_investigator_ingress)= # Data ingress process diff --git a/docs/roles/investigator/index.md b/docs/source/roles/investigator/index.md similarity index 100% rename from docs/roles/investigator/index.md rename to docs/source/roles/investigator/index.md diff --git a/docs/roles/programme_manager/index.md b/docs/source/roles/programme_manager/index.md similarity index 100% rename from docs/roles/programme_manager/index.md rename to docs/source/roles/programme_manager/index.md diff --git a/docs/roles/project_manager/data_egress.md b/docs/source/roles/project_manager/data_egress.md similarity index 100% rename from docs/roles/project_manager/data_egress.md rename to docs/source/roles/project_manager/data_egress.md diff --git a/docs/roles/project_manager/data_ingress.md b/docs/source/roles/project_manager/data_ingress.md similarity index 100% rename from docs/roles/project_manager/data_ingress.md rename to docs/source/roles/project_manager/data_ingress.md diff --git a/docs/roles/project_manager/index.md b/docs/source/roles/project_manager/index.md similarity index 100% rename from docs/roles/project_manager/index.md rename to docs/source/roles/project_manager/index.md diff --git a/docs/roles/project_manager/project_lifecycle.md b/docs/source/roles/project_manager/project_lifecycle.md similarity index 100% rename from docs/roles/project_manager/project_lifecycle.md rename to docs/source/roles/project_manager/project_lifecycle.md diff --git a/docs/roles/referee/index.md b/docs/source/roles/referee/index.md similarity index 100% rename from docs/roles/referee/index.md rename to docs/source/roles/referee/index.md diff --git a/docs/roles/researcher/available_software.md b/docs/source/roles/researcher/available_software.md similarity index 100% rename from docs/roles/researcher/available_software.md rename to docs/source/roles/researcher/available_software.md diff --git a/docs/roles/researcher/index.md b/docs/source/roles/researcher/index.md similarity index 100% rename from docs/roles/researcher/index.md rename to docs/source/roles/researcher/index.md diff --git a/docs/roles/researcher/snippets/02_account_setup.partial.md b/docs/source/roles/researcher/snippets/02_account_setup.partial.md similarity index 100% rename from docs/roles/researcher/snippets/02_account_setup.partial.md rename to docs/source/roles/researcher/snippets/02_account_setup.partial.md diff --git a/docs/roles/researcher/snippets/03_01_prerequisites.partial.md b/docs/source/roles/researcher/snippets/03_01_prerequisites.partial.md similarity index 100% rename from docs/roles/researcher/snippets/03_01_prerequisites.partial.md rename to docs/source/roles/researcher/snippets/03_01_prerequisites.partial.md diff --git a/docs/roles/researcher/snippets/03_02_srd_login.partial.md b/docs/source/roles/researcher/snippets/03_02_srd_login.partial.md similarity index 94% rename from docs/roles/researcher/snippets/03_02_srd_login.partial.md rename to docs/source/roles/researcher/snippets/03_02_srd_login.partial.md index de6812a39f..d44628f777 100644 --- a/docs/roles/researcher/snippets/03_02_srd_login.partial.md +++ b/docs/source/roles/researcher/snippets/03_02_srd_login.partial.md @@ -20,7 +20,7 @@ ```{caution} We recommend _not_ including special characters in your password as the keyboard layout expected by the login screen may be different from the one you're using. - - if you want to reset your password, follow the steps defined in the [**Set your password**](#set-a-password) section above. + - if you want to reset your password, follow the steps defined in the [Password and MFA](#password-and-mfa) section above. - if you want to continue with special characters in your password, please test that they are being entered correctly by typing them in the username field. ``` diff --git a/docs/roles/researcher/snippets/04_using_srd.partial.md b/docs/source/roles/researcher/snippets/04_using_srd.partial.md similarity index 99% rename from docs/roles/researcher/snippets/04_using_srd.partial.md rename to docs/source/roles/researcher/snippets/04_using_srd.partial.md index 7b8fefcfc0..92c0ed1cfc 100644 --- a/docs/roles/researcher/snippets/04_using_srd.partial.md +++ b/docs/source/roles/researcher/snippets/04_using_srd.partial.md @@ -78,8 +78,6 @@ For example: :align: center ``` -(available_python_and_r_versions)= - ### {{snake}} Available Python and R versions Typing `R` at the command line will give you the system version of `R` with many custom packages pre-installed. diff --git a/docs/roles/researcher/snippets/05_share_files.partial.md b/docs/source/roles/researcher/snippets/05_share_files.partial.md similarity index 100% rename from docs/roles/researcher/snippets/05_share_files.partial.md rename to docs/source/roles/researcher/snippets/05_share_files.partial.md diff --git a/docs/roles/researcher/snippets/06_cocalc.partial.md b/docs/source/roles/researcher/snippets/06_cocalc.partial.md similarity index 91% rename from docs/roles/researcher/snippets/06_cocalc.partial.md rename to docs/source/roles/researcher/snippets/06_cocalc.partial.md index 7940aa7f2b..79b9af5b2b 100644 --- a/docs/roles/researcher/snippets/06_cocalc.partial.md +++ b/docs/source/roles/researcher/snippets/06_cocalc.partial.md @@ -1,5 +1,9 @@ ## {{couple}} Collaborate on code using CoCalc +```{warning} +Support for `CoCalc` is deprecated. Deployment scripts and related documentation will be removed in version `4.2.0` of the Data Safe Haven. +``` + `CoCalc` is a collaborative calculation and data science environment. It lets you work with others on projects, using `Jupyter`, `LaTeX`, `Octave`, `Python` or `R` in collaborative notebooks. diff --git a/docs/roles/researcher/snippets/07_gitlab.partial.md b/docs/source/roles/researcher/snippets/07_gitlab.partial.md similarity index 100% rename from docs/roles/researcher/snippets/07_gitlab.partial.md rename to docs/source/roles/researcher/snippets/07_gitlab.partial.md diff --git a/docs/roles/researcher/snippets/08_codimd.partial.md b/docs/source/roles/researcher/snippets/08_codimd.partial.md similarity index 100% rename from docs/roles/researcher/snippets/08_codimd.partial.md rename to docs/source/roles/researcher/snippets/08_codimd.partial.md diff --git a/docs/roles/researcher/snippets/10_databases.partial.md b/docs/source/roles/researcher/snippets/10_databases.partial.md similarity index 100% rename from docs/roles/researcher/snippets/10_databases.partial.md rename to docs/source/roles/researcher/snippets/10_databases.partial.md diff --git a/docs/roles/researcher/snippets/11_report_bugs.partial.md b/docs/source/roles/researcher/snippets/11_report_bugs.partial.md similarity index 100% rename from docs/roles/researcher/snippets/11_report_bugs.partial.md rename to docs/source/roles/researcher/snippets/11_report_bugs.partial.md diff --git a/docs/roles/researcher/snippets/12_end_matter.partial.md b/docs/source/roles/researcher/snippets/12_end_matter.partial.md similarity index 100% rename from docs/roles/researcher/snippets/12_end_matter.partial.md rename to docs/source/roles/researcher/snippets/12_end_matter.partial.md diff --git a/docs/roles/researcher/snippets/13_MFA.partial.md b/docs/source/roles/researcher/snippets/13_MFA.partial.md similarity index 95% rename from docs/roles/researcher/snippets/13_MFA.partial.md rename to docs/source/roles/researcher/snippets/13_MFA.partial.md index bc4812d709..2af52825b3 100644 --- a/docs/roles/researcher/snippets/13_MFA.partial.md +++ b/docs/source/roles/researcher/snippets/13_MFA.partial.md @@ -147,6 +147,10 @@ This is known as multi-factor authentication (MFA). #### {{iphone}} Authenticator app registration +```{warning} +If the SRE you are using will use the Microsoft Remote Desktop interface, do not attempt to use the Authenticator app. At present, only phone call identification works correctly with MS RDS. If you have both the Authenticator and phone call set up as methods, select phone call as the default when intending to use the MS RDS interface. +``` + - If you want to use the Microsoft Authenticator app for MFA (which will work if you have wifi but no phone signal) then click on `+ Add sign-in method` and select `Authenticator app` ```{image} user_guide/account_setup_mfa_add_authenticator_app.png diff --git a/docs/roles/researcher/snippets/software_database.partial.md b/docs/source/roles/researcher/snippets/software_database.partial.md similarity index 100% rename from docs/roles/researcher/snippets/software_database.partial.md rename to docs/source/roles/researcher/snippets/software_database.partial.md diff --git a/docs/roles/researcher/snippets/software_editors.partial.md b/docs/source/roles/researcher/snippets/software_editors.partial.md similarity index 100% rename from docs/roles/researcher/snippets/software_editors.partial.md rename to docs/source/roles/researcher/snippets/software_editors.partial.md diff --git a/docs/roles/researcher/snippets/software_languages.partial.md b/docs/source/roles/researcher/snippets/software_languages.partial.md similarity index 100% rename from docs/roles/researcher/snippets/software_languages.partial.md rename to docs/source/roles/researcher/snippets/software_languages.partial.md diff --git a/docs/roles/researcher/snippets/software_other.partial.md b/docs/source/roles/researcher/snippets/software_other.partial.md similarity index 100% rename from docs/roles/researcher/snippets/software_other.partial.md rename to docs/source/roles/researcher/snippets/software_other.partial.md diff --git a/docs/roles/researcher/snippets/software_presentation.partial.md b/docs/source/roles/researcher/snippets/software_presentation.partial.md similarity index 100% rename from docs/roles/researcher/snippets/software_presentation.partial.md rename to docs/source/roles/researcher/snippets/software_presentation.partial.md diff --git a/docs/roles/researcher/user_guide.md b/docs/source/roles/researcher/user_guide.md similarity index 100% rename from docs/roles/researcher/user_guide.md rename to docs/source/roles/researcher/user_guide.md diff --git a/docs/roles/researcher/user_guide/access_desktop_applications.png b/docs/source/roles/researcher/user_guide/access_desktop_applications.png similarity index 100% rename from docs/roles/researcher/user_guide/access_desktop_applications.png rename to docs/source/roles/researcher/user_guide/access_desktop_applications.png diff --git a/docs/roles/researcher/user_guide/account_setup_captcha.png b/docs/source/roles/researcher/user_guide/account_setup_captcha.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_captcha.png rename to docs/source/roles/researcher/user_guide/account_setup_captcha.png diff --git a/docs/roles/researcher/user_guide/account_setup_forgotten_password.png b/docs/source/roles/researcher/user_guide/account_setup_forgotten_password.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_forgotten_password.png rename to docs/source/roles/researcher/user_guide/account_setup_forgotten_password.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_add_authenticator_app.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_add_authenticator_app.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_add_authenticator_app.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_add_authenticator_app.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_additional_security_verification.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_additional_security_verification.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_additional_security_verification.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_additional_security_verification.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_allow_notifications.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_allow_notifications.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_allow_notifications.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_allow_notifications.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_app_qrcode.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_app_qrcode.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_app_qrcode.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_app_qrcode.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_authenticator_app_approved.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_authenticator_app_approved.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_authenticator_app_approved.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_authenticator_app_approved.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_authenticator_app_test.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_authenticator_app_test.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_authenticator_app_test.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_authenticator_app_test.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_dashboard_phone_only.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_dashboard_phone_only.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_dashboard_phone_only.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_dashboard_phone_only.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_dashboard_two_methods.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_dashboard_two_methods.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_dashboard_two_methods.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_dashboard_two_methods.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_download_authenticator_app.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_download_authenticator_app.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_download_authenticator_app.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_download_authenticator_app.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_registered_phone.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_registered_phone.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_registered_phone.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_registered_phone.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_verified_phone.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_verified_phone.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_verified_phone.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_verified_phone.png diff --git a/docs/roles/researcher/user_guide/account_setup_mfa_verifying_phone.png b/docs/source/roles/researcher/user_guide/account_setup_mfa_verifying_phone.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_mfa_verifying_phone.png rename to docs/source/roles/researcher/user_guide/account_setup_mfa_verifying_phone.png diff --git a/docs/roles/researcher/user_guide/account_setup_more_information_required.png b/docs/source/roles/researcher/user_guide/account_setup_more_information_required.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_more_information_required.png rename to docs/source/roles/researcher/user_guide/account_setup_more_information_required.png diff --git a/docs/roles/researcher/user_guide/account_setup_new_password.png b/docs/source/roles/researcher/user_guide/account_setup_new_password.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_new_password.png rename to docs/source/roles/researcher/user_guide/account_setup_new_password.png diff --git a/docs/roles/researcher/user_guide/account_setup_new_password_sign_in.png b/docs/source/roles/researcher/user_guide/account_setup_new_password_sign_in.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_new_password_sign_in.png rename to docs/source/roles/researcher/user_guide/account_setup_new_password_sign_in.png diff --git a/docs/roles/researcher/user_guide/account_setup_verify_phone.png b/docs/source/roles/researcher/user_guide/account_setup_verify_phone.png similarity index 100% rename from docs/roles/researcher/user_guide/account_setup_verify_phone.png rename to docs/source/roles/researcher/user_guide/account_setup_verify_phone.png diff --git a/docs/roles/researcher/user_guide/cocalc_account_creation.png b/docs/source/roles/researcher/user_guide/cocalc_account_creation.png similarity index 100% rename from docs/roles/researcher/user_guide/cocalc_account_creation.png rename to docs/source/roles/researcher/user_guide/cocalc_account_creation.png diff --git a/docs/roles/researcher/user_guide/cocalc_homepage.png b/docs/source/roles/researcher/user_guide/cocalc_homepage.png similarity index 100% rename from docs/roles/researcher/user_guide/cocalc_homepage.png rename to docs/source/roles/researcher/user_guide/cocalc_homepage.png diff --git a/docs/roles/researcher/user_guide/cocalc_security_warning.png b/docs/source/roles/researcher/user_guide/cocalc_security_warning.png similarity index 100% rename from docs/roles/researcher/user_guide/cocalc_security_warning.png rename to docs/source/roles/researcher/user_guide/cocalc_security_warning.png diff --git a/docs/roles/researcher/user_guide/codimd_access_options.png b/docs/source/roles/researcher/user_guide/codimd_access_options.png similarity index 100% rename from docs/roles/researcher/user_guide/codimd_access_options.png rename to docs/source/roles/researcher/user_guide/codimd_access_options.png diff --git a/docs/roles/researcher/user_guide/codimd_logon.png b/docs/source/roles/researcher/user_guide/codimd_logon.png similarity index 100% rename from docs/roles/researcher/user_guide/codimd_logon.png rename to docs/source/roles/researcher/user_guide/codimd_logon.png diff --git a/docs/roles/researcher/user_guide/codimd_publishing.png b/docs/source/roles/researcher/user_guide/codimd_publishing.png similarity index 100% rename from docs/roles/researcher/user_guide/codimd_publishing.png rename to docs/source/roles/researcher/user_guide/codimd_publishing.png diff --git a/docs/roles/researcher/user_guide/db_azure_data_studio.png b/docs/source/roles/researcher/user_guide/db_azure_data_studio.png similarity index 100% rename from docs/roles/researcher/user_guide/db_azure_data_studio.png rename to docs/source/roles/researcher/user_guide/db_azure_data_studio.png diff --git a/docs/roles/researcher/user_guide/db_dbeaver_mssql.png b/docs/source/roles/researcher/user_guide/db_dbeaver_mssql.png similarity index 100% rename from docs/roles/researcher/user_guide/db_dbeaver_mssql.png rename to docs/source/roles/researcher/user_guide/db_dbeaver_mssql.png diff --git a/docs/roles/researcher/user_guide/db_dbeaver_postgres_connection.png b/docs/source/roles/researcher/user_guide/db_dbeaver_postgres_connection.png similarity index 100% rename from docs/roles/researcher/user_guide/db_dbeaver_postgres_connection.png rename to docs/source/roles/researcher/user_guide/db_dbeaver_postgres_connection.png diff --git a/docs/roles/researcher/user_guide/db_dbeaver_postgres_ignore.png b/docs/source/roles/researcher/user_guide/db_dbeaver_postgres_ignore.png similarity index 100% rename from docs/roles/researcher/user_guide/db_dbeaver_postgres_ignore.png rename to docs/source/roles/researcher/user_guide/db_dbeaver_postgres_ignore.png diff --git a/docs/roles/researcher/user_guide/gitlab_clone_url.png b/docs/source/roles/researcher/user_guide/gitlab_clone_url.png similarity index 100% rename from docs/roles/researcher/user_guide/gitlab_clone_url.png rename to docs/source/roles/researcher/user_guide/gitlab_clone_url.png diff --git a/docs/roles/researcher/user_guide/gitlab_merge_request_details.png b/docs/source/roles/researcher/user_guide/gitlab_merge_request_details.png similarity index 100% rename from docs/roles/researcher/user_guide/gitlab_merge_request_details.png rename to docs/source/roles/researcher/user_guide/gitlab_merge_request_details.png diff --git a/docs/roles/researcher/user_guide/gitlab_new_merge_request.png b/docs/source/roles/researcher/user_guide/gitlab_new_merge_request.png similarity index 100% rename from docs/roles/researcher/user_guide/gitlab_new_merge_request.png rename to docs/source/roles/researcher/user_guide/gitlab_new_merge_request.png diff --git a/docs/roles/researcher/user_guide/gitlab_screenshot_login.png b/docs/source/roles/researcher/user_guide/gitlab_screenshot_login.png similarity index 100% rename from docs/roles/researcher/user_guide/gitlab_screenshot_login.png rename to docs/source/roles/researcher/user_guide/gitlab_screenshot_login.png diff --git a/docs/roles/researcher/user_guide/guacamole_dashboard.png b/docs/source/roles/researcher/user_guide/guacamole_dashboard.png similarity index 100% rename from docs/roles/researcher/user_guide/guacamole_dashboard.png rename to docs/source/roles/researcher/user_guide/guacamole_dashboard.png diff --git a/docs/roles/researcher/user_guide/guacamole_mfa.png b/docs/source/roles/researcher/user_guide/guacamole_mfa.png similarity index 100% rename from docs/roles/researcher/user_guide/guacamole_mfa.png rename to docs/source/roles/researcher/user_guide/guacamole_mfa.png diff --git a/docs/roles/researcher/user_guide/logon_environment_guacamole.png b/docs/source/roles/researcher/user_guide/logon_environment_guacamole.png similarity index 100% rename from docs/roles/researcher/user_guide/logon_environment_guacamole.png rename to docs/source/roles/researcher/user_guide/logon_environment_guacamole.png diff --git a/docs/roles/researcher/user_guide/logon_environment_msrds.png b/docs/source/roles/researcher/user_guide/logon_environment_msrds.png similarity index 100% rename from docs/roles/researcher/user_guide/logon_environment_msrds.png rename to docs/source/roles/researcher/user_guide/logon_environment_msrds.png diff --git a/docs/roles/researcher/user_guide/msrds_dashboard.png b/docs/source/roles/researcher/user_guide/msrds_dashboard.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_dashboard.png rename to docs/source/roles/researcher/user_guide/msrds_dashboard.png diff --git a/docs/roles/researcher/user_guide/msrds_no_work_resources.png b/docs/source/roles/researcher/user_guide/msrds_no_work_resources.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_no_work_resources.png rename to docs/source/roles/researcher/user_guide/msrds_no_work_resources.png diff --git a/docs/roles/researcher/user_guide/msrds_srd_connection.png b/docs/source/roles/researcher/user_guide/msrds_srd_connection.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_srd_connection.png rename to docs/source/roles/researcher/user_guide/msrds_srd_connection.png diff --git a/docs/roles/researcher/user_guide/msrds_srd_connection_failure.png b/docs/source/roles/researcher/user_guide/msrds_srd_connection_failure.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_srd_connection_failure.png rename to docs/source/roles/researcher/user_guide/msrds_srd_connection_failure.png diff --git a/docs/roles/researcher/user_guide/msrds_srd_rdc_screen.png b/docs/source/roles/researcher/user_guide/msrds_srd_rdc_screen.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_srd_rdc_screen.png rename to docs/source/roles/researcher/user_guide/msrds_srd_rdc_screen.png diff --git a/docs/roles/researcher/user_guide/msrds_srd_security_fingerprint.png b/docs/source/roles/researcher/user_guide/msrds_srd_security_fingerprint.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_srd_security_fingerprint.png rename to docs/source/roles/researcher/user_guide/msrds_srd_security_fingerprint.png diff --git a/docs/roles/researcher/user_guide/msrds_unexpected_certificate_error.png b/docs/source/roles/researcher/user_guide/msrds_unexpected_certificate_error.png similarity index 100% rename from docs/roles/researcher/user_guide/msrds_unexpected_certificate_error.png rename to docs/source/roles/researcher/user_guide/msrds_unexpected_certificate_error.png diff --git a/docs/roles/researcher/user_guide/srd_login_failure.png b/docs/source/roles/researcher/user_guide/srd_login_failure.png similarity index 100% rename from docs/roles/researcher/user_guide/srd_login_failure.png rename to docs/source/roles/researcher/user_guide/srd_login_failure.png diff --git a/docs/roles/researcher/user_guide/srd_login_screen.png b/docs/source/roles/researcher/user_guide/srd_login_screen.png similarity index 100% rename from docs/roles/researcher/user_guide/srd_login_screen.png rename to docs/source/roles/researcher/user_guide/srd_login_screen.png diff --git a/docs/roles/researcher/user_guide/srd_xfce_initial.png b/docs/source/roles/researcher/user_guide/srd_xfce_initial.png similarity index 100% rename from docs/roles/researcher/user_guide/srd_xfce_initial.png rename to docs/source/roles/researcher/user_guide/srd_xfce_initial.png diff --git a/docs/roles/researcher/user_guide_guacamole.md b/docs/source/roles/researcher/user_guide_guacamole.md similarity index 63% rename from docs/roles/researcher/user_guide_guacamole.md rename to docs/source/roles/researcher/user_guide_guacamole.md index ec6ff16af9..1dd58f5608 100644 --- a/docs/roles/researcher/user_guide_guacamole.md +++ b/docs/source/roles/researcher/user_guide_guacamole.md @@ -2,10 +2,50 @@ # User Guide: Apache Guacamole -```{include} snippets/01_introduction.partial.md -:relative-images: +## {{beginner}} Introduction + +{{tada}} Welcome to the Turing Data Safe Haven! {{tada}} + +Secure research environments (SREs) for analysis of sensitive datasets are essential to give data providers confidence that their datasets will be kept secure over the course of a project. +The Data Safe Haven is a prescription for how to set up one or more SREs and give users access to them. +The Data Safe Haven SRE design is aimed at allowing groups of researchers to work together on projects that involve sensitive or confidential datasets at scale. +Our goal is to ensure that you are able to implement the most cutting edge data science techniques while maintaining all ethical and legal responsibilities of information governance and access. + +The data you are working on will have been classified into one of five sensitivity tiers, ranging from open data at Tier 0, to highly sensitive and high risk data at Tier 4. +The tiers are defined by the most sensitive data in your project, and may be increased if the combination of data is deemed to be require additional levels of security. +You can read more about this process in our policy paper: _Arenas et al, 2019_, [`arXiv:1908.08737`](https://arxiv.org/abs/1908.08737). + +The level of sensitivity of your data determines whether you have access to the internet within the SRE and whether you are allowed to copy and paste between the secure research environment and other windows on your computer. +This means you may be limited in which data science tools you are allowed to install. +You will find that many software packages are already available, and the administrator of the SRE will ingress - bring into the environment - as many additional resources as possible. + +```{important} +Please read this user guide carefully and remember to refer back to it when you have questions. +In many cases the answer is already here, but if you think this resource could be clearer, please let us know so we can improve the documentation for future users. ``` +### Definitions + +The following definitions might be useful during the rest of this guide + +Secure Research Environment (SRE) +: the environment that you will be using to access the sensitive data. + +Data Safe Haven +: the overall project that details how to create and manage one or more SREs. + +(user_guide_username_domain)= +Username domain +: the domain (for example `projects.turingsafehaven.ac.uk`) which your user account will belong to. Multiple SREs can share the same domain for managing users in common. + +(user_guide_sre_id)= +SRE ID +: each SRE has a unique short ID, for example `sandbox` which your {ref}`System Manager ` will use to distinguish different SREs in the same Data Safe Haven. + +(user_guide_sre_url)= +SRE URL +: each SRE has a unique URL (for example `sandbox.projects.turingsafehaven.ac.uk`) which is used to access the data. + (roles_researcher_user_guide_setup_mfa)= ```{include} snippets/02_account_setup.partial.md diff --git a/docs/roles/researcher/user_guide_msrds.md b/docs/source/roles/researcher/user_guide_msrds.md similarity index 62% rename from docs/roles/researcher/user_guide_msrds.md rename to docs/source/roles/researcher/user_guide_msrds.md index 7a4752d63f..263bf56650 100644 --- a/docs/roles/researcher/user_guide_msrds.md +++ b/docs/source/roles/researcher/user_guide_msrds.md @@ -2,10 +2,54 @@ # User Guide: Microsoft Remote Desktop -```{include} snippets/01_introduction.partial.md -:relative-images: +```{warning} +Support for Microsoft Remote Desktop is deprecated. Deployment scripts and related documentation will be removed in version `4.2.0` of the Data Safe Haven. +``` + +## {{beginner}} Introduction + +{{tada}} Welcome to the Turing Data Safe Haven! {{tada}} + +Secure research environments (SREs) for analysis of sensitive datasets are essential to give data providers confidence that their datasets will be kept secure over the course of a project. +The Data Safe Haven is a prescription for how to set up one or more SREs and give users access to them. +The Data Safe Haven SRE design is aimed at allowing groups of researchers to work together on projects that involve sensitive or confidential datasets at scale. +Our goal is to ensure that you are able to implement the most cutting edge data science techniques while maintaining all ethical and legal responsibilities of information governance and access. + +The data you are working on will have been classified into one of five sensitivity tiers, ranging from open data at Tier 0, to highly sensitive and high risk data at Tier 4. +The tiers are defined by the most sensitive data in your project, and may be increased if the combination of data is deemed to be require additional levels of security. +You can read more about this process in our policy paper: _Arenas et al, 2019_, [`arXiv:1908.08737`](https://arxiv.org/abs/1908.08737). + +The level of sensitivity of your data determines whether you have access to the internet within the SRE and whether you are allowed to copy and paste between the secure research environment and other windows on your computer. +This means you may be limited in which data science tools you are allowed to install. +You will find that many software packages are already available, and the administrator of the SRE will ingress - bring into the environment - as many additional resources as possible. + +```{important} +Please read this user guide carefully and remember to refer back to it when you have questions. +In many cases the answer is already here, but if you think this resource could be clearer, please let us know so we can improve the documentation for future users. ``` +### Definitions + +The following definitions might be useful during the rest of this guide + +Secure Research Environment (SRE) +: the environment that you will be using to access the sensitive data. + +Data Safe Haven +: the overall project that details how to create and manage one or more SREs. + +(user_guide_username_domain_2)= +Username domain +: the domain (for example `projects.turingsafehaven.ac.uk`) which your user account will belong to. Multiple SREs can share the same domain for managing users in common. + +(user_guide_sre_id_2)= +SRE ID +: each SRE has a unique short ID, for example `sandbox` which your {ref}`System Manager ` will use to distinguish different SREs in the same Data Safe Haven. + +(user_guide_sre_url_2)= +SRE URL +: each SRE has a unique URL (for example `sandbox.projects.turingsafehaven.ac.uk`) which is used to access the data. + ```{include} snippets/02_account_setup.partial.md :relative-images: ``` @@ -28,7 +72,7 @@ - Open a **private/incognito** browser session, so that you don't pick up any existing Microsoft logins -- Go to the {ref}`SRE URL ` given by your {ref}`System Manager `. +- Go to the {ref}`SRE URL ` given by your {ref}`System Manager `. ```{note} Our example user, Ada Lovelace, participating in the `sandbox` project at a Turing Data Study Group, would navigate to `https://sandbox.projects.turingsafehaven.ac.uk`. diff --git a/docs/roles/system_manager/administrator_guide/backup_instances_blobs.png b/docs/source/roles/system_manager/administrator_guide/backup_instances_blobs.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_instances_blobs.png rename to docs/source/roles/system_manager/administrator_guide/backup_instances_blobs.png diff --git a/docs/roles/system_manager/administrator_guide/backup_instances_disks.png b/docs/source/roles/system_manager/administrator_guide/backup_instances_disks.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_instances_disks.png rename to docs/source/roles/system_manager/administrator_guide/backup_instances_disks.png diff --git a/docs/roles/system_manager/administrator_guide/backup_progress_disk_1.png b/docs/source/roles/system_manager/administrator_guide/backup_progress_disk_1.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_progress_disk_1.png rename to docs/source/roles/system_manager/administrator_guide/backup_progress_disk_1.png diff --git a/docs/roles/system_manager/administrator_guide/backup_progress_disk_2.png b/docs/source/roles/system_manager/administrator_guide/backup_progress_disk_2.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_progress_disk_2.png rename to docs/source/roles/system_manager/administrator_guide/backup_progress_disk_2.png diff --git a/docs/roles/system_manager/administrator_guide/backup_progress_disk_3.png b/docs/source/roles/system_manager/administrator_guide/backup_progress_disk_3.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_progress_disk_3.png rename to docs/source/roles/system_manager/administrator_guide/backup_progress_disk_3.png diff --git a/docs/roles/system_manager/administrator_guide/backup_restore_disk.png b/docs/source/roles/system_manager/administrator_guide/backup_restore_disk.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_restore_disk.png rename to docs/source/roles/system_manager/administrator_guide/backup_restore_disk.png diff --git a/docs/roles/system_manager/administrator_guide/backup_select_containers_validate_blobs.png b/docs/source/roles/system_manager/administrator_guide/backup_select_containers_validate_blobs.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_select_containers_validate_blobs.png rename to docs/source/roles/system_manager/administrator_guide/backup_select_containers_validate_blobs.png diff --git a/docs/roles/system_manager/administrator_guide/backup_select_restore_time_blobs.png b/docs/source/roles/system_manager/administrator_guide/backup_select_restore_time_blobs.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_select_restore_time_blobs.png rename to docs/source/roles/system_manager/administrator_guide/backup_select_restore_time_blobs.png diff --git a/docs/roles/system_manager/administrator_guide/backup_select_snapshot_validate_disks.png b/docs/source/roles/system_manager/administrator_guide/backup_select_snapshot_validate_disks.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_select_snapshot_validate_disks.png rename to docs/source/roles/system_manager/administrator_guide/backup_select_snapshot_validate_disks.png diff --git a/docs/roles/system_manager/administrator_guide/backup_swap_disk_after.png b/docs/source/roles/system_manager/administrator_guide/backup_swap_disk_after.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_swap_disk_after.png rename to docs/source/roles/system_manager/administrator_guide/backup_swap_disk_after.png diff --git a/docs/roles/system_manager/administrator_guide/backup_swap_disk_before.png b/docs/source/roles/system_manager/administrator_guide/backup_swap_disk_before.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/backup_swap_disk_before.png rename to docs/source/roles/system_manager/administrator_guide/backup_swap_disk_before.png diff --git a/docs/roles/system_manager/administrator_guide/connect_azure_storage.png b/docs/source/roles/system_manager/administrator_guide/connect_azure_storage.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/connect_azure_storage.png rename to docs/source/roles/system_manager/administrator_guide/connect_azure_storage.png diff --git a/docs/roles/system_manager/administrator_guide/internal_mirror_packages.png b/docs/source/roles/system_manager/administrator_guide/internal_mirror_packages.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/internal_mirror_packages.png rename to docs/source/roles/system_manager/administrator_guide/internal_mirror_packages.png diff --git a/docs/roles/system_manager/administrator_guide/login_certificate_expiry.png b/docs/source/roles/system_manager/administrator_guide/login_certificate_expiry.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/login_certificate_expiry.png rename to docs/source/roles/system_manager/administrator_guide/login_certificate_expiry.png diff --git a/docs/roles/system_manager/administrator_guide/login_password_login.png b/docs/source/roles/system_manager/administrator_guide/login_password_login.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/login_password_login.png rename to docs/source/roles/system_manager/administrator_guide/login_password_login.png diff --git a/docs/source/roles/system_manager/administrator_guide/no_recent_connections.png b/docs/source/roles/system_manager/administrator_guide/no_recent_connections.png new file mode 100644 index 0000000000..5b333e69c9 Binary files /dev/null and b/docs/source/roles/system_manager/administrator_guide/no_recent_connections.png differ diff --git a/docs/source/roles/system_manager/administrator_guide/password_reset_failure.png b/docs/source/roles/system_manager/administrator_guide/password_reset_failure.png new file mode 100644 index 0000000000..4598d138cd Binary files /dev/null and b/docs/source/roles/system_manager/administrator_guide/password_reset_failure.png differ diff --git a/docs/roles/system_manager/administrator_guide/read_only_sas_token.png b/docs/source/roles/system_manager/administrator_guide/read_only_sas_token.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/read_only_sas_token.png rename to docs/source/roles/system_manager/administrator_guide/read_only_sas_token.png diff --git a/docs/source/roles/system_manager/administrator_guide/srd_login_failure.png b/docs/source/roles/system_manager/administrator_guide/srd_login_failure.png new file mode 100644 index 0000000000..b8b65973d9 Binary files /dev/null and b/docs/source/roles/system_manager/administrator_guide/srd_login_failure.png differ diff --git a/docs/roles/system_manager/administrator_guide/srd_login_opening_port.png b/docs/source/roles/system_manager/administrator_guide/srd_login_opening_port.png similarity index 100% rename from docs/roles/system_manager/administrator_guide/srd_login_opening_port.png rename to docs/source/roles/system_manager/administrator_guide/srd_login_opening_port.png diff --git a/docs/source/roles/system_manager/administrator_guide/srd_login_prompt.png b/docs/source/roles/system_manager/administrator_guide/srd_login_prompt.png new file mode 100644 index 0000000000..dda78cb4c7 Binary files /dev/null and b/docs/source/roles/system_manager/administrator_guide/srd_login_prompt.png differ diff --git a/docs/roles/system_manager/index.md b/docs/source/roles/system_manager/index.md similarity index 84% rename from docs/roles/system_manager/index.md rename to docs/source/roles/system_manager/index.md index 45056e34db..8bf48dcd4b 100644 --- a/docs/roles/system_manager/index.md +++ b/docs/source/roles/system_manager/index.md @@ -31,5 +31,5 @@ Typically these might be members of an institutional IT team. [Managing web applications](manage_webapps.md) : How to access virtual machines hosting web applications -[Removing deployed components](manage_deployments.md) -: Removing an SRE or a complete, deployment Data Safe Haven as an administrator. +[Managing Data Safe Haven deployments](manage_deployments.md) +: Managing, altering and removing an SRE or a complete deployment as a Data Safe Haven administrator. diff --git a/docs/roles/system_manager/manage_costs.md b/docs/source/roles/system_manager/manage_costs.md similarity index 100% rename from docs/roles/system_manager/manage_costs.md rename to docs/source/roles/system_manager/manage_costs.md diff --git a/docs/roles/system_manager/manage_data.md b/docs/source/roles/system_manager/manage_data.md similarity index 87% rename from docs/roles/system_manager/manage_data.md rename to docs/source/roles/system_manager/manage_data.md index 0bf6a56b63..9ebd6013d3 100644 --- a/docs/roles/system_manager/manage_data.md +++ b/docs/source/roles/system_manager/manage_data.md @@ -23,7 +23,7 @@ The following steps show how to generate a temporary write-only upload token tha - Click `Networking` under `Settings` and paste the data provider's IP address as one of those allowed under the `Firewall` header, then hit the save icon in the top left - From the `Overview` tab, click the link to `Containers` (in the middle of the page) - Click `ingress` -- Click `Shared access signature` under `Settings` and do the following: +- Click `Shared access tokens` under `Settings` and do the following: - Under `Permissions`, check these boxes: - `Write` - `List` @@ -52,16 +52,29 @@ Software ingress must go through the same approval process as is the case for da ## Data egress +```{important} +Any data egress must be signed off by the {ref}`role_data_provider_representative`, {ref}`role_investigator` and {ref}`role_referee` (if applicable). +``` + +```{important} +Classification of output must be completed **before** an egress link is created. + +The classification process is explained {ref}`here `. +``` + +The {ref}`role_system_manager` creates a time-limited and IP restricted link to remove data from the environment, after the outputs have been classified and approved for release. + - In the Azure portal select `Subscriptions` then navigate to the subscription containing the relevant SHM - Search for the resource group: `RG_SHM__PERSISTENT_DATA`, then click through to the storage account called: `data` (where `` is a random string) -- Click `Networking` under `Settings` to check the list of pre-approved IP addresses allowed under the `Firewall` header and check your own IP address to ensure you are connecting from one of these +- Click `Networking` under `Settings` to check the list of pre-approved IP addresses allowed under the `Firewall` header + - Ensure that the IP address of the person to receive the outputs is listed and enter it if not - Click `Containers` under `Data storage` - Click `egress` -- Click `Shared access signature` under `Settings` and do the following: +- Click `Shared access tokens` under `Settings` and do the following: - Under `Permissions`, check these boxes: - `Read` - `List` - - Set a time window in the `Start and expiry date/time` that gives you enough time to extract the data + - Set a time window in the `Start and expiry date/time` that gives enough time for the person who will perform the secure egress download to do so - Leave everything else as default click `Generate SAS token and URL` ```{image} administrator_guide/read_only_sas_token.png @@ -70,28 +83,7 @@ Software ingress must go through the same approval process as is the case for da ``` - Leave this portal window open and move to the next step -- Open `Azure Storage Explorer` ([download](https://azure.microsoft.com/en-us/products/storage/storage-explorer/) it if you don't have it) -- Click the socket image on the left hand side - - ```{image} ../data_provider_representative/azure_storage_explorer_connect.png - :alt: Azure Storage Explorer connection - :align: center - ``` - -- On `Select Resource`, choose `Blob container` -- On `Select Connection Method`, choose `Shared access signature URL (SAS)` and hit `Next` - - ```{image} administrator_guide/connect_azure_storage.png - :alt: Connect with SAS token - :align: center - ``` - -- On `Enter Connection Info`: - - Set the `Display name` to "egress" (or choose an informative name) - - Copy the `Blob SAS URL` from your Azure portal session into the `Blob container SAS URL` box and hit `Next` -- On the `Summary` page, hit `Connect` -- On the left hand side, the connection should show up under `Local & Attached > Storage Accounts > (Attached Containers) > Blob Containers > ingress (SAS)` -- You should now be able to securely download the data from the Safe Haven's output volume by highlighting the relevant file(s) and hitting the `Download` button +- The appropriate person should now be able to download data by following {ref}`these instructions ` ### The output volume @@ -103,7 +95,7 @@ For more info on shared SRE storage volumes, consult the {ref}`Safe Haven User G ### {{card_file_box}} Restoring blobs -Blob containers in backed up storage accounts are protected by [operational backup](https://learn.microsoft.com/en-us/azure/backup/blob-backup-overview#how-operational-backup-works). +Blob containers in backed up storage accounts are protected by [operational backup](https://learn.microsoft.com/en-us/azure/backup/blob-backup-overview#how-the-operational-backup-works). It is possible to restore the state of the blobs to an earlier point in time, up to twelve weeks in the past. The blob containers covered by the protection for each SRE are the diff --git a/docs/source/roles/system_manager/manage_deployments.md b/docs/source/roles/system_manager/manage_deployments.md new file mode 100644 index 0000000000..7b01adb3dc --- /dev/null +++ b/docs/source/roles/system_manager/manage_deployments.md @@ -0,0 +1,191 @@ +(administrator_manage_deployments)= + +# Managing Data Safe Haven deployments + +```{important} +This document assumes that you already have access to a {ref}`Safe Haven Management (SHM) environment ` and one or more {ref}`Secure Research Environments (SREs) ` that are linked to it. +``` + +```{include} ../../deployment/snippets/00_symbols.partial.md +:relative-images: +``` + +(renew_ssl)= + +## {{alarm_clock}} Renewing SRE Domain Certificates + +The remote desktop frontend of an SRE will need to periodically have its SSL certificate renewed so that it can be accessed via HTTPS. +After each 90 day period that the SRE is live, re-run the script to update the certificate. + +![Powershell: five minutes](https://img.shields.io/static/v1?style=for-the-badge&logo=powershell&label=local&color=blue&message=ten%20minutes) at {{file_folder}} `./deployment/secure_research_environment/setup` + +```powershell +PS> ./Update_SRE_SSL_Certificate.ps1 -shmId -sreId +``` + +- where `` is the {ref}`management environment ID ` for this SHM +- where `` is the {ref}`secure research environment ID ` for this SRE + +(resize_vm)= + +## {{arrow_upper_right}} Resize the Virtual Machine (VM) of a Secure Research Desktop (SRD) + +Sometimes during a project that uses a deployed SRE, researchers may find the available compute inadequate for their purposes and wish to increase the size of the SRD's VM. The **simplest way to resize a VM is via the Azure Portal**, but it can also be done via script. + +To resize via the Azure Portal: + +- Log into the Azure portal and locate the VM inside the Resource Group called `RG_SHM__SRE__COMPUTE` +- [Follow these instructions](https://learn.microsoft.com/en-us/azure/virtual-machines/resize-vm?tabs=portal) in the Azure portal + +
+ +To resize via script: + + +- Log into the Azure portal and locate the VM inside the Resource Group called `RG_SHM__SRE__COMPUTE` +- Make a note of the last octet of the IP address + +![Powershell: ten minutes](https://img.shields.io/static/v1?style=for-the-badge&logo=powershell&label=local&color=blue&message=ten%20minutes) at {{file_folder}} `./deployment/secure_research_environment/setup` + +```powershell +PS> ./Add_Single_SRD.ps1 -shmId -sreId -ipLastOctet [-vmSize ] -Upgrade -Force +``` + +- where `` is the {ref}`management environment ID ` for this SHM +- where `` is the {ref}`secure research environment ID ` for this SRE +- where `` is last octet of the IP address (check what this is in the Azure Portal) +- where `` is the new [Azure VM size](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes) +- where `` is required to ensure the old VM is replaced +- where `` ensures that `` works even when the VM is built with the same image + +
+ +```{tip} +If the new `VM size` you want isn't shown as available in the Azure Portal, there are several steps that can be taken. + +Firstly, try **stopping the VM** and checking again whether the size you want is available, as this can reveal additional options that aren't shown whilst the VM is running. For example, when resizing to an N-series VM in Azure, (see {ref}`using_gpus`) we've found that NVIDIA options such as the NVv3-series are not always shown as available. + +Next, you can try to **request an increase** in the vCPU quota for the VM family of the desired VM: +- Navigate to the Azure Portal and on the subscription page, click `Usage + quotas` under `Settings` +- Choose the family appropriate to the VM that you want to resize to, and select a region appropriate for the SRE +- Click the pen icon and set the `New Limit` to at least the number of vCPUs required by the VM that you want, the click submit +- After the request is accepted, resize the VM as above +- In some cases, the quota increase may require a request to be submitted to Microsoft +``` + +(add_new_srd)= + +## {{heavy_plus_sign}} Add a new SRD + +The `-VmSizes` parameter provided when deploying the SRE (with the `Deploy_SRE.ps1` script) determines how many SRDs are created and how large each one will be. + +To deploy a new SRD into the SRE environment, follow the below instructions: + +![Powershell: ten minutes](https://img.shields.io/static/v1?style=for-the-badge&logo=powershell&label=local&color=blue&message=ten%20minutes) at {{file_folder}} `./deployment/secure_research_environment/setup` + +```powershell +PS> ./Add_Single_SRD.ps1 -shmId -sreId -ipLastOctet [-vmSize ] +``` + +- where `` is the {ref}`management environment ID ` for this SHM +- where `` is the {ref}`secure research environment ID ` for this SRE +- where `` is last octet of the IP address (this must be different to any other SRD VMs) + +(using_gpus)= + +## {{minidisc}} Using GPUs in SRDs + +When you {ref}`resize_vm` or {ref}`add_new_srd` featuring a GPU (N-series in Azure), you'll need to ensure it has an Nvidia GPU (as opposed to AMD or other). +See the [Azure docs](https://learn.microsoft.com/en-us/azure/virtual-machines/sizes-gpu) for more information. +This is because only Nvidia GPUs support the drivers and CUDA libraries installed on the SRD image. + +To test that a GPU enabled VM is working as expected, log into the SRE and type `nvidia-smi` into the terminal. + +## {{crown}} Performing operations that require superuser privileges + +If you need to perform any operations in the SRE that require root access, you will need to log into the `compute` VM via the Serial Console in the Azure Portal. + +```{include} snippets/01_console.partial.md +:relative-images: +``` + +## {{fire}} Remove a single SRE + +In order to tear down an SRE, use the following procedure: + +On your **deployment machine**. + +- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team +- Open a `Powershell` terminal and navigate to the `deployment/administration` directory within the Data Safe Haven repository +- Ensure you are logged into `Azure` within `Powershell` using the command: `Connect-AzAccount`. This command will give you a URL and a short alphanumeric code. You will need to visit that URL in a web browser and enter the code +- NB. If your account is a guest in additional Azure tenants, you may need to add the `-Tenant ` flag, where `` is the ID of the Azure tenant you want to deploy into. +- Run the following script: + + ```powershell + ./SRE_Teardown.ps1 -shmId -sreId + ``` + +- If you provide the optional `-dryRun` parameter then the names of all affected resources will be printed, but nothing will be deleted + +## {{end}} Remove a complete Safe Haven + +### {{collision}} Tear down any attached SREs + +On your **deployment machine**. + +- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team +- Open a `Powershell` terminal and navigate to the `deployment/administration` directory within the Data Safe Haven repository +- Ensure you are logged into `Azure` within `Powershell` using the command: `Connect-AzAccount`. This command will give you a URL and a short alphanumeric code. You will need to visit that URL in a web browser and enter the code + + ```{attention} + If your account is a guest in additional Azure tenants, you may need to add the `-Tenant ` flag, where `` is the ID of the Azure tenant you want to deploy into. + ``` + +- For each SRE attached to the SHM, do the following: + - Tear down the SRE by running: + + ```powershell + ./SRE_Teardown.ps1 -sreId + ``` + + where the SRE ID is the one specified in the relevant config file + + ```{note} + If you provide the optional `-dryRun` parameter then the names of all affected resources will be printed, but nothing will be deleted + ``` + +### {{unlock}} Disconnect from the Azure Active Directory + +Connect to the **SHM Domain Controller (DC1)** via Remote Desktop Client over the SHM VPN connection + +- Log in as a **domain** user (ie. `@`) using the username and password obtained from the Azure portal +- If you see a warning dialog that the certificate cannot be verified as root, accept this and continue +- Open Powershell as an administrator + - Navigate to `C:\Installation` + - Run `.\Disconnect_AD.ps1` + - You will need to provide login credentials (including MFA if set up) for `@` + +```{attention} +Full disconnection of the Azure Active Directory can take up to 72 hours but is typically less. +If you are planning to install a new SHM connected to the same Azure Active Directory you may find the `AzureADConnect` installation step requires you to wait for the previous disconnection to complete. +``` + +### {{bomb}} Tear down the SHM + +On your **deployment machine**. + +- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team +- Open a `Powershell` terminal and navigate to the `deployment/administration` directory within the Data Safe Haven repository +- Ensure you are logged into `Azure` within `Powershell` using the command: `Connect-AzAccount`. This command will give you a URL and a short alphanumeric code. You will need to visit that URL in a web browser and enter the code + + ```{attention} + If your account is a guest in additional Azure tenants, you may need to add the `-Tenant ` flag, where `` is the ID of the Azure tenant you want to deploy into. + ``` + +- Tear down the SHM by running: + + ```powershell + ./SHM_Teardown.ps1 -shmId + ``` + + where `` is the {ref}`management environment ID ` specified in the configuration file. diff --git a/docs/roles/system_manager/manage_users.md b/docs/source/roles/system_manager/manage_users.md similarity index 62% rename from docs/roles/system_manager/manage_users.md rename to docs/source/roles/system_manager/manage_users.md index 5fb4e89d91..f0fe89cdf9 100644 --- a/docs/roles/system_manager/manage_users.md +++ b/docs/source/roles/system_manager/manage_users.md @@ -6,11 +6,24 @@ This document assumes that you already have access to a {ref}`Safe Haven Management (SHM) environment ` and one or more {ref}`Secure Research Environments (SREs) ` that are linked to it. ``` +(create_new_users)= + ## {{beginner}} Create new users Users should be created on the main domain controller (DC1) in the SHM and synchronised to Azure Active Directory. A helper script for doing this is already uploaded to the domain controller - you will need to prepare a `CSV` file in the appropriate format for it. +(security_groups)= + +### {{lock}} SRE Security Groups + +Each user should be assigned to one or more Active Directory "security groups", which give them access to a given SRE with appropriate privileges. The security groups are named like so: + +- `SG Research Users`: Default for most researchers. No special permissions. +- `SG Data Administrators`: Researchers who can create/modify/delete database tables schemas. Given to a smaller number of researchers. Restricting this access to most users prevents them creating/deleting arbitrary schemas, which is important because some SREs have their input data in database form. + +(generate_user_csv)= + ## {{scroll}} Generate user details CSV file ### {{car}} Using data classification app @@ -64,6 +77,66 @@ Once you're certain that you're adding a new user, make sure that the following - Be particularly careful never to use them to log in to any user-accessible VMs (such as the SRDs) ``` +(adding_users_manually)= + +### {{woman}} {{man}} Modifying user SRE access + +Users may have been added to one or more {ref}`security_groups` through setting the `GroupName` field in the `user_details_template.csv` (see {ref}`generate_user_csv`). Security Group assignments can also be manually modified via the following: + +- Log into the **SHM primary domain controller** (`DC1-SHM-`) VM using the login credentials {ref}`stored in Azure Key Vault ` +- In Server Manager click `Tools > Active Directory Users and Computers` +- Click on `Safe Haven Security Groups` +- Find the group that the user needs to be added to (see {ref}`security_groups`) +- Right click on the group and click `Properties` +- Click the `Members` tab +- To add a user click `Add...` + - Enter a part of the user's name and click `Check Names` + - Select the correct user and click `OK`, then click `OK` again until the window closes +- To remove a user click on the username of the person and then `Remove` + - Click `Yes` if you're sure this user should no longer have access to this SRE, then click `OK` again until the window closes +- Open a `Powershell` command window with elevated privileges +- Run `C:\Installation\Run_ADSync.ps1` + +### {{iphone}} Edit user details + +The `DC1` is the source of truth for user details. If these details need to be changed, they should be changed in the `DC1` and then synchronised to Azure AD. + +- Log into the **SHM primary domain controller** (`DC1-SHM-`) VM using the login credentials {ref}`stored in Azure Key Vault ` +- In Server Manager click `Tools > Active Directory Users and Computers` +- Click on `Safe Haven Research Users` +- Find the person, right click on them and select `Properties` +- To edit a **phone number**, select the `Telephones` tab and edit the `Mobile` number + - Click `OK` to save the new number + - Open a `Powershell` command window with elevated privileges + - Run `C:\Installation\Run_ADSync.ps1` +- To edit a user's **email** or their **username** (or first name or last name) you'll need to delete the user entirely and recreate them, meaning they'll have to set up their accounts (including MFA) again + - Find the person, right click on them and click `Delete` + - Click `OK` + - Open a `Powershell` command window with elevated privileges + - Run `C:\Installation\Run_ADSync.ps1` + - Create a new csv (or edit an existing) one with the correct user details (see {ref}`create_new_users`) + - Run `C:\Installation\CreateUsers.ps1 ` + - Run `C:\Installation\Run_ADSync.ps1` +- You can check the changes you made were successful by logging into the Azure Portal as the AAD admin + - Open `Azure Active Directory` + - Click on `Users` under `Manage` and search for the user + - Click on the user and then `Edit properties` and confirm your changes propagated to Azure AD + +(deleting_users)= + +### {{x}} Deleting users + +- Log into the **SHM primary domain controller** (`DC1-SHM-`) VM using the login credentials {ref}`stored in Azure Key Vault ` +- In Server Manager click `Tools > Active Directory Users and Computers` +- Click on `Safe Haven Research Users` +- Find the person, right click on them and click `Delete` +- Open a `Powershell` command window with elevated privileges +- Run `C:\Installation\Run_ADSync.ps1` +- You can check the user is deleted by logging into the Azure Portal as the AAD admin + - Open `Azure Active Directory` + - Click on `Users` under `Manage` and search for the user + - Confirm the user is no longer present + ## {{calling}} Assign MFA licences ### {{hand}} Manually add licence to each user @@ -127,10 +200,7 @@ If the certificate for the SRE domain has expired, users will not be able to log ```{tip} **Solution**: Replace the SSL certificate with a new one -- Ensure you have the same version of the Data Safe Haven repository as was used by your deployment team -- Open a `Powershell` terminal and navigate to the `deployment/secure_research_environment/setup` directory within the Data Safe Haven repository -- Ensure you are logged into the `Azure` within `Powershell` using the command: `Connect-AzAccount` -- Run `./Update_SRE_RDS_Ssl_Certificate.ps1 -sreId `, where the SRE ID is the one specified in the config +- {ref}`renew_ssl` ``` ### {{red_circle}} Unable to log into remote desktop gateway @@ -162,7 +232,22 @@ Users are stuck at the `Opening remote port` message and never receive the MFA p - Check that the user has set up MFA (at [https://aka.ms/mfasetup](https://aka.ms/mfasetup) ) and is using the phone-call or app authentication method ``` -### {{interrobang}} xrdp login failure on the SRD +### {{see_no_evil}} Unable to see SRD or SSH connection options + +After logging in with Microsoft, users can't see the option to log into the SRE via the SRD or SSH options. + +```{image} administrator_guide/no_recent_connections.png +:alt: Unable to see SRD or SSH connection options +:align: center +``` + +```{tip} +**Solution**: Ensure the user is added to the correct Security Group for the SRE + +- See {ref}`adding_users_manually` +``` + +### {{broken_heart}} Xorg login failure on the SRD If users can get to the login screen: @@ -204,6 +289,29 @@ there are a couple of possible causes. - File mounting configuration ``` +### {{nut_and_bolt}} Password reset failure + +When creating an account or resetting a password, the users get the following screen: + +```{image} administrator_guide/password_reset_failure.png +:alt: Password reset failure +:align: center +``` + +```{error} +**Problem**: the password could not be reset + +**Solution**: remove and re-add the password reset configuration on the DC1 + +- Log into the **SHM primary domain controller** (`DC1-SHM-`) VM using the login credentials {ref}`stored in Azure Key Vault ` +- Open a `Powershell` command window with elevated privileges +- Run `$aadConnector = Get-ADSyncConnector | ? {$_.Name -match "onmicrosoft.com - AAD"}` +- Run `Remove-ADSyncAADPasswordResetConfiguration -Connector $aadConnector.Name` +- Run `Set-ADSyncAADPasswordResetConfiguration -Connector $aadConnector.Name -Enable $true` +- Check the configuration is reset by running `Get-ADSyncAADPasswordResetConfiguration -Connector $aadConnector.Name` +- Ask the user to reset their password again +``` + ### {{cloud}} Unable to install from package mirrors If it is not possible to install packages from the package mirrors then this may be for one of the following reasons: diff --git a/docs/roles/system_manager/manage_webapps.md b/docs/source/roles/system_manager/manage_webapps.md similarity index 67% rename from docs/roles/system_manager/manage_webapps.md rename to docs/source/roles/system_manager/manage_webapps.md index 35483066d9..1869a82c24 100644 --- a/docs/roles/system_manager/manage_webapps.md +++ b/docs/source/roles/system_manager/manage_webapps.md @@ -39,18 +39,6 @@ If the reason for failure is less clear, accessing the command line interface di ## Accessing the VM console -Console access to the web app VMs can be achieved through the `Azure` portal. All VMs share the same ``, but each has its own ``, which will need to be retrieved from the `SRE` key vault before accessing the console. - -- From the `Azure` portal, navigate to the Resource Group `RG_SHM__SRE__SECRETS` -- Click on the `SRE` keyvault `kv-_SRE_` -- From the menu on the left, select `Secrets` from the `Objects` section. -- All web app VMs share the same ``, found in the `sre--vm-admin-username` secret. -- Each web app has its own ``, found in the `sre--vm-admin-password-` secret. - -Once you have the `` and ``, you will be able to log in to the VM console as follows: - -- From the `Azure` portal, navigate to the web app resource group `RG_SHM__SRE__WEBAPPS`. -- Click on the relevant VM (e.g. `COCALC-SRE-`) -- From the menu on the left, scroll down to the `Help` section and select `Serial console` -- After a short time, you will be shown the console for the VM. You may need to press a key to be shown the login prompt. -- Log in with the details you retrieved earlier to be given root access to the VM. +```{include} snippets/01_console.partial.md +:relative-images: +``` diff --git a/docs/roles/system_manager/migrate_an_shm.md b/docs/source/roles/system_manager/migrate_an_shm.md similarity index 100% rename from docs/roles/system_manager/migrate_an_shm.md rename to docs/source/roles/system_manager/migrate_an_shm.md diff --git a/docs/roles/system_manager/migrate_shm/aad_connection_failure.png b/docs/source/roles/system_manager/migrate_shm/aad_connection_failure.png similarity index 100% rename from docs/roles/system_manager/migrate_shm/aad_connection_failure.png rename to docs/source/roles/system_manager/migrate_shm/aad_connection_failure.png diff --git a/docs/source/roles/system_manager/snippets/01_console.partial.md b/docs/source/roles/system_manager/snippets/01_console.partial.md new file mode 100644 index 0000000000..50e5a1ac0d --- /dev/null +++ b/docs/source/roles/system_manager/snippets/01_console.partial.md @@ -0,0 +1,17 @@ +Console access to the SRE VMs, including those for each web app and the `compute` VM, can be achieved through the `Azure` portal. All VMs share the same ``, but each has its own ``, which will need to be retrieved from the `SRE` key vault before accessing the console. + +- From the `Azure` portal, navigate to the Resource Group `RG_SHM__SRE__SECRETS` +- Click on the `SRE` keyvault `kv-_SRE_` +- From the menu on the left, select `Secrets` from the `Objects` section. +- All VMs share the same ``, found in the `sre--vm-admin-username` secret. +- Each VM has its own ``, found in the `sre--vm-admin-password-` secret. + +Once you have the `` and ``, you will be able to log in to the VM console as follows: + +- From the `Azure` portal, navigate to the correct resource group: + - `RG_SHM__SRE__WEBAPPS` for the web applications + - `RG_SHM__SRE__COMPUTE` for the compute VM +- Click on the relevant VM +- From the menu on the left, scroll down to the `Help` section and select `Serial console` +- After a short time, you will be shown the console for the VM. You may need to press a key to be shown the login prompt. +- Log in with the details you retrieved earlier to be given root access to the VM. diff --git a/environment_configs/package_lists/allowlist-core-r-cran-tier3.list b/environment_configs/package_lists/allowlist-core-r-cran-tier3.list index 23977ee481..7fc527f500 100644 --- a/environment_configs/package_lists/allowlist-core-r-cran-tier3.list +++ b/environment_configs/package_lists/allowlist-core-r-cran-tier3.list @@ -1,3 +1,4 @@ +arrow BiocManager car caret diff --git a/environment_configs/package_lists/allowlist-extra-python-pypi-tier3.list b/environment_configs/package_lists/allowlist-extra-python-pypi-tier3.list new file mode 100644 index 0000000000..e69de29bb2 diff --git a/environment_configs/package_lists/allowlist-extra-r-cran-tier3.list b/environment_configs/package_lists/allowlist-extra-r-cran-tier3.list new file mode 100644 index 0000000000..e69de29bb2 diff --git a/environment_configs/package_lists/allowlist-full-python-pypi-tier3.list b/environment_configs/package_lists/allowlist-full-python-pypi-tier3.list index dde7e094eb..0afbdc7b3e 100644 --- a/environment_configs/package_lists/allowlist-full-python-pypi-tier3.list +++ b/environment_configs/package_lists/allowlist-full-python-pypi-tier3.list @@ -1,21 +1,25 @@ absl-py -aenum aero-calc aesara affine aiobotocore aiocontextvars +aiofiles aiohttp aioitertools aiosignal +aiosqlite alabaster altair +annotated-types annoy +ansi2html ansimarkup anyio apispec appdirs appnope +arabic-reshaper argcomplete argon2-cffi argon2-cffi-bindings @@ -28,7 +32,9 @@ astropy asttokens astunparse async_generator +async-lru async-timeout +asyncio asynctest atomicwrites attrs @@ -36,10 +42,12 @@ autograd autograd-gamma Automat awkward +awkward-cpp awkward0 Babel backcall backpack +backports.cached-property backports.entry-points-selectable backports.functools-lru-cache backports.lzma @@ -52,11 +60,15 @@ backports.zoneinfo bcdoc beautifulsoup4 better-exceptions-fork +biscuits black bleach +blinker blis blosc +blosc2 bokeh +boltons boto3 botocore Bottleneck @@ -64,6 +76,7 @@ bpemb branca Brotli bson +build bulwark CacheControl cached-property @@ -85,8 +98,12 @@ click-plugins cligj clikit cloudpickle +cmake +cmdstanpy colorama +comm commonmark +confection configparser confuse conllu @@ -95,37 +112,52 @@ constantly contextily contextlib2 contextvars +contoml +contourpy +convertdate +cookies crashtest crcmod cryptography +cssselect2 cycler cymem Cython +dacite dash dash-core-components dash-html-components dash-table dask dataclasses +datasets ddt debugpy decorator defusedxml +deprecat Deprecated deprecation descartes dill distlib distributed +dnspython +docopt docutils dparse dragonmapper dtw +dulwich +ecos eli5 entrypoints enum34 +ephem etuples +exceptiongroup executing +fastjsonschema fastprogress filelock Fiona @@ -137,15 +169,20 @@ Flask-Bootstrap Flask-Compress flatbuffers folium +fonttools formulaic +freetype-py frozenlist fsspec +fst-pso ftfy funcsigs functools32 +funcy future future-fstrings futures +FuzzyTM gast GDAL gdown @@ -171,22 +208,33 @@ google-resumable-media googleapis-common-protos GPy GPyOpt +graphlib-backport graphviz grpcio gym +h11 +h2 +h5netcdf h5py HeapDict +hijri-converter +holidays +hotelling +hpack html5lib htmlmin +httpcore httplib2 httpstan huggingface-hub +hyperframe hyperlink hyperopt idna idna_ssl imagecodecs imagecodecs-lite +ImageHash imageio imagesize importlib @@ -194,6 +242,7 @@ importlib-metadata importlib-resources incremental iniconfig +installer interface-meta ipaddress ipykernel @@ -203,6 +252,9 @@ ipywidgets isort itsdangerous Janome +jaraco.classes +jarowinkler +jax jedi jeepney Jinja2 @@ -211,11 +263,19 @@ joblib js-regex json5 jsonschema +jsonschema-specifications jupyter jupyter-client jupyter-core +jupyter-events +jupyter-lsp jupyter-packaging jupyter-server +jupyter-server-fileid +jupyter-server-terminals +jupyter-server-ydoc +jupyter-telemetry +jupyter-ydoc jupyterlab jupyterlab-launcher jupyterlab-pygments @@ -228,21 +288,27 @@ Keras-Preprocessing keyring kiwisolver konoha +korean-lunar-calendar langcodes langdetect +lapsolver +lazy_loader lazy-object-proxy libclang lifelines lightgbm +lit llvmlite locket lockfile logical-unification loguru +LunarCalendar lxml lz4 mapclassify Markdown +markdown-it-py markdown2 MarkupSafe marshmallow @@ -250,12 +316,15 @@ matplotlib matplotlib-inline matrixprofile mccabe +mdit-py-plugins +mdurl memory-profiler mercantile miniKanren missingno mistune mock +modutil monotonic more-itertools mpctools @@ -277,24 +346,42 @@ nbclassic nbclient nbconvert nbformat +ndindex nest-asyncio netCDF4 networkx nltk nose notebook +notebook-shim notifiers numba numexpr numpy numpydoc +nvidia-cublas-cu11 +nvidia-cuda-cupti-cu11 +nvidia-cuda-nvrtc-cu11 +nvidia-cuda-runtime-cu11 +nvidia-cudnn-cu11 +nvidia-cufft-cu11 +nvidia-curand-cu11 +nvidia-cusolver-cu11 +nvidia-cusparse-cu11 +nvidia-nccl-cu11 +nvidia-nvtx-cu11 oauth2client oauthlib odo olefile +opencv-python opt-einsum ordereddict +orderedmultidict +oscrypto +osqp overrides +packagebuilder packaging pandas pandas-datareader @@ -325,21 +412,26 @@ pickleshare Pillow Pint pip +pip-shims pip-tools pipenv pkgconfig pkginfo +pkgutil_resolve_name plac platformdirs +plette plotly pluggy plumbum poetry poetry-core +poetry-plugin-export pooch pox ppft pprintpp +pptree preshed prettytable prometheus-client @@ -351,31 +443,44 @@ psycopg2 ptyprocess pure-eval py +py-cpuinfo py4j +pyarrow pyasn1 pyasn1-modules +pybind11 +pycairo pycodestyle pycosat pycparser pycurl pydantic +pydantic-core pydot pyerfa pyflakes +pyFUME Pygments pygrib PyHamcrest +pyHanko +pyhanko-certvalidator pyLDAvis pylev pylint pymc3 +PyMeeus pymongo pyodbc PyOpenGL pyparsing +pypdf PyPDF2 +PyPDF3 pypiwin32 +pypng pyproj +pyproject_hooks pyrsistent pysha3 pyshp @@ -383,11 +488,16 @@ pysimdjson pystan pytest pytest-pylint +python-barcode +python-bidi python-dateutil python-geohash python-gitlab +python-json-logger python-louvain +python-pkcs11 pytoml +pytorch_revgrad pytorch-pretrained-bert pytorch-transformers pytz @@ -398,20 +508,32 @@ pywin32-ctypes pywinpty PyYAML pyzmq +qdldl +qrcode +rapidfuzz rasterio +referencing regex reportlab repoze.lru repro-catalogue +requestes requests requests-file requests-ftp requests-oauthlib requests-toolbelt requests-unixsocket +requirements-parser +requirementslib +responses retrying +rfc3339-validator +rfc3986-validator rfc3987 rich +rlPyCairo +rpds-py rpy2 rsa Rtree @@ -421,6 +543,7 @@ ruamel.yaml s3fs s3transfer sacremoses +safetensors safety scandir scikit-image @@ -437,8 +560,10 @@ semver Send2Trash sentencepiece setuptools -Shapely +setuptools-git +shapely shellingham +simpful simplegeneric simplejson singledispatch @@ -458,6 +583,7 @@ spacy spacy-langdetect spacy-legacy spacy-loggers +sparsesvd Sphinx sphinxcontrib-applehelp sphinxcontrib-devhelp @@ -475,6 +601,7 @@ statsmodels stumpy subprocess32 suod +svglib sympy syntok tables @@ -493,6 +620,7 @@ tensorflow-gpu-estimator tensorflow-io-gcs-filesystem tensorflow-tensorboard termcolor +termcolor-whl terminado testpath tf-estimator-nightly @@ -503,6 +631,7 @@ threadpoolctl tifffile tika tiny-tokenizer +tinycss2 tokenize-rt tokenizers toml @@ -514,12 +643,20 @@ torchvision tornado tqdm traitlets +transformer-smaller-training-vocab transformers +triton +trove-classifiers tsfresh Twisted twisted-iocpsupport typed-ast +typeguard typer +types-docutils +types-PyYAML +types-setuptools +types-toml typing typing-extensions typing-utils @@ -527,15 +664,18 @@ tzdata tzlocal ujson unicodecsv +unification unittest2 uproot uproot-methods +uritools urllib3 uvloop vincent virtualenv virtualenv-clone visions +vistir wasabi wcwidth webargs @@ -551,12 +691,19 @@ woops wordcloud wrapt xarray +xarray-einstats +xattr xgboost xhtml2pdf xlrd XlsxWriter +xxhash xyzservices +y-py yarl +yaspin +ydata-profiling +ypy-websocket zict zipfile36 zipp diff --git a/environment_configs/package_lists/allowlist-full-r-cran-tier3.list b/environment_configs/package_lists/allowlist-full-r-cran-tier3.list index c5286b35f4..7e629c72b2 100644 --- a/environment_configs/package_lists/allowlist-full-r-cran-tier3.list +++ b/environment_configs/package_lists/allowlist-full-r-cran-tier3.list @@ -2,7 +2,7 @@ abind acepack actuar anytime -argon2 +arrow askpass assertive assertive.base @@ -63,6 +63,7 @@ classInt cli clipr clisymbols +clock cluster coda codetools @@ -70,6 +71,7 @@ colorspace colourpicker colourvalues commonmark +conflicted conquer covr cpp11 @@ -89,10 +91,12 @@ DBI dbplyr ddalpha debugme +decor deldir DEoptimR desc devtools +diagram DiagrammeR DiagrammeRsvg dials @@ -101,13 +105,18 @@ dichromat diffobj digest dimRed +distributional +distro doMC doParallel dotCall64 +downlit downloader dplyr DRR DT +dtplyr +duckdb dygraphs e1071 ellipsis @@ -129,20 +138,22 @@ fields filehash float fontawesome +fontBitstreamVera +fontLiberation +fontquiver forcats foreach foreign formatR Formula fs -fts furrr futile.logger futile.options future future.apply +gargle gbRd -gdalUtils gdata gdtools generics @@ -150,10 +161,12 @@ geojson geojsonio geojsonlint geojsonsf +geometries geometry geosphere gert getPass +gfonts ggforce ggmap ggplot2 @@ -170,10 +183,11 @@ glue gmodels gmp gnm +googledrive googlePolylines +googlesheets4 googleVis gower -gpclib GPfit gplots gridBase @@ -200,7 +214,10 @@ htmlwidgets httpcode httpuv httr +httr2 hunspell +ICS +ids igraph infer influenceR @@ -270,6 +287,7 @@ markdown MASS mathjaxr Matrix +MatrixExtra MatrixModels matrixStats mcmc @@ -281,6 +299,8 @@ mgcv mime miniUI minqa +mitools +mlapi mlbench MLmetrics mlr3 @@ -290,6 +310,7 @@ mnormt modeldata ModelMetrics modelr +MSSQL multcomp munsell MVA @@ -309,9 +330,9 @@ odbc openssl OpenStreetMap openxlsx -osmar packcircles packrat +palmerpenguins paradox parallelly parsnip @@ -319,12 +340,14 @@ patchwork pbdMPI pbdZMQ pbkrtest +pcaPP permute phosphoricons pillar pixiedust pkgbuild pkgconfig +pkgdown pkgload pkgmaker PKI @@ -336,6 +359,7 @@ png pointblank polyclip polycor +posterior praise prettyunits pROC @@ -345,6 +369,7 @@ prodlim profileModel profvis progress +progressr promises prophet proto @@ -366,6 +391,7 @@ R.methodsS3 R.oo R.utils R6 +ragg randomForest ranger RApiDatetime @@ -380,8 +406,11 @@ Rcpp RcppEigen RcppParallel RcppRoll +RcppTOML RCurl Rdpack +reactable +reactR readODS readr readxl @@ -390,7 +419,6 @@ registry relimp rematch rematch2 -remoter remotes repr reprex @@ -403,6 +431,7 @@ rgeos rgexf rgl RgoogleMaps +RhpcBLASctl rio rJava rjson @@ -417,14 +446,17 @@ RMySQL RNetCDF rngtools robustbase +robustX ROCR roxygen2 rpart RPostgres RPostgreSQL rprojroot +rrcov rsample rsconnect +rsparse RSpectra RSQLite rstan @@ -450,6 +482,7 @@ sfheaders sfsmisc shape shiny +shinybusy shinyjs shinystan shinythemes @@ -478,15 +511,18 @@ stopwords stringi stringr styler +survey survival svglite svgPanZoom swagger sys systemfonts +tensorA terra testthat text2vec +textshaping TH.data threejs tibble @@ -499,12 +535,14 @@ tidytext tidyverse tidyxl tikzDevice +timechange timeDate timeSeries tinytex tis tmap tmaptools +tmvnsim tokenizers triebeard tsbox @@ -517,6 +555,7 @@ tzdb udunits2 units univariateML +urlchecker urltools usethis utf8 @@ -543,6 +582,7 @@ wk wordcloud workflows workflowsets +writexl xfun xgboost XLConnect diff --git a/environment_configs/sre_bluet1guac_core_config.json b/environment_configs/sre_bluet1guac_core_config.json index ca4440d3e3..365ae14a8c 100644 --- a/environment_configs/sre_bluet1guac_core_config.json +++ b/environment_configs/sre_bluet1guac_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "ApacheGuacamole", "dataAdminIpAddresses": ["193.60.220.253"], diff --git a/environment_configs/sre_bluet2guac_core_config.json b/environment_configs/sre_bluet2guac_core_config.json index 5608c6f732..15d96d2cda 100644 --- a/environment_configs/sre_bluet2guac_core_config.json +++ b/environment_configs/sre_bluet2guac_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "ApacheGuacamole", "dataAdminIpAddresses": ["193.60.220.253"], diff --git a/environment_configs/sre_bluet2msrds_core_config.json b/environment_configs/sre_bluet2msrds_core_config.json index 96f7ee82f9..f5174dc929 100644 --- a/environment_configs/sre_bluet2msrds_core_config.json +++ b/environment_configs/sre_bluet2msrds_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "MicrosoftRDS", "dataAdminIpAddresses": ["193.60.220.253"], diff --git a/environment_configs/sre_bluet3guac_core_config.json b/environment_configs/sre_bluet3guac_core_config.json index 7a772e6545..eff5d1e24c 100644 --- a/environment_configs/sre_bluet3guac_core_config.json +++ b/environment_configs/sre_bluet3guac_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "ApacheGuacamole", "dataAdminIpAddresses": ["193.60.220.240"], diff --git a/environment_configs/sre_bluet3msrds_core_config.json b/environment_configs/sre_bluet3msrds_core_config.json index 12f75d668b..56381578db 100644 --- a/environment_configs/sre_bluet3msrds_core_config.json +++ b/environment_configs/sre_bluet3msrds_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "MicrosoftRDS", "dataAdminIpAddresses": ["193.60.220.240"], diff --git a/environment_configs/sre_greent1guac_core_config.json b/environment_configs/sre_greent1guac_core_config.json index 21939b6dc8..10cad425c3 100644 --- a/environment_configs/sre_greent1guac_core_config.json +++ b/environment_configs/sre_greent1guac_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "ApacheGuacamole", "dataAdminIpAddresses": ["193.60.220.253"], diff --git a/environment_configs/sre_greent2guac_core_config.json b/environment_configs/sre_greent2guac_core_config.json index 9a42779859..091a74e605 100644 --- a/environment_configs/sre_greent2guac_core_config.json +++ b/environment_configs/sre_greent2guac_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "ApacheGuacamole", "dataAdminIpAddresses": ["193.60.220.253"], diff --git a/environment_configs/sre_greent2msrds_core_config.json b/environment_configs/sre_greent2msrds_core_config.json index be21cf6422..b5ae22cd47 100644 --- a/environment_configs/sre_greent2msrds_core_config.json +++ b/environment_configs/sre_greent2msrds_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "MicrosoftRDS", "dataAdminIpAddresses": ["193.60.220.253"], diff --git a/environment_configs/sre_greent3guac_core_config.json b/environment_configs/sre_greent3guac_core_config.json index 8cb8371adf..a3c0c4c57e 100644 --- a/environment_configs/sre_greent3guac_core_config.json +++ b/environment_configs/sre_greent3guac_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "ApacheGuacamole", "dataAdminIpAddresses": ["193.60.220.240"], diff --git a/environment_configs/sre_greent3msrds_core_config.json b/environment_configs/sre_greent3msrds_core_config.json index 688b02b044..118e24da48 100644 --- a/environment_configs/sre_greent3msrds_core_config.json +++ b/environment_configs/sre_greent3msrds_core_config.json @@ -8,7 +8,7 @@ "outboundInternetAccess": "default", "computeVmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "remoteDesktopProvider": "MicrosoftRDS", "dataAdminIpAddresses": ["193.60.220.240"], diff --git a/tests/resources/shm_blue_full_config.json b/tests/resources/shm_blue_full_config.json index ddc78b8f59..7172383965 100644 --- a/tests/resources/shm_blue_full_config.json +++ b/tests/resources/shm_blue_full_config.json @@ -150,6 +150,13 @@ "217.196.149.55", "91.189.91.38", "91.189.91.39", + "91.189.91.48", + "91.189.91.49", + "91.189.91.81", + "91.189.91.82", + "91.189.91.83", + "185.125.190.17", + "185.125.190.18", "185.125.190.36", "185.125.190.39", "103.21.244.0/22", diff --git a/tests/resources/shm_green_full_config.json b/tests/resources/shm_green_full_config.json index d47cd3edb8..65ba0efadf 100644 --- a/tests/resources/shm_green_full_config.json +++ b/tests/resources/shm_green_full_config.json @@ -150,6 +150,13 @@ "217.196.149.55", "91.189.91.38", "91.189.91.39", + "91.189.91.48", + "91.189.91.49", + "91.189.91.81", + "91.189.91.82", + "91.189.91.83", + "185.125.190.17", + "185.125.190.18", "185.125.190.36", "185.125.190.39", "103.21.244.0/22", diff --git a/tests/resources/sre_bluet1guac_full_config.json b/tests/resources/sre_bluet1guac_full_config.json index 43782149fc..61e843d6b0 100644 --- a/tests/resources/sre_bluet1guac_full_config.json +++ b/tests/resources/sre_bluet1guac_full_config.json @@ -151,6 +151,13 @@ "217.196.149.55", "91.189.91.38", "91.189.91.39", + "91.189.91.48", + "91.189.91.49", + "91.189.91.81", + "91.189.91.82", + "91.189.91.83", + "185.125.190.17", + "185.125.190.18", "185.125.190.36", "185.125.190.39", "103.21.244.0/22", @@ -1111,15 +1118,8 @@ "instances": [ { "adminPasswordSecretName": "sre-t1guac-vm-admin-password-mssql", - "dbAdminUsernameSecretName": "sre-t1guac-db-admin-username-mssql", "dbAdminPasswordSecretName": "sre-t1guac-db-admin-password-mssql", - "vmName": "MSSQL-T1GUAC", - "type": "MSSQL", - "ip": "10.151.3.4", - "port": "1433", - "sku": "sqldev-gen2", - "subnet": "databases", - "vmSize": "Standard_DS2_v2", + "dbAdminUsernameSecretName": "sre-t1guac-db-admin-username-mssql", "disks": { "data": { "sizeGb": "1024", @@ -1130,19 +1130,19 @@ "type": "Standard_LRS" } }, - "enableSSIS": true + "enableSSIS": true, + "ip": "10.151.3.4", + "port": "1433", + "sku": "sqldev-gen2", + "subnet": "databases", + "type": "MSSQL", + "vmName": "MSSQL-T1GUAC", + "vmSize": "Standard_DS2_v2" }, { "adminPasswordSecretName": "sre-t1guac-vm-admin-password-postgresql", - "dbAdminUsernameSecretName": "sre-t1guac-db-admin-username-postgresql", "dbAdminPasswordSecretName": "sre-t1guac-db-admin-password-postgresql", - "vmName": "PSTGRS-T1GUAC", - "type": "PostgreSQL", - "ip": "10.151.3.5", - "port": "5432", - "sku": "Ubuntu-latest", - "subnet": "databases", - "vmSize": "Standard_DS2_v2", + "dbAdminUsernameSecretName": "sre-t1guac-db-admin-username-postgresql", "disks": { "data": { "sizeGb": "1024", @@ -1152,7 +1152,14 @@ "sizeGb": "128", "type": "Standard_LRS" } - } + }, + "ip": "10.151.3.5", + "port": "5432", + "sku": "Ubuntu-latest", + "subnet": "databases", + "type": "PostgreSQL", + "vmName": "PSTGRS-T1GUAC", + "vmSize": "Standard_DS2_v2" } ], "rg": "RG_SHM_BLUE_SRE_T1GUAC_DATABASES" @@ -1302,7 +1309,7 @@ "rg": "RG_SHM_BLUE_SRE_T1GUAC_COMPUTE", "vmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "vmSizeDefault": "Standard_D2s_v3" }, diff --git a/tests/resources/sre_bluet3msrds_full_config.json b/tests/resources/sre_bluet3msrds_full_config.json index b357d2e3ef..44c1673fb3 100644 --- a/tests/resources/sre_bluet3msrds_full_config.json +++ b/tests/resources/sre_bluet3msrds_full_config.json @@ -151,6 +151,13 @@ "217.196.149.55", "91.189.91.38", "91.189.91.39", + "91.189.91.48", + "91.189.91.49", + "91.189.91.81", + "91.189.91.82", + "91.189.91.83", + "185.125.190.17", + "185.125.190.18", "185.125.190.36", "185.125.190.39", "103.21.244.0/22", @@ -1111,15 +1118,8 @@ "instances": [ { "adminPasswordSecretName": "sre-t3msrds-vm-admin-password-mssql", - "dbAdminUsernameSecretName": "sre-t3msrds-db-admin-username-mssql", "dbAdminPasswordSecretName": "sre-t3msrds-db-admin-password-mssql", - "vmName": "MSSQL-T3MSRDS", - "type": "MSSQL", - "ip": "10.163.3.4", - "port": "1433", - "sku": "sqldev-gen2", - "subnet": "databases", - "vmSize": "Standard_DS2_v2", + "dbAdminUsernameSecretName": "sre-t3msrds-db-admin-username-mssql", "disks": { "data": { "sizeGb": "1024", @@ -1130,19 +1130,19 @@ "type": "Standard_LRS" } }, - "enableSSIS": true + "enableSSIS": true, + "ip": "10.163.3.4", + "port": "1433", + "sku": "sqldev-gen2", + "subnet": "databases", + "type": "MSSQL", + "vmName": "MSSQL-T3MSRDS", + "vmSize": "Standard_DS2_v2" }, { "adminPasswordSecretName": "sre-t3msrds-vm-admin-password-postgresql", - "dbAdminUsernameSecretName": "sre-t3msrds-db-admin-username-postgresql", "dbAdminPasswordSecretName": "sre-t3msrds-db-admin-password-postgresql", - "vmName": "PSTGRS-T3MSRDS", - "type": "PostgreSQL", - "ip": "10.163.3.5", - "port": "5432", - "sku": "Ubuntu-latest", - "subnet": "databases", - "vmSize": "Standard_DS2_v2", + "dbAdminUsernameSecretName": "sre-t3msrds-db-admin-username-postgresql", "disks": { "data": { "sizeGb": "1024", @@ -1152,7 +1152,14 @@ "sizeGb": "128", "type": "Standard_LRS" } - } + }, + "ip": "10.163.3.5", + "port": "5432", + "sku": "Ubuntu-latest", + "subnet": "databases", + "type": "PostgreSQL", + "vmName": "PSTGRS-T3MSRDS", + "vmSize": "Standard_DS2_v2" } ], "rg": "RG_SHM_BLUE_SRE_T3MSRDS_DATABASES" @@ -1324,7 +1331,7 @@ "rg": "RG_SHM_BLUE_SRE_T3MSRDS_COMPUTE", "vmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "vmSizeDefault": "Standard_D2s_v3" }, diff --git a/tests/resources/sre_greent2guac_full_config.json b/tests/resources/sre_greent2guac_full_config.json index 33a2796de9..73ca82eae5 100644 --- a/tests/resources/sre_greent2guac_full_config.json +++ b/tests/resources/sre_greent2guac_full_config.json @@ -151,6 +151,13 @@ "217.196.149.55", "91.189.91.38", "91.189.91.39", + "91.189.91.48", + "91.189.91.49", + "91.189.91.81", + "91.189.91.82", + "91.189.91.83", + "185.125.190.17", + "185.125.190.18", "185.125.190.36", "185.125.190.39", "103.21.244.0/22", @@ -1164,15 +1171,8 @@ "instances": [ { "adminPasswordSecretName": "sre-t2guac-vm-admin-password-mssql", - "dbAdminUsernameSecretName": "sre-t2guac-db-admin-username-mssql", "dbAdminPasswordSecretName": "sre-t2guac-db-admin-password-mssql", - "vmName": "MSSQL-T2GUAC", - "type": "MSSQL", - "ip": "10.152.3.4", - "port": "1433", - "sku": "sqldev-gen2", - "subnet": "databases", - "vmSize": "Standard_DS2_v2", + "dbAdminUsernameSecretName": "sre-t2guac-db-admin-username-mssql", "disks": { "data": { "sizeGb": "1024", @@ -1183,19 +1183,19 @@ "type": "Standard_LRS" } }, - "enableSSIS": true + "enableSSIS": true, + "ip": "10.152.3.4", + "port": "1433", + "sku": "sqldev-gen2", + "subnet": "databases", + "type": "MSSQL", + "vmName": "MSSQL-T2GUAC", + "vmSize": "Standard_DS2_v2" }, { "adminPasswordSecretName": "sre-t2guac-vm-admin-password-postgresql", - "dbAdminUsernameSecretName": "sre-t2guac-db-admin-username-postgresql", "dbAdminPasswordSecretName": "sre-t2guac-db-admin-password-postgresql", - "vmName": "PSTGRS-T2GUAC", - "type": "PostgreSQL", - "ip": "10.152.3.5", - "port": "5432", - "sku": "Ubuntu-latest", - "subnet": "databases", - "vmSize": "Standard_DS2_v2", + "dbAdminUsernameSecretName": "sre-t2guac-db-admin-username-postgresql", "disks": { "data": { "sizeGb": "1024", @@ -1205,7 +1205,14 @@ "sizeGb": "128", "type": "Standard_LRS" } - } + }, + "ip": "10.152.3.5", + "port": "5432", + "sku": "Ubuntu-latest", + "subnet": "databases", + "type": "PostgreSQL", + "vmName": "PSTGRS-T2GUAC", + "vmSize": "Standard_DS2_v2" } ], "rg": "RG_SHM_GREEN_SRE_T2GUAC_DATABASES" @@ -1355,7 +1362,7 @@ "rg": "RG_SHM_GREEN_SRE_T2GUAC_COMPUTE", "vmImage": { "type": "Ubuntu", - "version": "20.04.2022081900" + "version": "20.04.2023082900" }, "vmSizeDefault": "Standard_D2s_v3" }, diff --git a/tests/srd_smoke_tests/test_packages_installed_python.py b/tests/srd_smoke_tests/test_packages_installed_python.py index a01a2c44c1..d91d3238f7 100644 --- a/tests/srd_smoke_tests/test_packages_installed_python.py +++ b/tests/srd_smoke_tests/test_packages_installed_python.py @@ -4,6 +4,7 @@ import subprocess import sys import warnings + import pkg_resources versions = { @@ -22,7 +23,10 @@ ] # For these packages we check for an executable as they are not importable -NON_IMPORTABLE_PACKAGES = {"repro-catalogue": "catalogue"} +NON_IMPORTABLE_PACKAGES = { + "pip-tools": "pip-compile", + "repro-catalogue": "catalogue", +} # Some packages are imported using a different name than they `pip install` with IMPORTABLE_NAMES = { diff --git a/tests/srd_smoke_tests/test_repository_R.mustache.sh b/tests/srd_smoke_tests/test_repository_R.mustache.sh index 2d9d749d7b..49636d6276 100644 --- a/tests/srd_smoke_tests/test_repository_R.mustache.sh +++ b/tests/srd_smoke_tests/test_repository_R.mustache.sh @@ -3,7 +3,7 @@ # - *not* pre-installed # - on the tier-3 list (so we can test all tiers) # - alphabetically early and late (so we can test the progress of the mirror synchronisation) -packages=("argon2" "zeallot") +packages=("askpass" "zeallot") uninstallable_packages=("aws.s3") # Create a temporary library directory