diff --git a/.codespellrc b/.codespellrc
index 13c05a80..433ea51a 100644
--- a/.codespellrc
+++ b/.codespellrc
@@ -1,5 +1,5 @@
[codespell]
-skip = .git,*.pdf,*.svg,pnpm-lock.yaml,yarn.lock
+skip = .git,package-lock.json,node_modules
# some modules, parts of regexes, and variable names to ignore, some
# misspellings in fixtures/external responses we do not own
-ignore-words-list = caf,bu,nwo,nd,kernal,crate,unparseable,couldn,defintions
+ignore-words-list = NWO,kernal
diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index 608e342b..071b4ee5 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"dotnet-ef": {
- "version": "8.0.7",
+ "version": "8.0.8",
"commands": [
"dotnet-ef"
]
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index f79572bf..4703c74f 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -25,8 +25,9 @@ Steps to reproduce the behavior:
**Expected behavior**
A clear and concise description of what you expected to happen.
-**Screenshots**
-If applicable, add screenshots to help explain your problem.
+**Logs and screenshots**
+If applicable, include relevant logs or screenshots to help explain your problem.
+See [extension troubleshooting](https://github.com/tinglesoftware/dependabot-azure-devops/blob/main/docs/extension.md#troubleshooting-issues) for more on how to collect additional diagnostic logs.
**Extension (please complete the following information):**
- Host: [e.g. Azure DevOps, Azure DevOps Server]
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 1e614505..c774caee 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -5,46 +5,42 @@
version: 2
updates:
- - package-ecosystem: "github-actions" # See documentation for possible values
- directory: "/" # Location of package manifests
+ - package-ecosystem: 'github-actions'
+ directory: '/'
schedule:
- interval: "weekly"
- time: "02:00"
- open-pull-requests-limit: 10
+ interval: 'weekly'
+ time: '02:00'
- - package-ecosystem: "devcontainers" # See documentation for possible values
- directory: "/" # Location of package manifests
+ - package-ecosystem: 'devcontainers'
+ directory: '/'
schedule:
- interval: "weekly"
- time: "02:00"
- open-pull-requests-limit: 10
+ interval: 'weekly'
+ time: '02:00'
- - package-ecosystem: "bundler" # See documentation for possible values
- directory: "/updater" # Location of package manifests
+ - package-ecosystem: 'bundler'
+ directory: '/updater'
schedule:
- interval: "weekly"
- time: "02:00"
- open-pull-requests-limit: 10
+ interval: 'weekly'
+ time: '02:00'
groups:
opentelemetry:
- patterns: ["opentelemetry-*"]
+ patterns: ['opentelemetry-*']
rubocop:
- patterns: ["*rubocop*"]
+ patterns: ['*rubocop*']
sentry:
- patterns: ["sentry-*"]
+ patterns: ['sentry-*']
- - package-ecosystem: "docker" # See documentation for possible values
- directory: "/updater" # Location of package manifests
+ - package-ecosystem: 'docker'
+ directories: ['**/*']
schedule:
- interval: "weekly"
- time: "02:00"
- open-pull-requests-limit: 10
+ interval: 'weekly'
+ time: '02:00'
- - package-ecosystem: "nuget" # See documentation for possible values
- directory: "/" # Location of package manifests
+ - package-ecosystem: 'nuget'
+ directory: '/'
schedule:
- interval: "weekly"
- time: "02:00"
+ interval: 'weekly'
+ time: '02:00'
open-pull-requests-limit: 10
groups:
azure:
@@ -53,32 +49,35 @@ updates:
- 'Microsoft.Azure.*'
- 'Microsoft.Extensions.Configuration.AzureAppConfiguration'
event-bus:
- patterns: ["Tingle.EventBus*"]
+ patterns: ['Tingle.EventBus*']
microsoft:
- patterns: ["Microsoft*"]
+ patterns:
+ - 'Microsoft.AspNetCore.*'
+ - 'Microsoft.EntityFrameworkCore.*'
+ - 'Microsoft.Extensions.*'
+ - 'dotnet-ef'
exclude-patterns:
- 'Microsoft.Azure.*'
- 'Microsoft.Extensions.Configuration.AzureAppConfiguration'
- - "Microsoft.VisualStudio.Azure.Containers.Tools.Targets"
+ - 'Microsoft.VisualStudio.Azure.Containers.Tools.Targets'
system:
- patterns: ["System*"]
+ patterns: ['System*']
tingle:
patterns:
- - "Tingle.AspNetCore*"
- - "Tingle.Extensions*"
+ - 'Tingle.AspNetCore*'
+ - 'Tingle.Extensions*'
xunit:
- patterns: ["Xunit*"]
+ patterns: ['Xunit*']
- - package-ecosystem: "npm" # See documentation for possible values
- directory: "/extension" # Location of package manifests
+ - package-ecosystem: 'npm'
+ directories: ['/', '/extension']
schedule:
- interval: "weekly"
- time: "02:00"
- open-pull-requests-limit: 10
+ interval: 'weekly'
+ time: '02:00'
groups:
jest:
- patterns: ["*jest*"]
+ patterns: ['*jest*']
js-yaml:
- patterns: ["*js-yaml*"]
+ patterns: ['*js-yaml*']
js-ts-types:
- patterns: ["@types/*"]
+ patterns: ['@types/*']
diff --git a/.github/workflows/cleanup.yml b/.github/workflows/cleanup.yml
index 49e810c4..8a8864f9 100644
--- a/.github/workflows/cleanup.yml
+++ b/.github/workflows/cleanup.yml
@@ -3,7 +3,7 @@ name: Cleanup
on:
workflow_dispatch:
schedule:
- - cron: '0 0 */14 * *' # every 14 days
+ - cron: '0 0 */14 * *' # every 14 days
jobs:
cleanup-updater:
@@ -12,44 +12,44 @@ jobs:
fail-fast: true
matrix:
suite:
- - { ecosystem: bundler }
- - { ecosystem: cargo }
- - { ecosystem: composer }
- - { ecosystem: docker }
- - { ecosystem: elm }
- - { ecosystem: gitsubmodule }
- - { ecosystem: github-actions }
- - { ecosystem: gomod }
- - { ecosystem: gradle }
- - { ecosystem: mix }
- - { ecosystem: maven }
- - { ecosystem: npm }
- - { ecosystem: nuget }
- - { ecosystem: pub }
- - { ecosystem: pip }
- - { ecosystem: swift }
- - { ecosystem: devcontainers }
- - { ecosystem: terraform }
+ - { ecosystem: bundler }
+ - { ecosystem: cargo }
+ - { ecosystem: composer }
+ - { ecosystem: docker }
+ - { ecosystem: elm }
+ - { ecosystem: gitsubmodule }
+ - { ecosystem: github-actions }
+ - { ecosystem: gomod }
+ - { ecosystem: gradle }
+ - { ecosystem: mix }
+ - { ecosystem: maven }
+ - { ecosystem: npm }
+ - { ecosystem: nuget }
+ - { ecosystem: pub }
+ - { ecosystem: pip }
+ - { ecosystem: swift }
+ - { ecosystem: devcontainers }
+ - { ecosystem: terraform }
steps:
- - name: Delete old dependabot-updater-${{ matrix.suite.ecosystem }} images
- uses: actions/delete-package-versions@v5
- with:
- package-name: 'dependabot-updater-${{ matrix.suite.ecosystem }}'
- package-type: 'container'
- min-versions-to-keep: 20
- # exclude major and minor versions as per SemVer e.g. 1.20
- ignore-versions: '^([0-9]\\d*)\\.([0-9]\\d*)$'
+ - name: Delete old dependabot-updater-${{ matrix.suite.ecosystem }} images
+ uses: actions/delete-package-versions@v5
+ with:
+ package-name: 'dependabot-updater-${{ matrix.suite.ecosystem }}'
+ package-type: 'container'
+ min-versions-to-keep: 20
+ # exclude major and minor versions as per SemVer e.g. 1.20
+ ignore-versions: '^([0-9]\\d*)\\.([0-9]\\d*)$'
cleanup-server:
runs-on: ubuntu-latest
steps:
- - name: Delete old dependabot-server images
- uses: actions/delete-package-versions@v5
- with:
- package-name: 'dependabot-server'
- package-type: 'container'
- min-versions-to-keep: 20
- # exclude major and minor versions as per SemVer e.g. 1.20
- ignore-versions: '^([0-9]\\d*)\\.([0-9]\\d*)$'
+ - name: Delete old dependabot-server images
+ uses: actions/delete-package-versions@v5
+ with:
+ package-name: 'dependabot-server'
+ package-type: 'container'
+ min-versions-to-keep: 20
+ # exclude major and minor versions as per SemVer e.g. 1.20
+ ignore-versions: '^([0-9]\\d*)\\.([0-9]\\d*)$'
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
index df18e8f6..b80fef70 100644
--- a/.github/workflows/codespell.yml
+++ b/.github/workflows/codespell.yml
@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - name: Checkout
- uses: actions/checkout@v4
- - name: Codespell
- uses: codespell-project/actions-codespell@v2
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Codespell
+ uses: codespell-project/actions-codespell@v2
diff --git a/.github/workflows/extension.yml b/.github/workflows/extension.yml
index f0b90498..325ca71d 100644
--- a/.github/workflows/extension.yml
+++ b/.github/workflows/extension.yml
@@ -3,23 +3,23 @@ name: Extension
on:
push:
branches:
- - main
+ - main
tags:
- - '*'
+ - '*'
paths:
- - "extension/**"
- - ".github/workflows/extension.yml"
- - "!extension/README.md"
- - "!docs/**"
+ - 'extension/**'
+ - '.github/workflows/extension.yml'
+ - '!extension/README.md'
+ - '!docs/**'
pull_request:
branches:
- # Only trigger for PRs against `main` branch.
- - main
+ # Only trigger for PRs against `main` branch.
+ - main
paths:
- - "extension/**"
- - ".github/workflows/extension.yml"
- - "!extension/README.md"
- - "!docs/**"
+ - 'extension/**'
+ - '.github/workflows/extension.yml'
+ - '!extension/README.md'
+ - '!docs/**'
jobs:
Build:
@@ -30,128 +30,134 @@ jobs:
cancel-in-progress: true
steps:
- - name: Checkout
- uses: actions/checkout@v4
- with:
- fetch-depth: 0 # Required for GitVersion
-
- - name: Install GitVersion
- uses: gittools/actions/gitversion/setup@v3.0.0
- with:
- versionSpec: '6.x'
-
- - name: Determine Version
- id: gitversion
- uses: gittools/actions/gitversion/execute@v3.0.0
- with:
- useConfigFile: true
-
- - name: Use Node.js 18.x
- uses: actions/setup-node@v4
- with:
- node-version: '18.x'
-
- - name: Install
- run: npm install
- working-directory: '${{ github.workspace }}/extension'
-
- - name: Test
- run: npm run test
- working-directory: '${{ github.workspace }}/extension'
-
- - name: Build
- run: npm run build:prod
- working-directory: '${{ github.workspace }}/extension'
-
- - name: Move node_modules
- run: npm run mv:prod
- working-directory: '${{ github.workspace }}/extension'
-
- - name: Install tfx-cli
- run: npm install -g tfx-cli@0.12.0
-
- - name: Replace tokens
- uses: cschleiden/replace-tokens@v1
- with:
- files: '["${{ github.workspace }}/extension/overrides*.json"]'
- env:
- MAJOR_MINOR_PATCH: ${{ steps.gitversion.outputs.majorMinorPatch }}
- BUILD_NUMBER: ${{ github.run_number }}
-
- - name: Update values in extension/task/task.json
- run: |
- echo "`jq '.version.Major=${{ steps.gitversion.outputs.major }}' extension/task/task.json`" > extension/task/task.json
- echo "`jq '.version.Minor=${{ steps.gitversion.outputs.minor }}' extension/task/task.json`" > extension/task/task.json
- echo "`jq '.version.Patch=${{ github.run_number }}' extension/task/task.json`" > extension/task/task.json
-
- - name: Create Extension (dev)
- run: >
- tfx extension create
- --root extension
- --manifest-globs vss-extension.json
- --output-path ${{ github.workspace }}/drop/dev
- --json5
- --overrides-file ${{ github.workspace }}/extension/overrides.dev.json
-
- - name: Create Extension (prod)
- run: >
- tfx extension create
- --root extension
- --manifest-globs vss-extension.json
- --output-path ${{ github.workspace }}/drop/prod
- --json5
- --overrides-file ${{ github.workspace }}/extension/overrides.prod.json
-
- - name: Upload Artifact (drop)
- uses: actions/upload-artifact@v4
- with:
- path: ${{ github.workspace }}/drop/*
- name: drop
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Required for GitVersion
+
+ - name: Install GitVersion
+ uses: gittools/actions/gitversion/setup@v3.0.0
+ with:
+ versionSpec: '6.x'
+
+ - name: Determine Version
+ id: gitversion
+ uses: gittools/actions/gitversion/execute@v3.0.0
+ with:
+ useConfigFile: true
+
+ - name: Use Node.js 18.x
+ uses: actions/setup-node@v4
+ with:
+ node-version: '18.x'
+ cache: npm
+
+ - name: Install (root)
+ run: npm install
+
+ - name: Format check (root)
+ run: npm run format:check
+
+ - name: Install
+ run: npm install
+ working-directory: '${{ github.workspace }}/extension'
+
+ - name: Test
+ run: npm run test
+ working-directory: '${{ github.workspace }}/extension'
+
+ - name: Build
+ run: npm run build
+ working-directory: '${{ github.workspace }}/extension'
+
+ - name: Install tfx-cli
+ run: npm install -g tfx-cli@0.12.0
+
+ - name: Update version numbers in vss-extension.json overrides files
+ uses: cschleiden/replace-tokens@v1
+ with:
+ files: '["${{ github.workspace }}/extension/overrides*.json"]'
+ env:
+ MAJOR_MINOR_PATCH: ${{ steps.gitversion.outputs.majorMinorPatch }}
+ BUILD_NUMBER: ${{ github.run_number }}
+
+ - name: Update minor and path versions in task.json
+ run: |
+ # Major versions are left out here because they are fixed in the task.json files.
+ # The minor and patch versions are updated to make sure we keep recent changes deployed.
+ echo "`jq '.version.Minor=34' extension/tasks/dependabotV1/task.json`" > extension/tasks/dependabotV1/task.json
+ echo "`jq '.version.Patch=${{ github.run_number }}' extension/tasks/dependabotV1/task.json`" > extension/tasks/dependabotV1/task.json
+ echo "`jq '.version.Minor=${{ steps.gitversion.outputs.minor }}' extension/tasks/dependabotV2/task.json`" > extension/tasks/dependabotV2/task.json
+ echo "`jq '.version.Patch=${{ github.run_number }}' extension/tasks/dependabotV2/task.json`" > extension/tasks/dependabotV2/task.json
+
+ - name: Create Extension (dev)
+ run: >
+ tfx extension create
+ --root extension
+ --manifest-globs vss-extension.json
+ --output-path ${{ github.workspace }}/drop/dev
+ --json5
+ --overrides-file ${{ github.workspace }}/extension/overrides.dev.json
+
+ - name: Create Extension (prod)
+ run: >
+ tfx extension create
+ --root extension
+ --manifest-globs vss-extension.json
+ --output-path ${{ github.workspace }}/drop/prod
+ --json5
+ --overrides-file ${{ github.workspace }}/extension/overrides.prod.json
+
+ - name: Upload Artifact (drop)
+ uses: actions/upload-artifact@v4
+ with:
+ path: ${{ github.workspace }}/drop/*
+ name: drop
Publish:
runs-on: ubuntu-latest
- needs: [ Build ]
+ needs: [Build]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false # wait for previous runs to complete
steps:
- - name: Download Artifact
- uses: actions/download-artifact@v4
- with:
- name: drop
-
- - name: Upload Release
- if: startsWith(github.ref, 'refs/tags/')
- uses: ncipollo/release-action@v1
- with:
- artifacts: '${{ github.workspace }}/prod/*.vsix,${{ github.workspace }}/dev/*.vsix'
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: true
- allowUpdates: true
-
- - name: Use Node.js 18.x
- uses: actions/setup-node@v4
- with:
- node-version: '18.x'
-
- - name: Install tfx-cli
- run: npm install -g tfx-cli@0.12.0
-
- - name: Publish to marketplace (dev)
- if: github.ref == 'refs/heads/main'
- run: >
- tfx extension publish
- --vsix ${{ github.workspace }}/dev/*.vsix
- --auth-type pat
- --token ${{ secrets.AZURE_DEVOPS_EXTENSION_TOKEN }}
- --share-with tingle
-
- - name: Publish to marketplace (prod)
- if: startsWith(github.ref, 'refs/tags/')
- run: >
- tfx extension publish
- --vsix ${{ github.workspace }}/prod/*.vsix
- --auth-type pat
- --token ${{ secrets.AZURE_DEVOPS_EXTENSION_TOKEN }}
+ - name: Download Artifact
+ uses: actions/download-artifact@v4
+ with:
+ name: drop
+
+ - name: Upload Release
+ if: startsWith(github.ref, 'refs/tags/')
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: '${{ github.workspace }}/prod/*.vsix,${{ github.workspace }}/dev/*.vsix'
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: true
+ allowUpdates: true
+
+ - name: Use Node.js 18.x
+ uses: actions/setup-node@v4
+ with:
+ node-version: '18.x'
+
+ - name: Install tfx-cli
+ run: npm install -g tfx-cli@0.12.0
+
+ - name: Publish to marketplace (dev)
+ if: github.ref == 'refs/heads/main'
+ run: >
+ tfx extension publish
+ --vsix ${{ github.workspace }}/dev/*.vsix
+ --auth-type pat
+ --token ${{ secrets.AZURE_DEVOPS_EXTENSION_TOKEN }}
+ --share-with tingle
+
+ - name: Publish to marketplace (prod)
+ if: startsWith(github.ref, 'refs/tags/')
+ run: >
+ tfx extension publish
+ --vsix ${{ github.workspace }}/prod/*.vsix
+ --auth-type pat
+ --token ${{ secrets.AZURE_DEVOPS_EXTENSION_TOKEN }}
diff --git a/.github/workflows/server.yml b/.github/workflows/server.yml
index 41c518f1..d5539ecd 100644
--- a/.github/workflows/server.yml
+++ b/.github/workflows/server.yml
@@ -4,18 +4,18 @@ on:
workflow_dispatch:
push:
branches:
- - main
+ - main
tags:
- - '*'
+ - '*'
# No path filters ensures we always have a docker image matching the latest commit on main
pull_request:
branches:
- # Only trigger for PRs against `main` branch.
- - main
+ # Only trigger for PRs against `main` branch.
+ - main
paths:
- - "server/**"
- - ".github/workflows/server.yml"
- - "!docs/**"
+ - 'server/**'
+ - '.github/workflows/server.yml'
+ - '!docs/**'
env:
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
@@ -34,130 +34,132 @@ jobs:
cancel-in-progress: true
steps:
- - name: Checkout
- uses: actions/checkout@v4
- with:
- fetch-depth: 0 # Required for GitVersion
-
- - name: Install GitVersion
- uses: gittools/actions/gitversion/setup@v3.0.0
- with:
- versionSpec: '6.x'
-
- - name: Determine Version
- uses: gittools/actions/gitversion/execute@v3.0.0
- id: gitversion
- with:
- useConfigFile: true
-
- - name: Setup .NET SDK
- uses: actions/setup-dotnet@v4
- with:
- dotnet-version: '8.x'
-
- - name: Test
- run: dotnet test -c Release --collect "Code coverage"
-
- - name: Publish
- run: |
- dotnet publish \
- ${{ github.workspace }}/server/Tingle.Dependabot/Tingle.Dependabot.csproj \
- -c Release \
- -o ${{ github.workspace }}/drop/Tingle.Dependabot
-
- - name: Replace tokens
- uses: cschleiden/replace-tokens@v1
- with:
- files: '["${{ github.workspace }}/server/main.bicep"]'
- env:
- IMAGE_TAG: ${{ steps.gitversion.outputs.fullSemVer }}
-
- - name: Build bicep file
- uses: azure/cli@v2
- with:
- inlineScript: |
- cp ${{ github.workspace }}/server/main.bicep ${{ github.workspace }}/drop/main.bicep && \
- az bicep build --file server/main.bicep --outfile ${{ github.workspace }}/drop/main.json
-
- - name: Upload Artifact (drop)
- uses: actions/upload-artifact@v4
- with:
- path: ${{ github.workspace }}/drop/*
- name: drop
-
- - name: Create deploy folder
- run: |
- mkdir -p deploy
- cp ${{ github.workspace }}/server/main.bicep ${{ github.workspace }}/deploy/main.bicep
- cp ${{ github.workspace }}/server/main.parameters.json ${{ github.workspace }}/deploy/main.parameters.json
-
- - name: Replace tokens in deploy folder
- uses: cschleiden/replace-tokens@v1
- with:
- files: '["${{ github.workspace }}/deploy/main.parameters.json"]'
- env:
- DOCKER_IMAGE_TAG: ${{ steps.gitversion.outputs.shortSha }}
- DEPENDABOT_PROJECT_TOKEN: ${{ secrets.DEPENDABOT_PROJECT_TOKEN }}
- DEPENDABOT_GITHUB_TOKEN: ${{ secrets.DEPENDABOT_GITHUB_TOKEN }}
-
- - name: Upload Artifact (deploy)
- uses: actions/upload-artifact@v4
- with:
- path: deploy
- name: deploy
- retention-days: 1
-
- - name: Pull Docker base image & warm Docker cache
- run: docker pull "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
-
- - name: Build image
- run: |
- docker build \
- -f server/Tingle.Dependabot/Dockerfile.CI \
- --label com.github.image.run.id=${{ github.run_id }} \
- --label com.github.image.run.number=${{ github.run_number }} \
- --label com.github.image.job.id=${{ github.job }} \
- --label com.github.image.source.sha=${{ github.sha }} \
- --label com.github.image.source.branch=${{ github.ref }} \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.fullSemVer }}" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major}}.${{ steps.gitversion.outputs.minor }}" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}" \
- --cache-from ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest \
- --build-arg BUILDKIT_INLINE_CACHE=1 \
- ${{ github.workspace }}/drop/Tingle.Dependabot
-
- - name: Log into registry
- if: ${{ (github.ref == 'refs/heads/main') || (!startsWith(github.ref, 'refs/pull')) || startsWith(github.ref, 'refs/tags') }}
- run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
-
- - name: Push image (latest, ShortSha)
- if: ${{ (github.ref == 'refs/heads/main') || startsWith(github.ref, 'refs/tags') }}
- run: |
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}"
-
- - name: Push image (NuGetVersionV2)
- if: ${{ !startsWith(github.ref, 'refs/pull') }}
- run: docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.fullSemVer }}"
-
- - name: Push image (major, minor)
- if: startsWith(github.ref, 'refs/tags')
- run: |
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}.${{ steps.gitversion.outputs.minor }}"
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}"
-
- - name: Upload Release
- if: startsWith(github.ref, 'refs/tags/')
- uses: ncipollo/release-action@v1
- with:
- artifacts: >
- ${{ github.workspace }}/drop/main.bicep,
- ${{ github.workspace }}/drop/main.json
- token: ${{ secrets.GITHUB_TOKEN }}
- draft: true
- allowUpdates: true
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Required for GitVersion
+
+ - name: Install GitVersion
+ uses: gittools/actions/gitversion/setup@v3.0.0
+ with:
+ versionSpec: '6.x'
+
+ - name: Determine Version
+ uses: gittools/actions/gitversion/execute@v3.0.0
+ id: gitversion
+ with:
+ useConfigFile: true
+
+ - name: Setup .NET SDK
+ uses: actions/setup-dotnet@v4
+ with:
+ dotnet-version: '8.x'
+
+ - name: Test
+ run: dotnet test -c Release --collect "Code coverage"
+
+ - name: Publish
+ run: |
+ dotnet publish \
+ ${{ github.workspace }}/server/Tingle.Dependabot/Tingle.Dependabot.csproj \
+ -c Release \
+ -o ${{ github.workspace }}/drop/Tingle.Dependabot
+
+ - name: Replace tokens
+ uses: cschleiden/replace-tokens@v1
+ with:
+ files: '["${{ github.workspace }}/server/main.bicep"]'
+ env:
+ IMAGE_TAG: ${{ steps.gitversion.outputs.fullSemVer }}
+
+ - name: Build bicep file
+ uses: azure/cli@v2
+ with:
+ inlineScript: |
+ tdnf install -y icu
+ tdnf install -y jq
+ cp ${{ github.workspace }}/server/main.bicep ${{ github.workspace }}/drop/main.bicep && \
+ az bicep build --file server/main.bicep --outfile ${{ github.workspace }}/drop/main.json
+
+ - name: Upload Artifact (drop)
+ uses: actions/upload-artifact@v4
+ with:
+ path: ${{ github.workspace }}/drop/*
+ name: drop
+
+ - name: Create deploy folder
+ run: |
+ mkdir -p deploy
+ cp ${{ github.workspace }}/server/main.bicep ${{ github.workspace }}/deploy/main.bicep
+ cp ${{ github.workspace }}/server/main.parameters.json ${{ github.workspace }}/deploy/main.parameters.json
+
+ - name: Replace tokens in deploy folder
+ uses: cschleiden/replace-tokens@v1
+ with:
+ files: '["${{ github.workspace }}/deploy/main.parameters.json"]'
+ env:
+ DOCKER_IMAGE_TAG: ${{ steps.gitversion.outputs.shortSha }}
+ DEPENDABOT_PROJECT_TOKEN: ${{ secrets.DEPENDABOT_PROJECT_TOKEN }}
+ DEPENDABOT_GITHUB_TOKEN: ${{ secrets.DEPENDABOT_GITHUB_TOKEN }}
+
+ - name: Upload Artifact (deploy)
+ uses: actions/upload-artifact@v4
+ with:
+ path: deploy
+ name: deploy
+ retention-days: 1
+
+ - name: Pull Docker base image & warm Docker cache
+ run: docker pull "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
+
+ - name: Build image
+ run: |
+ docker build \
+ -f server/Tingle.Dependabot/Dockerfile.CI \
+ --label com.github.image.run.id=${{ github.run_id }} \
+ --label com.github.image.run.number=${{ github.run_number }} \
+ --label com.github.image.job.id=${{ github.job }} \
+ --label com.github.image.source.sha=${{ github.sha }} \
+ --label com.github.image.source.branch=${{ github.ref }} \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.fullSemVer }}" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major}}.${{ steps.gitversion.outputs.minor }}" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}" \
+ --cache-from ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest \
+ --build-arg BUILDKIT_INLINE_CACHE=1 \
+ ${{ github.workspace }}/drop/Tingle.Dependabot
+
+ - name: Log into registry
+ if: ${{ (github.ref == 'refs/heads/main') || (!startsWith(github.ref, 'refs/pull')) || startsWith(github.ref, 'refs/tags') }}
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
+
+ - name: Push image (latest, ShortSha)
+ if: ${{ (github.ref == 'refs/heads/main') || startsWith(github.ref, 'refs/tags') }}
+ run: |
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}"
+
+ - name: Push image (NuGetVersionV2)
+ if: ${{ !startsWith(github.ref, 'refs/pull') }}
+ run: docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.fullSemVer }}"
+
+ - name: Push image (major, minor)
+ if: startsWith(github.ref, 'refs/tags')
+ run: |
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}.${{ steps.gitversion.outputs.minor }}"
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}"
+
+ - name: Upload Release
+ if: startsWith(github.ref, 'refs/tags/')
+ uses: ncipollo/release-action@v1
+ with:
+ artifacts: >
+ ${{ github.workspace }}/drop/main.bicep,
+ ${{ github.workspace }}/drop/main.json
+ token: ${{ secrets.GITHUB_TOKEN }}
+ draft: true
+ allowUpdates: true
# Deploy:
# runs-on: ubuntu-latest
diff --git a/.github/workflows/updater.yml b/.github/workflows/updater.yml
index f55d255f..978790b4 100644
--- a/.github/workflows/updater.yml
+++ b/.github/workflows/updater.yml
@@ -4,21 +4,21 @@ on:
workflow_dispatch:
push:
branches:
- - main
+ - main
tags:
- - '*'
+ - '*'
# No path filters ensures we always have a docker image matching the latest commit on main
pull_request:
branches:
- # Only trigger for PRs against `main` branch.
- - main
+ # Only trigger for PRs against `main` branch.
+ - main
paths:
- - "updater/**"
- - '.rubocop*.yml'
- - '.ruby-version'
- - 'Rakefile'
- - ".github/workflows/updater.yml"
- - "!docs/**"
+ - 'updater/**'
+ - '.rubocop*.yml'
+ - '.ruby-version'
+ - 'Rakefile'
+ - '.github/workflows/updater.yml'
+ - '!docs/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -31,24 +31,24 @@ jobs:
fail-fast: true
matrix:
suite:
- - { ecosystem: bundler }
- - { ecosystem: cargo }
- - { ecosystem: composer }
- - { ecosystem: docker }
- - { ecosystem: elm }
- - { ecosystem: gitsubmodule }
- - { ecosystem: github-actions }
- - { ecosystem: gomod }
- - { ecosystem: gradle }
- - { ecosystem: mix }
- - { ecosystem: maven }
- - { ecosystem: npm }
- - { ecosystem: nuget }
- - { ecosystem: pub }
- - { ecosystem: pip }
- - { ecosystem: swift }
- - { ecosystem: devcontainers }
- - { ecosystem: terraform }
+ - { ecosystem: bundler }
+ - { ecosystem: cargo }
+ - { ecosystem: composer }
+ - { ecosystem: docker }
+ - { ecosystem: elm }
+ - { ecosystem: gitsubmodule }
+ - { ecosystem: github-actions }
+ - { ecosystem: gomod }
+ - { ecosystem: gradle }
+ - { ecosystem: mix }
+ - { ecosystem: maven }
+ - { ecosystem: npm }
+ - { ecosystem: nuget }
+ - { ecosystem: pub }
+ - { ecosystem: pip }
+ - { ecosystem: swift }
+ - { ecosystem: devcontainers }
+ - { ecosystem: terraform }
env:
IMAGE_NAME: 'dependabot-updater-${{ matrix.suite.ecosystem }}'
@@ -57,85 +57,90 @@ jobs:
BUNDLE_GEMFILE: ${{ github.workspace }}/updater/Gemfile
steps:
- - name: Checkout
- uses: actions/checkout@v4
- with:
- fetch-depth: 0 # Required for GitVersion
-
- - name: Install GitVersion
- uses: gittools/actions/gitversion/setup@v3.0.0
- with:
- versionSpec: '6.x'
-
- - name: Determine Version
- uses: gittools/actions/gitversion/execute@v3.0.0
- id: gitversion
- with:
- useConfigFile: true
-
- - name: Setup Ruby
- uses: ruby/setup-ruby@v1
- with:
- bundler-cache: true
-
- - name: bundle exec rubocop
- run: bundle exec rubocop
- working-directory: updater
-
- - name: bundle exec rspec spec
- run: bundle exec rspec spec
- working-directory: updater
-
- - name: Pull Docker base image & warm Docker cache
- run: docker pull "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
- # remove this after at least one release tagged 'latest'
- continue-on-error: true
-
- - name: Get dependabot-updater image tag version
- id: docker-base-version
- run: |
- tag_name=$(grep -oP "(?<=gem \"dependabot-omnibus\", \"~>).*(?=\")" updater/Gemfile)
- tag_sha=$(curl --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' --url "https://api.github.com/repos/dependabot/dependabot-core/tags" | jq -r "[.[]|select(.name==\"v$tag_name\")][0].commit.sha")
- echo "Using dependabot-updater image tag '$tag_sha' (v$tag_name)"
- echo "version=$tag_sha" >> $GITHUB_OUTPUT
-
- - name: Build image
- run: |
- docker build \
- -f updater/Dockerfile \
- --build-arg BUILDKIT_INLINE_CACHE=1 \
- --build-arg ECOSYSTEM=${{ matrix.suite.ecosystem }} \
- --build-arg BASE_VERSION=${{ steps.docker-base-version.outputs.version }} \
- --build-arg DEPENDABOT_UPDATER_VERSION=${{ steps.gitversion.outputs.fullSemVer }} \
- --label com.github.image.run.id=${{ github.run_id }} \
- --label com.github.image.run.number=${{ github.run_number }} \
- --label com.github.image.job.id=${{ github.job }} \
- --label com.github.image.source.sha=${{ github.sha }} \
- --label com.github.image.source.branch=${{ github.ref }} \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.fullSemVer }}" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major}}.${{ steps.gitversion.outputs.minor }}" \
- -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}" \
- --cache-from ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest \
- .
-
- - name: Log into registry
- if: ${{ (github.ref == 'refs/heads/main') || (!startsWith(github.ref, 'refs/pull')) || startsWith(github.ref, 'refs/tags') }}
- run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
-
- - name: Push image (latest, ShortSha)
- if: ${{ (github.ref == 'refs/heads/main') || startsWith(github.ref, 'refs/tags') }}
- run: |
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}"
-
- - name: Push image (NuGetVersionV2)
- if: ${{ !startsWith(github.ref, 'refs/pull') }}
- run: docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.fullSemVer }}"
-
- - name: Push image (major, minor)
- if: startsWith(github.ref, 'refs/tags')
- run: |
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}.${{ steps.gitversion.outputs.minor }}"
- docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}"
+ - name: Checkout
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Required for GitVersion
+
+ - name: Install GitVersion
+ uses: gittools/actions/gitversion/setup@v3.0.0
+ with:
+ versionSpec: '6.x'
+
+ - name: Determine Version
+ uses: gittools/actions/gitversion/execute@v3.0.0
+ id: gitversion
+ with:
+ useConfigFile: true
+
+ - name: Setup Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ bundler-cache: true
+
+ - name: bundle exec rubocop
+ run: bundle exec rubocop
+ working-directory: updater
+
+ - name: bundle exec rspec spec
+ run: bundle exec rspec spec
+ working-directory: updater
+
+ - name: Pull Docker base image & warm Docker cache
+ run: docker pull "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
+ # remove this after at least one release tagged 'latest'
+ continue-on-error: true
+
+ - name: Get dependabot-updater image tag version
+ id: docker-base-version
+ run: |
+ tag_name=$(grep -oP "(?<=gem \"dependabot-omnibus\", \"~>).*(?=\")" updater/Gemfile)
+ tag_sha=$(curl --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' --url "https://api.github.com/repos/dependabot/dependabot-core/tags" | jq -r "[.[]|select(.name==\"v$tag_name\")][0].commit.sha")
+ echo "Using dependabot-updater image tag '$tag_sha' (v$tag_name)"
+ echo "version=$tag_sha" >> $GITHUB_OUTPUT
+ - name: Remove + from fullSemVer
+ id: remove_plus
+ run: |
+ cleanedFullSemVer=$(echo "${{ steps.gitversion.outputs.fullSemVer }}" | tr -d '+')
+ echo "::set-output name=cleanedFullSemVer::$cleanedFullSemVer"
+
+ - name: Build image
+ run: |
+ docker build \
+ -f updater/Dockerfile \
+ --build-arg BUILDKIT_INLINE_CACHE=1 \
+ --build-arg ECOSYSTEM=${{ matrix.suite.ecosystem }} \
+ --build-arg BASE_VERSION=${{ steps.docker-base-version.outputs.version }} \
+ --build-arg DEPENDABOT_UPDATER_VERSION=${{ steps.gitversion.outputs.fullSemVer }} \
+ --label com.github.image.run.id=${{ github.run_id }} \
+ --label com.github.image.run.number=${{ github.run_number }} \
+ --label com.github.image.job.id=${{ github.job }} \
+ --label com.github.image.source.sha=${{ github.sha }} \
+ --label com.github.image.source.branch=${{ github.ref }} \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.remove_plus.outputs.cleanedFullSemVer }}" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major}}.${{ steps.gitversion.outputs.minor }}" \
+ -t "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}" \
+ --cache-from ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest \
+ .
+
+ - name: Log into registry
+ if: ${{ (github.ref == 'refs/heads/main') || (!startsWith(github.ref, 'refs/pull')) || startsWith(github.ref, 'refs/tags') }}
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin
+
+ - name: Push image (latest, ShortSha)
+ if: ${{ (github.ref == 'refs/heads/main') || startsWith(github.ref, 'refs/tags') }}
+ run: |
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:latest"
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.shortSha }}"
+
+ - name: Push image (NuGetVersionV2)
+ if: ${{ !startsWith(github.ref, 'refs/pull') }}
+ run: docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.remove_plus.outputs.cleanedFullSemVer }}"
+
+ - name: Push image (major, minor)
+ if: startsWith(github.ref, 'refs/tags')
+ run: |
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}.${{ steps.gitversion.outputs.minor }}"
+ docker push "ghcr.io/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}:${{ steps.gitversion.outputs.major }}"
diff --git a/.gitignore b/.gitignore
index 978776d0..85aeeebc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,10 @@
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
.vs/
/.bundle/
/.env
@@ -20,3 +27,4 @@ local/**.sh
.DS_Store
*git.store
.idea
+node_modules/
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 00000000..ad54fcc0
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,35 @@
+# dependencies
+node_modules
+package-lock.json
+
+# outputs
+bin
+obj
+dist
+out
+.turbo
+.vercel
+.next
+.markdownlayer
+.react-email
+
+.idea
+
+# special files
+.config/dotnet-tools.json
+.github/ISSUE_TEMPLATE/**
+.vscode/settings.json
+README.md
+CONTRIBUTING.md
+docs/**/*.md
+.rubocop*.yml
+advisories-example.json
+updater/spec/fixtures/**
+server/main.json
+
+updater/.bundle/**
+updater/tmp/**
+updater/job/**
+
+# JSON array formatting by Visual Studio is different from prettier
+server/Tingle.Dependabot/appsettings.json
diff --git a/.prettierrc.yml b/.prettierrc.yml
new file mode 100644
index 00000000..47cd5c79
--- /dev/null
+++ b/.prettierrc.yml
@@ -0,0 +1,7 @@
+# Prettier configuration
+singleQuote: true
+endOfLine: 'auto'
+printWidth: 120
+quoteProps: consistent
+plugins:
+ - 'prettier-plugin-organize-imports'
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 75b8bdd4..8c4928b1 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -3,6 +3,7 @@
"azuredevops",
"devcontainers",
"fabrikam",
- "Kubernetes"
+ "Kubernetes",
+ "tinglesoftware"
]
}
\ No newline at end of file
diff --git a/CONTRIBUTING.MD b/CONTRIBUTING.MD
new file mode 100644
index 00000000..fc09cc62
--- /dev/null
+++ b/CONTRIBUTING.MD
@@ -0,0 +1,63 @@
+Thank you for contributing to the Dependabot for Azure DevOps project.
+
+# Table of Contents
+
+- [Contribution workflow](#contribution-workflow)
+- [Development environment](#development-environment)
+- [Submitting pull requests](#submitting-pull-requests)
+ - [Unit tests](#unit-tests)
+ - [Static code analyzers and linters](#static-code-analyzers-and-linters)
+ - [Formatters](#formatters)
+ - [Spelling](#spelling)
+
+# Contribution workflow
+
+1. Fork the project.
+1. Get the [development environment running](#development-environment).
+1. Make your feature addition or bug fix.
+1. Make sure all [required quality checks](#required-workflow-tasks) have passed.
+1. Send a pull request.
+
+# Development environment
+
+Before contributing, you'll need to configure your local development environment; View the corresponding development guide for the component you'd like to contribute to:
+
+- [Azure DevOps Extension](./docs/extension.md#development-guide)
+- [Dependabot Server](./docs/server.md#development-guide)
+- [Dependabot Updater image](./docs/updater.md#development-guide)
+
+# Submitting pull requests
+
+If you plan on submitting a pull request, there are several quality checks that must pass; It is recommended that you run these before submitting the pull request. The checks are:
+
+## Unit tests
+
+All existing unit tests must pass.
+View the corresponding unit test instructions for the component you'd like to test:
+
+- [Azure DevOps Extension](./docs/extension.md#running-the-unit-tests)
+- [Dependabot Server](./docs/server.md#running-the-unit-tests)
+- [Dependabot Updater image](./docs/updater.md#running-the-unit-tests)
+
+## Static code analyzers and linters
+
+Some components use static code analyzers and linters.
+View the corresponding instructions for each component:
+
+- [Dependabot Updater image](./docs/updater.md#running-the-code-linter)
+
+## Formatters
+
+```bash
+npm install
+npm run format:check # to check for formatting issues
+npm run format # to automatically fix formatting issues
+```
+
+## Spelling
+
+```bash
+pip install codespell
+codespell # to check for misspellings
+codespell --write-changes # to automatically fix misspellings
+```
diff --git a/README.md b/README.md
index 5a87b695..fc6962b8 100644
--- a/README.md
+++ b/README.md
@@ -8,115 +8,217 @@ This repository contains tools for updating dependencies in Azure DevOps reposit
In this repository you'll find:
-1. Dependabot [updater](./updater) in Ruby. See [docs](./docs/updater.md).
-2. Dockerfile and build/image for running the updater via Docker [here](./updater/Dockerfile).
-3. Dependabot [server](./server/) in .NET/C#. See [docs](./docs/server.md).
-4. Azure DevOps [Extension](https://marketplace.visualstudio.com/items?itemName=tingle-software.dependabot) and [source](./extension). See [docs](./docs/extension.md).
-
-> The hosted version is available to sponsors (most, but not all). It includes hustle free runs where the infrastructure is maintained for you. Much like the GitHub hosted version. Alternatively, you can run and host your own [server](./docs/server.md). Once you sponsor, you can send out an email to an maintainer or wait till they reach out. This is meant to ease the burden until GitHub/Azure/Microsoft can get it working natively (which could also be never) and hopefully for free.
+1. Azure DevOps [Extension](https://marketplace.visualstudio.com/items?itemName=tingle-software.dependabot), [source code](./extension) and [docs](./docs/extension.md).
+1. Dependabot Server, [source code](./server/) and [docs](./docs/server.md).
+1. Dependabot Updater image, [Dockerfile](./updater/Dockerfile), [source code](./updater/) and [docs](./docs/updater.md). **(Deprecated since v2.0)**
+
+> [!IMPORTANT]
+> This project is currently undergoing a major version increment (V1 → V2); See the [migration guide](./docs/migrations/v1-to-v2.md#summary-of-changes-v1--v2) for more details and progress updates.
+
+## Table of Contents
+- [Getting started](#getting-started)
+- [Using a configuration file](#using-a-configuration-file)
+- [Configuring private feeds and registries](#configuring-private-feeds-and-registries)
+- [Configuring security advisories and known vulnerabilities](#configuring-security-advisories-and-known-vulnerabilities)
+- [Configuring experiments](#configuring-experiments)
+- [Unsupported features and configurations](#unsupported-features-and-configurations)
+ * [Extension Task](#extension-task)
+ + [dependabot@V2](#dependabotv2)
+ + [dependabot@V1](#dependabotv1)
+ * [Updater Docker image](#updater-docker-image)
+ * [Server](#server)
+- [Migration Guide](#migration-guide)
+- [Contributing](#contributing)
+ * [Reporting issues and feature requests](#reporting-issues-and-feature-requests)
+ * [Submitting pull requests](#submitting-pull-requests)
+- [Acknowledgements](#acknowledgements)
+
+## Getting started
+
+Unlike the GitHub-hosted version, Dependabot for Azure DevOps must be explicitly setup in your organisation; creating a `dependabot.yml` file alone is **not** enough to enable updates. There are two ways to enable Dependabot, using:
+
+- [Azure DevOps Extension](https://marketplace.visualstudio.com/items?itemName=tingle-software.dependabot) - Ideal if you want to get Dependabot running with minimal administrative effort. The extension can run directly inside your existing pipeline agents and doesn't require hosting of any additional services. Because the extension runs in pipelines, this option does **not** scale well if you have a large number of projects and repositories.
+
+- [Hosted Server](./docs/server.md) - Ideal if you have a large number of projects and repositories or prefer to run Dependabot as a managed service instead of using pipeline agents. See [why should I use the server?](./docs/server.md#why-should-i-use-the-server) for more info.
+
+> [!NOTE]
+> A hosted version is available to sponsors (most, but not all). It includes hassle free runs where the infrastructure is maintained for you. Much like the GitHub hosted version. Alternatively, you can run and host your own [self-hosted server](./docs/server.md). Once you sponsor, you can send out an email to a maintainer or wait till they reach out. This is meant to ease the burden until GitHub/Azure/Microsoft can get it working natively (which could also be never) and hopefully for free.
## Using a configuration file
-Similar to the GitHub native version where you add a `.azuredevops/dependabot.yml` or `.github/dependabot.yml` file, this repository adds support for the same official [configuration options](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file) via a file located at `.azuredevops/dependabot.yml` or `.github/dependabot.yml`. This support is only available in the Azure DevOps extension and the [managed version](https://managd.dev). However, the extension does not currently support automatically picking up the file, a pipeline is still required. See [docs](./extension/README.md#usage).
+Similar to the GitHub-hosted version, Dependabot is configured using a [dependabot.yml file](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file) located at `.azuredevops/dependabot.yml` or `.github/dependabot.yml` in your repository.
-We are well aware that ignore conditions are not explicitly passed and passed on from the extension/server to the container. It is intentional. The ruby script in the docker container does it automatically. If you are having issues, search for related issues such as https://github.com/tinglesoftware/dependabot-azure-devops/pull/582 before creating a new issue. You can also test against various reproductions such as https://dev.azure.com/tingle/dependabot/_git/repro-582
+Most [official configuration options](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file) are supported since V2; Earlier versions have several limitations, see [unsupported features and configurations](#unsupported-features-and-configurations) for more.
-## Credentials for private registries and feeds
+## Configuring private feeds and registries
-Besides accessing the repository only, sometimes private feeds/registries may need to be accessed.
-For example a private NuGet feed or a company internal docker registry.
+Besides accessing the repository, sometimes private feeds/registries may need to be accessed. For example a private NuGet feed or a company internal docker registry.
-Adding configuration options for private registries is setup in `dependabot.yml`
-according to the dependabot [description](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#configuration-options-for-private-registries).
+Private registries are configured in `dependabot.yml`, refer to the [official documentation](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#configuration-options-for-private-registries).
-Example:
+Examples:
```yml
version: 2
registries:
- my-Extern@Release:
- type: nuget-feed
- url: https://dev.azure.com/organization1/_packaging/my-Extern@Release/nuget/v3/index.json
- token: PAT:${{MY_DEPENDABOT_ADO_PAT}}
+
+ # Azure DevOps private feed, all views
my-analyzers:
type: nuget-feed
url: https://dev.azure.com/organization2/_packaging/my-analyzers/nuget/v3/index.json
- token: PAT:${{MY_OTHER_PAT}}
+ token: PAT:${{ MY_DEPENDABOT_ADO_PAT }}
+
+ # Azure DevOps private feed, "Release" view only
+ my-Extern@Release:
+ type: nuget-feed
+ url: https://dev.azure.com/organization1/_packaging/my-Extern@Release/nuget/v3/index.json
+ token: PAT:${{ MY_DEPENDABOT_ADO_PAT }}
+
+ # Artifactory private feed using PAT
artifactory:
type: nuget-feed
url: https://artifactory.com/api/nuget/v3/myfeed
- token: PAT:${{MY_ARTIFACTORY_PAT}}
+ token: PAT:${{ MY_DEPENDABOT_ARTIFACTORY_PAT }}
+
+ # Other private feed using basic auth (username/password)
telerik:
type: nuget-feed
url: https://nuget.telerik.com/v3/index.json
- username: ${{MY_TELERIK_USERNAME}}
- password: ${{MY_TELERIK_PASSWORD}}
- token: ${{MY_TELERIK_USERNAME}}:${{MY_TELERIK_PASSWORD}}
+ username: ${{ MY_TELERIK_USERNAME }}
+ password: ${{ MY_TELERIK_PASSWORD }}
+ token: ${{ MY_TELERIK_USERNAME }}:${{ MY_TELERIK_PASSWORD }}
+
updates:
...
```
-Note:
-
-1. `${{VARIABLE_NAME}}` notation is used liked described [here](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/managing-encrypted-secrets-for-dependabot)
-BUT the values will be used from Environment Variables in the pipeline/environment. Template variables are not supported for this replacement. Replacement only works for values considered secret in the registries section i.e. `username`, `password`, `token`, and `key`
-
-2. When using an Azure DevOps Artifact feed, only the `token` property is required. The token notation should be `PAT:${{VARIABLE_NAME}}` otherwise the wrong authentication mechanism is used by Dependabot, see [here](https://github.com/tinglesoftware/dependabot-azure-devops/issues/50) for more details.
-When working with Azure DevOps Artifacts, some extra permission steps need to be done:
-
- 1. The PAT should have *Packaging Read* permission.
- 2. The user owning the PAT must be granted permissions to access the feed either directly or via a group. An easy way for this is to give `Contributor` permissions the `[{project_name}]\Contributors` group under the `Feed Settings -> Permissions` page. The page has the url format: `https://dev.azure.com/{organization}/{project}/_packaging?_a=settings&feed={feed-name}&view=permissions`.
+Note when using authentication secrets in configuration files:
+
+> [!IMPORTANT]
+> `${{ VARIABLE_NAME }}` notation is used liked described [here](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/managing-encrypted-secrets-for-dependabot)
+BUT the values will be used from pipeline environment variables. Template variables are not supported for this replacement. Replacement only works for values considered secret in the registries section i.e. `username`, `password`, `token`, and `key`
+
+> [!IMPORTANT]
+> When using an Azure DevOps Artifact feed, the token format must be `PAT:${{ VARIABLE_NAME }}` where `VARIABLE_NAME` is a pipeline/environment variable containing the PAT token. The PAT must:
+> 1. Have `Packaging (Read)` permission.
+> 2. Be issued by a user with permission to the feed either directly or via a group. An easy way for this is to give `Contributor` permissions the `[{project_name}]\Contributors` group under the `Feed Settings -> Permissions` page. The page has the url format: `https://dev.azure.com/{organization}/{project}/_packaging?_a=settings&feed={feed-name}&view=permissions`.
+
+> [!NOTE]
+> When using `dependabot@V1` with a private feed/registry secured with basic auth, the `username`, `password`, **and** `token` properties are all required. The token format must be `${{ USERNAME }}:${{ PASSWORD }}`.
+
+> [!NOTE]
+> When using `dependabot@V1` with a repository containing a `nuget.config` file configured with custom package sources, the `key` property is required for each registry. The key must match between `dependabot.yml` and `nuget.config` otherwise the package source will be duplicated, package source mappings will be ignored, and auth errors will occur during dependency discovery. If your `nuget.config` looks like this:
+> ```xml
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+>
+> ```
+>
+> Then your `dependabot.yml` registry should look like this:
+> ```yml
+> version: 2
+> registries:
+> my-org:
+> type: nuget-feed
+> key: my-organisation1-nuget
+> url: https://dev.azure.com/my-organization/_packaging/my-nuget-feed/nuget/v3/index.json
+> token: PAT:${{ MY_DEPENDABOT_ADO_PAT }}
+> ```
+
+## Configuring security advisories and known vulnerabilities
+
+Security-only updates is a mechanism to only create pull requests for dependencies with vulnerabilities by updating them to the earliest available non-vulnerable version. [Security updates are supported in the same way as the GitHub-hosted version](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/configuring-dependabot-security-updates#overriding-the-default-behavior-with-a-configuration-file) provided that a GitHub access token with `public_repo` access is provided in the `gitHubAccessToken` or `gitHubConnection` task inputs.
+
+You can provide extra security advisories, such as those for an internal dependency, in a JSON file via the `securityAdvisoriesFile` task input e.g. `securityAdvisoriesFile: '$(Pipeline.Workspace)/advisories.json'`. An example file is available in [./advisories-example.json](./advisories-example.json).
+
+## Configuring experiments
+Dependabot uses an internal feature flag system called "experiments". Typically, experiments represent new features or changes in logic which are still being internally tested before becoming generally available. In some cases, you may want to opt-in to experiments to work around known issues or to opt-in to preview features ahead of general availability (GA).
+
+Experiments vary depending on the package ecyosystem used; They can be enabled using the `experiments` task input with a comma-seperated list of key/value pairs representing the experiments e.g. `experiments: 'tidy=true,vendor=true,goprivate=*'`.
+
+> [!NOTE]
+> Dependabot experinment names are not [publicly] documented. For convenience, some known experiments are listed below; However, **be aware that this may be out-of-date at the time of reading.**
+
+
+List of known experiments from dependabot-core@0.278.0
+
+|Package Ecosystem|Experiment Name|Value Type|Description|
+|--|--|--|--|
+| All | dedup_branch_names | true/false | |
+| All | grouped_updates_experimental_rules | true/false | |
+| All | grouped_security_updates_disabled | true/false | |
+| All | record_ecosystem_versions | true/false | |
+| All | record_update_job_unknown_error | true/false | |
+| All | dependency_change_validation | true/false | |
+| All | add_deprecation_warn_to_pr_message | true/false | |
+| All | threaded_metadata | true/false | |
+| Bundler | bundler_v1_unsupported_error | true/false | |
+| Go | tidy | true/false | |
+| Go | vendor | true/false | |
+| Go | goprivate | string | |
+| NPM and Yarn | enable_pnpm_yarn_dynamic_engine | true/false | |
+| NuGet | nuget_native_analysis | true/false | https://github.com/dependabot/dependabot-core/pull/10025 |
+| NuGet | nuget_native_updater | true/false | https://github.com/dependabot/dependabot-core/pull/10521 |
+| NuGet | nuget_dependency_solver | true/false | https://github.com/dependabot/dependabot-core/pull/10343 |
+
+
+
+> [!TIP]
+> To find the latest list of Dependabot experiments, search the `dependabot-core` GitHub repository using queries like ["enabled?(x)"](https://github.com/search?q=repo%3Adependabot%2Fdependabot-core+%2Fenabled%5CW%5C%28.*%5C%29%2F&type=code) and ["options.fetch(x)"](https://github.com/search?q=repo%3Adependabot%2Fdependabot-core+%2Foptions%5C.fetch%5C%28.*%2C%2F&type=code).
+
+## Unsupported features and configurations
+We aim to support all [official configuration options](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file), but there are some limitations for:
+
+### Extension Task
+
+#### `dependabot@V2`
+- [`schedule`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#scheduleinterval) is ignored, use [pipeline scheduled triggers](https://learn.microsoft.com/en-us/azure/devops/pipelines/process/scheduled-triggers?view=azure-devops&tabs=yaml#scheduled-triggers) instead.
+- [Security-only updates](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/configuring-dependabot-security-updates#overriding-the-default-behavior-with-a-configuration-file) (`open-pull-requests-limit: 0`) are not supported. _(coming soon)_
+
+#### `dependabot@V1`
+- [`schedule`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#scheduleinterval) is ignored, use [pipeline scheduled triggers](https://learn.microsoft.com/en-us/azure/devops/pipelines/process/scheduled-triggers?view=azure-devops&tabs=yaml#scheduled-triggers) instead.
+- [`directories`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#directories) are only supported if task input `useUpdateScriptVNext: true` is set.
+- [`groups`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#groups) are only supported if task input `useUpdateScriptVNext: true` is set.
+- [`ignore`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#ignore) may not behave to official specifications unless task input `useUpdateScriptVNext: true` is set. If you are having issues, search for related issues such as before creating a new issue.
+- Private feed/registry authentication may not work with all package ecyosystems. Support is _slightly_ improved when task input `useUpdateScriptVNext: true` is set, but not still not fully supported. See [problems with authentication](https://github.com/tinglesoftware/dependabot-azure-devops/discussions/1317) for more.
+
+### Updater Docker image
+- Private feed/registry authentication may not work with all package ecyosystems. See [problems with authentication](https://github.com/tinglesoftware/dependabot-azure-devops/discussions/1317) for more.
+
+### Server
+
+- [`directories`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#directories) are not supported.
+- [`groups`](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#groups) are not supported.
+- Private feed/registry authentication may not work with all package ecyosystems. See [problems with authentication](https://github.com/tinglesoftware/dependabot-azure-devops/discussions/1317) for more.
+
+## Migration Guide
+- [Extension Task V1 → V2](./docs/migrations/v1-to-v2.md)
+
+## Contributing
+
+:wave: Want to give us feedback on Dependabot for Azure DevOps, or contribute to it? That's great - thank you so much!
+
+### Reporting issues and feature requests
+
+Please leave all issues, bugs, and feature requests on the [issues page](https://github.com/tinglesoftware/dependabot-azure-devops/issues). We'll respond ASAP!
+Use the [discussions page](https://github.com/tinglesoftware/dependabot-azure-devops/discussions) for all other questions and comments.
+
+### Submitting pull requests
-3. When using a NuGet package server secured with basic auth, the `username`, `password`, and `token` properties are all required. The token notation should be `${{USERNAME}}:${{PASSWORD}}`, see [here](https://github.com/tinglesoftware/dependabot-azure-devops/issues/1232#issuecomment-2247616424) for more details.
-
-
-4. When your project contains a `nuget.config` file with custom package source configuration, the `key` property is required for each nuget-feed registry. The key must match between `dependabot.yml` and `nuget.config` otherwise the package source will be duplicated, package source mappings will be ignored, and auth errors will occur during dependency discovery.
-
- If your `nuget.config` looks like this:
- ```xml
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- ```
-
- Then your `dependabot.yml` registry should look like this:
-
- ```yml
- version: 2
- registries:
- my-org:
- type: nuget-feed
- key: my-organisation1-nuget
- url: https://dev.azure.com/my-organization/_packaging/my-nuget-feed/nuget/v3/index.json
- token: PAT:${{MY_DEPENDABOT_ADO_PAT}}
- ```
-
-
-## Security Advisories, Vulnerabilities, and Updates
-
-Security-only updates ia a mechanism to only create pull requests for dependencies with vulnerabilities by updating them to the earliest available non-vulnerable version. Security updates are supported in the same way as the GitHub-hosted version. In addition, you can provide extra advisories, such as those for an internal dependency, in a JSON file via the `securityAdvisoriesFile` input e.g. `securityAdvisoriesFile: '$(Pipeline.Workspace)/advisories.json'`. A file example is available [here](./advisories-example.json).
-
-A GitHub access token with `public_repo` access is required to perform the GitHub GraphQL for `securityVulnerabilities`.
-
-## Development Guide
-
-If you'd like to contribute to the project or just run it locally, view our development guides for:
-
-- [Azure DevOps extension](./docs/extension.md#development-guide)
-- [Dependabot updater](./docs/updater.md#development-guide)
+Please refer to the [contributing guidelines](./CONTRIBUTING.MD) for more information on how to get started.
## Acknowledgements
@@ -127,7 +229,3 @@ The work in this repository is based on inspired and occasionally guided by some
3. Chris' work: [code](https://github.com/chris5287/dependabot-for-azuredevops)
4. andrcun's work on GitLab: [code](https://gitlab.com/dependabot-gitlab/dependabot)
5. WeWork's work for GitLab: [code](https://github.com/wemake-services/kira-dependencies)
-
-## Issues & Comments
-
-Please leave all comments, bugs, requests, and issues on the Issues page. We'll respond to your request ASAP!
diff --git a/docs/extension.md b/docs/extension.md
index 0f02cac4..27e9876e 100644
--- a/docs/extension.md
+++ b/docs/extension.md
@@ -2,21 +2,35 @@
# Table of Contents
- [Using the extension](#using-the-extension)
+- [Troubleshooting issues](#troubleshooting-issues)
- [Development guide](#development-guide)
- - [Getting the development environment ready](#getting-the-development-environment-ready)
- - [Building the extension](#building-the-extension)
- - [Installing the extension](#installing-the-extension)
- - [Running the unit tests](#running-the-unit-tests)
+ * [Getting the development environment ready](#getting-the-development-environment-ready)
+ * [Building the extension](#building-the-extension)
+ * [Installing the extension](#installing-the-extension)
+ * [Running the task locally](#running-the-task-locally)
+ * [Running the unit tests](#running-the-unit-tests)
+- [Architecture](#architecture)
+ * [Task V2 high-level update process diagram](#task-v2-high-level-update-process-diagram)
+
# Using the extension
-See the extension [README.md](../extension/README.md).
+Refer to the extension [README.md](../extension/README.md).
+
+# Troubleshooting issues
+
+Dependabot will log more diagnostic information when [verbose logs are enabled](https://learn.microsoft.com/en-us/azure/devops/pipelines/troubleshooting/review-logs?view=azure-devops&tabs=windows-agent#configure-verbose-logs); i.e. `system.debug` variable is set to `true`.
+
+When verbose logs are enable, Dependabot will also generate a [Flame Graph performance metrics report](https://www.brendangregg.com/flamegraphs.html), which can be viewed by [downloading the pipeline logs](https://learn.microsoft.com/en-us/azure/devops/pipelines/troubleshooting/review-logs?view=azure-devops&tabs=windows-agent#view-and-download-logs), then locating the corresponding HTML report file in the `Job` folder. To understand how to read Flame Graph reports, see: https://www.brendangregg.com/flamegraphs.html#summary
+
+> [!WARNING]
+> When sharing pipeline logs, please be aware that the **task log contains potentionally sensitive information** such as your DevOps organisation name, project names, repository names, private package feeds URLs, list of used dependency names/versions, and the contents of any dependency files that are updated (e.g. `package.json`, `*.csproj`, etc). The Flame Graph report does **not** contain any sensitive information about your DevOps environment.
# Development guide
## Getting the development environment ready
-First, ensure you have [Node.js](https://docs.docker.com/engine/install/) v18+ installed.
-Next, install project dependencies with npm:
+
+Install [Node.js](https://docs.docker.com/engine/install/) (18+), [Go](https://go.dev/doc/install) (1.22+), and [Docker](https://docs.docker.com/engine/install/) (with Linux containers); Install project dependencies using NPM:
```bash
cd extension
@@ -24,23 +38,93 @@ npm install
```
## Building the extension
+
```bash
cd extension
-npm run build:prod
-npm run mv:prod
+npm run build
```
-To generate the Azure DevOps `.vsix` extension package for testing, you'll first need to [create a publisher account](https://learn.microsoft.com/en-us/azure/devops/extend/publish/overview?view=azure-devops#create-a-publisher) on the [Visual Studio Marketplace Publishing Portal](https://marketplace.visualstudio.com/manage/createpublisher?managePageRedirect=true). After this, override your publisher ID below and generate the extension with:
+To then generate the a Azure DevOps `.vsix` extension package for testing, you'll first need to [create a publisher account](https://learn.microsoft.com/en-us/azure/devops/extend/publish/overview?view=azure-devops#create-a-publisher) for the [Visual Studio Marketplace Publishing Portal](https://marketplace.visualstudio.com/manage/createpublisher?managePageRedirect=true). After this, use `npm run package` to build the package, with an override for your publisher ID:
```bash
-npx tfx-cli extension create --overrides-file overrides.local.json --override "{\"publisher\": \"your-publisher-id-here\"}" --json5
+npm run package -- --overrides-file overrides.local.json --rev-version --publisher your-publisher-id-here
```
## Installing the extension
-To test the extension in Azure DevOps, you'll first need to build the extension `.vsix` file (see above). After this, [publish your extension](https://learn.microsoft.com/en-us/azure/devops/extend/publish/overview?view=azure-devops#publish-your-extension), then [install your extension](https://learn.microsoft.com/en-us/azure/devops/extend/publish/overview?view=azure-devops#install-your-extension).
+
+To test the extension in a Azure DevOps organisation:
+1. [Build the extension `.vsix` package](#building-the-extension)
+1. [Publish the extension to your publisher account](https://learn.microsoft.com/en-us/azure/devops/extend/publish/overview?view=azure-devops#publish-your-extension)
+1. [Share the extension with the organisation](https://learn.microsoft.com/en-us/azure/devops/extend/publish/overview?view=azure-devops#share-your-extension).
+
+## Running the task locally
+To run the latest task version:
+```bash
+npm start
+```
+
+To run a specific task version:
+```bash
+npm run start:V1 # runs dependabotV1 task
+npm run start:V2 # runs dependabotV2 task
+```
## Running the unit tests
+
```bash
cd extension
-npm run test
-```
\ No newline at end of file
+npm test
+```
+
+# Architecture
+
+## Task V2 high-level update process diagram
+High-level sequence diagram illustrating how the `dependabotV2` task performs updates using [dependabot-cli](https://github.com/dependabot/cli). For more technical details, see [how dependabot-cli works](https://github.com/dependabot/cli?tab=readme-ov-file#how-it-works).
+
+```mermaid
+ sequenceDiagram
+ participant ext as Dependabot DevOps Extension
+ participant agent as DevOps Pipeline Agent
+ participant devops as DevOps API
+ participant cli as Dependabot CLI
+ participant core as Dependabot Updater
+ participant feed as Package Feed
+
+ ext->>ext: Read and parse `dependabot.yml`
+ ext->>ext: Write `job.yaml`
+ ext->>agent: Download dependabot-cli from github
+ ext->>+cli: Execute `dependabot update -f job.yaml -o update-scenario.yaml`
+ cli->>+core: Run update for `job.yaml` with proxy and dependabot-updater docker containers
+ core->>devops: Fetch source files from repository
+ core->>core: Discover dependencies
+ loop for each dependency
+ core->>feed: Fetch latest version
+ core->>core: Update dependency files
+ end
+ core-->>-cli: Report outputs
+ cli->>cli: Write outputs to `update-sceario.yaml`
+ cli-->>-ext: Update completed
+
+ ext->>ext: Read and parse `update-sceario.yaml`
+ loop for each output
+ alt when output is "create_pull_request"
+ ext->>devops: Create pull request source branch
+ ext->>devops: Push commit to source branch
+ ext->>devops: Create pull request
+ ext->>devops: Set auto-approve
+ ext->>devops: Set auto-complete
+ end
+ alt when output is "update_pull_request"
+ ext->>devops: Push commit to pull request
+ ext->>devops: Update pull request description
+ ext->>devops: Set auto-approve
+ ext->>devops: Set auto-complete
+ end
+ alt when output is "close_pull_request"
+ ext->>devops: Create comment thread on pull request with close reason
+ ext->>devops: Abandon pull request
+ ext->>devops: Delete source branch
+ end
+ end
+
+```
diff --git a/docs/migrations/v1-to-v2.md b/docs/migrations/v1-to-v2.md
new file mode 100644
index 00000000..e28a8e67
--- /dev/null
+++ b/docs/migrations/v1-to-v2.md
@@ -0,0 +1,82 @@
+
+> [!WARNING]
+> **:construction: Work in progress;** `dependabot@V2` is still under development and this document may change without notice up until general availability (GA).
+
+# Table of Contents
+- [Summary of changes V1 → V2](#summary-of-changes-v1-v2)
+- [Breaking changes V1 → V2](#breaking-changes-v1-v2)
+- [Todo before general availability](#todo-before-general-availability)
+
+# Summary of changes V1 → V2
+V2 is a complete re-write of the Dependabot task; It aims to:
+
+- Resolve the [numerous private feed/registry authentication issues](https://github.com/tinglesoftware/dependabot-azure-devops/discussions/1317) that currently exist in V1;
+- More closely align the update logic with the GitHub-hosted Dependabot service;
+
+The task now uses [Dependabot CLI](https://github.com/dependabot/cli) to perform dependency updates, which is the _[currently]_ recommended approach for running Dependabot. See [extension task architecture](../extension.md#architecture) for more details on the technical changes and impact to the update process.
+
+# Breaking changes V1 → V2
+
+> [!WARNING]
+> **It is strongly recommended that you complete (or abandon) all active Depedabot pull requests created in V1 before migrating to V2.** Due to changes in Dependabot dependency metadata, V2 pull requests are not compatible with V1 (and vice versa). Migrating to V2 before completing existing pull requests will lead to duplication of pull requests.
+
+### New pipeline agent requirements; "Go" must be installed
+Dependabot CLI requires [Go](https://go.dev/doc/install) (1.22+) and [Docker](https://docs.docker.com/engine/install/) (with Linux containers).
+If you use [Microsoft-hosted agents](https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml#software), we recommend using the [ubuntu-latest](https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2404-Readme.md) image, which meets all task requirements.
+For self-hosted agents, you will need to install Go 1.22+.
+
+### Security-only updates and "fixed vulnerabilities" are not implemented (yet)
+Using configuration `open-pull-requests-limit: 0` will cause a "not implemented" error. This is [current limitation of V2](../../README.md#unsupported-features-and-configurations). A solution is still under development and is expected to be resolved before general availability.
+See: https://github.com/dependabot/cli/issues/360 for more technical details.
+
+### Task Input `updaterOptions` has been renamed to `experiments`
+Renamed to match Dependabot Core/CLI terminology. The input value remains unchanged. See [configuring experiments](../../README.md#configuring-experiments) for more details.
+
+### Task Input `failOnException` has been removed
+Due to the design of Dependabot CLI, the update process can no longer be interrupted once the update has started. Because of this, the update will now continue on error and summarise all error at the end of the update process.
+
+### Task Input `excludeRequirementsToUnlock` has been removed
+This was a customisation/workaround specific to the V1 update script that can no longer be implemented with Dependabot CLI as it is not an official configuration option.
+
+### Task Input `dockerImageTag` has been removed
+This is no longer required as the [custom] [Dependabot Updater image](../updater.md) is no longer used.
+
+### Task Input `extraEnvironmentVariables` has been removed
+Due to the containerised design of Dependabot CLI, environment variables can no longer be passed from the task to the updater process. All Dependabot config must now set via `dependabot.yaml` or as task inputs. See changes to environment variables below for more details.
+
+### Changes to environment variables
+The following environment variables are now configured using [pipeline system variables](https://learn.microsoft.com/en-us/azure/devops/pipelines/process/variables?view=azure-devops&tabs=yaml%2Cbatch#system-variables):
+| Environment Variable | → | Pipeline Variable |
+|--|--|--|
+|`DEPENDABOT_DEBUG`| → |`System.Debug`|
+
+The following environment variables are now configured using task inputs:
+| Environment Variable | → | Task Input |
+|--|--|--|
+|`DEPENDABOT_AUTHOR_EMAIL`| → |`authorEmail`|
+|`DEPENDABOT_AUTHOR_NAME`| → |`authorName`|
+|`DEPENDABOT_UPDATER_OPTIONS`| → |`experiments`|
+
+The following environment variables have been removed entirely; the feature is no longer supported:
+
+| Removed Environment Variable | Reason |
+|--|--|
+|`DEPENDABOT_PR_NAME_PREFIX_STYLE`| Feature is not supported; It is not an official configuration |
+|`DEPENDABOT_COMPATIBILITY_SCORE_BADGE`| Feature is not supported; It is not an official configuration |
+|`DEPENDABOT_MESSAGE_HEADER`| Feature is not supported; It is not an official configuration |
+|`DEPENDABOT_MESSAGE_FOOTER`| Feature is not supported; It is not an official configuration |
+|`DEPENDABOT_SIGNATURE_KEY`| Feature is not supported; It is not an official configuration |
+|`DEPENDABOT_JOB_ID`| Set automatically by extension |
+
+## Todo before general availability
+Before removing the preview flag from V2 `task.json`, we need to:
+ - [x] Open an issue in Dependabot-CLI, enquire how security-advisories are expected to be provided **before** knowing the list of dependencies. (https://github.com/dependabot/cli/issues/360)
+ - [ ] Convert GitHub security advisory client in `vulnerabilities.rb` to TypeScript code
+ - [ ] Implement `security-advisories` config once the answer the above is known
+ - [x] Review `task.json`, add documentation for new V2 inputs
+ - [x] Update `\docs\extension.md` with V2 docs
+ - [x] Update `\extension\README.MD` with V2 docs
+ - [x] Update `\README.MD` with V2 docs
+ - [ ] Do a general code tidy-up pass (check all "TODO" comments)
+ - [ ] Add unit tests for V2 utils scripts
+ - [ ] Investigate https://zod.dev/
\ No newline at end of file
diff --git a/docs/server.md b/docs/server.md
index f6352872..98d2f268 100644
--- a/docs/server.md
+++ b/docs/server.md
@@ -4,11 +4,14 @@
- [Why should I use the server?](#why-should-i-use-the-server)
- [Composition](#composition)
- [Deployment](#deployment)
- - [Single click deployment](#single-click-deployment)
- - [Deployment Parameters](#deployment-parameters)
- - [Deployment with CLI](#deployment-with-cli)
- - [Service Hooks and Subscriptions](#service-hooks-and-subscriptions)
+ * [Single click deployment](#single-click-deployment)
+ * [Deployment Parameters](#deployment-parameters)
+ * [Deployment with CLI](#deployment-with-cli)
+ * [Service Hooks and Subscriptions](#service-hooks-and-subscriptions)
- [Keeping updated](#keeping-updated)
+- [Development guide](#development-guide)
+ * [Getting the development environment ready](#getting-the-development-environment-ready)
+ * [Running the unit tests](#running-the-unit-tests)
# Why should I use the server?
@@ -33,7 +36,7 @@ The current cost we have internally for this in the `westeurope` region:
- Azure Service Bus namespace: approx $0.05/month
- Azure Container Instances: approx $2/month for 21 repositories
- Azure Container Apps: approx $15/month given about 80% idle time
-- **Total: approx $22/month** (expected to reduce when jobs are added to Azure Container Apps, see https://github.com/microsoft/azure-container-apps/issues/526)
+- **Total: approx $22/month** (expected to reduce when jobs are added to Azure Container Apps, see )
# Deployment
@@ -55,14 +58,16 @@ The deployment exposes the following parameters that can be tuned to suit the se
|--|--|--|--|
|location|Location to deploy the resources.|No|<resource-group-location>|
|name|The name of all resources.|No|`dependabot`|
-|projectSetups|A JSON array string representing the projects to be setup on startup. This is useful when running your own setup. Example: `[{\"url\":\"https://dev.azure.com/tingle/dependabot\",\"token\":\"dummy\",\"AutoComplete\":true}]`|
+|projectSetups|A JSON array string representing the projects to be setup on startup. This is useful when running your own setup. Example: `[{\"url\":\"https://dev.azure.com/tingle/dependabot\",\"token\":\"dummy\",\"AutoComplete\":true}]`|Yes|<empty>|
|githubToken|Access token for authenticating requests to GitHub. Required for vulnerability checks and to avoid rate limiting on free requests|No|<empty>|
|imageTag|The image tag to use when pulling the docker containers. A tag also defines the version. You should avoid using `latest`. Example: `1.1.0`|No|<version-downloaded>|
+> [!NOTE]
> The template includes a User Assigned Managed Identity, which is used when performing Azure Resource Manager operations such as deletions. In the deployment it creates the role assignments that it needs. These role assignments are on the resource group that you deploy to.
## Deployment with CLI
+> [!IMPORTANT]
> Ensure the Azure CLI tools are installed and that you are logged in.
For a one time deployment, it is similar to how you deploy other resources on Azure.
@@ -116,3 +121,22 @@ To enable automatic pickup of configuration files, merge conflict resolution and
If you wish to keep your deployment updated, you can create a private repository with this one as a git submodule, configure dependabot to update it then add a new workflow that deploys to your preferred host using a manual trigger (or one of your choice).
You can also choose to watch the repository so as to be notified when a new release is published.
+
+# Development guide
+
+## Getting the development environment ready
+
+Install [.NET 8](https://dotnet.microsoft.com/en-us/download) and [Docker](https://docs.docker.com/engine/install/) (with Linux containers); Install project dependencies using `dotnet` or Visual Studio [Code]:
+
+```bash
+cd server
+dotnet restore Tingle.Dependabot
+dotnet restore Tingle.Dependabot.Tests
+```
+
+## Running the unit tests
+
+```bash
+cd server
+dotnet test Tingle.Dependabot.Tests
+```
diff --git a/docs/updater.md b/docs/updater.md
index 03dc43b4..99ddc37a 100644
--- a/docs/updater.md
+++ b/docs/updater.md
@@ -1,25 +1,27 @@
+> [!WARNING]
+> **Deprecated;** Use of the Dependabot Updater image is no longer recommended since v2.0; The "updater" component is considered internal to Dependabot and is not intended to be run directly by end-users. There are known limitations with this image, see [unsupported features and configuration](../README.md#unsupported-features-and-configurations) for more details.
+
# Table of Contents
- [Running the updater](#running-the-updater)
- - [Environment variables](#environment-variables)
+ * [Environment Variables](#environment-variables)
- [Development guide](#development-guide)
- - [Getting the development environment ready](#getting-the-development-environment-ready)
- - [Building the Docker image](#building-the-docker-image)
- - [Running your code changes](#running-your-code-changes)
- - [Running the code linter](#running-the-code-linter)
- - [Running the unit tests](#running-the-unit-tests)
+ * [Getting the development environment ready](#getting-the-development-environment-ready)
+ * [Building the Docker image](#building-the-docker-image)
+ * [Running your code changes](#running-your-code-changes)
+ * [Running the code linter](#running-the-code-linter)
+ * [Running the unit tests](#running-the-unit-tests)
# Running the updater
-First, you need to pull the docker image locally to your machine:
+[Build](#building-the-docker-image) or pull the docker image:
```bash
docker pull ghcr.io/tinglesoftware/dependabot-updater-
```
-Next create and run a container from the image. The full list of container options are detailed in [Environment variables](#environment-variables); at minimum the command should be:
-
+Create and run a container based on the image. The full list of container options are detailed in [environment variables](#environment-variables); at minimum the command should be:
```bash
docker run --rm -t \
@@ -39,28 +41,32 @@ docker run --rm -t \
ghcr.io/tinglesoftware/dependabot-updater- update_script
```
-An example, for Azure DevOps Services:
+
+Example, for Azure DevOps Services
```bash
docker run --rm -t \
- -e GITHUB_ACCESS_TOKEN=ijkl..mnop \
+ -e GITHUB_ACCESS_TOKEN=ijk..mop \
-e DEPENDABOT_PACKAGE_MANAGER=nuget \
-e DEPENDABOT_DIRECTORY=/ \
-e DEPENDABOT_TARGET_BRANCH=main \
-e DEPENDABOT_EXTRA_CREDENTIALS='[{"type":"npm_registry","token":"","registry":"npm.fontawesome.com"}]' \
-e AZURE_HOSTNAME=dev.azure.com \
- -e AZURE_ACCESS_TOKEN=abcd..efgh \
+ -e AZURE_ACCESS_TOKEN=abc..efg \
-e AZURE_ORGANIZATION=tinglesoftware \
-e AZURE_PROJECT=oss \
-e AZURE_REPOSITORY=repro-411 \
ghcr.io/tinglesoftware/dependabot-updater-nuget update_script
```
-An example, for Azure DevOps Server:
+
+
+
+Example, for Azure DevOps Server
```bash
docker run --rm -t \
- -e GITHUB_ACCESS_TOKEN=ijkl..mnop \
+ -e GITHUB_ACCESS_TOKEN=ijk..mno \
-e DEPENDABOT_PACKAGE_MANAGER=nuget \
-e DEPENDABOT_DIRECTORY=/ \
-e DEPENDABOT_TARGET_BRANCH=main \
@@ -69,16 +75,18 @@ docker run --rm -t \
-e AZURE_HOSTNAME=my-devops.com \
-e AZURE_PORT=8080 \
-e AZURE_VIRTUAL_DIRECTORY=tfs \
- -e AZURE_ACCESS_TOKEN=abcd..efgh \
+ -e AZURE_ACCESS_TOKEN=abc..efg \
-e AZURE_ORGANIZATION=tinglesoftware \
-e AZURE_PROJECT=oss \
-e AZURE_REPOSITORY=repro-411 \
ghcr.io/tinglesoftware/dependabot-updater-nuget update_script
```
+
+
## Environment Variables
-To run the script, some environment variables are required.
+The following environment variables are required when running the container.
|Variable Name|Supported Command(s)|Description|
|--|--|--|
@@ -108,7 +116,7 @@ To run the script, some environment variables are required.
|DEPENDABOT_COMPATIBILITY_SCORE_BADGE|vNext|**_Optional_**. Determines if compatibility score badges are shown in the pull request description for single dependency updates (but not group updates). This feature uses public information from GitHub and enabling it does **not** send any private information about your repository to GitHub other than the dependency name and version number(s) required to calculate to the compatibility score. Defaults to `false`. See [official docs](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates#about-compatibility-scores) for more.|
|DEPENDABOT_MESSAGE_HEADER|vNext|**_Optional_**. Additional pull request description text to shown before the dependency change info.|
|DEPENDABOT_MESSAGE_FOOTER|vNext|**_Optional_**. Additional pull request description text to shown after the dependency change info. This text will not be truncated, even when the dependency change info exceeds the PR maximum description length. |
-|DEPENDABOT_REVIEWERS|Update,
vNext|**_Optional_**. The user id or email of the users to review the pull requests, in JSON format. These shall be added as optional approvers. For example: `[\"23d9f23d-981e-4a0c-a975-8e5c665914ec\",\"user@company.com\"]`.
+|DEPENDABOT_REVIEWERS|Update,
vNext|**_Optional_**. The user id or email of the users to review the pull requests, in JSON format. These shall be added as optional approvers. For example: `[\"23d9f23d-981e-4a0c-a975-8e5c665914ec\",\"user@company.com\"]`.|
|DEPENDABOT_ASSIGNEES|Update,
vNext|**_Optional_**. The user ids or emails of the users to be assigned to the pull requests, in JSON format. These shall be added as required approvers. For example: `[\"be9321e2-f404-4ffa-8d6b-44efddb04865\", \"user@company.com\"]`. |
|DEPENDABOT_LABELS|Update,
vNext|**_Optional_**. The custom labels to be used, in JSON format. This can be used to override the default values. For example: `[\"npm dependencies\",\"triage-board\"]`. See [official docs](https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/customizing-dependency-updates#setting-custom-labels) for more.|
|DEPENDABOT_MILESTONE|Update,
vNext|**_Optional_**. The identifier of the work item to be linked to the Pull Requests that dependabot creates.|
@@ -124,9 +132,9 @@ To run the script, some environment variables are required.
|AZURE_VIRTUAL_DIRECTORY|Update,
vNext|**_Optional_**. Some Azure DevOps Server installations are hosted in an IIS virtual directory, traditionally named tfs. This variable can be used to define the name of that virtual directory. By default, this is not set.|
|AZURE_ACCESS_USERNAME|Update,
vNext|**_Optional_**. This Variable can be used together with the User Password in the Access Token Variable to use basic Auth when connecting to Azure Dev Ops. By default, this is not set.|
|AZURE_ACCESS_TOKEN|Update,
vNext|**_Required_**. The Personal Access in Azure DevOps for accessing the repository and creating pull requests. The required permissions are:
- Code (Full)
- Pull Requests Threads (Read & Write).
See the [documentation](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops&tabs=preview-page#create-a-pat) to know more about creating a Personal Access Token|
-|AZURE_ORGANIZATION|Update,
vNext|**_Required_**. The name of the Azure DevOps Organization. This is can be extracted from the URL of the home page. https://dev.azure.com/{organization}/|
-|AZURE_PROJECT|Update,
vNext|**_Required_**. The name of the Azure DevOps Project within the above organization. This can be extracted them the URL too. https://dev.azure.com/{organization}/{project}/|
-|AZURE_REPOSITORY|Update,
vNext|**_Required_**. The name of the Azure DevOps Repository within the above project to run Dependabot against. This can be extracted from the URL of the repository. https://dev.azure.com/{organization}/{project}/_git/{repository}/|
+|AZURE_ORGANIZATION|Update,
vNext|**_Required_**. The name of the Azure DevOps Organization. This is can be extracted from the URL of the home page. |
+|AZURE_PROJECT|Update,
vNext|**_Required_**. The name of the Azure DevOps Project within the above organization. This can be extracted them the URL too. |
+|AZURE_REPOSITORY|Update,
vNext|**_Required_**. The name of the Azure DevOps Repository within the above project to run Dependabot against. This can be extracted from the URL of the repository. |
|AZURE_SET_AUTO_COMPLETE|Update,
vNext|**_Optional_**. Determines if the pull requests that dependabot creates should have auto complete set. When set to `true`, pull requests that pass all policies will be merged automatically|
|AZURE_AUTO_COMPLETE_IGNORE_CONFIG_IDS|Update,
vNext|**_Optional_**. List of any policy configuration Id's which auto-complete should not wait for. Only applies to optional policies. Auto-complete always waits for required (blocking) policies.|
|AZURE_AUTO_APPROVE_PR|Update,
vNext|**_Optional_**. Determines if the pull requests that dependabot creates should be automatically completed. When set to `true`, pull requests will be approved automatically.|
@@ -135,10 +143,13 @@ To run the script, some environment variables are required.
# Development guide
## Getting the development environment ready
-First, ensure you have [Docker](https://docs.docker.com/engine/install/) and [Ruby](https://www.ruby-lang.org/en/documentation/installation/) installed.
-On Linux, you'll need the the build essentials and Ruby development packages too; These are typically `build-essentials` and `ruby-dev`.
-Next, install project build tools with bundle:
+Install [Docker](https://docs.docker.com/engine/install/) (with Linux containers) and [Ruby](https://www.ruby-lang.org/en/documentation/installation/) (3.3).
+
+> [!NOTE]
+> If developing in Linux, you'll also need the the build essentials and Ruby development packages; These are typically `build-essentials` and `ruby-dev`.
+
+Install the project build tools using Bundle:
```bash
cd updater
@@ -146,6 +157,7 @@ bundle install
```
## Building the Docker image
+
Each package ecosystem must be built separately; You only need to build images for the ecosystems that you plan on testing.
```bash
@@ -158,20 +170,26 @@ docker build \
.
```
-In some scenarios, you may want to set `BASE_VERSION` to a specific version instead of "latest".
-See [updater/Dockerfile](../updater/Dockerfile) for a more detailed explanation.
+> [!TIP]
+> In some scenarios, you may want to set `BASE_VERSION` to a specific version instead of "latest".
+> See [updater/Dockerfile](../updater/Dockerfile) for a more detailed explanation.
## Running your code changes
-To test run your code changes, you'll first need to build the updater Docker image (see above), then run the updater Docker image in a container with all the required environment variables (see above).
+
+To test run your code changes, you'll first need to [build the Docker image](#building-the-docker-image), then run the Docker image in a container with all the [required environment variables](#environment-variables).
## Running the code linter
+
```bash
cd updater
bundle exec rubocop
-bundle exec rubocop -a # to automatically fix any correctable offenses
```
+> [!TIP]
+> To automatically fix correctable linting issues, use `bundle exec rubocop -a`
+
## Running the unit tests
+
```bash
cd updater
bundle exec rspec spec
diff --git a/extension/.gitignore b/extension/.gitignore
index 57c2ca3a..9509d65b 100644
--- a/extension/.gitignore
+++ b/extension/.gitignore
@@ -1,4 +1,5 @@
node_modules
.taskkey
-task/**/*.js
-*.vsix
\ No newline at end of file
+**/*.js
+*.vsix
+*flamegraph.html
\ No newline at end of file
diff --git a/extension/README.md b/extension/README.md
index 6c7e9f8e..abf31027 100644
--- a/extension/README.md
+++ b/extension/README.md
@@ -1,6 +1,6 @@
# Dependabot Azure DevOps Extension
-This is the unofficial [dependabot](https://github.com/Dependabot/dependabot-core) extension for [Azure DevOps](https://azure.microsoft.com/en-gb/services/devops/). It will allow you to run Dependabot inside a build pipeline and is accessible [here in the Visual Studio marketplace](https://marketplace.visualstudio.com/items?itemName=tingle-software.dependabot). The extension first has to be installed before you can run it in your pipeline.
+This is the unofficial [dependabot](https://github.com/Dependabot/dependabot-core) extension for [Azure DevOps](https://azure.microsoft.com/en-gb/services/devops/). It will allow you to run Dependabot inside a build pipeline.
## Usage
@@ -9,7 +9,7 @@ Add a configuration file stored at `.azuredevops/dependabot.yml` or `.github/dep
To use in a YAML pipeline:
```yaml
-- task: dependabot@1
+- task: dependabot@2
```
You can schedule the pipeline as is appropriate for your solution.
@@ -32,23 +32,62 @@ pool:
vmImage: 'ubuntu-latest' # requires macos or ubuntu (windows is not supported)
steps:
-- task: dependabot@1
+- task: dependabot@2
```
-This task makes use of a docker image, which may take time to install. Subsequent dependabot tasks in a job will be faster after initially pulling the image using the first task. An alternative way to run your pipelines faster is by leveraging Docker caching in Azure Pipelines (See [#113](https://github.com/tinglesoftware/dependabot-azure-devops/issues/113#issuecomment-894771611)).
+## Task Requirements
+
+The task uses [dependabot-cli](https://github.com/dependabot/cli), which requires [Go](https://go.dev/doc/install) (1.22+) and [Docker](https://docs.docker.com/engine/install/) (with Linux containers) be installed on the pipeline agent.
+If you use [Microsoft-hosted agents](https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml#software), we recommend using the [ubuntu-latest](https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2404-Readme.md) image, which meets all task requirements.
+
+Dependabot uses Docker containers, which may take time to install if not already cached. Subsequent dependabot tasks in the same job will be faster after initially pulling the images. An alternative way to run your pipelines faster is by leveraging Docker caching in Azure Pipelines (See [#113](https://github.com/tinglesoftware/dependabot-azure-devops/issues/113#issuecomment-894771611)).
## Task Parameters
+
+dependabot@V2
+
|Input|Description|
|--|--|
+|skipPullRequests|**_Optional_**. Determines whether to skip creation and updating of pull requests. When set to `true` the logic to update the dependencies is executed but the actual Pull Requests are not created/updated. This is useful for debugging. Defaults to `false`.|
+|abandonUnwantedPullRequests|**_Optional_**. Determines whether to abandon unwanted pull requests. Defaults to `false`.|
+|commentPullRequests|**_Optional_**. Determines whether to comment on pull requests which an explanation of the reason for closing. Defaults to `false`.|
+|setAutoComplete|**_Optional_**. Determines if the pull requests that dependabot creates should have auto complete set. When set to `true`, pull requests that pass all policies will be merged automatically. Defaults to `false`.|
+|mergeStrategy|**_Optional_**. The merge strategy to use when auto complete is set. Learn more [here](https://learn.microsoft.com/en-us/rest/api/azure/devops/git/pull-requests/update?view=azure-devops-rest-6.0&tabs=HTTP#gitpullrequestmergestrategy). Defaults to `squash`.|
+|autoCompleteIgnoreConfigIds|**_Optional_**. List of any policy configuration Id's which auto-complete should not wait for. Only applies to optional policies. Auto-complete always waits for required (blocking) policies.|
+|autoApprove|**_Optional_**. Determines if the pull requests that dependabot creates should be automatically completed. When set to `true`, pull requests will be approved automatically. To use a different user for approval, supply `autoApproveUserToken` input. Defaults to `false`.|
+|autoApproveUserToken|**_Optional_**. A personal access token for the user to automatically approve the created PR.|
+|authorEmail|**_Optional_**. The email address to use for the change commit author. Can be used to associate the committer with an existing account, to provide a profile picture. Defaults to `noreply@github.com`.|
+|authorName|**_Optional_**. The name to use as the git commit author of the pull requests. Defaults to `dependabot[bot]`.|
+|securityAdvisoriesFile|**_Optional_**. The path to a JSON file containing additional security advisories to be included when performing package updates. See: [Configuring security advisories and known vulnerabilities](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-security-advisories-and-known-vulnerabilities).|
+|azureDevOpsServiceConnection|**_Optional_**. A Service Connection to use for accessing Azure DevOps. Supply a value here to avoid using permissions for the Build Service either because you cannot change its permissions or because you prefer that the Pull Requests be done by a different user. When not provided, the current authentication scope is used.
See the [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops) to know more about creating a Service Connections|
+|azureDevOpsAccessToken|**_Optional_**. The Personal Access Token for accessing Azure DevOps. Supply a value here to avoid using permissions for the Build Service either because you cannot change its permissions or because you prefer that the Pull Requests be done by a different user. When not provided, the current authentication scope is used. In either case, be use the following permissions are granted:
- Code (Full)
- Pull Requests Threads (Read & Write).
See the [documentation](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops&tabs=preview-page#create-a-pat) to know more about creating a Personal Access Token.
Use this in place of `azureDevOpsServiceConnection` such as when it is not possible to create a service connection.|
+|gitHubConnection|**_Optional_**. The GitHub service connection for authenticating requests against GitHub repositories. This is useful to avoid rate limiting errors. The token must include permissions to read public repositories. See the [GitHub docs](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token) for more on Personal Access Tokens and [Azure DevOps docs](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-github) for the GitHub service connection.|
+|gitHubAccessToken|**_Optional_**. The raw GitHub PAT for authenticating requests against GitHub repositories. Use this in place of `gitHubConnection` such as when it is not possible to create a service connection.|
+|storeDependencyList|**_Optional_**. Determines if the last know dependency list information should be stored in the parent DevOps project properties. If enabled, the authenticated user must have the "Project & Team (Write)" permission for the project. Enabling this option improves performance when doing security-only updates. Defaults to `false`.|
+|targetRepositoryName|**_Optional_**. The name of the repository to target for processing. If this value is not supplied then the Build Repository Name is used. Supplying this value allows creation of a single pipeline that runs Dependabot against multiple repositories by running a `dependabot` task for each repository to update.|
+|targetUpdateIds|**_Optional_**. A semicolon (`;`) delimited list of update identifiers run. Index are zero-based and in the order written in the configuration file. When not present, all the updates are run. This is meant to be used in scenarios where you want to run updates a different times from the same configuration file given you cannot schedule them independently in the pipeline.|
+|experiments|**_Optional_**. Comma separated list of Dependabot experiments; available options depend on the ecosystem. Example: `tidy=true,vendor=true,goprivate=*`. See: [Configuring experiments](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-experiments)|
+
+
+
+
+dependabot@V1 (Deprecated)
+
+|Input|Description|
+|--|--|
+|useUpdateScriptvNext|**_Optional_**. Determines if the task should use the new "vNext" update script based on Dependabot Updater (true), or the original update script based on `dry-run.rb` (false). Defaults to `false`. For more information, see: [PR #1186](https://github.com/tinglesoftware/dependabot-azure-devops/pull/1186).|
|failOnException|**_Optional_**. Determines if the execution should fail when an exception occurs. Defaults to `true`.|
-|updaterOptions|**_Optional_**. Comma separated list of updater options; available options depend on the ecosystem. Example: `goprivate=true,kubernetes_updates=true`.|
+|updaterOptions|**_Optional_**. Comma separated list of updater options; available options depend on the ecosystem. Example: `tidy=true,vendor=true,goprivate=*`. See: [Configuring experiments](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-experiments)|
|setAutoComplete|**_Optional_**. Determines if the pull requests that dependabot creates should have auto complete set. When set to `true`, pull requests that pass all policies will be merged automatically. Defaults to `false`.|
|mergeStrategy|**_Optional_**. The merge strategy to use when auto complete is set. Learn more [here](https://learn.microsoft.com/en-us/rest/api/azure/devops/git/pull-requests/update?view=azure-devops-rest-6.0&tabs=HTTP#gitpullrequestmergestrategy). Defaults to `squash`.|
+|autoCompleteIgnoreConfigIds|**_Optional_**. List of any policy configuration Id's which auto-complete should not wait for. Only applies to optional policies. Auto-complete always waits for required (blocking) policies.|
|autoApprove|**_Optional_**. Determines if the pull requests that dependabot creates should be automatically completed. When set to `true`, pull requests will be approved automatically. To use a different user for approval, supply `autoApproveUserToken` input. Defaults to `false`.|
|autoApproveUserToken|**_Optional_**. A personal access token for the user to automatically approve the created PR.|
|skipPullRequests|**_Optional_**. Determines whether to skip creation and updating of pull requests. When set to `true` the logic to update the dependencies is executed but the actual Pull Requests are not created/updated. This is useful for debugging. Defaults to `false`.|
|abandonUnwantedPullRequests|**_Optional_**. Determines whether to abandon unwanted pull requests. Defaults to `false`.|
+|commentPullRequests|**_Optional_**. Determines whether to comment on pull requests which an explanation of the reason for closing. Defaults to `false`.|
+|securityAdvisoriesFile|**_Optional_**. The path to a JSON file containing additional security advisories to be included when performing package updates. See: [Configuring security advisories and known vulnerabilities](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-security-advisories-and-known-vulnerabilities).|
|gitHubConnection|**_Optional_**. The GitHub service connection for authenticating requests against GitHub repositories. This is useful to avoid rate limiting errors. The token must include permissions to read public repositories. See the [GitHub docs](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token) for more on Personal Access Tokens and [Azure DevOps docs](https://docs.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml#sep-github) for the GitHub service connection.|
|gitHubAccessToken|**_Optional_**. The raw GitHub PAT for authenticating requests against GitHub repositories. Use this in place of `gitHubConnection` such as when it is not possible to create a service connection.|
|azureDevOpsServiceConnection|**_Optional_**. A Service Connection to use for accessing Azure DevOps. Supply a value here to avoid using permissions for the Build Service either because you cannot change its permissions or because you prefer that the Pull Requests be done by a different user. When not provided, the current authentication scope is used.
See the [documentation](https://learn.microsoft.com/en-us/azure/devops/pipelines/library/service-endpoints?view=azure-devops) to know more about creating a Service Connections|
@@ -59,32 +98,12 @@ This task makes use of a docker image, which may take time to install. Subsequen
|dockerImageTag|**_Optional_**. The image tag to use when pulling the docker container used by the task. A tag also defines the version. By default, the task decides which tag/version to use. This can be the latest or most stable version. When not provided, the value is inferred from the current task version|
|extraEnvironmentVariables|**_Optional_**. A semicolon (`;`) delimited list of environment variables that are sent to the docker container. See possible use case [here](https://github.com/tinglesoftware/dependabot-azure-devops/issues/138)|
-## Advanced
+
-In some situations, such as when getting the latest bits for testing, you might want to override the docker image tag that is pulled. Even though doing so is discouraged you can declare a global variable, for example:
-
-```yaml
-trigger: none # Disable CI trigger
-
-schedules:
-- cron: '0 2 * * *' # daily at 2am UTC
- always: true # run even when there are no code changes
- branches:
- include:
- - master
- batch: true
- displayName: Daily
-
-# variables declared below can be put in one or more Variable Groups for sharing across pipelines
-variables:
- DEPENDABOT_ALLOW_CONDITIONS: '[{\"dependency-name\":"django*",\"dependency-type\":\"direct\"}]' # packages allowed to be updated
- DEPENDABOT_IGNORE_CONDITIONS: '[{\"dependency-name\":"@types/*"}]' # packages ignored to be updated
-
-pool:
- vmImage: 'ubuntu-latest' # requires macos or ubuntu (windows is not supported)
-
-steps:
-- task: dependabot@1
-```
+## Advanced
-Check the logs for the image that is pulled.
+- [Configuring private feeds and registries](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-private-feeds-and-registries)
+- [Configuring security advisories and known vulnerabilities](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-security-advisories-and-known-vulnerabilities)
+- [Configuring experiments](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-experiments)
+- [Unsupported features and configurations](https://github.com/tinglesoftware/dependabot-azure-devops/#unsupported-features-and-configurations)
+- [Task migration guide for V1 → V2](https://github.com/tinglesoftware/dependabot-azure-devops/blob/main/docs/migrations/v1-to-v2.md)
diff --git a/extension/jest.config.ts b/extension/jest.config.ts
index 166e6cf2..fee31283 100644
--- a/extension/jest.config.ts
+++ b/extension/jest.config.ts
@@ -1,4 +1,4 @@
-import type { Config } from "@jest/types";
+import type { Config } from '@jest/types';
// Sync object
const config: Config.InitialOptions = {
@@ -6,9 +6,8 @@ const config: Config.InitialOptions = {
// transform: {
// "^.+\\.test.tsx?$": "ts-jest",
// },
- testEnvironment: "node",
- preset: "ts-jest",
- rootDir: "./tests",
+ testEnvironment: 'node',
+ preset: 'ts-jest',
};
export default config;
diff --git a/extension/overrides.dev.json b/extension/overrides.dev.json
index bf917f17..503b9796 100644
--- a/extension/overrides.dev.json
+++ b/extension/overrides.dev.json
@@ -1,5 +1,5 @@
{
- "id":"dependabot-dev",
- "version": "#{MAJOR_MINOR_PATCH}#.#{BUILD_NUMBER}#",
- "name": "Dependabot (Dev)"
-}
\ No newline at end of file
+ "id": "dependabot-dev",
+ "version": "#{MAJOR_MINOR_PATCH}#.#{BUILD_NUMBER}#",
+ "name": "Dependabot (Dev)"
+}
diff --git a/extension/overrides.local.json b/extension/overrides.local.json
index efca1c37..f9d0c89a 100644
--- a/extension/overrides.local.json
+++ b/extension/overrides.local.json
@@ -1,5 +1,5 @@
{
- "id":"dependabot-local",
- "version": "0.1.0.6",
- "name": "Dependabot (Local)"
-}
\ No newline at end of file
+ "id": "dependabot-local",
+ "version": "2.0.0.0",
+ "name": "Dependabot (Local)"
+}
diff --git a/extension/overrides.prod.json b/extension/overrides.prod.json
index af2b7d36..1fc839d4 100644
--- a/extension/overrides.prod.json
+++ b/extension/overrides.prod.json
@@ -1,4 +1,4 @@
{
- "version": "#{MAJOR_MINOR_PATCH}#.#{BUILD_NUMBER}#",
- "public": true
-}
\ No newline at end of file
+ "version": "#{MAJOR_MINOR_PATCH}#.#{BUILD_NUMBER}#",
+ "public": true
+}
diff --git a/extension/package-lock.json b/extension/package-lock.json
index 08b996fb..dce6cb2f 100644
--- a/extension/package-lock.json
+++ b/extension/package-lock.json
@@ -1,27 +1,28 @@
{
"name": "dependabot-azure-devops",
- "version": "1.0.0",
+ "version": "2.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "dependabot-azure-devops",
- "version": "1.0.0",
+ "version": "2.0.0",
"license": "MIT",
"dependencies": {
- "axios": "1.7.4",
- "azure-pipelines-task-lib": "4.16.0",
+ "axios": "1.7.7",
+ "azure-devops-node-api": "14.1.0",
+ "azure-pipelines-task-lib": "4.17.2",
"js-yaml": "4.1.0"
},
"devDependencies": {
- "@types/jest": "29.5.12",
+ "@types/jest": "29.5.13",
"@types/js-yaml": "4.0.9",
- "@types/node": "22.4.0",
+ "@types/node": "22.7.4",
"@types/q": "1.5.8",
"jest": "29.7.0",
- "ts-jest": "29.2.4",
+ "ts-jest": "29.2.5",
"ts-node": "10.9.2",
- "typescript": "5.5.4"
+ "typescript": "5.6.2"
}
},
"node_modules/@ampproject/remapping": {
@@ -991,10 +992,11 @@
}
},
"node_modules/@jridgewell/sourcemap-codec": {
- "version": "1.4.15",
- "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
- "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==",
- "dev": true
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
+ "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
+ "dev": true,
+ "license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.25",
@@ -1129,10 +1131,11 @@
}
},
"node_modules/@types/jest": {
- "version": "29.5.12",
- "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz",
- "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==",
+ "version": "29.5.13",
+ "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.13.tgz",
+ "integrity": "sha512-wd+MVEZCHt23V0/L642O5APvspWply/rGY5BcW4SUETo2UzPU3Z26qr8jC2qxpimI2jjx9h7+2cj2FwIr01bXg==",
"dev": true,
+ "license": "MIT",
"dependencies": {
"expect": "^29.0.0",
"pretty-format": "^29.0.0"
@@ -1145,9 +1148,9 @@
"dev": true
},
"node_modules/@types/node": {
- "version": "22.4.0",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-22.4.0.tgz",
- "integrity": "sha512-49AbMDwYUz7EXxKU/r7mXOsxwFr4BYbvB7tWYxVuLdb2ibd30ijjXINSMAHiEEZk5PCRBmW1gUeisn2VMKt3cQ==",
+ "version": "22.7.4",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz",
+ "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -1300,9 +1303,9 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
- "version": "1.7.4",
- "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz",
- "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==",
+ "version": "1.7.7",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz",
+ "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
@@ -1310,10 +1313,23 @@
"proxy-from-env": "^1.1.0"
}
},
+ "node_modules/azure-devops-node-api": {
+ "version": "14.1.0",
+ "resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-14.1.0.tgz",
+ "integrity": "sha512-QhpgjH1LQ+vgDJ7oBwcmsZ3+o4ZpjLVilw0D3oJQpYpRzN+L39lk5jZDLJ464hLUgsDzWn/Ksv7zLLMKLfoBzA==",
+ "license": "MIT",
+ "dependencies": {
+ "tunnel": "0.0.6",
+ "typed-rest-client": "2.1.0"
+ },
+ "engines": {
+ "node": ">= 16.0.0"
+ }
+ },
"node_modules/azure-pipelines-task-lib": {
- "version": "4.16.0",
- "resolved": "https://registry.npmjs.org/azure-pipelines-task-lib/-/azure-pipelines-task-lib-4.16.0.tgz",
- "integrity": "sha512-hjyDi5GI1cFmS2o6GzTFPqloeTZBeaTLOjPn/H3CVr0vV/MV+eYoWszVe9kn7XnRSiv22j3p4Rhw/Sy4v1okxA==",
+ "version": "4.17.2",
+ "resolved": "https://registry.npmjs.org/azure-pipelines-task-lib/-/azure-pipelines-task-lib-4.17.2.tgz",
+ "integrity": "sha512-kKG1I2cpHM0kqn/YlnZiA2J59/x4OraEZZ1/Cp6A7XOu0e+E1PfrfldVVOU/tdeW/xOFoexqA4EEV27LfH0YqQ==",
"license": "MIT",
"dependencies": {
"adm-zip": "^0.5.10",
@@ -1526,6 +1542,25 @@
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
"dev": true
},
+ "node_modules/call-bind": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
+ "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
+ "license": "MIT",
+ "dependencies": {
+ "es-define-property": "^1.0.0",
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "get-intrinsic": "^1.2.4",
+ "set-function-length": "^1.2.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -1760,6 +1795,23 @@
"node": ">=0.10.0"
}
},
+ "node_modules/define-data-property": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
+ "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
+ "license": "MIT",
+ "dependencies": {
+ "es-define-property": "^1.0.0",
+ "es-errors": "^1.3.0",
+ "gopd": "^1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
@@ -1768,6 +1820,16 @@
"node": ">=0.4.0"
}
},
+ "node_modules/des.js": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz",
+ "integrity": "sha512-r17GxjhUCjSRy8aiJpr8/UadFIzMzJGexI3Nmz4ADi9LYSFx4gTBp80+NaX/YsXWWLhpZ7v/v/ubEc/bCNfKwg==",
+ "license": "MIT",
+ "dependencies": {
+ "inherits": "^2.0.1",
+ "minimalistic-assert": "^1.0.0"
+ }
+ },
"node_modules/detect-newline": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz",
@@ -1844,6 +1906,27 @@
"is-arrayish": "^0.2.1"
}
},
+ "node_modules/es-define-property": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
+ "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
+ "license": "MIT",
+ "dependencies": {
+ "get-intrinsic": "^1.2.4"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
+ "node_modules/es-errors": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
+ "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/escalade": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz",
@@ -2073,6 +2156,25 @@
"node": "6.* || 8.* || >= 10.*"
}
},
+ "node_modules/get-intrinsic": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
+ "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "has-proto": "^1.0.1",
+ "has-symbols": "^1.0.3",
+ "hasown": "^2.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/get-package-type": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
@@ -2134,6 +2236,18 @@
"node": ">=4"
}
},
+ "node_modules/gopd": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
+ "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
+ "license": "MIT",
+ "dependencies": {
+ "get-intrinsic": "^1.1.3"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
@@ -2149,6 +2263,42 @@
"node": ">=8"
}
},
+ "node_modules/has-property-descriptors": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
+ "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
+ "license": "MIT",
+ "dependencies": {
+ "es-define-property": "^1.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-proto": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
+ "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/has-symbols": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
+ "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
@@ -2977,6 +3127,12 @@
"url": "https://github.com/chalk/supports-color?sponsor=1"
}
},
+ "node_modules/js-md4": {
+ "version": "0.3.2",
+ "resolved": "https://registry.npmjs.org/js-md4/-/js-md4-0.3.2.tgz",
+ "integrity": "sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA==",
+ "license": "MIT"
+ },
"node_modules/js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
@@ -3164,6 +3320,12 @@
"node": ">=6"
}
},
+ "node_modules/minimalistic-assert": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz",
+ "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==",
+ "license": "ISC"
+ },
"node_modules/minimatch": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.5.tgz",
@@ -3230,6 +3392,18 @@
"node": ">=8"
}
},
+ "node_modules/object-inspect": {
+ "version": "1.13.2",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
+ "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -3462,6 +3636,21 @@
"teleport": ">=0.2.0"
}
},
+ "node_modules/qs": {
+ "version": "6.13.0",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
+ "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
+ "license": "BSD-3-Clause",
+ "dependencies": {
+ "side-channel": "^1.0.6"
+ },
+ "engines": {
+ "node": ">=0.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/react-is": {
"version": "18.3.1",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
@@ -3550,6 +3739,23 @@
"semver": "bin/semver"
}
},
+ "node_modules/set-function-length": {
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
+ "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
+ "license": "MIT",
+ "dependencies": {
+ "define-data-property": "^1.1.4",
+ "es-errors": "^1.3.0",
+ "function-bind": "^1.1.2",
+ "get-intrinsic": "^1.2.4",
+ "gopd": "^1.0.1",
+ "has-property-descriptors": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
@@ -3587,6 +3793,24 @@
"node": ">=4"
}
},
+ "node_modules/side-channel": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
+ "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind": "^1.0.7",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.4",
+ "object-inspect": "^1.13.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/signal-exit": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
@@ -3787,21 +4011,21 @@
}
},
"node_modules/ts-jest": {
- "version": "29.2.4",
- "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.4.tgz",
- "integrity": "sha512-3d6tgDyhCI29HlpwIq87sNuI+3Q6GLTTCeYRHCs7vDz+/3GCMwEtV9jezLyl4ZtnBgx00I7hm8PCP8cTksMGrw==",
+ "version": "29.2.5",
+ "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.5.tgz",
+ "integrity": "sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "bs-logger": "0.x",
+ "bs-logger": "^0.2.6",
"ejs": "^3.1.10",
- "fast-json-stable-stringify": "2.x",
+ "fast-json-stable-stringify": "^2.1.0",
"jest-util": "^29.0.0",
"json5": "^2.2.3",
- "lodash.memoize": "4.x",
- "make-error": "1.x",
- "semver": "^7.5.3",
- "yargs-parser": "^21.0.1"
+ "lodash.memoize": "^4.1.2",
+ "make-error": "^1.3.6",
+ "semver": "^7.6.3",
+ "yargs-parser": "^21.1.1"
},
"bin": {
"ts-jest": "cli.js"
@@ -3836,10 +4060,11 @@
}
},
"node_modules/ts-jest/node_modules/semver": {
- "version": "7.6.2",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
- "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
+ "version": "7.6.3",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz",
+ "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==",
"dev": true,
+ "license": "ISC",
"bin": {
"semver": "bin/semver.js"
},
@@ -3890,6 +4115,15 @@
}
}
},
+ "node_modules/tunnel": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.6.11 <=0.7.0 || >=0.7.3"
+ }
+ },
"node_modules/type-detect": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz",
@@ -3911,10 +4145,26 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/typed-rest-client": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-2.1.0.tgz",
+ "integrity": "sha512-Nel9aPbgSzRxfs1+4GoSB4wexCF+4Axlk7OSGVQCMa+4fWcyxIsN/YNmkp0xTT2iQzMD98h8yFLav/cNaULmRA==",
+ "license": "MIT",
+ "dependencies": {
+ "des.js": "^1.1.0",
+ "js-md4": "^0.3.2",
+ "qs": "^6.10.3",
+ "tunnel": "0.0.6",
+ "underscore": "^1.12.1"
+ },
+ "engines": {
+ "node": ">= 16.0.0"
+ }
+ },
"node_modules/typescript": {
- "version": "5.5.4",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz",
- "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==",
+ "version": "5.6.2",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.2.tgz",
+ "integrity": "sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
@@ -3925,6 +4175,12 @@
"node": ">=14.17"
}
},
+ "node_modules/underscore": {
+ "version": "1.13.7",
+ "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz",
+ "integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==",
+ "license": "MIT"
+ },
"node_modules/undici-types": {
"version": "6.19.6",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.6.tgz",
diff --git a/extension/package.json b/extension/package.json
index d772eb84..d6af7aad 100644
--- a/extension/package.json
+++ b/extension/package.json
@@ -1,13 +1,16 @@
{
"name": "dependabot-azure-devops",
- "version": "1.0.0",
+ "version": "2.0.0",
"description": "Dependabot Azure DevOps task",
"main": "''",
"scripts": {
- "mv:prod": "cp -r node_modules task/node_modules",
+ "postdependencies": "cp -r node_modules tasks/dependabotV1/node_modules && cp -r node_modules tasks/dependabotV2/node_modules",
"build": "tsc -p .",
- "build:prod": "npm run build",
- "test": "jest"
+ "start": "npm run start:V2",
+ "start:V1": "node tasks/dependabotV1/index.js",
+ "start:V2": "node tasks/dependabotV2/index.js",
+ "test": "jest",
+ "package": "npx tfx-cli extension create --json5"
},
"repository": {
"type": "git",
@@ -25,19 +28,20 @@
},
"homepage": "https://github.com/tinglesoftware/dependabot-azure-devops#readme",
"dependencies": {
- "axios": "1.7.4",
- "azure-pipelines-task-lib": "4.16.0",
+ "axios": "1.7.7",
+ "azure-devops-node-api": "14.1.0",
+ "azure-pipelines-task-lib": "4.17.2",
"js-yaml": "4.1.0"
},
"devDependencies": {
- "@types/jest": "29.5.12",
+ "@types/jest": "29.5.13",
"@types/js-yaml": "4.0.9",
- "@types/node": "22.4.0",
+ "@types/node": "22.7.4",
"@types/q": "1.5.8",
"jest": "29.7.0",
- "ts-jest": "29.2.4",
- "typescript": "5.5.4",
- "ts-node": "10.9.2"
+ "ts-jest": "29.2.5",
+ "ts-node": "10.9.2",
+ "typescript": "5.6.2"
},
"packageManager": "npm@10.8.1"
}
diff --git a/extension/task/utils/getAzureDevOpsAccessToken.ts b/extension/task/utils/getAzureDevOpsAccessToken.ts
deleted file mode 100644
index 142f2381..00000000
--- a/extension/task/utils/getAzureDevOpsAccessToken.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import {
- debug,
- getEndpointAuthorizationParameter,
- getInput,
-} from "azure-pipelines-task-lib/task";
-
-/**
- * Prepare the access token for Azure DevOps Repos.
- *
- *
- * If the user has not provided one, we use the one from the SystemVssConnection
- *
- * @returns Azure DevOps Access Token
- */
-export default function getAzureDevOpsAccessToken() {
- let systemAccessToken: string = getInput("azureDevOpsAccessToken");
- if (systemAccessToken) {
- debug("azureDevOpsAccessToken provided, using for authenticating");
- return systemAccessToken;
- }
-
- let serviceConnectionName: string = getInput("azureDevOpsServiceConnection");
- if (serviceConnectionName) {
- debug("TFS connection supplied. A token shall be extracted from it.");
- return getEndpointAuthorizationParameter(serviceConnectionName, "apitoken", false);
- }
-
- debug("No custom token provided. The SystemVssConnection's AccessToken shall be used.");
- return getEndpointAuthorizationParameter(
- "SystemVssConnection",
- "AccessToken",
- false
- );
-}
diff --git a/extension/task/utils/getDockerImageTag.ts b/extension/task/utils/getDockerImageTag.ts
deleted file mode 100644
index 2974ce8e..00000000
--- a/extension/task/utils/getDockerImageTag.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import * as fs from "fs";
-import * as path from "path";
-import * as tl from "azure-pipelines-task-lib/task"
-
-/**
- * Extract the docker image tag from `dockerImageTag` input or the `task.json` file.
- * @returns {string} the version
- */
-export default function getDockerImageTag(): string {
-
- let dockerImageTag: string | undefined = tl.getInput("dockerImageTag");
-
- if (!dockerImageTag) {
- tl.debug("Getting dockerImageTag from task.json file. If you want to override, specify the dockerImageTag input");
-
- // Ensure we have the file. Otherwise throw a well readable error.
- const filePath = path.join(__dirname, "..", "task.json");
- if (!fs.existsSync(filePath)) {
- throw new Error(`task.json could not be found at '${filePath}'`);
- }
-
- // Ensure the file parsed to an object
- let obj: any = JSON.parse(fs.readFileSync(filePath, "utf-8"));
- if (obj === null || typeof obj !== "object") {
- throw new Error("Invalid dependabot config object");
- }
-
- const versionMajor = obj["version"]["Major"];
- const versionMinor = obj["version"]["Minor"];
- if (!!!versionMajor || !!!versionMinor) throw new Error("Major and/or Minor version could not be parsed from the file");
-
- dockerImageTag = `${versionMajor}.${versionMinor}`;
- }
-
- return dockerImageTag;
-}
diff --git a/extension/task/icon.png b/extension/tasks/dependabotV1/icon.png
similarity index 100%
rename from extension/task/icon.png
rename to extension/tasks/dependabotV1/icon.png
diff --git a/extension/task/index.ts b/extension/tasks/dependabotV1/index.ts
similarity index 60%
rename from extension/task/index.ts
rename to extension/tasks/dependabotV1/index.ts
index 7a1e9b3a..774f1db9 100644
--- a/extension/task/index.ts
+++ b/extension/tasks/dependabotV1/index.ts
@@ -1,15 +1,15 @@
-import * as tl from "azure-pipelines-task-lib/task"
-import { ToolRunner } from "azure-pipelines-task-lib/toolrunner"
-import { IDependabotConfig, IDependabotRegistry, IDependabotUpdate } from "./IDependabotConfig";
-import getSharedVariables from "./utils/getSharedVariables";
-import { parseConfigFile } from "./utils/parseConfigFile";
-import { resolveAzureDevOpsIdentities } from "./utils/resolveAzureDevOpsIdentities";
+import * as tl from 'azure-pipelines-task-lib/task';
+import { ToolRunner } from 'azure-pipelines-task-lib/toolrunner';
+import { IDependabotRegistry, IDependabotUpdate } from './utils/IDependabotConfig';
+import getSharedVariables from './utils/getSharedVariables';
+import { parseConfigFile } from './utils/parseConfigFile';
+import { resolveAzureDevOpsIdentities } from './utils/resolveAzureDevOpsIdentities';
async function run() {
try {
// Checking if docker is installed
- tl.debug("Checking for docker install ...");
- tl.which("docker", true);
+ tl.debug('Checking for docker install ...');
+ tl.which('docker', true);
// prepare the shared variables
const variables = getSharedVariables();
@@ -32,58 +32,58 @@ async function run() {
for (const update of updates) {
// Prepare the docker task
// tl.which throws an error if the tool is not found
- let dockerRunner: ToolRunner = tl.tool(tl.which("docker", true));
- dockerRunner.arg(["run"]); // run command
- dockerRunner.arg(["--rm"]); // remove after execution
- dockerRunner.arg(["-i"]); // attach pseudo tty
+ let dockerRunner: ToolRunner = tl.tool(tl.which('docker', true));
+ dockerRunner.arg(['run']); // run command
+ dockerRunner.arg(['--rm']); // remove after execution
+ dockerRunner.arg(['-i']); // attach pseudo tty
// Set the github token, if one is provided
if (variables.githubAccessToken) {
- dockerRunner.arg(["-e", `GITHUB_ACCESS_TOKEN=${variables.githubAccessToken}`]);
+ dockerRunner.arg(['-e', `GITHUB_ACCESS_TOKEN=${variables.githubAccessToken}`]);
}
/*
* Set env variables in the runner for Dependabot
*/
- dockerRunner.arg(["-e", `DEPENDABOT_PACKAGE_MANAGER=${update.packageEcosystem}`]);
- dockerRunner.arg(["-e", `DEPENDABOT_OPEN_PULL_REQUESTS_LIMIT=${update.openPullRequestsLimit}`]); // always has a value
+ dockerRunner.arg(['-e', `DEPENDABOT_PACKAGE_MANAGER=${update.packageEcosystem}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_OPEN_PULL_REQUESTS_LIMIT=${update.openPullRequestsLimit}`]); // always has a value
// Set the directory or directories
if (update.directory) {
- dockerRunner.arg(["-e", `DEPENDABOT_DIRECTORY=${update.directory}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_DIRECTORY=${update.directory}`]);
}
if (update.directories && update.directories.length > 0) {
- dockerRunner.arg(["-e", `DEPENDABOT_DIRECTORIES=${JSON.stringify(update.directories)}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_DIRECTORIES=${JSON.stringify(update.directories)}`]);
}
// Set the target branch
if (update.targetBranch) {
- dockerRunner.arg(["-e", `DEPENDABOT_TARGET_BRANCH=${update.targetBranch}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_TARGET_BRANCH=${update.targetBranch}`]);
}
// Set vendored if true
if (update.vendor === true) {
- dockerRunner.arg(["-e", 'DEPENDABOT_VENDOR=true']);
+ dockerRunner.arg(['-e', 'DEPENDABOT_VENDOR=true']);
}
// Set the versioning strategy
if (update.versioningStrategy) {
- dockerRunner.arg(["-e", `DEPENDABOT_VERSIONING_STRATEGY=${update.versioningStrategy}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_VERSIONING_STRATEGY=${update.versioningStrategy}`]);
}
// Set the milestone, if provided
if (update.milestone) {
- dockerRunner.arg(["-e", `DEPENDABOT_MILESTONE=${update.milestone}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_MILESTONE=${update.milestone}`]);
}
// Set the PR branch separator
if (update.branchNameSeparator) {
- dockerRunner.arg(["-e", `DEPENDABOT_BRANCH_NAME_SEPARATOR=${update.branchNameSeparator}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_BRANCH_NAME_SEPARATOR=${update.branchNameSeparator}`]);
}
// Set exception behaviour if true
- if (update.insecureExternalCodeExecution === "deny") {
- dockerRunner.arg(["-e", 'DEPENDABOT_REJECT_EXTERNAL_CODE=true']);
+ if (update.insecureExternalCodeExecution === 'deny') {
+ dockerRunner.arg(['-e', 'DEPENDABOT_REJECT_EXTERNAL_CODE=true']);
}
// We are well aware that ignore is not passed here. It is intentional.
@@ -95,52 +95,52 @@ async function run() {
// Set the dependencies to allow
let allow = update.allow;
if (allow) {
- dockerRunner.arg(["-e", `DEPENDABOT_ALLOW_CONDITIONS=${allow}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_ALLOW_CONDITIONS=${allow}`]);
}
// Set the dependencies to ignore
let ignore = update.ignore;
if (ignore) {
- dockerRunner.arg(["-e", `DEPENDABOT_IGNORE_CONDITIONS=${ignore}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_IGNORE_CONDITIONS=${ignore}`]);
}
// Set the dependency groups
let groups = update.groups;
if (groups) {
- dockerRunner.arg(["-e", `DEPENDABOT_DEPENDENCY_GROUPS=${groups}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_DEPENDENCY_GROUPS=${groups}`]);
}
// Set the commit message options
let commitMessage = update.commitMessage;
if (commitMessage) {
- dockerRunner.arg(["-e", `DEPENDABOT_COMMIT_MESSAGE_OPTIONS=${commitMessage}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_COMMIT_MESSAGE_OPTIONS=${commitMessage}`]);
}
// Set the requirements that should not be unlocked
if (variables.excludeRequirementsToUnlock) {
- dockerRunner.arg(["-e", `DEPENDABOT_EXCLUDE_REQUIREMENTS_TO_UNLOCK=${variables.excludeRequirementsToUnlock}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_EXCLUDE_REQUIREMENTS_TO_UNLOCK=${variables.excludeRequirementsToUnlock}`]);
}
// Set the custom labels/tags
if (update.labels) {
- dockerRunner.arg(["-e", `DEPENDABOT_LABELS=${update.labels}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_LABELS=${update.labels}`]);
}
// Set the reviewers
if (update.reviewers) {
- const reviewers = await resolveAzureDevOpsIdentities(variables.organizationUrl, update.reviewers)
- dockerRunner.arg(["-e", `DEPENDABOT_REVIEWERS=${JSON.stringify(reviewers.map(identity => identity.id))}`]);
+ const reviewers = await resolveAzureDevOpsIdentities(variables.organizationUrl, update.reviewers);
+ dockerRunner.arg(['-e', `DEPENDABOT_REVIEWERS=${JSON.stringify(reviewers.map((identity) => identity.id))}`]);
}
// Set the assignees
if (update.assignees) {
- const assignees = await resolveAzureDevOpsIdentities(variables.organizationUrl, update.assignees)
- dockerRunner.arg(["-e", `DEPENDABOT_ASSIGNEES=${JSON.stringify(assignees.map(identity => identity.id))}`]);
+ const assignees = await resolveAzureDevOpsIdentities(variables.organizationUrl, update.assignees);
+ dockerRunner.arg(['-e', `DEPENDABOT_ASSIGNEES=${JSON.stringify(assignees.map((identity) => identity.id))}`]);
}
// Set the updater options, if provided
if (variables.updaterOptions) {
- dockerRunner.arg(["-e", `DEPENDABOT_UPDATER_OPTIONS=${variables.updaterOptions}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_UPDATER_OPTIONS=${variables.updaterOptions}`]);
}
// Set the extra credentials
@@ -149,99 +149,102 @@ async function run() {
for (const reg of update.registries) {
selectedRegistries.push(config.registries[reg]);
}
- let extraCredentials = JSON.stringify(selectedRegistries, (k, v) => v === null ? undefined : v);
- dockerRunner.arg(["-e", `DEPENDABOT_EXTRA_CREDENTIALS=${extraCredentials}`]);
+ let extraCredentials = JSON.stringify(selectedRegistries, (k, v) => (v === null ? undefined : v));
+ dockerRunner.arg(['-e', `DEPENDABOT_EXTRA_CREDENTIALS=${extraCredentials}`]);
}
// Set exception behaviour if true
if (variables.failOnException === true) {
- dockerRunner.arg(["-e", 'DEPENDABOT_FAIL_ON_EXCEPTION=true']);
+ dockerRunner.arg(['-e', 'DEPENDABOT_FAIL_ON_EXCEPTION=true']);
}
// Set skip pull requests if true
if (variables.skipPullRequests === true) {
- dockerRunner.arg(["-e", 'DEPENDABOT_SKIP_PULL_REQUESTS=true']);
+ dockerRunner.arg(['-e', 'DEPENDABOT_SKIP_PULL_REQUESTS=true']);
}
// Set skip pull requests if true
if (variables.commentPullRequests === true) {
- dockerRunner.arg(["-e", 'DEPENDABOT_COMMENT_PULL_REQUESTS=true']);
+ dockerRunner.arg(['-e', 'DEPENDABOT_COMMENT_PULL_REQUESTS=true']);
}
// Set abandon Unwanted pull requests if true
if (variables.abandonUnwantedPullRequests === true) {
- dockerRunner.arg(["-e", 'DEPENDABOT_CLOSE_PULL_REQUESTS=true']);
+ dockerRunner.arg(['-e', 'DEPENDABOT_CLOSE_PULL_REQUESTS=true']);
}
// Set the security advisories
if (variables.securityAdvisoriesFile) {
- const containerPath = "/mnt/security_advisories.json"
+ const containerPath = '/mnt/security_advisories.json';
dockerRunner.arg(['--mount', `type=bind,source=${variables.securityAdvisoriesFile},target=${containerPath}`]);
- dockerRunner.arg(["-e", `DEPENDABOT_SECURITY_ADVISORIES_FILE=${containerPath}`]);
+ dockerRunner.arg(['-e', `DEPENDABOT_SECURITY_ADVISORIES_FILE=${containerPath}`]);
}
/*
* Set env variables in the runner for Azure
*/
- dockerRunner.arg(["-e", `AZURE_ORGANIZATION=${variables.organization}`]); // Set the organization
- dockerRunner.arg(["-e", `AZURE_PROJECT=${variables.project}`]); // Set the project
- dockerRunner.arg(["-e", `AZURE_REPOSITORY=${variables.repository}`]);
- dockerRunner.arg(["-e", `AZURE_ACCESS_TOKEN=${variables.systemAccessToken}`]);
- dockerRunner.arg(["-e", `AZURE_MERGE_STRATEGY=${variables.mergeStrategy}`]);
+ dockerRunner.arg(['-e', `AZURE_ORGANIZATION=${variables.organization}`]); // Set the organization
+ dockerRunner.arg(['-e', `AZURE_PROJECT=${variables.project}`]); // Set the project
+ dockerRunner.arg(['-e', `AZURE_REPOSITORY=${variables.repository}`]);
+ dockerRunner.arg(['-e', `AZURE_ACCESS_TOKEN=${variables.systemAccessToken}`]);
+ dockerRunner.arg(['-e', `AZURE_MERGE_STRATEGY=${variables.mergeStrategy}`]);
// Set Username
if (variables.systemAccessUser) {
- dockerRunner.arg(["-e", `AZURE_ACCESS_USERNAME=${variables.systemAccessUser}`]);
+ dockerRunner.arg(['-e', `AZURE_ACCESS_USERNAME=${variables.systemAccessUser}`]);
}
// Set the protocol if not the default value
if (variables.protocol !== 'https') {
- dockerRunner.arg(["-e", `AZURE_PROTOCOL=${variables.protocol}`]);
+ dockerRunner.arg(['-e', `AZURE_PROTOCOL=${variables.protocol}`]);
}
// Set the host name if not the default value
- if (variables.hostname !== "dev.azure.com") {
- dockerRunner.arg(["-e", `AZURE_HOSTNAME=${variables.hostname}`]);
+ if (variables.hostname !== 'dev.azure.com') {
+ dockerRunner.arg(['-e', `AZURE_HOSTNAME=${variables.hostname}`]);
}
// Set auto complete, if set
if (variables.setAutoComplete === true) {
- dockerRunner.arg(["-e", 'AZURE_SET_AUTO_COMPLETE=true']);
+ dockerRunner.arg(['-e', 'AZURE_SET_AUTO_COMPLETE=true']);
// Set the ignore config IDs for auto complete if not the default value
if (variables.autoCompleteIgnoreConfigIds.length > 0) {
- dockerRunner.arg(["-e", `AZURE_AUTO_COMPLETE_IGNORE_CONFIG_IDS=${JSON.stringify(variables.autoCompleteIgnoreConfigIds)}`]);
+ dockerRunner.arg([
+ '-e',
+ `AZURE_AUTO_COMPLETE_IGNORE_CONFIG_IDS=${JSON.stringify(variables.autoCompleteIgnoreConfigIds)}`,
+ ]);
}
}
// Set the port
- if (variables.port && variables.port !== "") {
- dockerRunner.arg(["-e", `AZURE_PORT=${variables.port}`]);
+ if (variables.port && variables.port !== '') {
+ dockerRunner.arg(['-e', `AZURE_PORT=${variables.port}`]);
}
// Set the virtual directory
- if (variables.virtualDirectory !== "") {
- dockerRunner.arg(["-e", `AZURE_VIRTUAL_DIRECTORY=${variables.virtualDirectory}`]);
+ if (variables.virtualDirectory !== '') {
+ dockerRunner.arg(['-e', `AZURE_VIRTUAL_DIRECTORY=${variables.virtualDirectory}`]);
}
// Set auto approve
if (variables.autoApprove === true) {
- dockerRunner.arg(["-e", 'AZURE_AUTO_APPROVE_PR=true']);
+ dockerRunner.arg(['-e', 'AZURE_AUTO_APPROVE_PR=true']);
// Set the token to use for auto approve if provided
if (variables.autoApproveUserToken) {
- dockerRunner.arg(["-e", `AZURE_AUTO_APPROVE_USER_TOKEN=${variables.autoApproveUserToken}`]);
+ dockerRunner.arg(['-e', `AZURE_AUTO_APPROVE_USER_TOKEN=${variables.autoApproveUserToken}`]);
}
}
// Set debug
if (variables.debug === true) {
- dockerRunner.arg(["-e", 'DEPENDABOT_DEBUG=true']);
+ dockerRunner.arg(['-e', 'DEPENDABOT_DEBUG=true']);
}
// Add in extra environment variables
- variables.extraEnvironmentVariables.forEach(extraEnvVar => {
- dockerRunner.arg(["-e", extraEnvVar]);
+ variables.extraEnvironmentVariables.forEach((extraEnvVar) => {
+ dockerRunner.arg(['-e', extraEnvVar]);
});
// Forward the host SSH socket
@@ -262,7 +265,7 @@ async function run() {
await dockerRunner.exec();
}
- tl.debug("Docker container execution completed!");
+ tl.debug('Docker container execution completed!');
} catch (err) {
tl.setResult(tl.TaskResult.Failed, err.message);
}
diff --git a/extension/task/task.json b/extension/tasks/dependabotV1/task.json
similarity index 95%
rename from extension/task/task.json
rename to extension/tasks/dependabotV1/task.json
index 211cf035..320028ac 100644
--- a/extension/task/task.json
+++ b/extension/tasks/dependabotV1/task.json
@@ -3,7 +3,7 @@
"id": "d98b873d-cf18-41eb-8ff5-234f14697896",
"name": "dependabot",
"friendlyName": "Dependabot",
- "description": "Automatically update dependencies and vulnerabilities in your code",
+ "description": "Automatically update dependencies and vulnerabilities in your code using [Dependabot Updater](https://github.com/dependabot/dependabot-core/tree/main/updater)",
"helpMarkDown": "For help please visit https://github.com/tinglesoftware/dependabot-azure-devops/issues",
"helpUrl": "https://github.com/tinglesoftware/dependabot-azure-devops/issues",
"releaseNotes": "https://github.com/tinglesoftware/dependabot-azure-devops/releases",
@@ -14,11 +14,13 @@
"demands": [],
"version": {
"Major": 1,
- "Minor": 6,
+ "Minor": 0,
"Patch": 0
},
+ "deprecated": true,
+ "deprecationMessage": "This task version is deprecated and is no longer maintained. Please upgrade to the latest version to continue receiving fixes and features. More details: https://github.com/tinglesoftware/dependabot-azure-devops/discussions/1317.",
"instanceNameFormat": "Dependabot",
- "minimumAgentVersion": "2.105.0",
+ "minimumAgentVersion": "3.232.1",
"groups": [
{
"name": "security_updates",
@@ -255,8 +257,9 @@
],
"dataSourceBindings": [],
"execution": {
- "Node16": {
- "target": "index.js"
+ "Node20_1": {
+ "target": "index.js",
+ "argumentFormat": ""
}
}
}
diff --git a/extension/task/IDependabotConfig.ts b/extension/tasks/dependabotV1/utils/IDependabotConfig.ts
similarity index 84%
rename from extension/task/IDependabotConfig.ts
rename to extension/tasks/dependabotV1/utils/IDependabotConfig.ts
index 3951b9a8..714fe2c9 100644
--- a/extension/task/IDependabotConfig.ts
+++ b/extension/tasks/dependabotV1/utils/IDependabotConfig.ts
@@ -99,48 +99,48 @@ export interface IDependabotUpdate {
export interface IDependabotRegistry {
/** Identifies the type of registry*/
- type: string;
+ 'type': string;
/**
* The URL to use to access the dependencies.
* Dependabot adds or ignores trailing slashes as required.
* The protocol is optional. If not specified, `https://` is assumed.
*/
- url?: string | null | undefined;
- "index-url"?: string | null | undefined; // only for python_index
+ 'url'?: string | null | undefined;
+ 'index-url'?: string | null | undefined; // only for python_index
/**
* The URL of the registry to use to access the dependencies.
* Dependabot adds or ignores trailing slashes as required.
* It should not have the scheme.
*/
- registry?: string | null | undefined;
- /** The hostname for 'terraform_registry' types */
- host?: string | null | undefined;
+ 'registry'?: string | null | undefined;
+ /** The hostname for `terraform_registry` and `composer_repository` types */
+ 'host'?: string | null | undefined;
/** The username to access the registry */
- username?: string | null | undefined;
+ 'username'?: string | null | undefined;
/** A password for the username to access this registry */
- password?: string | null | undefined;
+ 'password'?: string | null | undefined;
/** An access key for this registry */
- key?: string | null | undefined;
+ 'key'?: string | null | undefined;
/** An access token for this registry */
- token?: string | null | undefined;
+ 'token'?: string | null | undefined;
/** Organization for 'hex_organization' types */
- organization?: string | null | undefined;
+ 'organization'?: string | null | undefined;
/** Repository for 'hex_repository' types */
- repo?: string | null | undefined;
+ 'repo'?: string | null | undefined;
/** Repository for 'hex_repository' types */
- "auth-key"?: string | null | undefined;
+ 'auth-key'?: string | null | undefined;
/** Fingerprint of the public key for the Hex repository */
- "public-key-fingerprint"?: string | null | undefined;
+ 'public-key-fingerprint'?: string | null | undefined;
/**
* For registries with type: python-index,
* if the boolean value is `true`, pip resolves dependencies by using the specified URL
* rather than the base URL of the Python Package Index (by default https://pypi.org/simple).
*/
- "replaces-base"?: boolean | null | undefined;
+ 'replaces-base'?: boolean | null | undefined;
}
diff --git a/extension/tasks/dependabotV1/utils/convertPlaceholder.test.ts b/extension/tasks/dependabotV1/utils/convertPlaceholder.test.ts
new file mode 100644
index 00000000..d16418a6
--- /dev/null
+++ b/extension/tasks/dependabotV1/utils/convertPlaceholder.test.ts
@@ -0,0 +1,28 @@
+import { extractPlaceholder } from './convertPlaceholder';
+
+describe('Parse property placeholder', () => {
+ it('Should return key with underscores', () => {
+ var matches: RegExpExecArray[] = extractPlaceholder('PAT:${{MY_DEPENDABOT_ADO_PAT}}');
+ expect(matches[0][1]).toBe('MY_DEPENDABOT_ADO_PAT');
+ });
+
+ it('Should return the key', () => {
+ var matches: RegExpExecArray[] = extractPlaceholder('PAT:${{PAT}}');
+ expect(matches[0][1]).toBe('PAT');
+ });
+
+ it('Without PAT: prefix should return key', () => {
+ var matches: RegExpExecArray[] = extractPlaceholder('${{MY_DEPENDABOT_ADO_PAT}}');
+ expect(matches[0][1]).toBe('MY_DEPENDABOT_ADO_PAT');
+ });
+
+ it('Works when padded with spaces', () => {
+ var matches: RegExpExecArray[] = extractPlaceholder('PAT:${{ MY_SECRET_VAR_NAME }}');
+ expect(matches[0][1]).toBe('MY_SECRET_VAR_NAME');
+ });
+
+ it('With malformed brackets should be null', () => {
+ var matches: RegExpExecArray[] = extractPlaceholder('${MY_DEPENDABOT_ADO_PAT}');
+ expect(matches[0]).toBe(undefined);
+ });
+});
diff --git a/extension/task/utils/convertPlaceholder.ts b/extension/tasks/dependabotV1/utils/convertPlaceholder.ts
similarity index 82%
rename from extension/task/utils/convertPlaceholder.ts
rename to extension/tasks/dependabotV1/utils/convertPlaceholder.ts
index e8b35f0a..aabca849 100644
--- a/extension/task/utils/convertPlaceholder.ts
+++ b/extension/tasks/dependabotV1/utils/convertPlaceholder.ts
@@ -1,4 +1,4 @@
-import { getVariable } from "azure-pipelines-task-lib/task";
+import { getVariable } from 'azure-pipelines-task-lib/task';
function convertPlaceholder(input: string): string {
var matches: RegExpExecArray[] = extractPlaceholder(input);
@@ -13,7 +13,7 @@ function convertPlaceholder(input: string): string {
}
function extractPlaceholder(input: string) {
- const regexp: RegExp = new RegExp("\\${{\\s*([a-zA-Z_]+[a-zA-Z0-9_-]*)\\s*}}", 'g');
+ const regexp: RegExp = new RegExp('\\${{\\s*([a-zA-Z_]+[a-zA-Z0-9_-]*)\\s*}}', 'g');
return matchAll(input, regexp);
}
diff --git a/extension/tasks/dependabotV1/utils/extractHostname.test.ts b/extension/tasks/dependabotV1/utils/extractHostname.test.ts
new file mode 100644
index 00000000..78044219
--- /dev/null
+++ b/extension/tasks/dependabotV1/utils/extractHostname.test.ts
@@ -0,0 +1,24 @@
+import extractHostname from './extractHostname';
+
+describe('Extract hostname', () => {
+ it('Should convert old *.visualstudio.com hostname to dev.azure.com', () => {
+ var url = new URL('https://contoso.visualstudio.com');
+ var hostname = extractHostname(url);
+
+ expect(hostname).toBe('dev.azure.com');
+ });
+
+ it('Should retain the hostname', () => {
+ var url = new URL('https://dev.azure.com/Core/contoso');
+ var hostname = extractHostname(url);
+
+ expect(hostname).toBe('dev.azure.com');
+ });
+
+ it('Should retain localhost hostname', () => {
+ var url = new URL('https://localhost:8080/contoso');
+ var hostname = extractHostname(url);
+
+ expect(hostname).toBe('localhost');
+ });
+});
diff --git a/extension/task/utils/extractHostname.ts b/extension/tasks/dependabotV1/utils/extractHostname.ts
similarity index 100%
rename from extension/task/utils/extractHostname.ts
rename to extension/tasks/dependabotV1/utils/extractHostname.ts
diff --git a/extension/tasks/dependabotV1/utils/extractOrganization.test.ts b/extension/tasks/dependabotV1/utils/extractOrganization.test.ts
new file mode 100644
index 00000000..ebdb8209
--- /dev/null
+++ b/extension/tasks/dependabotV1/utils/extractOrganization.test.ts
@@ -0,0 +1,24 @@
+import extractOrganization from './extractOrganization';
+
+describe('Extract organization name', () => {
+ it('Should extract organization for on-premise domain', () => {
+ var url = 'https://server.domain.com/tfs/contoso/';
+ var organization = extractOrganization(url);
+
+ expect(organization).toBe('contoso');
+ });
+
+ it('Should extract organization for azure devops domain', () => {
+ var url = 'https://dev.azure.com/contoso/';
+ var organization = extractOrganization(url);
+
+ expect(organization).toBe('contoso');
+ });
+
+ it('Should extract organization for old style devops url', () => {
+ var url = 'https://contoso.visualstudio.com/';
+ var organization = extractOrganization(url);
+
+ expect(organization).toBe('contoso');
+ });
+});
diff --git a/extension/task/utils/extractOrganization.ts b/extension/tasks/dependabotV1/utils/extractOrganization.ts
similarity index 90%
rename from extension/task/utils/extractOrganization.ts
rename to extension/tasks/dependabotV1/utils/extractOrganization.ts
index ae6745ad..2b43561b 100644
--- a/extension/task/utils/extractOrganization.ts
+++ b/extension/tasks/dependabotV1/utils/extractOrganization.ts
@@ -6,7 +6,7 @@
* @returns organization name
*/
export default function extractOrganization(organizationUrl: string): string {
- let parts = organizationUrl.split("/");
+ let parts = organizationUrl.split('/');
// Check for on-premise style: https://server.domain.com/tfs/x/
if (parts.length === 6) {
@@ -24,7 +24,7 @@ export default function extractOrganization(organizationUrl: string): string {
let part = parts[2];
// Return organization part (x).
- return part.split(".")[0];
+ return part.split('.')[0];
}
throw new Error(`Error parsing organization from organization url: '${organizationUrl}'.`);
diff --git a/extension/tasks/dependabotV1/utils/extractVirtualDirectory.test.ts b/extension/tasks/dependabotV1/utils/extractVirtualDirectory.test.ts
new file mode 100644
index 00000000..71ce773d
--- /dev/null
+++ b/extension/tasks/dependabotV1/utils/extractVirtualDirectory.test.ts
@@ -0,0 +1,17 @@
+import extractVirtualDirectory from './extractVirtualDirectory';
+
+describe('Extract virtual directory', () => {
+ it('Should extract virtual directory', () => {
+ var url = new URL('https://server.domain.com/contoso/x/');
+ var virtualDirectory = extractVirtualDirectory(url);
+
+ expect(virtualDirectory).toBe('contoso');
+ });
+
+ it('Should return empty for dev.azure.com organization URL', () => {
+ var url = new URL('https://dev.azure.com/contoso/');
+ var virtualDirectory = extractVirtualDirectory(url);
+
+ expect(virtualDirectory).toBe('');
+ });
+});
diff --git a/extension/task/utils/extractVirtualDirectory.ts b/extension/tasks/dependabotV1/utils/extractVirtualDirectory.ts
similarity index 91%
rename from extension/task/utils/extractVirtualDirectory.ts
rename to extension/tasks/dependabotV1/utils/extractVirtualDirectory.ts
index 7e54c0d3..02db1447 100644
--- a/extension/task/utils/extractVirtualDirectory.ts
+++ b/extension/tasks/dependabotV1/utils/extractVirtualDirectory.ts
@@ -11,7 +11,7 @@
export default function extractVirtualDirectory(organizationUrl: URL): string {
// extract the pathname from the url then split
//pathname takes the shape '/tfs/x/'
- let path = organizationUrl.pathname.split("/");
+ let path = organizationUrl.pathname.split('/');
// Virtual Directories are sometimes used in on-premises
// URLs typically are like this: https://server.domain.com/tfs/x/
@@ -19,5 +19,5 @@ export default function extractVirtualDirectory(organizationUrl: URL): string {
if (path.length == 4) {
return path[1];
}
- return "";
+ return '';
}
diff --git a/extension/tasks/dependabotV1/utils/getAzureDevOpsAccessToken.ts b/extension/tasks/dependabotV1/utils/getAzureDevOpsAccessToken.ts
new file mode 100644
index 00000000..646747af
--- /dev/null
+++ b/extension/tasks/dependabotV1/utils/getAzureDevOpsAccessToken.ts
@@ -0,0 +1,26 @@
+import { debug, getEndpointAuthorizationParameter, getInput } from 'azure-pipelines-task-lib/task';
+
+/**
+ * Prepare the access token for Azure DevOps Repos.
+ *
+ *
+ * If the user has not provided one, we use the one from the SystemVssConnection
+ *
+ * @returns Azure DevOps Access Token
+ */
+export default function getAzureDevOpsAccessToken() {
+ let systemAccessToken: string = getInput('azureDevOpsAccessToken');
+ if (systemAccessToken) {
+ debug('azureDevOpsAccessToken provided, using for authenticating');
+ return systemAccessToken;
+ }
+
+ let serviceConnectionName: string = getInput('azureDevOpsServiceConnection');
+ if (serviceConnectionName) {
+ debug('TFS connection supplied. A token shall be extracted from it.');
+ return getEndpointAuthorizationParameter(serviceConnectionName, 'apitoken', false);
+ }
+
+ debug("No custom token provided. The SystemVssConnection's AccessToken shall be used.");
+ return getEndpointAuthorizationParameter('SystemVssConnection', 'AccessToken', false);
+}
diff --git a/extension/tasks/dependabotV1/utils/getDockerImageTag.ts b/extension/tasks/dependabotV1/utils/getDockerImageTag.ts
new file mode 100644
index 00000000..ebd81969
--- /dev/null
+++ b/extension/tasks/dependabotV1/utils/getDockerImageTag.ts
@@ -0,0 +1,36 @@
+import * as tl from 'azure-pipelines-task-lib/task';
+import * as fs from 'fs';
+import * as path from 'path';
+
+/**
+ * Extract the docker image tag from `dockerImageTag` input or the `task.json` file.
+ * @returns {string} the version
+ */
+export default function getDockerImageTag(): string {
+ let dockerImageTag: string | undefined = tl.getInput('dockerImageTag');
+
+ if (!dockerImageTag) {
+ tl.debug('Getting dockerImageTag from task.json file. If you want to override, specify the dockerImageTag input');
+
+ // Ensure we have the file. Otherwise throw a well readable error.
+ const filePath = path.join(__dirname, '..', 'task.json');
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`task.json could not be found at '${filePath}'`);
+ }
+
+ // Ensure the file parsed to an object
+ let obj: any = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
+ if (obj === null || typeof obj !== 'object') {
+ throw new Error('Invalid dependabot config object');
+ }
+
+ const versionMajor = obj['version']['Major'];
+ const versionMinor = obj['version']['Minor'];
+ if (!!!versionMajor || !!!versionMinor)
+ throw new Error('Major and/or Minor version could not be parsed from the file');
+
+ dockerImageTag = `${versionMajor}.${versionMinor}`;
+ }
+
+ return dockerImageTag;
+}
diff --git a/extension/task/utils/getGithubAccessToken.ts b/extension/tasks/dependabotV1/utils/getGithubAccessToken.ts
similarity index 57%
rename from extension/task/utils/getGithubAccessToken.ts
rename to extension/tasks/dependabotV1/utils/getGithubAccessToken.ts
index 3d886930..ab660444 100644
--- a/extension/task/utils/getGithubAccessToken.ts
+++ b/extension/tasks/dependabotV1/utils/getGithubAccessToken.ts
@@ -1,9 +1,4 @@
-import {
- debug,
- getEndpointAuthorization,
- getInput,
- loc,
-} from "azure-pipelines-task-lib/task";
+import { debug, getEndpointAuthorization, getInput, loc } from 'azure-pipelines-task-lib/task';
/**
* Extract access token from Github endpoint
@@ -16,23 +11,21 @@ function getGithubEndPointToken(githubEndpoint: string): string {
let githubEndpointToken: string = null;
if (!!githubEndpointObject) {
- debug("Endpoint scheme: " + githubEndpointObject.scheme);
+ debug('Endpoint scheme: ' + githubEndpointObject.scheme);
- if (githubEndpointObject.scheme === "PersonalAccessToken") {
+ if (githubEndpointObject.scheme === 'PersonalAccessToken') {
githubEndpointToken = githubEndpointObject.parameters.accessToken;
- } else if (githubEndpointObject.scheme === "OAuth") {
+ } else if (githubEndpointObject.scheme === 'OAuth') {
githubEndpointToken = githubEndpointObject.parameters.AccessToken;
- } else if (githubEndpointObject.scheme === "Token") {
+ } else if (githubEndpointObject.scheme === 'Token') {
githubEndpointToken = githubEndpointObject.parameters.AccessToken;
} else if (githubEndpointObject.scheme) {
- throw new Error(
- loc("InvalidEndpointAuthScheme", githubEndpointObject.scheme)
- );
+ throw new Error(loc('InvalidEndpointAuthScheme', githubEndpointObject.scheme));
}
}
if (!githubEndpointToken) {
- throw new Error(loc("InvalidGitHubEndpoint", githubEndpoint));
+ throw new Error(loc('InvalidGitHubEndpoint', githubEndpoint));
}
return githubEndpointToken;
@@ -44,15 +37,15 @@ function getGithubEndPointToken(githubEndpoint: string): string {
* @returns the Github access token
*/
export default function getGithubAccessToken() {
- let gitHubAccessToken: string = getInput("gitHubAccessToken");
+ let gitHubAccessToken: string = getInput('gitHubAccessToken');
if (gitHubAccessToken) {
- debug("gitHubAccessToken provided, using for authenticating");
+ debug('gitHubAccessToken provided, using for authenticating');
return gitHubAccessToken;
}
- const githubEndpointId = getInput("gitHubConnection");
+ const githubEndpointId = getInput('gitHubConnection');
if (githubEndpointId) {
- debug("GitHub connection supplied. A token shall be extracted from it.");
+ debug('GitHub connection supplied. A token shall be extracted from it.');
gitHubAccessToken = getGithubEndPointToken(githubEndpointId);
}
diff --git a/extension/task/utils/getSharedVariables.ts b/extension/tasks/dependabotV1/utils/getSharedVariables.ts
similarity index 62%
rename from extension/task/utils/getSharedVariables.ts
rename to extension/tasks/dependabotV1/utils/getSharedVariables.ts
index 996c95ed..56383d79 100644
--- a/extension/task/utils/getSharedVariables.ts
+++ b/extension/tasks/dependabotV1/utils/getSharedVariables.ts
@@ -1,10 +1,10 @@
-import * as tl from "azure-pipelines-task-lib/task";
-import extractHostname from "./extractHostname";
-import extractOrganization from "./extractOrganization";
-import extractVirtualDirectory from "./extractVirtualDirectory";
-import getAzureDevOpsAccessToken from "./getAzureDevOpsAccessToken";
-import getDockerImageTag from "./getDockerImageTag";
-import getGithubAccessToken from "./getGithubAccessToken";
+import * as tl from 'azure-pipelines-task-lib/task';
+import extractHostname from './extractHostname';
+import extractOrganization from './extractOrganization';
+import extractVirtualDirectory from './extractVirtualDirectory';
+import getAzureDevOpsAccessToken from './getAzureDevOpsAccessToken';
+import getDockerImageTag from './getDockerImageTag';
+import getGithubAccessToken from './getGithubAccessToken';
export interface ISharedVariables {
/** URL of the organization. This may lack the project name */
@@ -53,7 +53,7 @@ export interface ISharedVariables {
/** Determines if verbose log messages are logged */
debug: boolean;
-
+
/** List of update identifiers to run */
targetUpdateIds: number[];
@@ -85,82 +85,62 @@ export interface ISharedVariables {
* @returns shared variables
*/
export default function getSharedVariables(): ISharedVariables {
- let organizationUrl = tl.getVariable("System.TeamFoundationCollectionUri");
+ let organizationUrl = tl.getVariable('System.TeamFoundationCollectionUri');
//convert url string into a valid JS URL object
let formattedOrganizationUrl = new URL(organizationUrl);
let protocol: string = formattedOrganizationUrl.protocol.slice(0, -1);
let hostname: string = extractHostname(formattedOrganizationUrl);
let port: string = formattedOrganizationUrl.port;
- let virtualDirectory: string = extractVirtualDirectory(
- formattedOrganizationUrl
- );
+ let virtualDirectory: string = extractVirtualDirectory(formattedOrganizationUrl);
let organization: string = extractOrganization(organizationUrl);
- let project: string = encodeURI(tl.getVariable("System.TeamProject")); // encode special characters like spaces
- let repository: string = tl.getInput("targetRepositoryName");
- let repositoryOverridden = typeof repository === "string";
+ let project: string = encodeURI(tl.getVariable('System.TeamProject')); // encode special characters like spaces
+ let repository: string = tl.getInput('targetRepositoryName');
+ let repositoryOverridden = typeof repository === 'string';
if (!repositoryOverridden) {
- tl.debug(
- "No custom repository provided. The Pipeline Repository Name shall be used."
- );
- repository = tl.getVariable("Build.Repository.Name");
+ tl.debug('No custom repository provided. The Pipeline Repository Name shall be used.');
+ repository = tl.getVariable('Build.Repository.Name');
}
repository = encodeURI(repository); // encode special characters like spaces
// Prepare the access credentials
let githubAccessToken: string = getGithubAccessToken();
- let systemAccessUser: string = tl.getInput("azureDevOpsUser");
+ let systemAccessUser: string = tl.getInput('azureDevOpsUser');
let systemAccessToken: string = getAzureDevOpsAccessToken();
// Prepare variables for auto complete
- let setAutoComplete = tl.getBoolInput("setAutoComplete", false);
- let mergeStrategy = tl.getInput("mergeStrategy", true);
- let autoCompleteIgnoreConfigIds = tl
- .getDelimitedInput("autoCompleteIgnoreConfigIds", ";", false)
- .map(Number);
+ let setAutoComplete = tl.getBoolInput('setAutoComplete', false);
+ let mergeStrategy = tl.getInput('mergeStrategy', true);
+ let autoCompleteIgnoreConfigIds = tl.getDelimitedInput('autoCompleteIgnoreConfigIds', ';', false).map(Number);
// Prepare variables for auto approve
- let autoApprove: boolean = tl.getBoolInput("autoApprove", false);
- let autoApproveUserToken: string = tl.getInput("autoApproveUserToken");
+ let autoApprove: boolean = tl.getBoolInput('autoApprove', false);
+ let autoApproveUserToken: string = tl.getInput('autoApproveUserToken');
// Prepare control flow variables
- let failOnException = tl.getBoolInput("failOnException", true);
- let excludeRequirementsToUnlock =
- tl.getInput("excludeRequirementsToUnlock") || "";
- let updaterOptions = tl.getInput("updaterOptions");
+ let failOnException = tl.getBoolInput('failOnException', true);
+ let excludeRequirementsToUnlock = tl.getInput('excludeRequirementsToUnlock') || '';
+ let updaterOptions = tl.getInput('updaterOptions');
- let debug: boolean = tl.getVariable("System.Debug")?.localeCompare("true") === 0;
+ let debug: boolean = tl.getVariable('System.Debug')?.match(/true/i) ? true : false;
// Get the target identifiers
- let targetUpdateIds = tl
- .getDelimitedInput("targetUpdateIds", ";", false)
- .map(Number);
+ let targetUpdateIds = tl.getDelimitedInput('targetUpdateIds', ';', false).map(Number);
// Prepare other variables
- let securityAdvisoriesFile: string | undefined = tl.getInput(
- "securityAdvisoriesFile"
- );
- let skipPullRequests: boolean = tl.getBoolInput("skipPullRequests", false);
- let commentPullRequests: boolean = tl.getBoolInput("commentPullRequests", false);
- let abandonUnwantedPullRequests: boolean = tl.getBoolInput("abandonUnwantedPullRequests", true);
-
- let extraEnvironmentVariables = tl.getDelimitedInput(
- "extraEnvironmentVariables",
- ";",
- false
- );
-
- let forwardHostSshSocket: boolean = tl.getBoolInput(
- "forwardHostSshSocket",
- false
- );
+ let securityAdvisoriesFile: string | undefined = tl.getInput('securityAdvisoriesFile');
+ let skipPullRequests: boolean = tl.getBoolInput('skipPullRequests', false);
+ let commentPullRequests: boolean = tl.getBoolInput('commentPullRequests', false);
+ let abandonUnwantedPullRequests: boolean = tl.getBoolInput('abandonUnwantedPullRequests', true);
+
+ let extraEnvironmentVariables = tl.getDelimitedInput('extraEnvironmentVariables', ';', false);
+
+ let forwardHostSshSocket: boolean = tl.getBoolInput('forwardHostSshSocket', false);
// Prepare variables for the docker image to use
let dockerImageTag: string = getDockerImageTag();
- let command: string = tl.getBoolInput("useUpdateScriptvNext", false)
- ? "update_script_vnext"
- : "update_script";
+ let command: string = tl.getBoolInput('useUpdateScriptvNext', false) ? 'update_script_vnext' : 'update_script';
return {
organizationUrl: formattedOrganizationUrl,
@@ -187,7 +167,7 @@ export default function getSharedVariables(): ISharedVariables {
failOnException,
excludeRequirementsToUnlock,
updaterOptions,
-
+
debug,
targetUpdateIds,
@@ -196,13 +176,13 @@ export default function getSharedVariables(): ISharedVariables {
skipPullRequests,
commentPullRequests,
abandonUnwantedPullRequests,
-
+
extraEnvironmentVariables,
forwardHostSshSocket,
dockerImageTag,
- command
+ command,
};
}
diff --git a/extension/tests/utils/parseConfigFile.test.ts b/extension/tasks/dependabotV1/utils/parseConfigFile.test.ts
similarity index 66%
rename from extension/tests/utils/parseConfigFile.test.ts
rename to extension/tasks/dependabotV1/utils/parseConfigFile.test.ts
index 4e039cc4..2e908379 100644
--- a/extension/tests/utils/parseConfigFile.test.ts
+++ b/extension/tasks/dependabotV1/utils/parseConfigFile.test.ts
@@ -1,12 +1,11 @@
-import { load } from "js-yaml";
-import * as fs from "fs";
-import * as path from "path";
-import { parseRegistries, parseUpdates, validateConfiguration } from "../../task/utils/parseConfigFile";
-import { IDependabotRegistry, IDependabotUpdate } from "../../task/IDependabotConfig";
+import * as fs from 'fs';
+import { load } from 'js-yaml';
+import { IDependabotRegistry, IDependabotUpdate } from './IDependabotConfig';
+import { parseRegistries, parseUpdates, validateConfiguration } from './parseConfigFile';
-describe("Parse configuration file", () => {
- it("Parsing works as expected", () => {
- let config: any = load(fs.readFileSync('tests/utils/dependabot.yml', "utf-8"));
+describe('Parse configuration file', () => {
+ it('Parsing works as expected', () => {
+ let config: any = load(fs.readFileSync('tests/config/dependabot.yml', 'utf-8'));
let updates = parseUpdates(config);
expect(updates.length).toBe(3);
@@ -31,13 +30,13 @@ describe("Parse configuration file", () => {
expect(third.directory).toBe(undefined);
expect(third.directories).toEqual(['/src/client', '/src/server']);
expect(third.packageEcosystem).toBe('nuget');
- expect(third.groups).toBe('{\"microsoft\":{\"patterns\":[\"microsoft*\"],\"update-types\":[\"minor\",\"patch\"]}}');
+ expect(third.groups).toBe('{"microsoft":{"patterns":["microsoft*"],"update-types":["minor","patch"]}}');
});
});
-describe("Parse registries", () => {
- it("Parsing works as expected", () => {
- let config: any = load(fs.readFileSync('tests/utils/sample-registries.yml', "utf-8"));
+describe('Parse registries', () => {
+ it('Parsing works as expected', () => {
+ let config: any = load(fs.readFileSync('tests/config/sample-registries.yml', 'utf-8'));
let registries = parseRegistries(config);
expect(Object.keys(registries).length).toBe(11);
@@ -45,69 +44,69 @@ describe("Parse registries", () => {
var registry = registries['composer'];
expect(registry.type).toBe('composer_repository');
expect(registry.url).toBe('https://repo.packagist.com/example-company/');
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
- expect(registry.host).toBe(undefined);
+ expect(registry.host).toBe('repo.packagist.com');
expect(registry.key).toBe(undefined);
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe('octocat');
expect(registry.password).toBe('pwd_1234567890');
- expect(registry["replaces-base"]).toBe(undefined);
+ expect(registry['replaces-base']).toBe(undefined);
// docker-registry
registry = registries['dockerhub'];
expect(registry.type).toBe('docker_registry');
expect(registry.url).toBe(undefined);
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe('registry.hub.docker.com');
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe('octocat');
expect(registry.password).toBe('pwd_1234567890');
- expect(registry["replaces-base"]).toBe(true);
+ expect(registry['replaces-base']).toBe(true);
// git
registry = registries['github-octocat'];
expect(registry.type).toBe('git');
expect(registry.url).toBe('https://github.com');
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe('x-access-token');
expect(registry.password).toBe('pwd_1234567890');
- expect(registry["replaces-base"]).toBe(undefined);
+ expect(registry['replaces-base']).toBe(undefined);
// hex-organization
registry = registries['github-hex-org'];
expect(registry.type).toBe('hex_organization');
expect(registry.url).toBe(undefined);
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe(undefined);
expect(registry.key).toBe('key_1234567890');
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe('github');
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe(undefined);
expect(registry.password).toBe(undefined);
- expect(registry["replaces-base"]).toBe(undefined);
+ expect(registry['replaces-base']).toBe(undefined);
// hex-repository
registry = registries['github-hex-repository'];
@@ -119,142 +118,142 @@ describe("Parse registries", () => {
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe('private-repo');
- expect(registry["auth-key"]).toBe('ak_1234567890');
- expect(registry["public-key-fingerprint"]).toBe('pkf_1234567890');
+ expect(registry['auth-key']).toBe('ak_1234567890');
+ expect(registry['public-key-fingerprint']).toBe('pkf_1234567890');
expect(registry.username).toBe(undefined);
expect(registry.password).toBe(undefined);
- expect(registry["replaces-base"]).toBe(undefined);
+ expect(registry['replaces-base']).toBe(undefined);
// maven-repository
registry = registries['maven-artifactory'];
expect(registry.type).toBe('maven_repository');
expect(registry.url).toBe('https://artifactory.example.com');
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe('octocat');
expect(registry.password).toBe('pwd_1234567890');
- expect(registry["replaces-base"]).toBe(true);
+ expect(registry['replaces-base']).toBe(true);
// npm-registry
registry = registries['npm-github'];
expect(registry.type).toBe('npm_registry');
expect(registry.url).toBe(undefined);
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe('npm.pkg.github.com');
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe('tkn_1234567890');
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe(undefined);
expect(registry.password).toBe(undefined);
- expect(registry["replaces-base"]).toBe(true);
+ expect(registry['replaces-base']).toBe(true);
// nuget-feed
registry = registries['nuget-azure-devops'];
expect(registry.type).toBe('nuget_feed');
expect(registry.url).toBe('https://pkgs.dev.azure.com/contoso/_packaging/My_Feed/nuget/v3/index.json');
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe('octocat@example.com');
expect(registry.password).toBe('pwd_1234567890');
- expect(registry["replaces-base"]).toBe(undefined);
+ expect(registry['replaces-base']).toBe(undefined);
// python-index
registry = registries['python-azure'];
expect(registry.type).toBe('python_index');
expect(registry.url).toBe(undefined);
- expect(registry["index-url"]).toBe('https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example');
+ expect(registry['index-url']).toBe('https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example');
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe(undefined);
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe('octocat@example.com');
expect(registry.password).toBe('pwd_1234567890');
- expect(registry["replaces-base"]).toBe(true);
+ expect(registry['replaces-base']).toBe(true);
// rubygems-server
registry = registries['ruby-github'];
expect(registry.type).toBe('rubygems_server');
expect(registry.url).toBe('https://rubygems.pkg.github.com/octocat/github_api');
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe(undefined);
expect(registry.key).toBe(undefined);
expect(registry.token).toBe('tkn_1234567890');
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe(undefined);
expect(registry.password).toBe(undefined);
- expect(registry["replaces-base"]).toBe(false);
+ expect(registry['replaces-base']).toBe(false);
// terraform-registry
registry = registries['terraform-example'];
expect(registry.type).toBe('terraform_registry');
expect(registry.url).toBe(undefined);
- expect(registry["index-url"]).toBe(undefined);
+ expect(registry['index-url']).toBe(undefined);
expect(registry.registry).toBe(undefined);
expect(registry.host).toBe('terraform.example.com');
expect(registry.key).toBe(undefined);
expect(registry.token).toBe('tkn_1234567890');
expect(registry.organization).toBe(undefined);
expect(registry.repo).toBe(undefined);
- expect(registry["auth-key"]).toBe(undefined);
- expect(registry["public-key-fingerprint"]).toBe(undefined);
+ expect(registry['auth-key']).toBe(undefined);
+ expect(registry['public-key-fingerprint']).toBe(undefined);
expect(registry.username).toBe(undefined);
expect(registry.password).toBe(undefined);
- expect(registry["replaces-base"]).toBe(undefined);
+ expect(registry['replaces-base']).toBe(undefined);
});
});
-describe("Validate registries", () => {
- it("Validation works as expected", () => {
+describe('Validate registries', () => {
+ it('Validation works as expected', () => {
// let config: any = load(fs.readFileSync('tests/utils/dependabot.yml', "utf-8"));
// let updates = parseUpdates(config);
// expect(updates.length).toBe(2);
var updates: IDependabotUpdate[] = [
{
- packageEcosystem: "npm",
- directory: "/",
- registries: ["dummy1", "dummy2"],
+ packageEcosystem: 'npm',
+ directory: '/',
+ registries: ['dummy1', 'dummy2'],
},
];
var registries: Record = {
- 'dummy1': {
+ dummy1: {
type: 'nuget',
- url: "https://pkgs.dev.azure.com/contoso/_packaging/My_Feed/nuget/v3/index.json",
- token: "pwd_1234567890",
+ url: 'https://pkgs.dev.azure.com/contoso/_packaging/My_Feed/nuget/v3/index.json',
+ token: 'pwd_1234567890',
},
- 'dummy2': {
- type: "python-index",
- url: "https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example",
- username: "octocat@example.com",
- password: "pwd_1234567890",
- "replaces-base": true,
+ dummy2: {
+ 'type': 'python-index',
+ 'url': 'https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example',
+ 'username': 'octocat@example.com',
+ 'password': 'pwd_1234567890',
+ 'replaces-base': true,
},
};
@@ -263,10 +262,14 @@ describe("Validate registries", () => {
// fails: registry not referenced
updates[0].registries = [];
- expect(() => validateConfiguration(updates, registries)).toThrow(`Registries: 'dummy1,dummy2' have not been referenced by any update`);
+ expect(() => validateConfiguration(updates, registries)).toThrow(
+ `Registries: 'dummy1,dummy2' have not been referenced by any update`,
+ );
// fails: registrynot configured
- updates[0].registries = ["dummy1", "dummy2", "dummy3",];
- expect(() => validateConfiguration(updates, registries)).toThrow(`Referenced registries: 'dummy3' have not been configured in the root of dependabot.yml`);
+ updates[0].registries = ['dummy1', 'dummy2', 'dummy3'];
+ expect(() => validateConfiguration(updates, registries)).toThrow(
+ `Referenced registries: 'dummy3' have not been configured in the root of dependabot.yml`,
+ );
});
});
diff --git a/extension/task/utils/parseConfigFile.ts b/extension/tasks/dependabotV1/utils/parseConfigFile.ts
similarity index 50%
rename from extension/task/utils/parseConfigFile.ts
rename to extension/tasks/dependabotV1/utils/parseConfigFile.ts
index 33bb814d..4b394949 100644
--- a/extension/task/utils/parseConfigFile.ts
+++ b/extension/tasks/dependabotV1/utils/parseConfigFile.ts
@@ -1,16 +1,13 @@
-import {
- IDependabotConfig,
- IDependabotRegistry,
- IDependabotUpdate,
-} from "../IDependabotConfig";
-import { load } from "js-yaml";
-import * as fs from "fs";
-import * as path from "path";
-import * as tl from "azure-pipelines-task-lib/task";
-import { getVariable } from "azure-pipelines-task-lib/task";
-import { ISharedVariables } from "./getSharedVariables";
-import { convertPlaceholder } from "./convertPlaceholder";
-import axios from "axios";
+import axios from 'axios';
+import * as tl from 'azure-pipelines-task-lib/task';
+import { getVariable } from 'azure-pipelines-task-lib/task';
+import * as fs from 'fs';
+import { load } from 'js-yaml';
+import * as path from 'path';
+import { URL } from 'url';
+import { IDependabotConfig, IDependabotRegistry, IDependabotUpdate } from './IDependabotConfig';
+import { convertPlaceholder } from './convertPlaceholder';
+import { ISharedVariables } from './getSharedVariables';
/**
* Parse the dependabot config YAML file to specify update configuration
@@ -25,11 +22,11 @@ import axios from "axios";
*/
async function parseConfigFile(variables: ISharedVariables): Promise {
const possibleFilePaths = [
- "/.azuredevops/dependabot.yml",
- "/.azuredevops/dependabot.yaml",
+ '/.azuredevops/dependabot.yml',
+ '/.azuredevops/dependabot.yaml',
- "/.github/dependabot.yaml",
- "/.github/dependabot.yml",
+ '/.github/dependabot.yaml',
+ '/.github/dependabot.yml',
];
let contents: null | string;
@@ -50,11 +47,11 @@ async function parseConfigFile(variables: ISharedVariables): Promise {
var dependabotUpdate: IDependabotUpdate = {
- packageEcosystem: update["package-ecosystem"],
- directory: update["directory"],
- directories: update["directories"] || [],
-
- openPullRequestsLimit: update["open-pull-requests-limit"],
- registries: update["registries"] || [],
-
- targetBranch: update["target-branch"],
- vendor: update["vendor"] ? JSON.parse(update["vendor"]) : null,
- versioningStrategy: update["versioning-strategy"],
- milestone: update["milestone"],
- branchNameSeparator: update["pull-request-branch-name"]
- ? update["pull-request-branch-name"]["separator"]
+ packageEcosystem: update['package-ecosystem'],
+ directory: update['directory'],
+ directories: update['directories'] || [],
+
+ openPullRequestsLimit: update['open-pull-requests-limit'],
+ registries: update['registries'] || [],
+
+ targetBranch: update['target-branch'],
+ vendor: update['vendor'] ? JSON.parse(update['vendor']) : null,
+ versioningStrategy: update['versioning-strategy'],
+ milestone: update['milestone'],
+ branchNameSeparator: update['pull-request-branch-name']
+ ? update['pull-request-branch-name']['separator']
: undefined,
- insecureExternalCodeExecution: update["insecure-external-code-execution"],
+ insecureExternalCodeExecution: update['insecure-external-code-execution'],
// We are well aware that ignore is not parsed here. It is intentional.
// The ruby script in the docker container does it automatically.
@@ -186,40 +170,27 @@ function parseUpdates(config: any): IDependabotUpdate[] {
// You can also test against various reproductions such as https://dev.azure.com/tingle/dependabot/_git/repro-582
// Convert to JSON or as required by the script
- allow: update["allow"] ? JSON.stringify(update["allow"]) : undefined,
- ignore: update["ignore"] ? JSON.stringify(update["ignore"]) : undefined,
- labels: update["labels"] ? JSON.stringify(update["labels"]) : undefined,
- reviewers: update["reviewers"]
- ? update["reviewers"]
- : undefined,
- assignees: update["assignees"]
- ? update["assignees"]
- : undefined,
- commitMessage: update["commit-message"]
- ? JSON.stringify(update["commit-message"])
- : undefined,
- groups: update["groups"]
- ? JSON.stringify(update["groups"])
- : undefined,
+ allow: update['allow'] ? JSON.stringify(update['allow']) : undefined,
+ ignore: update['ignore'] ? JSON.stringify(update['ignore']) : undefined,
+ labels: update['labels'] ? JSON.stringify(update['labels']) : undefined,
+ reviewers: update['reviewers'] ? update['reviewers'] : undefined,
+ assignees: update['assignees'] ? update['assignees'] : undefined,
+ commitMessage: update['commit-message'] ? JSON.stringify(update['commit-message']) : undefined,
+ groups: update['groups'] ? JSON.stringify(update['groups']) : undefined,
};
if (!dependabotUpdate.packageEcosystem) {
- throw new Error(
- "The value 'package-ecosystem' in dependency update config is missing"
- );
+ throw new Error("The value 'package-ecosystem' in dependency update config is missing");
}
// zero is a valid value
- if (
- !dependabotUpdate.openPullRequestsLimit &&
- dependabotUpdate.openPullRequestsLimit !== 0
- ) {
+ if (!dependabotUpdate.openPullRequestsLimit && dependabotUpdate.openPullRequestsLimit !== 0) {
dependabotUpdate.openPullRequestsLimit = 5;
}
if (!dependabotUpdate.directory && dependabotUpdate.directories.length === 0) {
throw new Error(
- "The values 'directory' and 'directories' in dependency update config is missing, you must specify at least one"
+ "The values 'directory' and 'directories' in dependency update config is missing, you must specify at least one",
);
}
@@ -231,7 +202,7 @@ function parseUpdates(config: any): IDependabotUpdate[] {
function parseRegistries(config: any): Record {
var registries: Record = {};
- var rawRegistries = config["registries"];
+ var rawRegistries = config['registries'];
if (rawRegistries == undefined) return registries;
@@ -241,85 +212,91 @@ function parseRegistries(config: any): Record {
var registryConfig = item[1];
// parse the type
- var rawType = registryConfig["type"]
+ var rawType = registryConfig['type'];
if (!rawType) {
- throw new Error(
- `The value for 'type' in dependency registry config '${registryConfigKey}' is missing`
- );
+ throw new Error(`The value for 'type' in dependency registry config '${registryConfigKey}' is missing`);
}
// ensure the type is a known one
if (!KnownRegistryTypes.includes(rawType)) {
throw new Error(
- `The value '${rawType}' for 'type' in dependency registry config '${registryConfigKey}' is not among the supported values.`
+ `The value '${rawType}' for 'type' in dependency registry config '${registryConfigKey}' is not among the supported values.`,
);
}
- var type = rawType?.replace("-", "_");
+ var type = rawType?.replace('-', '_');
- var parsed: IDependabotRegistry = { type: type, };
+ var parsed: IDependabotRegistry = { type: type };
registries[registryConfigKey] = parsed;
// handle special fields for 'hex-organization' types
if (type === 'hex_organization') {
- var organization = registryConfig["organization"];
+ var organization = registryConfig['organization'];
if (!organization) {
- throw new Error(
- `The value 'organization' in dependency registry config '${registryConfigKey}' is missing`
- );
+ throw new Error(`The value 'organization' in dependency registry config '${registryConfigKey}' is missing`);
}
parsed.organization = organization;
}
// handle special fields for 'hex-repository' types
if (type === 'hex_repository') {
- var repo = registryConfig["repo"];
+ var repo = registryConfig['repo'];
if (!repo) {
- throw new Error(
- `The value 'repo' in dependency registry config '${registryConfigKey}' is missing`
- );
+ throw new Error(`The value 'repo' in dependency registry config '${registryConfigKey}' is missing`);
}
parsed.repo = repo;
- parsed["auth-key"] = registryConfig["auth-key"];
- parsed["public-key-fingerprint"] = registryConfig["public-key-fingerprint"];
+ parsed['auth-key'] = registryConfig['auth-key'];
+ parsed['public-key-fingerprint'] = registryConfig['public-key-fingerprint'];
}
// parse username, password, key, and token while replacing tokens where necessary
- parsed.username = convertPlaceholder(registryConfig["username"]);
- parsed.password = convertPlaceholder(registryConfig["password"]);
- parsed.key = convertPlaceholder(registryConfig["key"]);
- parsed.token = convertPlaceholder(registryConfig["token"]);
+ parsed.username = convertPlaceholder(registryConfig['username']);
+ parsed.password = convertPlaceholder(registryConfig['password']);
+ parsed.key = convertPlaceholder(registryConfig['key']);
+ parsed.token = convertPlaceholder(registryConfig['token']);
// add "replaces-base" if present
- var replacesBase = registryConfig["replaces-base"];
+ var replacesBase = registryConfig['replaces-base'];
if (replacesBase !== undefined) {
- parsed["replaces-base"] = replacesBase;
+ parsed['replaces-base'] = replacesBase;
}
// parse the url
- var url = registryConfig["url"];
+ var url = registryConfig['url'];
if (!url && type !== 'hex_organization') {
- throw new Error(
- `The value 'url' in dependency registry config '${registryConfigKey}' is missing`
- );
+ throw new Error(`The value 'url' in dependency registry config '${registryConfigKey}' is missing`);
}
if (url) {
- // Some credentials do not use the 'url' property in the Ruby updater.
- // npm_registry and docker_registry use 'registry' which should be stripped off the scheme.
- // terraform_registry uses 'host' which is the hostname from the given URL.
-
- if (type === 'docker_registry' || type === 'npm_registry') {
- parsed.registry = url.replace("https://", "").replace("http://", "");
- }
- else if (type === 'terraform_registry') {
- parsed.host = new URL(url).hostname;
- }
- else if (type === 'python_index') {
- parsed["index-url"] = url;
- }
- else {
- parsed.url = url;
+ /*
+ * Some credentials do not use the 'url' property in the Ruby updater.
+ * The 'host' and 'registry' properties are derived from the given URL.
+ * The 'registry' property is derived from the 'url' by stripping off the scheme.
+ * The 'host' property is derived from the hostname of the 'url'.
+ *
+ * 'npm_registry' and 'docker_registry' use 'registry' only.
+ * 'terraform_registry' uses 'host' only.
+ * 'composer_repository' uses both 'url' and 'host'.
+ * 'python_index' uses 'index-url' instead of 'url'.
+ */
+
+ if (URL.canParse(url)) {
+ const parsedUrl = new URL(url);
+
+ const addRegistry = type === 'docker_registry' || type === 'npm_registry';
+ if (addRegistry) parsed.registry = url.replace('https://', '').replace('http://', '');
+
+ const addHost = type === 'terraform_registry' || type === 'composer_repository';
+ if (addHost) parsed.host = parsedUrl.hostname;
}
+
+ if (type === 'python_index') parsed['index-url'] = url;
+
+ const skipUrl =
+ type === 'docker_registry' ||
+ type === 'npm_registry' ||
+ type === 'terraform_registry' ||
+ type === 'python_index';
+ if (!skipUrl) parsed.url = url;
}
});
return registries;
@@ -328,38 +305,35 @@ function parseRegistries(config: any): Record {
function validateConfiguration(updates: IDependabotUpdate[], registries: Record) {
const configured = Object.keys(registries);
const referenced: string[] = [];
- for (const u of updates) referenced.push(...u.registries);
+ for (const u of updates) referenced.push(...(u.registries ?? []));
// ensure there are no configured registries that have not been referenced
const missingConfiguration = referenced.filter((el) => !configured.includes(el));
if (missingConfiguration.length > 0) {
throw new Error(
- `Referenced registries: '${missingConfiguration.join(',')}' have not been configured in the root of dependabot.yml`
+ `Referenced registries: '${missingConfiguration.join(',')}' have not been configured in the root of dependabot.yml`,
);
}
// ensure there are no registries referenced but not configured
const missingReferences = configured.filter((el) => !referenced.includes(el));
- if (missingReferences.length > 0)
- {
- throw new Error(
- `Registries: '${missingReferences.join(',')}' have not been referenced by any update`
- );
+ if (missingReferences.length > 0) {
+ throw new Error(`Registries: '${missingReferences.join(',')}' have not been referenced by any update`);
}
}
const KnownRegistryTypes = [
- "composer-repository",
- "docker-registry",
- "git",
- "hex-organization",
- "hex-repository",
- "maven-repository",
- "npm-registry",
- "nuget-feed",
- "python-index",
- "rubygems-server",
- "terraform-registry",
+ 'composer-repository',
+ 'docker-registry',
+ 'git',
+ 'hex-organization',
+ 'hex-repository',
+ 'maven-repository',
+ 'npm-registry',
+ 'nuget-feed',
+ 'python-index',
+ 'rubygems-server',
+ 'terraform-registry',
];
-export { parseConfigFile, parseUpdates, parseRegistries, validateConfiguration, };
+export { parseConfigFile, parseRegistries, parseUpdates, validateConfiguration };
diff --git a/extension/tests/utils/resolveAzureDevOpsIdentities.test.ts b/extension/tasks/dependabotV1/utils/resolveAzureDevOpsIdentities.test.ts
similarity index 50%
rename from extension/tests/utils/resolveAzureDevOpsIdentities.test.ts
rename to extension/tasks/dependabotV1/utils/resolveAzureDevOpsIdentities.test.ts
index 9b39605b..1e0d91cf 100644
--- a/extension/tests/utils/resolveAzureDevOpsIdentities.test.ts
+++ b/extension/tasks/dependabotV1/utils/resolveAzureDevOpsIdentities.test.ts
@@ -1,58 +1,57 @@
-import { isHostedAzureDevOps, resolveAzureDevOpsIdentities } from "../../task/utils/resolveAzureDevOpsIdentities";
-import { describe } from "node:test";
-import axios from "axios";
+import axios from 'axios';
+import { describe } from 'node:test';
+import { isHostedAzureDevOps, resolveAzureDevOpsIdentities } from './resolveAzureDevOpsIdentities';
-describe("isHostedAzureDevOps", () => {
- it("Old visualstudio url is hosted.", () => {
- const url = new URL("https://example.visualstudio.com/abc")
+describe('isHostedAzureDevOps', () => {
+ it('Old visualstudio url is hosted.', () => {
+ const url = new URL('https://example.visualstudio.com/abc');
const result = isHostedAzureDevOps(url);
expect(result).toBeTruthy();
});
- it("Dev Azure url is hosted.", () => {
- const url = new URL("https://dev.azure.com/example")
+ it('Dev Azure url is hosted.', () => {
+ const url = new URL('https://dev.azure.com/example');
const result = isHostedAzureDevOps(url);
expect(result).toBeTruthy();
});
- it("private url is not hosted.", () => {
- const url = new URL("https://tfs.example.com/tfs/Collection")
+ it('private url is not hosted.', () => {
+ const url = new URL('https://tfs.example.com/tfs/Collection');
const result = isHostedAzureDevOps(url);
expect(result).toBeFalsy();
});
});
-
jest.mock('axios');
const mockedAxios = axios as jest.Mocked;
const aliceOnPrem = {
- id: "any id",
- email: "alice@example.com",
- providerDisplayName: "Alice"
-}
+ id: 'any id',
+ email: 'alice@example.com',
+ providerDisplayName: 'Alice',
+};
-const aliceHostedId = "any Id"
+const aliceHostedId = 'any Id';
const aliceHosted = {
- descriptor: "aad." + Buffer.from(aliceHostedId, 'utf8').toString('base64'),
- email: "alice@example.com",
- providerDisplayName: "Alice"
-}
+ descriptor: 'aad.' + Buffer.from(aliceHostedId, 'utf8').toString('base64'),
+ email: 'alice@example.com',
+ providerDisplayName: 'Alice',
+};
-describe("resolveAzureDevOpsIdentities", () => {
- it("No email input, is directly returned.", async () => {
- const url = new URL("https://example.visualstudio.com/abc")
+describe('resolveAzureDevOpsIdentities', () => {
+ it('No email input, is directly returned.', async () => {
+ const url = new URL('https://example.visualstudio.com/abc');
- const input = ["be9321e2-f404-4ffa-8d6b-44efddb04865"];
+ const input = ['be9321e2-f404-4ffa-8d6b-44efddb04865'];
const results = await resolveAzureDevOpsIdentities(url, input);
- const outputs = results.map(identity => identity.id);
+ const outputs = results.map((identity) => identity.id);
expect(outputs).toHaveLength(1);
expect(outputs).toContain(input[0]);
});
- it("successfully resolve id for azure devops server", async () => {
- const url = new URL("https://example.onprem.com/abc")
+ it('successfully resolve id for azure devops server', async () => {
+ const url = new URL('https://example.onprem.com/abc');
// Provide the data object to be returned
mockedAxios.get.mockResolvedValue({
@@ -66,13 +65,12 @@ describe("resolveAzureDevOpsIdentities", () => {
const input = [aliceOnPrem.email];
const results = await resolveAzureDevOpsIdentities(url, input);
- const outputs = results.map(identity => identity.id);
+ const outputs = results.map((identity) => identity.id);
expect(outputs).toHaveLength(1);
expect(outputs).toContain(aliceOnPrem.id);
});
- it("successfully resolve id for hosted azure devops", async () => {
- const url = new URL("https://dev.azure.com/exampleorganization")
-
+ it('successfully resolve id for hosted azure devops', async () => {
+ const url = new URL('https://dev.azure.com/exampleorganization');
// Provide the data object to be returned
mockedAxios.post.mockResolvedValue({
@@ -86,8 +84,8 @@ describe("resolveAzureDevOpsIdentities", () => {
const input = [aliceHosted.email];
const results = await resolveAzureDevOpsIdentities(url, input);
- const outputs = results.map(identity => identity.id);
+ const outputs = results.map((identity) => identity.id);
expect(outputs).toHaveLength(1);
expect(outputs).toContain(aliceHostedId);
});
-});
\ No newline at end of file
+});
diff --git a/extension/task/utils/resolveAzureDevOpsIdentities.ts b/extension/tasks/dependabotV1/utils/resolveAzureDevOpsIdentities.ts
similarity index 65%
rename from extension/task/utils/resolveAzureDevOpsIdentities.ts
rename to extension/tasks/dependabotV1/utils/resolveAzureDevOpsIdentities.ts
index 58ff56c0..08f03141 100644
--- a/extension/task/utils/resolveAzureDevOpsIdentities.ts
+++ b/extension/tasks/dependabotV1/utils/resolveAzureDevOpsIdentities.ts
@@ -1,37 +1,37 @@
-import * as tl from "azure-pipelines-task-lib/task";
-import axios from "axios";
-import extractOrganization from "./extractOrganization";
+import axios from 'axios';
+import * as tl from 'azure-pipelines-task-lib/task';
+import extractOrganization from './extractOrganization';
export interface IIdentity {
/**
* The identity id to use for PR reviewer or assignee Id.
*/
- id: string,
+ id: string;
/**
* Human readable Username.
*/
- displayName?: string,
+ displayName?: string;
/**
* The provided input to use for searching an identity.
*/
- input: string,
+ input: string;
}
/**
* Resolves the given input email addresses to an array of IIdentity information.
- * It also handles non email input, which is assumed to be already an identity id
+ * It also handles non email input, which is assumed to be already an identity id
* to pass as reviewer id to an PR.
- *
- * @param organizationUrl
- * @param inputs
- * @returns
+ *
+ * @param organizationUrl
+ * @param inputs
+ * @returns
*/
export async function resolveAzureDevOpsIdentities(organizationUrl: URL, inputs: string[]): Promise {
const result: IIdentity[] = [];
tl.debug(`Attempting to fetch configuration file via REST API ...`);
for (const input of inputs) {
- if (input.indexOf("@") > 0 ) {
+ if (input.indexOf('@') > 0) {
// input is email to look-up
const identityInfo = await querySubject(organizationUrl, input);
if (identityInfo) {
@@ -39,7 +39,7 @@ export async function resolveAzureDevOpsIdentities(organizationUrl: URL, inputs:
}
} else {
// input is already identity id
- result.push({id: input, input: input});
+ result.push({ id: input, input: input });
}
}
return result;
@@ -55,20 +55,19 @@ export function isHostedAzureDevOps(uri: URL): boolean {
return hostname === 'dev.azure.com' || hostname.endsWith('.visualstudio.com');
}
-function decodeBase64(input: string):string {
+function decodeBase64(input: string): string {
return Buffer.from(input, 'base64').toString('utf8');
}
-function encodeBase64(input: string):string {
+function encodeBase64(input: string): string {
return Buffer.from(input, 'utf8').toString('base64');
}
-function isSuccessStatusCode(statusCode?: number) : boolean {
- return (statusCode >= 200) && (statusCode <= 299);
+function isSuccessStatusCode(statusCode?: number): boolean {
+ return statusCode >= 200 && statusCode <= 299;
}
async function querySubject(organizationUrl: URL, email: string): Promise {
-
if (isHostedAzureDevOps(organizationUrl)) {
const organization: string = extractOrganization(organizationUrl.toString());
return await querySubjectHosted(organization, email);
@@ -79,9 +78,9 @@ async function querySubject(organizationUrl: URL, email: string): Promise {
const url = `${organizationUrl}_apis/identities?searchFilter=MailAddress&queryMembership=None&filterValue=${email}`;
@@ -89,8 +88,8 @@ async function querySubjectOnPrem(organizationUrl: URL, email: string): Promise<
try {
const response = await axios.get(url, {
headers: {
- Authorization: `Basic ${encodeBase64("PAT:" + tl.getVariable("System.AccessToken"))}`,
- Accept: "application/json;api-version=5.0",
+ Authorization: `Basic ${encodeBase64('PAT:' + tl.getVariable('System.AccessToken'))}`,
+ Accept: 'application/json;api-version=5.0',
},
});
@@ -98,16 +97,15 @@ async function querySubjectOnPrem(organizationUrl: URL, email: string): Promise<
return {
id: response.data.value[0]?.id,
displayName: response.data.value[0]?.providerDisplayName,
- input: email}
+ input: email,
+ };
}
} catch (error) {
const responseStatusCode = error?.response?.status;
- tl.debug(`HTTP Response Status: ${responseStatusCode}`)
+ tl.debug(`HTTP Response Status: ${responseStatusCode}`);
if (responseStatusCode > 400 && responseStatusCode < 500) {
- tl.debug(`Access token is ${tl.getVariable("System.AccessToken")?.length > 0 ? "not" : ""} null or empty.`);
- throw new Error(
- `The access token provided is empty or does not have permissions to access '${url}'`
- );
+ tl.debug(`Access token is ${tl.getVariable('System.AccessToken')?.length > 0 ? 'not' : ''} null or empty.`);
+ throw new Error(`The access token provided is empty or does not have permissions to access '${url}'`);
} else {
throw error;
}
@@ -127,39 +125,35 @@ async function querySubjectHosted(organization: string, email: string): Promise<
try {
const response = await axios.post(url, {
headers: {
- Authorization: `Basic ${encodeBase64("PAT:" + tl.getVariable("System.AccessToken"))}`,
- Accept: "application/json;api-version=6.0-preview.1",
- "Content-Type": "application/json",
+ 'Authorization': `Basic ${encodeBase64('PAT:' + tl.getVariable('System.AccessToken'))}`,
+ 'Accept': 'application/json;api-version=6.0-preview.1',
+ 'Content-Type': 'application/json',
},
data: {
- "query": email,
- "subjectKind": [ "User" ]
- }
+ query: email,
+ subjectKind: ['User'],
+ },
});
tl.debug(`Got Http Response: ${response.status}`);
- if(!isSuccessStatusCode(response.status) || response.data.value.length === 0) {
- throw new Error(
- 'Failed to resolve given email in organization'
- );
+ if (!isSuccessStatusCode(response.status) || response.data.value.length === 0) {
+ throw new Error('Failed to resolve given email in organization');
}
- const descriptor: string = response.data.value[0]?.descriptor || "";
- const id = decodeBase64(descriptor.substring(descriptor.indexOf(".") + 1))
+ const descriptor: string = response.data.value[0]?.descriptor || '';
+ const id = decodeBase64(descriptor.substring(descriptor.indexOf('.') + 1));
return {
id: id,
displayName: response.data.value[0]?.displayName,
- input: email
- }
+ input: email,
+ };
} catch (error) {
const responseStatusCode = error?.response?.status;
- tl.debug(`HTTP Response Status: ${responseStatusCode}`)
+ tl.debug(`HTTP Response Status: ${responseStatusCode}`);
if (responseStatusCode > 400 && responseStatusCode < 500) {
- tl.debug(`Access token is ${tl.getVariable("System.AccessToken")?.length > 0 ? "not" : ""} null or empty.`);
- throw new Error(
- `The access token provided is empty or does not have permissions to access '${url}'`
- );
+ tl.debug(`Access token is ${tl.getVariable('System.AccessToken')?.length > 0 ? 'not' : ''} null or empty.`);
+ throw new Error(`The access token provided is empty or does not have permissions to access '${url}'`);
} else {
throw error;
}
diff --git a/extension/tasks/dependabotV2/icon.png b/extension/tasks/dependabotV2/icon.png
new file mode 100644
index 00000000..ffa0fe7c
Binary files /dev/null and b/extension/tasks/dependabotV2/icon.png differ
diff --git a/extension/tasks/dependabotV2/index.ts b/extension/tasks/dependabotV2/index.ts
new file mode 100644
index 00000000..94c4c87f
--- /dev/null
+++ b/extension/tasks/dependabotV2/index.ts
@@ -0,0 +1,164 @@
+import { debug, error, setResult, TaskResult, warning, which } from 'azure-pipelines-task-lib/task';
+import { AzureDevOpsWebApiClient } from './utils/azure-devops/AzureDevOpsWebApiClient';
+import { DependabotCli } from './utils/dependabot-cli/DependabotCli';
+import { DependabotJobBuilder } from './utils/dependabot-cli/DependabotJobBuilder';
+import {
+ DependabotOutputProcessor,
+ parseProjectDependencyListProperty,
+ parsePullRequestProperties,
+} from './utils/dependabot-cli/DependabotOutputProcessor';
+import { IDependabotUpdate } from './utils/dependabot/interfaces/IDependabotConfig';
+import parseDependabotConfigFile from './utils/dependabot/parseConfigFile';
+import parseTaskInputConfiguration from './utils/getSharedVariables';
+
+async function run() {
+ let dependabot: DependabotCli = undefined;
+ let failedJobs: number = 0;
+ try {
+ // Check if required tools are installed
+ debug('Checking for `docker` install...');
+ which('docker', true);
+ debug('Checking for `go` install...');
+ which('go', true);
+
+ // Parse task input configuration
+ const taskInputs = parseTaskInputConfiguration();
+ if (!taskInputs) {
+ throw new Error('Failed to parse task input configuration');
+ }
+
+ // Parse dependabot.yaml configuration file
+ const dependabotConfig = await parseDependabotConfigFile(taskInputs);
+ if (!dependabotConfig) {
+ throw new Error('Failed to parse dependabot.yaml configuration file from the target repository');
+ }
+
+ // Initialise the DevOps API clients
+ // There are two clients; one for authoring pull requests and one for auto-approving pull requests (if configured)
+ const prAuthorClient = new AzureDevOpsWebApiClient(
+ taskInputs.organizationUrl.toString(),
+ taskInputs.systemAccessToken,
+ );
+ const prApproverClient = taskInputs.autoApprove
+ ? new AzureDevOpsWebApiClient(
+ taskInputs.organizationUrl.toString(),
+ taskInputs.autoApproveUserToken || taskInputs.systemAccessToken,
+ )
+ : null;
+
+ // Fetch the active pull requests created by the author user
+ const prAuthorActivePullRequests = await prAuthorClient.getActivePullRequestProperties(
+ taskInputs.project,
+ taskInputs.repository,
+ await prAuthorClient.getUserId(),
+ );
+
+ // Initialise the Dependabot updater
+ dependabot = new DependabotCli(
+ DependabotCli.CLI_IMAGE_LATEST, // TODO: Add config for this?
+ new DependabotOutputProcessor(taskInputs, prAuthorClient, prApproverClient, prAuthorActivePullRequests),
+ taskInputs.debug,
+ );
+
+ const dependabotUpdaterOptions = {
+ azureDevOpsAccessToken: taskInputs.systemAccessToken,
+ gitHubAccessToken: taskInputs.githubAccessToken,
+ collectorImage: undefined, // TODO: Add config for this?
+ proxyImage: undefined, // TODO: Add config for this?
+ updaterImage: undefined, // TODO: Add config for this?
+ flamegraph: taskInputs.debug,
+ };
+
+ // If update identifiers are specified, select them; otherwise handle all
+ let updates: IDependabotUpdate[] = [];
+ const targetIds = taskInputs.targetUpdateIds;
+ if (targetIds && targetIds.length > 0) {
+ for (const id of targetIds) {
+ updates.push(dependabotConfig.updates[id]);
+ }
+ } else {
+ updates = dependabotConfig.updates;
+ }
+
+ // Loop through the [targeted] update blocks in dependabot.yaml and perform updates
+ for (const update of updates) {
+ const updateId = updates.indexOf(update).toString();
+
+ // Parse the last dependency list snapshot (if any) from the project properties.
+ // This is required when doing a security-only update as dependabot requires the list of vulnerable dependencies to be updated.
+ // Automatic discovery of vulnerable dependencies during a security-only update is not currently supported by dependabot-updater.
+ const dependencyList = parseProjectDependencyListProperty(
+ await prAuthorClient.getProjectProperties(taskInputs.projectId),
+ taskInputs.repository,
+ update['package-ecosystem'],
+ );
+
+ // Parse the Dependabot metadata for the existing pull requests that are related to this update
+ // Dependabot will use this to determine if we need to create new pull requests or update/close existing ones
+ const existingPullRequests = parsePullRequestProperties(prAuthorActivePullRequests, update['package-ecosystem']);
+ const existingPullRequestDependencies = Object.entries(existingPullRequests).map(([id, deps]) => deps);
+
+ // Run an update job for "all dependencies"; this will create new pull requests for dependencies that need updating
+ const allDependenciesJob = DependabotJobBuilder.newUpdateAllJob(
+ taskInputs,
+ updateId,
+ update,
+ dependabotConfig.registries,
+ dependencyList?.['dependencies'],
+ existingPullRequestDependencies,
+ );
+ const allDependenciesUpdateOutputs = await dependabot.update(allDependenciesJob, dependabotUpdaterOptions);
+ if (!allDependenciesUpdateOutputs || allDependenciesUpdateOutputs.filter((u) => !u.success).length > 0) {
+ allDependenciesUpdateOutputs?.filter((u) => !u.success)?.forEach((u) => exception(u.error));
+ failedJobs++;
+ }
+
+ // If there are existing pull requests, run an update job for each one; this will resolve merge conflicts and close pull requests that are no longer needed
+ const numberOfPullRequestsToUpdate = Object.keys(existingPullRequests).length;
+ if (numberOfPullRequestsToUpdate > 0) {
+ if (!taskInputs.skipPullRequests) {
+ for (const pullRequestId in existingPullRequests) {
+ const updatePullRequestJob = DependabotJobBuilder.newUpdatePullRequestJob(
+ taskInputs,
+ pullRequestId,
+ update,
+ dependabotConfig.registries,
+ existingPullRequestDependencies,
+ existingPullRequests[pullRequestId],
+ );
+ const updatePullRequestOutputs = await dependabot.update(updatePullRequestJob, dependabotUpdaterOptions);
+ if (!updatePullRequestOutputs || updatePullRequestOutputs.filter((u) => !u.success).length > 0) {
+ updatePullRequestOutputs?.filter((u) => !u.success)?.forEach((u) => exception(u.error));
+ failedJobs++;
+ }
+ }
+ } else {
+ warning(
+ `Skipping update of ${numberOfPullRequestsToUpdate} existing pull request(s) as 'skipPullRequests' is set to 'true'`,
+ );
+ }
+ }
+ }
+
+ setResult(
+ failedJobs ? TaskResult.Failed : TaskResult.Succeeded,
+ failedJobs
+ ? `${failedJobs} update job(s) failed, check logs for more information`
+ : `All update jobs completed successfully`,
+ );
+ } catch (e) {
+ setResult(TaskResult.Failed, e?.message);
+ exception(e);
+ } finally {
+ dependabot?.cleanup();
+ }
+}
+
+function exception(e: Error) {
+ if (e) {
+ error(`An unhandled exception occurred: ${e}`);
+ console.debug(e); // Dump the stack trace to help with debugging
+ }
+}
+
+run();
diff --git a/extension/tasks/dependabotV2/task.json b/extension/tasks/dependabotV2/task.json
new file mode 100644
index 00000000..0909dea7
--- /dev/null
+++ b/extension/tasks/dependabotV2/task.json
@@ -0,0 +1,239 @@
+{
+ "$schema": "https://raw.githubusercontent.com/Microsoft/azure-pipelines-task-lib/master/tasks.schema.json",
+ "id": "d98b873d-cf18-41eb-8ff5-234f14697896",
+ "name": "dependabot",
+ "friendlyName": "Dependabot",
+ "description": "Automatically update dependencies and vulnerabilities in your code using [Dependabot CLI](https://github.com/dependabot/cli)",
+ "helpMarkDown": "For help please visit https://github.com/tinglesoftware/dependabot-azure-devops/issues",
+ "helpUrl": "https://github.com/tinglesoftware/dependabot-azure-devops/issues",
+ "releaseNotes": "https://github.com/tinglesoftware/dependabot-azure-devops/releases",
+ "category": "Utility",
+ "visibility": ["Build", "Release"],
+ "runsOn": ["Agent", "DeploymentGroup"],
+ "author": "Tingle Software",
+ "demands": [],
+ "version": {
+ "Major": 2,
+ "Minor": 0,
+ "Patch": 0
+ },
+ "preview": true,
+ "instanceNameFormat": "Dependabot",
+ "minimumAgentVersion": "3.232.1",
+ "groups": [
+ {
+ "name": "pull_requests",
+ "displayName": "Pull request options",
+ "isExpanded": true
+ },
+ {
+ "name": "security_updates",
+ "displayName": "Security advisories and vulnerabilities",
+ "isExpanded": false
+ },
+ {
+ "name": "devops",
+ "displayName": "Azure DevOps authentication",
+ "isExpanded": false
+ },
+ {
+ "name": "github",
+ "displayName": "GitHub authentication",
+ "isExpanded": false
+ },
+ {
+ "name": "advanced",
+ "displayName": "Advanced",
+ "isExpanded": false
+ }
+ ],
+ "inputs": [
+ {
+ "name": "skipPullRequests",
+ "type": "boolean",
+ "groupName": "pull_requests",
+ "label": "Skip creation and updating of pull requests.",
+ "defaultValue": false,
+ "required": false,
+ "helpMarkDown": "When set to `true` the logic to update the dependencies is executed but the actual Pull Requests are not created/updated. Defaults to `false`."
+ },
+ {
+ "name": "abandonUnwantedPullRequests",
+ "type": "boolean",
+ "groupName": "pull_requests",
+ "label": "Abandon unwanted pull requests.",
+ "defaultValue": false,
+ "required": false,
+ "helpMarkDown": "When set to `true` pull requests that are no longer needed are closed at the tail end of the execution. Defaults to `false`."
+ },
+ {
+ "name": "commentPullRequests",
+ "type": "boolean",
+ "groupName": "pull_requests",
+ "label": "Comment on abandoned pull requests with close reason.",
+ "defaultValue": false,
+ "required": false,
+ "helpMarkDown": "When set to `true` a comment will be added to abandoned pull requests explanating why it was closed. Defaults to `false`.",
+ "visibleRule": "abandonUnwantedPullRequests=true"
+ },
+ {
+ "name": "setAutoComplete",
+ "type": "boolean",
+ "groupName": "pull_requests",
+ "label": "Auto-complete pull requests when all policies pass",
+ "defaultValue": false,
+ "required": false,
+ "helpMarkDown": "When set to `true`, pull requests that pass all policies will be merged automatically. Defaults to `false`."
+ },
+ {
+ "name": "mergeStrategy",
+ "type": "pickList",
+ "groupName": "pull_requests",
+ "label": "Auto-complete merge Strategy",
+ "defaultValue": "squash",
+ "required": true,
+ "helpMarkDown": "The merge strategy to use. Learn more [here](https://learn.microsoft.com/en-us/rest/api/azure/devops/git/pull-requests/update?view=azure-devops-rest-5.1&tabs=HTTP#gitpullrequestmergestrategy).",
+ "options": {
+ "noFastForward": "No fast forward",
+ "rebase": "Rebase",
+ "rebaseMerge": "Rebase merge",
+ "squash": "Squash"
+ },
+ "visibleRule": "setAutoComplete=true"
+ },
+ {
+ "name": "autoCompleteIgnoreConfigIds",
+ "type": "string",
+ "groupName": "pull_requests",
+ "label": "Semicolon delimited list of any policy configuration IDs which auto-complete should not wait for.",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "A semicolon (`;`) delimited list of any policy configuration IDs which auto-complete should not wait for. Only applies to optional policies (isBlocking == false). Auto-complete always waits for required policies (isBlocking == true).",
+ "visibleRule": "setAutoComplete=true"
+ },
+ {
+ "name": "autoApprove",
+ "type": "boolean",
+ "groupName": "pull_requests",
+ "label": "Auto-approve pull requests",
+ "defaultValue": false,
+ "required": false,
+ "helpMarkDown": "When set to `true`, pull requests will automatically be approved by the specified user. Defaults to `false`."
+ },
+ {
+ "name": "autoApproveUserToken",
+ "type": "string",
+ "groupName": "pull_requests",
+ "label": "A personal access token of the user that should approve the PR.",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "A personal access token of the user of that shall be used to approve the created PR automatically. If the same user that creates the PR should approve, this can be left empty. This won't work with if the Build Service with the build service account!",
+ "visibleRule": "autoApprove=true"
+ },
+ {
+ "name": "authorEmail",
+ "type": "string",
+ "groupName": "pull_requests",
+ "label": "Git commit uthor email address",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "The email address to use for the change commit author. Can be used to associate the committer with an existing account, to provide a profile picture. Defaults to `noreply@github.com`."
+ },
+ {
+ "name": "authorName",
+ "type": "string",
+ "groupName": "pull_requests",
+ "label": "Git commit author name",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "The name to use as the git commit author of the pull requests. Defaults to `dependabot[bot]`."
+ },
+
+ {
+ "name": "securityAdvisoriesFile",
+ "type": "string",
+ "label": "Path for the file containing security advisories in JSON format.",
+ "groupName": "security_updates",
+ "helpMarkDown": "The file containing security advisories.",
+ "required": false
+ },
+
+ {
+ "name": "azureDevOpsServiceConnection",
+ "type": "connectedService:Externaltfs",
+ "groupName": "devops",
+ "label": "Azure DevOps Service Connection to use.",
+ "required": false,
+ "helpMarkDown": "Specify a service connection to use, if you want to use a different service principal than the default to create your PRs."
+ },
+ {
+ "name": "azureDevOpsAccessToken",
+ "type": "string",
+ "groupName": "devops",
+ "label": "Azure DevOps Personal Access Token.",
+ "required": false,
+ "helpMarkDown": "The Personal Access Token for accessing Azure DevOps repositories. Supply a value here to avoid using permissions for the Build Service either because you cannot change its permissions or because you prefer that the Pull Requests be done by a different user. Use this in place of `azureDevOpsServiceConnection` such as when it is not possible to create a service connection."
+ },
+
+ {
+ "name": "gitHubConnection",
+ "type": "connectedService:github:OAuth,PersonalAccessToken,InstallationToken,Token",
+ "groupName": "github",
+ "label": "GitHub connection (OAuth or PAT)",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "Specify the name of the GitHub service connection to use to connect to the GitHub repositories. The connection must be based on a GitHub user's OAuth or a GitHub personal access token. Learn more about service connections [here](https://aka.ms/AA3am5s)."
+ },
+ {
+ "name": "gitHubAccessToken",
+ "type": "string",
+ "groupName": "github",
+ "label": "GitHub Personal Access Token.",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "The raw Personal Access Token for accessing GitHub repositories. Use this in place of `gitHubConnection` such as when it is not possible to create a service connection."
+ },
+
+ {
+ "name": "storeDependencyList",
+ "type": "boolean",
+ "groupName": "advanced",
+ "label": "Monitor the discovered dependencies",
+ "defaultValue": false,
+ "required": false,
+ "helpMarkDown": "Determines if the last know dependency list information should be stored in the parent DevOps project properties. If enabled, the authenticated user must have the `Project & Team (Write)` permission for the project. Enabling this option improves performance when doing security-only updates. Defaults to `false`."
+ },
+ {
+ "name": "targetRepositoryName",
+ "type": "string",
+ "groupName": "advanced",
+ "label": "Target Repository Name",
+ "required": false,
+ "helpMarkDown": "The name of the repository to target for processing. If this value is not supplied then the Build Repository Name is used. Supplying this value allows creation of a single pipeline that runs Dependabot against multiple repositories."
+ },
+ {
+ "name": "targetUpdateIds",
+ "type": "string",
+ "groupName": "advanced",
+ "label": "Semicolon delimited list of update identifiers to run.",
+ "defaultValue": "",
+ "required": false,
+ "helpMarkDown": "A semicolon (`;`) delimited list of update identifiers run. Index are zero-based and in the order written in the configuration file. When not present, all the updates are run. This is meant to be used in scenarios where you want to run updates a different times from the same configuration file given you cannot schedule them independently in the pipeline."
+ },
+ {
+ "name": "experiments",
+ "type": "string",
+ "groupName": "advanced",
+ "label": "Dependabot updater experiments",
+ "required": false,
+ "helpMarkDown": "Comma-seperated list of key/value pairs representing the enabled Dependabot experiments e.g. `experiments: 'tidy=true,vendor=true,goprivate=*'`. Available options vary depending on the package ecosystem. See [configuring experiments](https://github.com/tinglesoftware/dependabot-azure-devops/#configuring-experiments) for more details."
+ }
+ ],
+ "dataSourceBindings": [],
+ "execution": {
+ "Node20_1": {
+ "target": "index.js",
+ "argumentFormat": ""
+ }
+ }
+}
diff --git a/extension/tasks/dependabotV2/utils/azure-devops/AzureDevOpsWebApiClient.ts b/extension/tasks/dependabotV2/utils/azure-devops/AzureDevOpsWebApiClient.ts
new file mode 100644
index 00000000..a01362c2
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/azure-devops/AzureDevOpsWebApiClient.ts
@@ -0,0 +1,558 @@
+import { WebApi, getPersonalAccessTokenHandler } from 'azure-devops-node-api';
+import {
+ CommentThreadStatus,
+ CommentType,
+ IdentityRefWithVote,
+ ItemContentType,
+ PullRequestAsyncStatus,
+ PullRequestStatus,
+} from 'azure-devops-node-api/interfaces/GitInterfaces';
+import { error, warning } from 'azure-pipelines-task-lib/task';
+import { IFileChange } from './interfaces/IFileChange';
+import { IPullRequest } from './interfaces/IPullRequest';
+import { IPullRequestProperties } from './interfaces/IPullRequestProperties';
+import { resolveAzureDevOpsIdentities } from './resolveAzureDevOpsIdentities';
+
+/**
+ * Wrapper for DevOps WebApi client with helper methods for easier management of dependabot pull requests
+ */
+export class AzureDevOpsWebApiClient {
+ private readonly organisationApiUrl: string;
+ private readonly accessToken: string;
+ private readonly connection: WebApi;
+ private cachedUserIds: Record;
+
+ constructor(organisationApiUrl: string, accessToken: string) {
+ this.organisationApiUrl = organisationApiUrl;
+ this.accessToken = accessToken;
+ this.connection = new WebApi(organisationApiUrl, getPersonalAccessTokenHandler(accessToken));
+ this.cachedUserIds = {};
+ }
+
+ /**
+ * Get the identity of a user by email address. If no email is provided, the identity of the authenticated user is returned.
+ * @param email
+ * @returns
+ */
+ public async getUserId(email?: string): Promise {
+ // If no email is provided, resolve to the authenticated user
+ if (!email) {
+ this.cachedUserIds[this.accessToken] ||= (await this.connection.connect())?.authenticatedUser?.id || '';
+ return this.cachedUserIds[this.accessToken];
+ }
+
+ // Otherwise, do a cached identity lookup of the supplied email address
+ // TODO: When azure-devops-node-api supports Graph API, use that instead of the REST API
+ else if (!this.cachedUserIds[email]) {
+ const identities = await resolveAzureDevOpsIdentities(new URL(this.organisationApiUrl), [email]);
+ identities.forEach((i) => (this.cachedUserIds[i.input] ||= i.id));
+ }
+
+ return this.cachedUserIds[email];
+ }
+
+ /**
+ * Get the default branch for a repository
+ * @param project
+ * @param repository
+ * @returns
+ */
+ public async getDefaultBranch(project: string, repository: string): Promise {
+ try {
+ const git = await this.connection.getGitApi();
+ const repo = await git.getRepository(repository, project);
+ if (!repo) {
+ throw new Error(`Repository '${project}/${repository}' not found`);
+ }
+
+ // Strip reference prefix from the branch name, the caller doesn't need to know this
+ return repo.defaultBranch?.replace(/^refs\/heads\//i, '');
+ } catch (e) {
+ error(`Failed to get default branch for '${project}/${repository}': ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return undefined;
+ }
+ }
+
+ /**
+ * Get the properties for all active pull request created by the supplied user
+ * @param project
+ * @param repository
+ * @param creator
+ * @returns
+ */
+ public async getActivePullRequestProperties(
+ project: string,
+ repository: string,
+ creator: string,
+ ): Promise {
+ try {
+ const git = await this.connection.getGitApi();
+ const pullRequests = await git.getPullRequests(
+ repository,
+ {
+ creatorId: isGuid(creator) ? creator : await this.getUserId(creator),
+ status: PullRequestStatus.Active,
+ },
+ project,
+ );
+ if (!pullRequests || pullRequests.length === 0) {
+ return [];
+ }
+
+ return await Promise.all(
+ pullRequests.map(async (pr) => {
+ const properties = (await git.getPullRequestProperties(repository, pr.pullRequestId, project))?.value || {};
+ return {
+ id: pr.pullRequestId,
+ properties:
+ Object.keys(properties).map((key) => {
+ return {
+ name: key,
+ value: properties[key]?.$value,
+ };
+ }) || [],
+ };
+ }),
+ );
+ } catch (e) {
+ error(`Failed to list active pull request properties: ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return [];
+ }
+ }
+
+ /**
+ * Create a new pull request
+ * @param pr
+ * @returns
+ */
+ public async createPullRequest(pr: IPullRequest): Promise {
+ console.info(`Creating pull request '${pr.title}'...`);
+ try {
+ const userId = await this.getUserId();
+ const git = await this.connection.getGitApi();
+
+ // Create the source branch and commit the file changes
+ console.info(` - Pushing ${pr.changes.length} change(s) to branch '${pr.source.branch}'...`);
+ const push = await git.createPush(
+ {
+ refUpdates: [
+ {
+ name: `refs/heads/${pr.source.branch}`,
+ oldObjectId: pr.source.commit,
+ },
+ ],
+ commits: [
+ {
+ comment: pr.commitMessage,
+ author: pr.author,
+ changes: pr.changes.map((change) => {
+ return {
+ changeType: change.changeType,
+ item: {
+ path: normalizeDevOpsPath(change.path),
+ },
+ newContent: {
+ content: Buffer.from(change.content, change.encoding).toString('base64'),
+ contentType: ItemContentType.Base64Encoded,
+ },
+ };
+ }),
+ },
+ ],
+ },
+ pr.repository,
+ pr.project,
+ );
+
+ // Build the list of the pull request reviewers
+ // NOTE: Azure DevOps does not have a concept of assignees, only reviewers.
+ // We treat assignees as required reviewers and all other reviewers as optional.
+ const allReviewers: IdentityRefWithVote[] = [];
+ if (pr.assignees?.length > 0) {
+ for (const assignee of pr.assignees) {
+ const identityId = isGuid(assignee) ? assignee : await this.getUserId(assignee);
+ if (identityId) {
+ allReviewers.push({
+ id: identityId,
+ isRequired: true,
+ isFlagged: true,
+ });
+ } else {
+ warning(` - Unable to resolve assignee identity '${assignee}'`);
+ }
+ }
+ }
+ if (pr.reviewers?.length > 0) {
+ for (const reviewer of pr.reviewers) {
+ const identityId = isGuid(reviewer) ? reviewer : await this.getUserId(reviewer);
+ if (identityId) {
+ allReviewers.push({
+ id: identityId,
+ });
+ } else {
+ warning(` - Unable to resolve reviewer identity '${reviewer}'`);
+ }
+ }
+ }
+
+ // Create the pull request
+ console.info(` - Creating pull request to merge '${pr.source.branch}' into '${pr.target.branch}'...`);
+ const pullRequest = await git.createPullRequest(
+ {
+ sourceRefName: `refs/heads/${pr.source.branch}`,
+ targetRefName: `refs/heads/${pr.target.branch}`,
+ title: pr.title,
+ description: pr.description,
+ reviewers: allReviewers,
+ workItemRefs: pr.workItems?.map((id) => {
+ return { id: id };
+ }),
+ labels: pr.labels?.map((label) => {
+ return { name: label };
+ }),
+ isDraft: false, // TODO: Add config for this?
+ },
+ pr.repository,
+ pr.project,
+ true,
+ );
+
+ // Add the pull request properties
+ if (pr.properties?.length > 0) {
+ console.info(` - Adding dependency metadata to pull request properties...`);
+ await git.updatePullRequestProperties(
+ null,
+ pr.properties.map((property) => {
+ return {
+ op: 'add',
+ path: '/' + property.name,
+ value: property.value,
+ };
+ }),
+ pr.repository,
+ pullRequest.pullRequestId,
+ pr.project,
+ );
+ }
+
+ // TODO: Upload the pull request description as a 'changes.md' file attachment?
+ // This might be a way to work around the 4000 character limit for PR descriptions, but needs more investigation.
+ // https://learn.microsoft.com/en-us/rest/api/azure/devops/git/pull-request-attachments/create?view=azure-devops-rest-7.1
+
+ // Set the pull request auto-complete status
+ if (pr.autoComplete) {
+ console.info(` - Setting auto-complete...`);
+ await git.updatePullRequest(
+ {
+ autoCompleteSetBy: {
+ id: userId,
+ },
+ completionOptions: {
+ autoCompleteIgnoreConfigIds: pr.autoComplete.ignorePolicyConfigIds,
+ deleteSourceBranch: true,
+ mergeCommitMessage: mergeCommitMessage(pullRequest.pullRequestId, pr.title, pr.description),
+ mergeStrategy: pr.autoComplete.mergeStrategy,
+ transitionWorkItems: false,
+ },
+ },
+ pr.repository,
+ pullRequest.pullRequestId,
+ pr.project,
+ );
+ }
+
+ console.info(` - Pull request #${pullRequest.pullRequestId} was created successfully.`);
+ return pullRequest.pullRequestId;
+ } catch (e) {
+ error(`Failed to create pull request: ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return null;
+ }
+ }
+
+ /**
+ * Update a pull request
+ * @param options
+ * @returns
+ */
+ public async updatePullRequest(options: {
+ project: string;
+ repository: string;
+ pullRequestId: number;
+ changes: IFileChange[];
+ skipIfCommitsFromUsersOtherThan?: string;
+ skipIfNoConflicts?: boolean;
+ }): Promise {
+ console.info(`Updating pull request #${options.pullRequestId}...`);
+ try {
+ const userId = await this.getUserId();
+ const git = await this.connection.getGitApi();
+
+ // Get the pull request details
+ const pullRequest = await git.getPullRequest(options.repository, options.pullRequestId, options.project);
+ if (!pullRequest) {
+ throw new Error(`Pull request #${options.pullRequestId} not found`);
+ }
+
+ // Skip if no merge conflicts
+ if (options.skipIfNoConflicts && pullRequest.mergeStatus !== PullRequestAsyncStatus.Conflicts) {
+ console.info(` - Skipping update as pull request has no merge conflicts.`);
+ return true;
+ }
+
+ // Skip if the pull request has been modified by another user
+ const commits = await git.getPullRequestCommits(options.repository, options.pullRequestId, options.project);
+ if (
+ options.skipIfCommitsFromUsersOtherThan &&
+ commits.some((c) => c.author?.email !== options.skipIfCommitsFromUsersOtherThan)
+ ) {
+ console.info(` - Skipping update as pull request has been modified by another user.`);
+ return true;
+ }
+
+ // Push changes to the source branch
+ console.info(` - Pushing ${options.changes.length} change(s) branch '${pullRequest.sourceRefName}'...`);
+ const push = await git.createPush(
+ {
+ refUpdates: [
+ {
+ name: pullRequest.sourceRefName,
+ oldObjectId: pullRequest.lastMergeSourceCommit.commitId,
+ },
+ ],
+ commits: [
+ {
+ comment:
+ pullRequest.mergeStatus === PullRequestAsyncStatus.Conflicts
+ ? 'Resolve merge conflicts'
+ : 'Update dependency files',
+ changes: options.changes.map((change) => {
+ return {
+ changeType: change.changeType,
+ item: {
+ path: normalizeDevOpsPath(change.path),
+ },
+ newContent: {
+ content: Buffer.from(change.content, change.encoding).toString('base64'),
+ contentType: ItemContentType.Base64Encoded,
+ },
+ };
+ }),
+ },
+ ],
+ },
+ options.repository,
+ options.project,
+ );
+
+ console.info(` - Pull request #${options.pullRequestId} was updated successfully.`);
+ return true;
+ } catch (e) {
+ error(`Failed to update pull request: ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return false;
+ }
+ }
+
+ /**
+ * Approve a pull request
+ * @param options
+ * @returns
+ */
+ public async approvePullRequest(options: {
+ project: string;
+ repository: string;
+ pullRequestId: number;
+ }): Promise {
+ console.info(`Approving pull request #${options.pullRequestId}...`);
+ try {
+ const userId = await this.getUserId();
+ const git = await this.connection.getGitApi();
+
+ // Approve the pull request
+ console.info(` - Creating reviewer vote on pull request...`);
+ await git.createPullRequestReviewer(
+ {
+ vote: 10, // 10 - approved 5 - approved with suggestions 0 - no vote -5 - waiting for author -10 - rejected
+ isReapprove: true,
+ },
+ options.repository,
+ options.pullRequestId,
+ userId,
+ options.project,
+ );
+
+ console.info(` - Pull request #${options.pullRequestId} was approved.`);
+ } catch (e) {
+ error(`Failed to approve pull request: ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return false;
+ }
+ }
+
+ /**
+ * Close a pull request
+ * @param options
+ * @returns
+ */
+ public async closePullRequest(options: {
+ project: string;
+ repository: string;
+ pullRequestId: number;
+ comment: string;
+ deleteSourceBranch: boolean;
+ }): Promise {
+ console.info(`Closing pull request #${options.pullRequestId}...`);
+ try {
+ const userId = await this.getUserId();
+ const git = await this.connection.getGitApi();
+
+ // Add a comment to the pull request, if supplied
+ if (options.comment) {
+ console.info(` - Adding comment to pull request...`);
+ await git.createThread(
+ {
+ status: CommentThreadStatus.Closed,
+ comments: [
+ {
+ author: {
+ id: userId,
+ },
+ content: options.comment,
+ commentType: CommentType.System,
+ },
+ ],
+ },
+ options.repository,
+ options.pullRequestId,
+ options.project,
+ );
+ }
+
+ // Close the pull request
+ console.info(` - Abandoning pull request...`);
+ const pullRequest = await git.updatePullRequest(
+ {
+ status: PullRequestStatus.Abandoned,
+ closedBy: {
+ id: userId,
+ },
+ },
+ options.repository,
+ options.pullRequestId,
+ options.project,
+ );
+
+ // Delete the source branch if required
+ if (options.deleteSourceBranch) {
+ console.info(` - Deleting source branch...`);
+ await git.updateRef(
+ {
+ name: `refs/heads/${pullRequest.sourceRefName}`,
+ oldObjectId: pullRequest.lastMergeSourceCommit.commitId,
+ newObjectId: '0000000000000000000000000000000000000000',
+ isLocked: false,
+ },
+ options.repository,
+ '',
+ options.project,
+ );
+ }
+
+ console.info(` - Pull request #${options.pullRequestId} was closed successfully.`);
+ return true;
+ } catch (e) {
+ error(`Failed to close pull request: ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return false;
+ }
+ }
+
+ /**
+ * Get project properties
+ * @param projectId
+ * @param valueBuilder
+ * @returns
+ */
+ public async getProjectProperties(projectId: string): Promise | undefined> {
+ try {
+ const core = await this.connection.getCoreApi();
+ const properties = await core.getProjectProperties(projectId);
+ return properties?.map((p) => ({ [p.name]: p.value }))?.reduce((a, b) => ({ ...a, ...b }), {});
+ } catch (e) {
+ error(`Failed to get project properties: ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ return undefined;
+ }
+ }
+
+ /**
+ * Update a project property
+ * @param project
+ * @param name
+ * @param valueBuilder
+ * @returns
+ */
+ public async updateProjectProperty(
+ projectId: string,
+ name: string,
+ valueBuilder: (existingValue: string) => string,
+ ): Promise {
+ try {
+ // Get the existing project property value
+ const core = await this.connection.getCoreApi();
+ const properties = await core.getProjectProperties(projectId);
+ const propertyValue = properties?.find((p) => p.name === name)?.value;
+
+ // Update the project property
+ await core.setProjectProperties(undefined, projectId, [
+ {
+ op: 'add',
+ path: '/' + name,
+ value: valueBuilder(propertyValue || ''),
+ },
+ ]);
+ } catch (e) {
+ error(`Failed to update project property '${name}': ${e}`);
+ console.debug(e); // Dump the error stack trace to help with debugging
+ }
+ }
+}
+
+function normalizeDevOpsPath(path: string): string {
+ // Convert backslashes to forward slashes, convert './' => '/' and ensure the path starts with a forward slash if it doesn't already, this is how DevOps paths are formatted
+ return path
+ .replace(/\\/g, '/')
+ .replace(/^\.\//, '/')
+ .replace(/^([^/])/, '/$1');
+}
+
+function mergeCommitMessage(id: number, title: string, description: string): string {
+ //
+ // The merge commit message should contain the PR number and title for tracking.
+ // This is the default behaviour in Azure DevOps.
+ // Example:
+ // Merged PR 24093: Bump Tingle.Extensions.Logging.LogAnalytics from 3.4.2-ci0005 to 3.4.2-ci0006
+ //
+ // Bumps [Tingle.Extensions.Logging.LogAnalytics](...) from 3.4.2-ci0005 to 3.4.2-ci0006
+ // - [Release notes](....)
+ // - [Changelog](....)
+ // - [Commits](....)
+ //
+ // There appears to be a DevOps bug when setting "completeOptions" with a "mergeCommitMessage" even when truncated to 4000 characters.
+ // The error message is:
+ // Invalid argument value.
+ // Parameter name: Completion options have exceeded the maximum encoded length (4184/4000)
+ //
+ // The effective limit seems to be about 3500 characters:
+ // https://developercommunity.visualstudio.com/t/raise-the-character-limit-for-pull-request-descrip/365708#T-N424531
+ //
+ return `Merged PR ${id}: ${title}\n\n${description}`.slice(0, 3500);
+}
+
+function isGuid(guid: string): boolean {
+ const regex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/;
+ return regex.test(guid);
+}
diff --git a/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IFileChange.ts b/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IFileChange.ts
new file mode 100644
index 00000000..5d6ccbef
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IFileChange.ts
@@ -0,0 +1,11 @@
+import { VersionControlChangeType } from 'azure-devops-node-api/interfaces/TfvcInterfaces';
+
+/**
+ * File change
+ */
+export interface IFileChange {
+ changeType: VersionControlChangeType;
+ path: string;
+ content: string;
+ encoding: string;
+}
diff --git a/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IPullRequest.ts b/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IPullRequest.ts
new file mode 100644
index 00000000..aaa21a02
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IPullRequest.ts
@@ -0,0 +1,37 @@
+import { GitPullRequestMergeStrategy } from 'azure-devops-node-api/interfaces/GitInterfaces';
+import { IFileChange } from './IFileChange';
+
+/**
+ * Pull request creation
+ */
+export interface IPullRequest {
+ project: string;
+ repository: string;
+ source: {
+ commit: string;
+ branch: string;
+ };
+ target: {
+ branch: string;
+ };
+ author?: {
+ email: string;
+ name: string;
+ };
+ title: string;
+ description: string;
+ commitMessage: string;
+ autoComplete?: {
+ ignorePolicyConfigIds?: number[];
+ mergeStrategy?: GitPullRequestMergeStrategy;
+ };
+ assignees?: string[];
+ reviewers?: string[];
+ labels?: string[];
+ workItems?: string[];
+ changes: IFileChange[];
+ properties?: {
+ name: string;
+ value: string;
+ }[];
+}
diff --git a/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IPullRequestProperties.ts b/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IPullRequestProperties.ts
new file mode 100644
index 00000000..2fd2e6fb
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/azure-devops/interfaces/IPullRequestProperties.ts
@@ -0,0 +1,10 @@
+/**
+ * Pull request properties
+ */
+export interface IPullRequestProperties {
+ id: number;
+ properties?: {
+ name: string;
+ value: string;
+ }[];
+}
diff --git a/extension/tasks/dependabotV2/utils/azure-devops/resolveAzureDevOpsIdentities.ts b/extension/tasks/dependabotV2/utils/azure-devops/resolveAzureDevOpsIdentities.ts
new file mode 100644
index 00000000..77e0a617
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/azure-devops/resolveAzureDevOpsIdentities.ts
@@ -0,0 +1,161 @@
+import axios from 'axios';
+import * as tl from 'azure-pipelines-task-lib/task';
+import extractOrganization from '../extractOrganization';
+
+export interface IIdentity {
+ /**
+ * The identity id to use for PR reviewer or assignee Id.
+ */
+ id: string;
+ /**
+ * Human readable Username.
+ */
+ displayName?: string;
+ /**
+ * The provided input to use for searching an identity.
+ */
+ input: string;
+}
+
+/**
+ * Resolves the given input email addresses to an array of IIdentity information.
+ * It also handles non email input, which is assumed to be already an identity id
+ * to pass as reviewer id to an PR.
+ *
+ * @param organizationUrl
+ * @param inputs
+ * @returns
+ */
+export async function resolveAzureDevOpsIdentities(organizationUrl: URL, inputs: string[]): Promise {
+ const result: IIdentity[] = [];
+
+ tl.debug(`Attempting to fetch configuration file via REST API ...`);
+ for (const input of inputs) {
+ if (input.indexOf('@') > 0) {
+ // input is email to look-up
+ const identityInfo = await querySubject(organizationUrl, input);
+ if (identityInfo) {
+ result.push(identityInfo);
+ }
+ } else {
+ // input is already identity id
+ result.push({ id: input, input: input });
+ }
+ }
+ return result;
+}
+
+/**
+ * Returns whether the extension is run in a hosted environment (as opposed to an on-premise environment).
+ * In Azure DevOps terms, hosted environment is also known as "Azure DevOps Services" and on-premise environment is known as
+ * "Team Foundation Server" or "Azure DevOps Server".
+ */
+export function isHostedAzureDevOps(uri: URL): boolean {
+ const hostname = uri.hostname.toLowerCase();
+ return hostname === 'dev.azure.com' || hostname.endsWith('.visualstudio.com');
+}
+
+function decodeBase64(input: string): string {
+ return Buffer.from(input, 'base64').toString('utf8');
+}
+
+function encodeBase64(input: string): string {
+ return Buffer.from(input, 'utf8').toString('base64');
+}
+
+function isSuccessStatusCode(statusCode?: number): boolean {
+ return statusCode >= 200 && statusCode <= 299;
+}
+
+async function querySubject(organizationUrl: URL, email: string): Promise {
+ if (isHostedAzureDevOps(organizationUrl)) {
+ const organization: string = extractOrganization(organizationUrl.toString());
+ return await querySubjectHosted(organization, email);
+ } else {
+ return await querySubjectOnPrem(organizationUrl, email);
+ }
+}
+
+/**
+ * Make the HTTP Request for an OnPrem Azure DevOps Server to resolve an email to an IIdentity
+ * @param organizationUrl
+ * @param email
+ * @returns
+ */
+async function querySubjectOnPrem(organizationUrl: URL, email: string): Promise {
+ const url = `${organizationUrl}_apis/identities?searchFilter=MailAddress&queryMembership=None&filterValue=${email}`;
+ tl.debug(`GET ${url}`);
+ try {
+ const response = await axios.get(url, {
+ headers: {
+ Authorization: `Basic ${encodeBase64('PAT:' + tl.getVariable('System.AccessToken'))}`,
+ Accept: 'application/json;api-version=5.0',
+ },
+ });
+
+ if (isSuccessStatusCode(response.status)) {
+ return {
+ id: response.data.value[0]?.id,
+ displayName: response.data.value[0]?.providerDisplayName,
+ input: email,
+ };
+ }
+ } catch (error) {
+ const responseStatusCode = error?.response?.status;
+ tl.debug(`HTTP Response Status: ${responseStatusCode}`);
+ if (responseStatusCode > 400 && responseStatusCode < 500) {
+ tl.debug(`Access token is ${tl.getVariable('System.AccessToken')?.length > 0 ? 'not' : ''} null or empty.`);
+ throw new Error(`The access token provided is empty or does not have permissions to access '${url}'`);
+ } else {
+ throw error;
+ }
+ }
+}
+
+/**
+ * * Make the HTTP Request for a hosted Azure DevOps Service, to resolve an email to an IIdentity
+ * @param organization
+ * @param email
+ * @returns
+ */
+async function querySubjectHosted(organization: string, email: string): Promise {
+ // make HTTP request
+ const url = `https://vssps.dev.azure.com/${organization}/_apis/graph/subjectquery`;
+ tl.debug(`GET ${url}`);
+ try {
+ const response = await axios.post(url, {
+ headers: {
+ 'Authorization': `Basic ${encodeBase64('PAT:' + tl.getVariable('System.AccessToken'))}`,
+ 'Accept': 'application/json;api-version=6.0-preview.1',
+ 'Content-Type': 'application/json',
+ },
+ data: {
+ query: email,
+ subjectKind: ['User'],
+ },
+ });
+
+ tl.debug(`Got Http Response: ${response.status}`);
+
+ if (!isSuccessStatusCode(response.status) || response.data.value.length === 0) {
+ throw new Error('Failed to resolve given email in organization');
+ }
+
+ const descriptor: string = response.data.value[0]?.descriptor || '';
+ const id = decodeBase64(descriptor.substring(descriptor.indexOf('.') + 1));
+ return {
+ id: id,
+ displayName: response.data.value[0]?.displayName,
+ input: email,
+ };
+ } catch (error) {
+ const responseStatusCode = error?.response?.status;
+ tl.debug(`HTTP Response Status: ${responseStatusCode}`);
+ if (responseStatusCode > 400 && responseStatusCode < 500) {
+ tl.debug(`Access token is ${tl.getVariable('System.AccessToken')?.length > 0 ? 'not' : ''} null or empty.`);
+ throw new Error(`The access token provided is empty or does not have permissions to access '${url}'`);
+ } else {
+ throw error;
+ }
+ }
+}
diff --git a/extension/tasks/dependabotV2/utils/convertPlaceholder.ts b/extension/tasks/dependabotV2/utils/convertPlaceholder.ts
new file mode 100644
index 00000000..d4e96d5d
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/convertPlaceholder.ts
@@ -0,0 +1,27 @@
+import { getVariable } from 'azure-pipelines-task-lib/task';
+
+function convertPlaceholder(input: string): string {
+ var matches: RegExpExecArray[] = extractPlaceholder(input);
+ var result = input;
+ for (const match of matches) {
+ var placeholder = match[0];
+ var name = match[1];
+ var value = getVariable(name) ?? placeholder;
+ result = result.replace(placeholder, value);
+ }
+ return result;
+}
+
+function extractPlaceholder(input: string) {
+ const regexp: RegExp = new RegExp('\\${{\\s*([a-zA-Z_]+[a-zA-Z0-9\\._-]*)\\s*}}', 'g');
+
+ return matchAll(input, regexp);
+}
+
+function matchAll(input: string, rExp: RegExp, matches: Array = []) {
+ const matchIfAny = rExp.exec(input);
+ matchIfAny && matches.push(matchIfAny) && matchAll(input, rExp, matches);
+ return matches;
+}
+
+export { convertPlaceholder, extractPlaceholder };
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotCli.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotCli.ts
new file mode 100644
index 00000000..476ddf22
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotCli.ts
@@ -0,0 +1,211 @@
+import { command, debug, error, tool, which } from 'azure-pipelines-task-lib/task';
+import { ToolRunner } from 'azure-pipelines-task-lib/toolrunner';
+import * as fs from 'fs';
+import * as yaml from 'js-yaml';
+import * as os from 'os';
+import * as path from 'path';
+import { IDependabotUpdateJobConfig } from './interfaces/IDependabotUpdateJobConfig';
+import { IDependabotUpdateOperation } from './interfaces/IDependabotUpdateOperation';
+import { IDependabotUpdateOperationResult } from './interfaces/IDependabotUpdateOperationResult';
+import { IDependabotUpdateOutputProcessor } from './interfaces/IDependabotUpdateOutputProcessor';
+
+/**
+ * Wrapper class for running updates using dependabot-cli
+ */
+export class DependabotCli {
+ private readonly jobsPath: string;
+ private readonly toolImage: string;
+ private readonly outputProcessor: IDependabotUpdateOutputProcessor;
+ private readonly debug: boolean;
+
+ private toolPath: string;
+
+ public static readonly CLI_IMAGE_LATEST = 'github.com/dependabot/cli/cmd/dependabot@latest';
+
+ constructor(cliToolImage: string, outputProcessor: IDependabotUpdateOutputProcessor, debug: boolean) {
+ this.jobsPath = path.join(os.tmpdir(), 'dependabot-jobs');
+ this.toolImage = cliToolImage;
+ this.outputProcessor = outputProcessor;
+ this.debug = debug;
+ this.ensureJobsPathExists();
+ }
+
+ /**
+ * Run dependabot update job
+ * @param operation
+ * @param options
+ * @returns
+ */
+ public async update(
+ operation: IDependabotUpdateOperation,
+ options?: {
+ azureDevOpsAccessToken?: string;
+ gitHubAccessToken?: string;
+ collectorImage?: string;
+ proxyImage?: string;
+ updaterImage?: string;
+ flamegraph?: boolean;
+ },
+ ): Promise {
+ // Find the dependabot tool path, or install it if missing
+ const dependabotPath = await this.getDependabotToolPath();
+
+ // Create the job directory
+ const jobId = operation.job.id;
+ const jobPath = path.join(this.jobsPath, jobId.toString());
+ const jobInputPath = path.join(jobPath, 'job.yaml');
+ const jobOutputPath = path.join(jobPath, 'scenario.yaml');
+ this.ensureJobsPathExists();
+ if (!fs.existsSync(jobPath)) {
+ fs.mkdirSync(jobPath);
+ }
+
+ // Compile dependabot cmd arguments
+ // See: https://github.com/dependabot/cli/blob/main/cmd/dependabot/internal/cmd/root.go
+ // https://github.com/dependabot/cli/blob/main/cmd/dependabot/internal/cmd/update.go
+ let dependabotArguments = ['update', '--file', jobInputPath, '--output', jobOutputPath];
+ if (options?.collectorImage) {
+ dependabotArguments.push('--collector-image', options.collectorImage);
+ }
+ if (options?.proxyImage) {
+ dependabotArguments.push('--proxy-image', options.proxyImage);
+ }
+ if (options?.updaterImage) {
+ dependabotArguments.push('--updater-image', options.updaterImage);
+ }
+ if (options?.flamegraph) {
+ dependabotArguments.push('--flamegraph');
+ }
+
+ // Generate the job input file
+ writeJobConfigFile(jobInputPath, operation);
+
+ // Run dependabot update
+ if (!fs.existsSync(jobOutputPath) || fs.statSync(jobOutputPath)?.size == 0) {
+ console.info(`Running Dependabot update job '${jobInputPath}'...`);
+ const dependabotTool = tool(dependabotPath).arg(dependabotArguments);
+ const dependabotResultCode = await dependabotTool.execAsync({
+ failOnStdErr: false,
+ ignoreReturnCode: true,
+ env: {
+ DEPENDABOT_JOB_ID: jobId.replace(/-/g, '_'), // replace hyphens with underscores
+ LOCAL_GITHUB_ACCESS_TOKEN: options?.gitHubAccessToken, // avoid rate-limiting when pulling images from GitHub container registries
+ LOCAL_AZURE_ACCESS_TOKEN: options?.azureDevOpsAccessToken, // technically not needed since we already supply this in our 'git_source' registry, but included for consistency
+ },
+ });
+ if (dependabotResultCode != 0) {
+ error(`Dependabot failed with exit code ${dependabotResultCode}`);
+ }
+ }
+
+ // If flamegraph is enabled, upload the report to the pipeline timeline so the use can download it
+ const flamegraphPath = path.join(process.cwd(), 'flamegraph.html');
+ if (options?.flamegraph && fs.existsSync(flamegraphPath)) {
+ const jobFlamegraphPath = path.join(process.cwd(), `dependabot-${operation.job.id}-flamegraph.html`);
+ fs.renameSync(flamegraphPath, jobFlamegraphPath);
+ console.info(`Uploading flamegraph report '${jobFlamegraphPath}' to pipeline timeline...`);
+ command('task.uploadfile', {}, jobFlamegraphPath);
+ }
+
+ // Process the job output
+ const operationResults = Array();
+ if (fs.existsSync(jobOutputPath)) {
+ const jobOutputs = readJobScenarioOutputFile(jobOutputPath);
+ if (jobOutputs?.length > 0) {
+ console.info(`Processing outputs from '${jobOutputPath}'...`);
+ for (const output of jobOutputs) {
+ // Documentation on the scenario model can be found here:
+ // https://github.com/dependabot/cli/blob/main/internal/model/scenario.go
+ const type = output['type'];
+ const data = output['expect']?.['data'];
+ var operationResult = {
+ success: true,
+ error: null,
+ output: {
+ type: type,
+ data: data,
+ },
+ };
+ try {
+ operationResult.success = await this.outputProcessor.process(operation, type, data);
+ } catch (e) {
+ operationResult.success = false;
+ operationResult.error = e;
+ } finally {
+ operationResults.push(operationResult);
+ }
+ }
+ }
+ }
+
+ return operationResults.length > 0 ? operationResults : undefined;
+ }
+
+ // Get the dependabot tool path and install if missing
+ private async getDependabotToolPath(installIfMissing: boolean = true): Promise {
+ debug('Checking for `dependabot` install...');
+ this.toolPath ||= which('dependabot', false);
+ if (this.toolPath) {
+ return this.toolPath;
+ }
+ if (!installIfMissing) {
+ throw new Error('Dependabot CLI install not found');
+ }
+
+ console.info('Dependabot CLI install was not found, installing now with `go install dependabot`...');
+ const goTool: ToolRunner = tool(which('go', true));
+ goTool.arg(['install', this.toolImage]);
+ await goTool.execAsync();
+
+ // Depending on how Go is configured on the host agent, the "go/bin" path may not be in the PATH environment variable.
+ // If dependabot still cannot be found using `which()` after install, we must manually resolve the path;
+ // It will either be "$GOPATH/bin/dependabot" or "$HOME/go/bin/dependabot", if GOPATH is not set.
+ const goBinPath = process.env.GOPATH ? path.join(process.env.GOPATH, 'bin') : path.join(os.homedir(), 'go', 'bin');
+ return (this.toolPath ||= which('dependabot', false) || path.join(goBinPath, 'dependabot'));
+ }
+
+ // Create the jobs directory if it does not exist
+ private ensureJobsPathExists(): void {
+ if (!fs.existsSync(this.jobsPath)) {
+ fs.mkdirSync(this.jobsPath);
+ }
+ }
+
+ // Clean up the jobs directory and its contents
+ public cleanup(): void {
+ if (fs.existsSync(this.jobsPath)) {
+ fs.rmSync(this.jobsPath, {
+ recursive: true,
+ force: true,
+ });
+ }
+ }
+}
+
+// Documentation on the job model can be found here:
+// https://github.com/dependabot/cli/blob/main/internal/model/job.go
+function writeJobConfigFile(path: string, config: IDependabotUpdateJobConfig): void {
+ fs.writeFileSync(
+ path,
+ yaml.dump({
+ job: config.job,
+ credentials: config.credentials,
+ }),
+ );
+}
+
+// Documentation on the scenario model can be found here:
+// https://github.com/dependabot/cli/blob/main/internal/model/scenario.go
+function readJobScenarioOutputFile(path: string): any[] {
+ const scenarioContent = fs.readFileSync(path, 'utf-8');
+ if (!scenarioContent || typeof scenarioContent !== 'string') {
+ return []; // No outputs or failed scenario
+ }
+
+ const scenario: any = yaml.load(scenarioContent);
+ if (scenario === null || typeof scenario !== 'object') {
+ throw new Error('Invalid scenario object');
+ }
+
+ return scenario['output'] || [];
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotJobBuilder.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotJobBuilder.ts
new file mode 100644
index 00000000..005a4c18
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotJobBuilder.ts
@@ -0,0 +1,270 @@
+import { warning } from 'azure-pipelines-task-lib';
+import {
+ IDependabotAllowCondition,
+ IDependabotGroup,
+ IDependabotRegistry,
+ IDependabotUpdate,
+} from '../dependabot/interfaces/IDependabotConfig';
+import { ISharedVariables } from '../getSharedVariables';
+import { IDependabotUpdateOperation } from './interfaces/IDependabotUpdateOperation';
+
+/**
+ * Wrapper class for building dependabot update job objects
+ */
+export class DependabotJobBuilder {
+ /**
+ * Create a dependabot update job that updates all dependencies for a package ecyosystem
+ * @param taskInputs
+ * @param update
+ * @param registries
+ * @param dependencyList
+ * @param existingPullRequests
+ * @returns
+ */
+ public static newUpdateAllJob(
+ taskInputs: ISharedVariables,
+ id: string,
+ update: IDependabotUpdate,
+ registries: Record,
+ dependencyList: any[],
+ existingPullRequests: any[],
+ ): IDependabotUpdateOperation {
+ const packageEcosystem = update['package-ecosystem'];
+ const securityUpdatesOnly = update['open-pull-requests-limit'] == 0;
+ const updateDependencyNames = securityUpdatesOnly ? mapDependenciesForSecurityUpdate(dependencyList) : undefined;
+ return buildUpdateJobConfig(
+ `update-${id}-${packageEcosystem}-${securityUpdatesOnly ? 'security-only' : 'all'}`,
+ taskInputs,
+ update,
+ registries,
+ false,
+ undefined,
+ updateDependencyNames,
+ existingPullRequests,
+ );
+ }
+
+ /**
+ * Create a dependabot update job that updates a single pull request
+ * @param taskInputs
+ * @param update
+ * @param registries
+ * @param existingPullRequests
+ * @param pullRequestToUpdate
+ * @returns
+ */
+ public static newUpdatePullRequestJob(
+ taskInputs: ISharedVariables,
+ id: string,
+ update: IDependabotUpdate,
+ registries: Record,
+ existingPullRequests: any[],
+ pullRequestToUpdate: any,
+ ): IDependabotUpdateOperation {
+ const dependencyGroupName = pullRequestToUpdate['dependency-group-name'];
+ const dependencies = (dependencyGroupName ? pullRequestToUpdate['dependencies'] : pullRequestToUpdate)?.map(
+ (d) => d['dependency-name'],
+ );
+ return buildUpdateJobConfig(
+ `update-pr-${id}`,
+ taskInputs,
+ update,
+ registries,
+ true,
+ dependencyGroupName,
+ dependencies,
+ existingPullRequests,
+ );
+ }
+}
+
+function buildUpdateJobConfig(
+ id: string,
+ taskInputs: ISharedVariables,
+ update: IDependabotUpdate,
+ registries: Record,
+ updatingPullRequest: boolean,
+ updateDependencyGroupName: string | undefined,
+ updateDependencyNames: string[] | undefined,
+ existingPullRequests: any[],
+) {
+ const hasMultipleDirectories = update.directories?.length > 1;
+ return {
+ config: update,
+ job: {
+ 'id': id,
+ 'package-manager': update['package-ecosystem'],
+ 'update-subdependencies': true, // TODO: add config for this?
+ 'updating-a-pull-request': updatingPullRequest,
+ 'dependency-group-to-refresh': updateDependencyGroupName,
+ 'dependency-groups': mapGroupsFromDependabotConfigToJobConfig(update.groups),
+ 'dependencies': updateDependencyNames,
+ 'allowed-updates': mapAllowedUpdatesFromDependabotConfigToJobConfig(update.allow),
+ 'ignore-conditions': mapIgnoreConditionsFromDependabotConfigToJobConfig(update.ignore),
+ 'security-updates-only': update['open-pull-requests-limit'] == 0,
+ 'security-advisories': [], // TODO: add config for this!
+ 'source': {
+ 'provider': 'azure',
+ 'api-endpoint': taskInputs.apiEndpointUrl,
+ 'hostname': taskInputs.hostname,
+ 'repo': `${taskInputs.organization}/${taskInputs.project}/_git/${taskInputs.repository}`,
+ 'branch': update['target-branch'],
+ 'commit': undefined, // use latest commit of target branch
+ 'directory': hasMultipleDirectories ? undefined : update.directory || '/',
+ 'directories': hasMultipleDirectories ? update.directories : undefined,
+ },
+ 'existing-pull-requests': existingPullRequests.filter((pr) => !pr['dependency-group-name']),
+ 'existing-group-pull-requests': existingPullRequests.filter((pr) => pr['dependency-group-name']),
+ 'commit-message-options':
+ update['commit-message'] === undefined
+ ? undefined
+ : {
+ 'prefix': update['commit-message']?.['prefix'],
+ 'prefix-development': update['commit-message']?.['prefix-development'],
+ 'include-scope': update['commit-message']?.['include'],
+ },
+ 'experiments': taskInputs.experiments,
+ 'max-updater-run-time': undefined, // TODO: add config for this?
+ 'reject-external-code': update['insecure-external-code-execution']?.toLocaleLowerCase() == 'allow',
+ 'repo-private': undefined, // TODO: add config for this?
+ 'repo-contents-path': undefined, // TODO: add config for this?
+ 'requirements-update-strategy': mapVersionStrategyToRequirementsUpdateStrategy(update['versioning-strategy']),
+ 'lockfile-only': update['versioning-strategy'] === 'lockfile-only',
+ 'vendor-dependencies': update.vendor,
+ 'debug': taskInputs.debug,
+ },
+ credentials: mapRegistryCredentialsFromDependabotConfigToJobConfig(taskInputs, registries),
+ };
+}
+
+function mapDependenciesForSecurityUpdate(dependencyList: any[]): string[] {
+ if (!dependencyList || dependencyList.length == 0) {
+ // This happens when no previous dependency list snapshot exists yet;
+ // TODO: Find a way to discover dependencies for a first-time security-only update (no existing dependency list snapshot).
+ // It would be nice if we could use dependabot-cli for this (e.g. `dependabot --discover-only`), but this is not supported currently.
+ // TODO: Open a issue in dependabot-cli project, ask how we should handle this scenario.
+ warning(
+ 'Security updates can only be performed if there is a previous dependency list snapshot available, but there is none as you have not completed a successful update job yet. ' +
+ 'Dependabot does not currently support discovering vulnerable dependencies during security-only updates and it is likely that this update operation will fail.',
+ );
+
+ // Attempt to do a security update for "all dependencies"; it will probably fail this is not supported in dependabot-updater yet, but it is best we can do...
+ return [];
+ }
+
+ // Return only dependencies that are vulnerable, ignore the rest
+ const dependencyNames = dependencyList.map((dependency) => dependency['name']);
+ const dependencyVulnerabilities = {}; // TODO: getGitHubSecurityAdvisoriesForDependencies(dependencyNames);
+ return dependencyNames.filter((dependency) => dependencyVulnerabilities[dependency]?.length > 0);
+}
+
+function mapGroupsFromDependabotConfigToJobConfig(dependencyGroups: Record): any[] {
+ if (!dependencyGroups) {
+ return undefined;
+ }
+ return Object.keys(dependencyGroups).map((name) => {
+ const group = dependencyGroups[name];
+ return {
+ 'name': name,
+ 'applies-to': group['applies-to'],
+ 'rules': {
+ 'patterns': group['patterns'],
+ 'exclude-patterns': group['exclude-patterns'],
+ 'dependency-type': group['dependency-type'],
+ 'update-types': group['update-types'],
+ },
+ };
+ });
+}
+
+function mapAllowedUpdatesFromDependabotConfigToJobConfig(allowedUpdates: IDependabotAllowCondition[]): any[] {
+ if (!allowedUpdates) {
+ // If no allow conditions are specified, update all dependencies by default
+ return [{ 'dependency-type': 'all' }];
+ }
+ return allowedUpdates.map((allow) => {
+ return {
+ 'dependency-name': allow['dependency-name'],
+ 'dependency-type': allow['dependency-type'],
+ //'update-type': allow["update-type"] // TODO: This is missing from dependabot.ymal docs, but is used in the dependabot-core job model!?
+ };
+ });
+}
+
+function mapIgnoreConditionsFromDependabotConfigToJobConfig(ignoreConditions: IDependabotAllowCondition[]): any[] {
+ if (!ignoreConditions) {
+ return undefined;
+ }
+ return ignoreConditions.map((ignore) => {
+ return {
+ 'dependency-name': ignore['dependency-name'],
+ //'source': ignore["source"], // TODO: This is missing from dependabot.ymal docs, but is used in the dependabot-core job model!?
+ 'update-types': ignore['update-types'],
+ //'updated-at': ignore["updated-at"], // TODO: This is missing from dependabot.ymal docs, but is used in the dependabot-core job model!?
+ 'version-requirement': (ignore['versions'])?.join(', '), // TODO: Test this, not sure how this should be parsed...
+ };
+ });
+}
+
+function mapVersionStrategyToRequirementsUpdateStrategy(versioningStrategy: string): string | undefined {
+ if (!versioningStrategy) {
+ return undefined;
+ }
+ switch (versioningStrategy) {
+ case 'auto':
+ return undefined;
+ case 'increase':
+ return 'bump_versions';
+ case 'increase-if-necessary':
+ return 'bump_versions_if_necessary';
+ case 'lockfile-only':
+ return 'lockfile_only';
+ case 'widen':
+ return 'widen_ranges';
+ default:
+ throw new Error(`Invalid dependabot.yaml versioning strategy option '${versioningStrategy}'`);
+ }
+}
+
+function mapRegistryCredentialsFromDependabotConfigToJobConfig(
+ taskInputs: ISharedVariables,
+ registries: Record,
+): any[] {
+ let registryCredentials = new Array();
+ if (taskInputs.systemAccessToken) {
+ // Required to authenticate with the Azure DevOps git repository when cloning the source code
+ registryCredentials.push({
+ type: 'git_source',
+ host: taskInputs.hostname,
+ username: taskInputs.systemAccessUser?.trim()?.length > 0 ? taskInputs.systemAccessUser : 'x-access-token',
+ password: taskInputs.systemAccessToken,
+ });
+ }
+ if (taskInputs.githubAccessToken) {
+ // Required to avoid rate-limiting errors when generating pull request descriptions (e.g. fetching release notes, commit messages, etc)
+ registryCredentials.push({
+ type: 'git_source',
+ host: 'github.com',
+ username: 'x-access-token',
+ password: taskInputs.githubAccessToken,
+ });
+ }
+ if (registries) {
+ // Required to authenticate with private package feeds when finding the latest version of dependencies
+ for (const key in registries) {
+ const registry = registries[key];
+ registryCredentials.push({
+ 'type': registry.type,
+ 'host': registry.host,
+ 'url': registry.url,
+ 'registry': registry.registry,
+ 'username': registry.username,
+ 'password': registry.password,
+ 'token': registry.token,
+ 'replaces-base': registry['replaces-base'],
+ });
+ }
+ }
+
+ return registryCredentials;
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotOutputProcessor.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotOutputProcessor.ts
new file mode 100644
index 00000000..97ab9d00
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/DependabotOutputProcessor.ts
@@ -0,0 +1,419 @@
+import { GitPullRequestMergeStrategy, VersionControlChangeType } from 'azure-devops-node-api/interfaces/GitInterfaces';
+import { error, warning } from 'azure-pipelines-task-lib/task';
+import * as crypto from 'crypto';
+import * as path from 'path';
+import { AzureDevOpsWebApiClient } from '../azure-devops/AzureDevOpsWebApiClient';
+import { IPullRequestProperties } from '../azure-devops/interfaces/IPullRequestProperties';
+import { IDependabotUpdate } from '../dependabot/interfaces/IDependabotConfig';
+import { ISharedVariables } from '../getSharedVariables';
+import { IDependabotUpdateOperation } from './interfaces/IDependabotUpdateOperation';
+import { IDependabotUpdateOutputProcessor } from './interfaces/IDependabotUpdateOutputProcessor';
+
+/**
+ * Processes dependabot update outputs using the DevOps API
+ */
+export class DependabotOutputProcessor implements IDependabotUpdateOutputProcessor {
+ private readonly prAuthorClient: AzureDevOpsWebApiClient;
+ private readonly prApproverClient: AzureDevOpsWebApiClient;
+ private readonly existingPullRequests: IPullRequestProperties[];
+ private readonly taskInputs: ISharedVariables;
+
+ // Custom properties used to store dependabot metadata in projects.
+ // https://learn.microsoft.com/en-us/rest/api/azure/devops/core/projects/set-project-properties
+ public static PROJECT_PROPERTY_NAME_DEPENDENCY_LIST = 'Dependabot.DependencyList';
+
+ // Custom properties used to store dependabot metadata in pull requests.
+ // https://learn.microsoft.com/en-us/rest/api/azure/devops/git/pull-request-properties
+ public static PR_PROPERTY_NAME_PACKAGE_MANAGER = 'Dependabot.PackageManager';
+ public static PR_PROPERTY_NAME_DEPENDENCIES = 'Dependabot.Dependencies';
+
+ public static PR_DEFAULT_AUTHOR_EMAIL = 'noreply@github.com';
+ public static PR_DEFAULT_AUTHOR_NAME = 'dependabot[bot]';
+
+ constructor(
+ taskInputs: ISharedVariables,
+ prAuthorClient: AzureDevOpsWebApiClient,
+ prApproverClient: AzureDevOpsWebApiClient,
+ existingPullRequests: IPullRequestProperties[],
+ ) {
+ this.taskInputs = taskInputs;
+ this.prAuthorClient = prAuthorClient;
+ this.prApproverClient = prApproverClient;
+ this.existingPullRequests = existingPullRequests;
+ }
+
+ /**
+ * Process the appropriate DevOps API actions for the supplied dependabot update output
+ * @param update
+ * @param type
+ * @param data
+ * @returns
+ */
+ public async process(update: IDependabotUpdateOperation, type: string, data: any): Promise {
+ console.debug(`Processing output '${type}' with data:`, data);
+ const project = this.taskInputs.project;
+ const repository = this.taskInputs.repository;
+ switch (type) {
+ // Documentation on the 'data' model for each output type can be found here:
+ // See: https://github.com/dependabot/cli/blob/main/internal/model/update.go
+
+ case 'update_dependency_list':
+ // Store the dependency list snapshot in project properties, if configured
+ if (this.taskInputs.storeDependencyList) {
+ console.info(`Storing the dependency list snapshot for project '${project}'...`);
+ await this.prAuthorClient.updateProjectProperty(
+ this.taskInputs.projectId,
+ DependabotOutputProcessor.PROJECT_PROPERTY_NAME_DEPENDENCY_LIST,
+ function (existingValue: string) {
+ const repoDependencyLists = JSON.parse(existingValue || '{}');
+ repoDependencyLists[repository] = repoDependencyLists[repository] || {};
+ repoDependencyLists[repository][update.job['package-manager']] = {
+ 'dependencies': data['dependencies'],
+ 'dependency-files': data['dependency_files'],
+ 'last-updated': new Date().toISOString(),
+ };
+
+ return JSON.stringify(repoDependencyLists);
+ },
+ );
+ console.info(`Dependency list snapshot was updated for project '${project}'`);
+ }
+
+ return true;
+
+ case 'create_pull_request':
+ if (this.taskInputs.skipPullRequests) {
+ warning(`Skipping pull request creation as 'skipPullRequests' is set to 'true'`);
+ return true;
+ }
+
+ // Skip if active pull request limit reached.
+ const openPullRequestLimit = update.config['open-pull-requests-limit'];
+ if (openPullRequestLimit > 0 && this.existingPullRequests.length >= openPullRequestLimit) {
+ warning(
+ `Skipping pull request creation as the maximum number of active pull requests (${openPullRequestLimit}) has been reached`,
+ );
+ return true;
+ }
+
+ // Create a new pull request
+ const dependencies = getPullRequestDependenciesPropertyValueForOutputData(data);
+ const targetBranch =
+ update.config['target-branch'] || (await this.prAuthorClient.getDefaultBranch(project, repository));
+ const sourceBranch = getSourceBranchNameForUpdate(update.config, targetBranch, dependencies);
+ const newPullRequestId = await this.prAuthorClient.createPullRequest({
+ project: project,
+ repository: repository,
+ source: {
+ commit: data['base-commit-sha'] || update.job.source.commit,
+ branch: sourceBranch,
+ },
+ target: {
+ branch: targetBranch,
+ },
+ author: {
+ email: this.taskInputs.authorEmail || DependabotOutputProcessor.PR_DEFAULT_AUTHOR_EMAIL,
+ name: this.taskInputs.authorName || DependabotOutputProcessor.PR_DEFAULT_AUTHOR_NAME,
+ },
+ title: data['pr-title'],
+ description: data['pr-body'],
+ commitMessage: data['commit-message'],
+ autoComplete: this.taskInputs.setAutoComplete
+ ? {
+ ignorePolicyConfigIds: this.taskInputs.autoCompleteIgnoreConfigIds,
+ mergeStrategy: (() => {
+ switch (this.taskInputs.mergeStrategy) {
+ case 'noFastForward':
+ return GitPullRequestMergeStrategy.NoFastForward;
+ case 'squash':
+ return GitPullRequestMergeStrategy.Squash;
+ case 'rebase':
+ return GitPullRequestMergeStrategy.Rebase;
+ case 'rebaseMerge':
+ return GitPullRequestMergeStrategy.RebaseMerge;
+ default:
+ return GitPullRequestMergeStrategy.Squash;
+ }
+ })(),
+ }
+ : undefined,
+ assignees: update.config.assignees,
+ reviewers: update.config.reviewers,
+ labels: update.config.labels?.map((label) => label?.trim()) || [],
+ workItems: update.config.milestone ? [update.config.milestone] : [],
+ changes: getPullRequestChangedFilesForOutputData(data),
+ properties: buildPullRequestProperties(update.job['package-manager'], dependencies),
+ });
+
+ // Auto-approve the pull request, if required
+ if (this.taskInputs.autoApprove && this.prApproverClient && newPullRequestId) {
+ await this.prApproverClient.approvePullRequest({
+ project: project,
+ repository: repository,
+ pullRequestId: newPullRequestId,
+ });
+ }
+
+ return newPullRequestId > 0;
+
+ case 'update_pull_request':
+ if (this.taskInputs.skipPullRequests) {
+ warning(`Skipping pull request update as 'skipPullRequests' is set to 'true'`);
+ return true;
+ }
+
+ // Find the pull request to update
+ const pullRequestToUpdate = this.getPullRequestForDependencyNames(
+ update.job['package-manager'],
+ data['dependency-names'],
+ );
+ if (!pullRequestToUpdate) {
+ error(
+ `Could not find pull request to update for package manager '${update.job['package-manager']}' and dependencies '${data['dependency-names'].join(', ')}'`,
+ );
+ return false;
+ }
+
+ // Update the pull request
+ const pullRequestWasUpdated = await this.prAuthorClient.updatePullRequest({
+ project: project,
+ repository: repository,
+ pullRequestId: pullRequestToUpdate.id,
+ changes: getPullRequestChangedFilesForOutputData(data),
+ skipIfCommitsFromUsersOtherThan:
+ this.taskInputs.authorEmail || DependabotOutputProcessor.PR_DEFAULT_AUTHOR_EMAIL,
+ skipIfNoConflicts: true,
+ });
+
+ // Re-approve the pull request, if required
+ if (this.taskInputs.autoApprove && this.prApproverClient && pullRequestWasUpdated) {
+ await this.prApproverClient.approvePullRequest({
+ project: project,
+ repository: repository,
+ pullRequestId: pullRequestToUpdate.id,
+ });
+ }
+
+ return pullRequestWasUpdated;
+
+ case 'close_pull_request':
+ if (!this.taskInputs.abandonUnwantedPullRequests) {
+ warning(`Skipping pull request closure as 'abandonUnwantedPullRequests' is set to 'false'`);
+ return true;
+ }
+
+ // Find the pull request to close
+ const pullRequestToClose = this.getPullRequestForDependencyNames(
+ update.job['package-manager'],
+ data['dependency-names'],
+ );
+ if (!pullRequestToClose) {
+ error(
+ `Could not find pull request to close for package manager '${update.job['package-manager']}' and dependencies '${data['dependency-names'].join(', ')}'`,
+ );
+ return false;
+ }
+
+ // TODO: GitHub Dependabot will close with reason "Superseded by ${new_pull_request_id}" when another PR supersedes it.
+ // How do we detect this? Do we need to?
+
+ // Close the pull request
+ return await this.prAuthorClient.closePullRequest({
+ project: project,
+ repository: repository,
+ pullRequestId: pullRequestToClose.id,
+ comment: this.taskInputs.commentPullRequests ? getPullRequestCloseReasonForOutputData(data) : undefined,
+ deleteSourceBranch: true,
+ });
+
+ case 'mark_as_processed':
+ // No action required
+ return true;
+
+ case 'record_ecosystem_versions':
+ // No action required
+ break;
+
+ case 'record_update_job_error':
+ error(`Update job error: ${data['error-type']} ${JSON.stringify(data['error-details'])}`);
+ return false;
+
+ case 'record_update_job_unknown_error':
+ error(`Update job unknown error: ${data['error-type']}, ${JSON.stringify(data['error-details'])}`);
+ return false;
+
+ case 'increment_metric':
+ // No action required
+ return true;
+
+ default:
+ warning(`Unknown dependabot output type '${type}', ignoring...`);
+ return true;
+ }
+ }
+
+ private getPullRequestForDependencyNames(
+ packageManager: string,
+ dependencyNames: string[],
+ ): IPullRequestProperties | undefined {
+ return this.existingPullRequests.find((pr) => {
+ return (
+ pr.properties.find(
+ (p) => p.name === DependabotOutputProcessor.PR_PROPERTY_NAME_PACKAGE_MANAGER && p.value === packageManager,
+ ) &&
+ pr.properties.find(
+ (p) =>
+ p.name === DependabotOutputProcessor.PR_PROPERTY_NAME_DEPENDENCIES &&
+ areEqual(getDependencyNames(JSON.parse(p.value)), dependencyNames),
+ )
+ );
+ });
+ }
+}
+
+export function buildPullRequestProperties(packageManager: string, dependencies: any): any[] {
+ return [
+ {
+ name: DependabotOutputProcessor.PR_PROPERTY_NAME_PACKAGE_MANAGER,
+ value: packageManager,
+ },
+ {
+ name: DependabotOutputProcessor.PR_PROPERTY_NAME_DEPENDENCIES,
+ value: JSON.stringify(dependencies),
+ },
+ ];
+}
+
+export function parseProjectDependencyListProperty(
+ properties: Record,
+ repository: string,
+ packageManager: string,
+): any {
+ const dependencyList = properties?.[DependabotOutputProcessor.PROJECT_PROPERTY_NAME_DEPENDENCY_LIST] || '{}';
+ const repoDependencyLists = JSON.parse(dependencyList);
+ return repoDependencyLists[repository]?.[packageManager];
+}
+
+export function parsePullRequestProperties(
+ pullRequests: IPullRequestProperties[],
+ packageManager: string | null,
+): Record {
+ return Object.fromEntries(
+ pullRequests
+ .filter((pr) => {
+ return pr.properties.find(
+ (p) =>
+ p.name === DependabotOutputProcessor.PR_PROPERTY_NAME_PACKAGE_MANAGER &&
+ (packageManager === null || p.value === packageManager),
+ );
+ })
+ .map((pr) => {
+ return [
+ pr.id,
+ JSON.parse(
+ pr.properties.find((p) => p.name === DependabotOutputProcessor.PR_PROPERTY_NAME_DEPENDENCIES)?.value,
+ ),
+ ];
+ }),
+ );
+}
+
+function getSourceBranchNameForUpdate(update: IDependabotUpdate, targetBranch: string, dependencies: any): string {
+ const prefix = 'dependabot'; // TODO: Add config for this? Task V1 supported this via DEPENDABOT_BRANCH_NAME_PREFIX
+ const separator = update['pull-request-branch-name'].separator || '/';
+ const packageEcosystem = update['package-ecosystem'];
+ const targetBranchName = targetBranch?.replace(/^\/+|\/+$/g, ''); // strip leading/trailing slashes
+ if (dependencies['dependency-group-name']) {
+ // Group dependency update
+ // e.g. dependabot/nuget/main/microsoft-3b49c54d9e
+ const dependencyGroupName = dependencies['dependency-group-name'];
+ const dependencyHash = crypto
+ .createHash('md5')
+ .update(dependencies['dependencies'].map((d) => `${d['dependency-name']}-${d['dependency-version']}`).join(','))
+ .digest('hex')
+ .substring(0, 10);
+ return `${prefix}${separator}${packageEcosystem}${separator}${targetBranchName}${separator}${dependencyGroupName}-${dependencyHash}`;
+ } else {
+ // Single dependency update
+ // e.g. dependabot/nuget/main/Microsoft.Extensions.Logging-1.0.0
+ const leadDependency = dependencies.length === 1 ? dependencies[0] : null;
+ return `${prefix}${separator}${packageEcosystem}${separator}${targetBranchName}${separator}${leadDependency['dependency-name']}-${leadDependency['dependency-version']}`;
+ }
+}
+
+function getPullRequestChangedFilesForOutputData(data: any): any {
+ return data['updated-dependency-files']
+ .filter((file) => file['type'] === 'file')
+ .map((file) => {
+ let changeType = VersionControlChangeType.None;
+ if (file['deleted'] === true) {
+ changeType = VersionControlChangeType.Delete;
+ } else if (file['operation'] === 'update') {
+ changeType = VersionControlChangeType.Edit;
+ } else {
+ changeType = VersionControlChangeType.Add;
+ }
+ return {
+ changeType: changeType,
+ path: path.join(file['directory'], file['name']),
+ content: file['content'],
+ encoding: file['content_encoding'],
+ };
+ });
+}
+
+function getPullRequestCloseReasonForOutputData(data: any): string {
+ // The first dependency is the "lead" dependency in a multi-dependency update
+ const leadDependencyName = data['dependency-names'][0];
+ let reason: string = null;
+ switch (data['reason']) {
+ case 'dependencies_changed':
+ reason = `Looks like the dependencies have changed`;
+ break;
+ case 'dependency_group_empty':
+ reason = `Looks like the dependencies in this group are now empty`;
+ break;
+ case 'dependency_removed':
+ reason = `Looks like ${leadDependencyName} is no longer a dependency`;
+ break;
+ case 'up_to_date':
+ reason = `Looks like ${leadDependencyName} is up-to-date now`;
+ break;
+ case 'update_no_longer_possible':
+ reason = `Looks like ${leadDependencyName} can no longer be updated`;
+ break;
+ }
+ if (reason?.length > 0) {
+ reason += ', so this is no longer needed.';
+ }
+ return reason;
+}
+
+function getPullRequestDependenciesPropertyValueForOutputData(data: any): any {
+ const dependencyGroupName = data['dependency-group']?.['name'];
+ let dependencies: any = data['dependencies']?.map((dep) => {
+ return {
+ 'dependency-name': dep['name'],
+ 'dependency-version': dep['version'],
+ 'directory': dep['directory'],
+ };
+ });
+ if (dependencyGroupName) {
+ dependencies = {
+ 'dependency-group-name': dependencyGroupName,
+ 'dependencies': dependencies,
+ };
+ }
+ return dependencies;
+}
+
+function getDependencyNames(dependencies: any): string[] {
+ return (dependencies['dependency-group-name'] ? dependencies['dependencies'] : dependencies)?.map((dep) =>
+ dep['dependency-name']?.toString(),
+ );
+}
+
+function areEqual(a: string[], b: string[]): boolean {
+ if (a.length !== b.length) return false;
+ return a.every((name) => b.includes(name));
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateJobConfig.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateJobConfig.ts
new file mode 100644
index 00000000..fa8b18e8
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateJobConfig.ts
@@ -0,0 +1,101 @@
+/**
+ * Represents the Dependabot CLI update job.yaml configuration file options.
+ */
+export interface IDependabotUpdateJobConfig {
+ // The dependabot "updater" job configuration
+ // See: https://github.com/dependabot/cli/blob/main/internal/model/job.go
+ // https://github.com/dependabot/dependabot-core/blob/main/updater/lib/dependabot/job.rb
+ job: {
+ 'id': string;
+ 'package-manager': string;
+ 'update-subdependencies'?: boolean;
+ 'updating-a-pull-request'?: boolean;
+ 'dependency-group-to-refresh'?: string;
+ 'dependency-groups'?: {
+ 'name': string;
+ 'applies-to'?: string;
+ 'rules': {
+ 'patterns'?: string[];
+ 'exclude-patterns'?: string[];
+ 'dependency-type'?: string;
+ 'update-types'?: string[];
+ };
+ }[];
+ 'dependencies'?: string[];
+ 'allowed-updates'?: {
+ 'dependency-name'?: string;
+ 'dependency-type'?: string;
+ 'update-type'?: string;
+ }[];
+ 'ignore-conditions'?: {
+ 'dependency-name'?: string;
+ 'source'?: string;
+ 'update-types'?: string[];
+ 'updated-at'?: string;
+ 'version-requirement'?: string;
+ }[];
+ 'security-updates-only': boolean;
+ 'security-advisories'?: {
+ 'dependency-name': string;
+ 'affected-versions': string[];
+ 'patched-versions': string[];
+ 'unaffected-versions': string[];
+ // TODO: The below configs are not in the dependabot-cli model, but are in the dependabot-core model
+ 'title'?: string;
+ 'description'?: string;
+ 'source-name'?: string;
+ 'source-url'?: string;
+ }[];
+ 'source': {
+ 'provider': string;
+ 'api-endpoint'?: string;
+ 'hostname': string;
+ 'repo': string;
+ 'branch'?: string;
+ 'commit'?: string;
+ 'directory'?: string;
+ 'directories'?: string[];
+ };
+ 'existing-pull-requests'?: {
+ 'dependency-name': string;
+ 'dependency-version': string;
+ 'directory': string;
+ }[][];
+ 'existing-group-pull-requests'?: {
+ 'dependency-group-name': string;
+ 'dependencies': {
+ 'dependency-name': string;
+ 'dependency-version': string;
+ 'directory': string;
+ }[];
+ }[];
+ 'commit-message-options'?: {
+ 'prefix'?: string;
+ 'prefix-development'?: string;
+ 'include-scope'?: string;
+ };
+ 'experiments'?: Record;
+ 'max-updater-run-time'?: number;
+ 'reject-external-code'?: boolean;
+ 'repo-private'?: boolean;
+ 'repo-contents-path'?: string;
+ 'requirements-update-strategy'?: string;
+ 'lockfile-only'?: boolean;
+ 'vendor-dependencies'?: boolean;
+ 'debug'?: boolean;
+ };
+
+ // The dependabot "proxy" registry credentials
+ // See: https://github.com/dependabot/dependabot-core/blob/main/common/lib/dependabot/credential.rb
+ credentials: {
+ 'type': string;
+ 'host'?: string;
+ 'url'?: string;
+ 'registry'?: string;
+ 'region'?: string;
+ 'username'?: string;
+ 'password'?: string;
+ 'token'?: string;
+ 'replaces-base'?: boolean;
+ }[];
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOperation.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOperation.ts
new file mode 100644
index 00000000..8986619d
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOperation.ts
@@ -0,0 +1,9 @@
+import { IDependabotUpdate } from '../../dependabot/interfaces/IDependabotConfig';
+import { IDependabotUpdateJobConfig } from './IDependabotUpdateJobConfig';
+
+/**
+ * Represents a single Dependabot CLI update operation
+ */
+export interface IDependabotUpdateOperation extends IDependabotUpdateJobConfig {
+ config: IDependabotUpdate;
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOperationResult.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOperationResult.ts
new file mode 100644
index 00000000..8c565ca1
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOperationResult.ts
@@ -0,0 +1,11 @@
+/**
+ * Represents the output of a Dependabot CLI update operation
+ */
+export interface IDependabotUpdateOperationResult {
+ success: boolean;
+ error: Error;
+ output: {
+ type: string;
+ data: any;
+ };
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOutputProcessor.ts b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOutputProcessor.ts
new file mode 100644
index 00000000..fc7da30e
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot-cli/interfaces/IDependabotUpdateOutputProcessor.ts
@@ -0,0 +1,14 @@
+import { IDependabotUpdateOperation } from './IDependabotUpdateOperation';
+
+/**
+ * Represents a processor for Dependabot update operation outputs
+ */
+export interface IDependabotUpdateOutputProcessor {
+ /**
+ * Process the output of a Dependabot update operation
+ * @param update The update operation
+ * @param type The output type (e.g. "create-pull-request", "update-pull-request", etc.)
+ * @param data The output data object related to the type
+ */
+ process(update: IDependabotUpdateOperation, type: string, data: any): Promise;
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot/interfaces/IDependabotConfig.ts b/extension/tasks/dependabotV2/utils/dependabot/interfaces/IDependabotConfig.ts
new file mode 100644
index 00000000..2f530a72
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot/interfaces/IDependabotConfig.ts
@@ -0,0 +1,102 @@
+/**
+ * Represents the dependabot.yaml configuration file options.
+ * See: https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#configuration-options-for-dependabotyml
+ */
+export interface IDependabotConfig {
+ /**
+ * Mandatory. configuration file version.
+ **/
+ 'version': number;
+
+ /**
+ * Mandatory. Configure how Dependabot updates the versions or project dependencies.
+ * Each entry configures the update settings for a particular package manager.
+ */
+ 'updates': IDependabotUpdate[];
+
+ /**
+ * Optional.
+ * Specify authentication details to access private package registries.
+ */
+ 'registries'?: Record;
+
+ /**
+ * Optional. Enables updates for ecosystems that are not yet generally available.
+ * https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#enable-beta-ecosystems
+ */
+ 'enable-beta-ecosystems'?: boolean;
+}
+
+export interface IDependabotUpdate {
+ 'package-ecosystem': string;
+ 'directory': string;
+ 'directories': string[];
+ 'allow'?: IDependabotAllowCondition[];
+ 'assignees'?: string[];
+ 'commit-message'?: IDependabotCommitMessage;
+ 'groups'?: Record;
+ 'ignore'?: IDependabotIgnoreCondition[];
+ 'insecure-external-code-execution'?: string;
+ 'labels': string[];
+ 'milestone'?: string;
+ 'open-pull-requests-limit'?: number;
+ 'pull-request-branch-name'?: IDependabotPullRequestBranchName;
+ 'rebase-strategy'?: string;
+ 'registries'?: string[];
+ 'reviewers'?: string[];
+ 'schedule'?: IDependabotSchedule;
+ 'target-branch'?: string;
+ 'vendor'?: boolean;
+ 'versioning-strategy'?: string;
+}
+
+export interface IDependabotRegistry {
+ 'type': string;
+ 'url'?: string;
+ 'username'?: string;
+ 'password'?: string;
+ 'key'?: string;
+ 'token'?: string;
+ 'replaces-base'?: boolean;
+ 'host'?: string; // for terraform and composer only
+ 'registry'?: string; // for npm only
+ 'organization'?: string; // for hex-organisation only
+ 'repo'?: string; // for hex-repository only
+ 'public-key-fingerprint'?: string; // for hex-repository only
+}
+
+export interface IDependabotGroup {
+ 'applies-to'?: string;
+ 'dependency-type'?: string;
+ 'patterns'?: string[];
+ 'exclude-patterns'?: string[];
+ 'update-types'?: string[];
+}
+
+export interface IDependabotAllowCondition {
+ 'dependency-name'?: string;
+ 'dependency-type'?: string;
+}
+
+export interface IDependabotIgnoreCondition {
+ 'dependency-name'?: string;
+ 'versions'?: string[];
+ 'update-types'?: string[];
+}
+
+export interface IDependabotSchedule {
+ interval?: string;
+ day?: string;
+ time?: string;
+ timezone?: string;
+}
+
+export interface IDependabotCommitMessage {
+ 'prefix'?: string;
+ 'prefix-development'?: string;
+ 'include'?: string;
+}
+
+export interface IDependabotPullRequestBranchName {
+ separator?: string;
+}
diff --git a/extension/tasks/dependabotV2/utils/dependabot/parseConfigFile.ts b/extension/tasks/dependabotV2/utils/dependabot/parseConfigFile.ts
new file mode 100644
index 00000000..5440cf0a
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/dependabot/parseConfigFile.ts
@@ -0,0 +1,338 @@
+import axios from 'axios';
+import * as tl from 'azure-pipelines-task-lib/task';
+import { getVariable } from 'azure-pipelines-task-lib/task';
+import * as fs from 'fs';
+import { load } from 'js-yaml';
+import * as path from 'path';
+import { URL } from 'url';
+import { convertPlaceholder } from '../convertPlaceholder';
+import { ISharedVariables } from '../getSharedVariables';
+import { IDependabotConfig, IDependabotRegistry, IDependabotUpdate } from './interfaces/IDependabotConfig';
+
+/**
+ * Parse the dependabot config YAML file to specify update configuration.
+ * The file should be located at '/.azuredevops/dependabot.yml' or '/.github/dependabot.yml'
+ *
+ * To view YAML file format, visit
+ * https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#allow
+ *
+ * @param taskInputs the input variables of the task
+ * @returns {IDependabotConfig} config - the dependabot configuration
+ */
+export default async function parseConfigFile(taskInputs: ISharedVariables): Promise {
+ const possibleFilePaths = [
+ '/.azuredevops/dependabot.yml',
+ '/.azuredevops/dependabot.yaml',
+ '/.github/dependabot.yaml',
+ '/.github/dependabot.yml',
+ ];
+
+ let contents: null | string;
+
+ /*
+ * The configuration file can be available locally if the repository is cloned.
+ * Otherwise, we should get it via the API which supports 2 scenarios:
+ * 1. Running the pipeline without cloning, which is useful for huge repositories (multiple submodules or large commit log)
+ * 2. Running a single pipeline to update multiple repositories https://github.com/tinglesoftware/dependabot-azure-devops/issues/328
+ */
+ if (taskInputs.repositoryOverridden) {
+ tl.debug(`Attempting to fetch configuration file via REST API ...`);
+ for (const fp of possibleFilePaths) {
+ // make HTTP request
+ var url = `${taskInputs.organizationUrl}${taskInputs.project}/_apis/git/repositories/${taskInputs.repository}/items?path=${fp}`;
+ tl.debug(`GET ${url}`);
+
+ try {
+ var response = await axios.get(url, {
+ auth: {
+ username: 'x-access-token',
+ password: taskInputs.systemAccessToken,
+ },
+ headers: {
+ Accept: '*/*', // Gotcha!!! without this SH*T fails terribly
+ },
+ });
+ if (response.status === 200) {
+ tl.debug(`Found configuration file at '${url}'`);
+ contents = response.data;
+ break;
+ }
+ } catch (error) {
+ var responseStatusCode = error?.response?.status;
+
+ if (responseStatusCode === 404) {
+ tl.debug(`No configuration file at '${url}'`);
+ continue;
+ } else if (responseStatusCode === 401) {
+ throw new Error(`No access token has been provided to access '${url}'`);
+ } else if (responseStatusCode === 403) {
+ throw new Error(`The access token provided does not have permissions to access '${url}'`);
+ } else {
+ throw error;
+ }
+ }
+ }
+ } else {
+ let rootDir = getVariable('Build.SourcesDirectory');
+ for (const fp of possibleFilePaths) {
+ var filePath = path.join(rootDir, fp);
+ if (fs.existsSync(filePath)) {
+ tl.debug(`Found configuration file cloned at ${filePath}`);
+ contents = fs.readFileSync(filePath, 'utf-8');
+ break;
+ } else {
+ tl.debug(`No configuration file cloned at ${filePath}`);
+ }
+ }
+ }
+
+ // Ensure we have file contents. Otherwise throw a well readable error.
+ if (!contents || typeof contents !== 'string') {
+ throw new Error(`Configuration file not found at possible locations: ${possibleFilePaths.join(', ')}`);
+ } else {
+ tl.debug('Configuration file contents read.');
+ }
+
+ let config: any = load(contents);
+
+ // Ensure the config object parsed is an object
+ if (config === null || typeof config !== 'object') {
+ throw new Error('Invalid dependabot config object');
+ } else {
+ tl.debug('Parsed YAML content from configuration file contents.');
+ }
+
+ const rawVersion = config['version'];
+ let version = -1;
+
+ // Ensure the version has been specified
+ if (!!!rawVersion) throw new Error('The version must be specified in dependabot.yml');
+
+ // Try convert the version to integer
+ try {
+ version = parseInt(rawVersion, 10);
+ } catch (e) {
+ throw new Error('Dependabot version specified must be a valid integer');
+ }
+
+ // Ensure the version is == 2
+ if (version !== 2) {
+ throw new Error('Only version 2 of dependabot is supported. Version specified: ' + version);
+ }
+
+ const updates = parseUpdates(config);
+ const registries = parseRegistries(config);
+ validateConfiguration(updates, registries);
+
+ return {
+ version: version,
+ updates: updates,
+ registries: registries,
+ };
+}
+
+function parseUpdates(config: any): IDependabotUpdate[] {
+ var updates: IDependabotUpdate[] = [];
+
+ // Check the updates parsed
+ var rawUpdates = config['updates'];
+
+ // Check if the array of updates exists
+ if (!Array.isArray(rawUpdates)) {
+ throw new Error('Invalid dependabot config object: Dependency updates config array not found');
+ }
+
+ // Parse the value of each of the updates obtained from the file
+ rawUpdates.forEach((update) => {
+ var dependabotUpdate: IDependabotUpdate = update;
+
+ if (!dependabotUpdate['package-ecosystem']) {
+ throw new Error("The value 'package-ecosystem' in dependency update config is missing");
+ }
+
+ // Remap the package ecyosystem name from config to a value that dependabot-core/cli understands.
+ // Config values: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
+ // Core/CLI values: https://github.com/dependabot/dependabot-core/blob/main/common/lib/dependabot/config/file.rb
+ dependabotUpdate['package-ecosystem'] = (() => {
+ const ecosystem = dependabotUpdate['package-ecosystem'].toLowerCase();
+ switch (ecosystem) {
+ case 'devcontainer':
+ return 'devcontainers';
+ case 'github-actions':
+ return 'github_actions';
+ case 'gitsubmodule':
+ return 'submodules';
+ case 'gomod':
+ return 'go_modules';
+ case 'mix':
+ return 'hex';
+ case 'npm':
+ return 'npm_and_yarn';
+ // Additional aliases, for convenience
+ case 'pipenv':
+ return 'pip';
+ case 'pip-compile':
+ return 'pip';
+ case 'poetry':
+ return 'pip';
+ case 'pnpm':
+ return 'npm_and_yarn';
+ case 'yarn':
+ return 'npm_and_yarn';
+ default:
+ return ecosystem;
+ }
+ })();
+
+ // zero is a valid value
+ if (!dependabotUpdate['open-pull-requests-limit'] && dependabotUpdate['open-pull-requests-limit'] !== 0) {
+ dependabotUpdate['open-pull-requests-limit'] = 5;
+ }
+
+ if (!dependabotUpdate.directory && dependabotUpdate.directories.length === 0) {
+ throw new Error(
+ "The values 'directory' and 'directories' in dependency update config is missing, you must specify at least one",
+ );
+ }
+
+ updates.push(dependabotUpdate);
+ });
+ return updates;
+}
+
+function parseRegistries(config: any): Record {
+ var registries: Record = {};
+
+ var rawRegistries = config['registries'];
+
+ if (rawRegistries == undefined) return registries;
+
+ // Parse the value of each of the registries obtained from the file
+ Object.entries(rawRegistries).forEach((item) => {
+ var registryConfigKey = item[0];
+ var registryConfig = item[1];
+
+ // parse the type
+ var rawType = registryConfig['type'];
+ if (!rawType) {
+ throw new Error(`The value for 'type' in dependency registry config '${registryConfigKey}' is missing`);
+ }
+
+ // ensure the type is a known one
+ if (!KnownRegistryTypes.includes(rawType)) {
+ throw new Error(
+ `The value '${rawType}' for 'type' in dependency registry config '${registryConfigKey}' is not among the supported values.`,
+ );
+ }
+ var type = rawType?.replace('-', '_');
+
+ var parsed: IDependabotRegistry = { type: type };
+ registries[registryConfigKey] = parsed;
+
+ // handle special fields for 'hex-organization' types
+ if (type === 'hex_organization') {
+ var organization = registryConfig['organization'];
+ if (!organization) {
+ throw new Error(`The value 'organization' in dependency registry config '${registryConfigKey}' is missing`);
+ }
+ parsed.organization = organization;
+ }
+
+ // handle special fields for 'hex-repository' types
+ if (type === 'hex_repository') {
+ var repo = registryConfig['repo'];
+ if (!repo) {
+ throw new Error(`The value 'repo' in dependency registry config '${registryConfigKey}' is missing`);
+ }
+
+ parsed.repo = repo;
+ parsed['auth-key'] = registryConfig['auth-key'];
+ parsed['public-key-fingerprint'] = registryConfig['public-key-fingerprint'];
+ }
+
+ // parse username, password, key, and token while replacing tokens where necessary
+ parsed.username = convertPlaceholder(registryConfig['username']);
+ parsed.password = convertPlaceholder(registryConfig['password']);
+ parsed.key = convertPlaceholder(registryConfig['key']);
+ parsed.token = convertPlaceholder(registryConfig['token']);
+
+ // add "replaces-base" if present
+ var replacesBase = registryConfig['replaces-base'];
+ if (replacesBase !== undefined) {
+ parsed['replaces-base'] = replacesBase;
+ }
+
+ // parse the url
+ var url = registryConfig['url'];
+ if (!url && type !== 'hex_organization') {
+ throw new Error(`The value 'url' in dependency registry config '${registryConfigKey}' is missing`);
+ }
+ if (url) {
+ /*
+ * Some credentials do not use the 'url' property in the Ruby updater.
+ * The 'host' and 'registry' properties are derived from the given URL.
+ * The 'registry' property is derived from the 'url' by stripping off the scheme.
+ * The 'host' property is derived from the hostname of the 'url'.
+ *
+ * 'npm_registry' and 'docker_registry' use 'registry' only.
+ * 'terraform_registry' uses 'host' only.
+ * 'composer_repository' uses both 'url' and 'host'.
+ * 'python_index' uses 'index-url' instead of 'url'.
+ */
+
+ if (URL.canParse(url)) {
+ const parsedUrl = new URL(url);
+
+ const addRegistry = type === 'docker_registry' || type === 'npm_registry';
+ if (addRegistry) parsed.registry = url.replace('https://', '').replace('http://', '');
+
+ const addHost = type === 'terraform_registry' || type === 'composer_repository';
+ if (addHost) parsed.host = parsedUrl.hostname;
+ }
+
+ if (type === 'python_index') parsed['index-url'] = url;
+
+ const skipUrl =
+ type === 'docker_registry' ||
+ type === 'npm_registry' ||
+ type === 'terraform_registry' ||
+ type === 'python_index';
+ if (!skipUrl) parsed.url = url;
+ }
+ });
+ return registries;
+}
+
+function validateConfiguration(updates: IDependabotUpdate[], registries: Record) {
+ const configured = Object.keys(registries);
+ const referenced: string[] = [];
+ for (const u of updates) referenced.push(...(u.registries ?? []));
+
+ // ensure there are no configured registries that have not been referenced
+ const missingConfiguration = referenced.filter((el) => !configured.includes(el));
+ if (missingConfiguration.length > 0) {
+ throw new Error(
+ `Referenced registries: '${missingConfiguration.join(',')}' have not been configured in the root of dependabot.yml`,
+ );
+ }
+
+ // ensure there are no registries referenced but not configured
+ const missingReferences = configured.filter((el) => !referenced.includes(el));
+ if (missingReferences.length > 0) {
+ throw new Error(`Registries: '${missingReferences.join(',')}' have not been referenced by any update`);
+ }
+}
+
+const KnownRegistryTypes = [
+ 'composer-repository',
+ 'docker-registry',
+ 'git',
+ 'hex-organization',
+ 'hex-repository',
+ 'maven-repository',
+ 'npm-registry',
+ 'nuget-feed',
+ 'python-index',
+ 'rubygems-server',
+ 'terraform-registry',
+];
diff --git a/extension/tasks/dependabotV2/utils/extractHostname.ts b/extension/tasks/dependabotV2/utils/extractHostname.ts
new file mode 100644
index 00000000..2126530d
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/extractHostname.ts
@@ -0,0 +1,13 @@
+/**
+ * Extract a dependabot compatible hostname from a TeamFoundationCollection URL
+ * @param organizationUrl A URL object constructed from the `System.TeamFoundationCollectionUri` variable.
+ * @returns The hostname component of the {@see organizationUrl} parameter or `dev.azure.com` if the parameter points to an old `*.visualstudio.com` URL.
+ */
+export default function extractHostname(organizationUrl: URL): string {
+ const visualStudioUrlRegex = /^(?\S+)\.visualstudio\.com$/iu;
+ let hostname = organizationUrl.hostname;
+ if (visualStudioUrlRegex.test(hostname)) {
+ return 'dev.azure.com';
+ }
+ return hostname;
+}
diff --git a/extension/tasks/dependabotV2/utils/extractOrganization.ts b/extension/tasks/dependabotV2/utils/extractOrganization.ts
new file mode 100644
index 00000000..2b43561b
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/extractOrganization.ts
@@ -0,0 +1,31 @@
+/**
+ * Extract organization name from organization URL
+ *
+ * @param organizationUrl
+ *
+ * @returns organization name
+ */
+export default function extractOrganization(organizationUrl: string): string {
+ let parts = organizationUrl.split('/');
+
+ // Check for on-premise style: https://server.domain.com/tfs/x/
+ if (parts.length === 6) {
+ return parts[4];
+ }
+
+ // Check for new style: https://dev.azure.com/x/
+ if (parts.length === 5) {
+ return parts[3];
+ }
+
+ // Check for old style: https://x.visualstudio.com/
+ if (parts.length === 4) {
+ // Get x.visualstudio.com part.
+ let part = parts[2];
+
+ // Return organization part (x).
+ return part.split('.')[0];
+ }
+
+ throw new Error(`Error parsing organization from organization url: '${organizationUrl}'.`);
+}
diff --git a/extension/tasks/dependabotV2/utils/extractVirtualDirectory.ts b/extension/tasks/dependabotV2/utils/extractVirtualDirectory.ts
new file mode 100644
index 00000000..02db1447
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/extractVirtualDirectory.ts
@@ -0,0 +1,23 @@
+/**
+ * Extract virtual directory from organization URL
+ *
+ * Virtual Directories are sometimes used in on-premises
+ * @param organizationUrl
+ *
+ * @returns virtual directory
+ *
+ * @example URLs typically are like this:`https://server.domain.com/tfs/x/` and `tfs` is the virtual directory
+ */
+export default function extractVirtualDirectory(organizationUrl: URL): string {
+ // extract the pathname from the url then split
+ //pathname takes the shape '/tfs/x/'
+ let path = organizationUrl.pathname.split('/');
+
+ // Virtual Directories are sometimes used in on-premises
+ // URLs typically are like this: https://server.domain.com/tfs/x/
+ // The pathname extracted looks like this: '/tfs/x/'
+ if (path.length == 4) {
+ return path[1];
+ }
+ return '';
+}
diff --git a/extension/tasks/dependabotV2/utils/getAzureDevOpsAccessToken.ts b/extension/tasks/dependabotV2/utils/getAzureDevOpsAccessToken.ts
new file mode 100644
index 00000000..646747af
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/getAzureDevOpsAccessToken.ts
@@ -0,0 +1,26 @@
+import { debug, getEndpointAuthorizationParameter, getInput } from 'azure-pipelines-task-lib/task';
+
+/**
+ * Prepare the access token for Azure DevOps Repos.
+ *
+ *
+ * If the user has not provided one, we use the one from the SystemVssConnection
+ *
+ * @returns Azure DevOps Access Token
+ */
+export default function getAzureDevOpsAccessToken() {
+ let systemAccessToken: string = getInput('azureDevOpsAccessToken');
+ if (systemAccessToken) {
+ debug('azureDevOpsAccessToken provided, using for authenticating');
+ return systemAccessToken;
+ }
+
+ let serviceConnectionName: string = getInput('azureDevOpsServiceConnection');
+ if (serviceConnectionName) {
+ debug('TFS connection supplied. A token shall be extracted from it.');
+ return getEndpointAuthorizationParameter(serviceConnectionName, 'apitoken', false);
+ }
+
+ debug("No custom token provided. The SystemVssConnection's AccessToken shall be used.");
+ return getEndpointAuthorizationParameter('SystemVssConnection', 'AccessToken', false);
+}
diff --git a/extension/tasks/dependabotV2/utils/getGithubAccessToken.ts b/extension/tasks/dependabotV2/utils/getGithubAccessToken.ts
new file mode 100644
index 00000000..ab660444
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/getGithubAccessToken.ts
@@ -0,0 +1,53 @@
+import { debug, getEndpointAuthorization, getInput, loc } from 'azure-pipelines-task-lib/task';
+
+/**
+ * Extract access token from Github endpoint
+ *
+ * @param githubEndpoint
+ * @returns
+ */
+function getGithubEndPointToken(githubEndpoint: string): string {
+ const githubEndpointObject = getEndpointAuthorization(githubEndpoint, false);
+ let githubEndpointToken: string = null;
+
+ if (!!githubEndpointObject) {
+ debug('Endpoint scheme: ' + githubEndpointObject.scheme);
+
+ if (githubEndpointObject.scheme === 'PersonalAccessToken') {
+ githubEndpointToken = githubEndpointObject.parameters.accessToken;
+ } else if (githubEndpointObject.scheme === 'OAuth') {
+ githubEndpointToken = githubEndpointObject.parameters.AccessToken;
+ } else if (githubEndpointObject.scheme === 'Token') {
+ githubEndpointToken = githubEndpointObject.parameters.AccessToken;
+ } else if (githubEndpointObject.scheme) {
+ throw new Error(loc('InvalidEndpointAuthScheme', githubEndpointObject.scheme));
+ }
+ }
+
+ if (!githubEndpointToken) {
+ throw new Error(loc('InvalidGitHubEndpoint', githubEndpoint));
+ }
+
+ return githubEndpointToken;
+}
+
+/**
+ * Extract the Github access token from `gitHubAccessToken` and `gitHubConnection` inputs
+ *
+ * @returns the Github access token
+ */
+export default function getGithubAccessToken() {
+ let gitHubAccessToken: string = getInput('gitHubAccessToken');
+ if (gitHubAccessToken) {
+ debug('gitHubAccessToken provided, using for authenticating');
+ return gitHubAccessToken;
+ }
+
+ const githubEndpointId = getInput('gitHubConnection');
+ if (githubEndpointId) {
+ debug('GitHub connection supplied. A token shall be extracted from it.');
+ gitHubAccessToken = getGithubEndPointToken(githubEndpointId);
+ }
+
+ return gitHubAccessToken;
+}
diff --git a/extension/tasks/dependabotV2/utils/getSharedVariables.ts b/extension/tasks/dependabotV2/utils/getSharedVariables.ts
new file mode 100644
index 00000000..40e6d797
--- /dev/null
+++ b/extension/tasks/dependabotV2/utils/getSharedVariables.ts
@@ -0,0 +1,188 @@
+import * as tl from 'azure-pipelines-task-lib/task';
+import extractHostname from './extractHostname';
+import extractOrganization from './extractOrganization';
+import extractVirtualDirectory from './extractVirtualDirectory';
+import getAzureDevOpsAccessToken from './getAzureDevOpsAccessToken';
+import getGithubAccessToken from './getGithubAccessToken';
+
+export interface ISharedVariables {
+ /** URL of the organization. This may lack the project name */
+ organizationUrl: URL;
+
+ /** Organization URL protocol */
+ protocol: string;
+ /** Organization URL hostname */
+ hostname: string;
+ /** Organization URL hostname */
+ port: string;
+ /** Organization URL virtual directory */
+ virtualDirectory: string;
+ /** Organization name */
+ organization: string;
+ /** Project ID */
+ projectId: string;
+ /** Project name */
+ project: string;
+ /** Repository name */
+ repository: string;
+ /** Whether the repository was overridden via input */
+ repositoryOverridden: boolean;
+
+ /** Organisation API endpoint URL */
+ apiEndpointUrl: string;
+
+ /** The github token */
+ githubAccessToken: string;
+ /** The access User for Azure DevOps Repos */
+ systemAccessUser: string;
+ /** The access token for Azure DevOps Repos */
+ systemAccessToken: string;
+
+ authorEmail?: string;
+ authorName?: string;
+
+ storeDependencyList: boolean;
+
+ /** Determines if the pull requests that dependabot creates should have auto complete set */
+ setAutoComplete: boolean;
+ /** Merge strategies which can be used to complete a pull request */
+ mergeStrategy: string;
+ /** List of any policy configuration Id's which auto-complete should not wait for */
+ autoCompleteIgnoreConfigIds: number[];
+
+ /** Determines if the pull requests that dependabot creates should be automatically approved */
+ autoApprove: boolean;
+ /** A personal access token of the user that should approve the PR */
+ autoApproveUserToken: string;
+
+ experiments: Record;
+
+ /** Determines if verbose log messages are logged */
+ debug: boolean;
+
+ /** List of update identifiers to run */
+ targetUpdateIds: number[];
+
+ securityAdvisoriesFile: string | undefined;
+
+ /** Determines whether to skip creating/updating pull requests */
+ skipPullRequests: boolean;
+ /** Determines whether to comment on pull requests which an explanation of the reason for closing */
+ commentPullRequests: boolean;
+ /** Determines whether to abandon unwanted pull requests */
+ abandonUnwantedPullRequests: boolean;
+}
+
+/**
+ * Extract shared variables
+ *
+ * @returns shared variables
+ */
+export default function getSharedVariables(): ISharedVariables {
+ let organizationUrl = tl.getVariable('System.TeamFoundationCollectionUri');
+
+ //convert url string into a valid JS URL object
+ let formattedOrganizationUrl = new URL(organizationUrl);
+ let protocol: string = formattedOrganizationUrl.protocol.slice(0, -1);
+ let hostname: string = extractHostname(formattedOrganizationUrl);
+ let port: string = formattedOrganizationUrl.port;
+ let virtualDirectory: string = extractVirtualDirectory(formattedOrganizationUrl);
+ let organization: string = extractOrganization(organizationUrl);
+ let projectId: string = tl.getVariable('System.TeamProjectId');
+ let project: string = encodeURI(tl.getVariable('System.TeamProject')); // encode special characters like spaces
+ let repository: string = tl.getInput('targetRepositoryName');
+ let repositoryOverridden = typeof repository === 'string';
+ if (!repositoryOverridden) {
+ tl.debug('No custom repository provided. The Pipeline Repository Name shall be used.');
+ repository = tl.getVariable('Build.Repository.Name');
+ }
+ repository = encodeURI(repository); // encode special characters like spaces
+
+ const virtualDirectorySuffix = virtualDirectory?.length > 0 ? `${virtualDirectory}/` : '';
+ let apiEndpointUrl = `${protocol}://${hostname}:${port}/${virtualDirectorySuffix}`;
+
+ // Prepare the access credentials
+ let githubAccessToken: string = getGithubAccessToken();
+ let systemAccessUser: string = tl.getInput('azureDevOpsUser');
+ let systemAccessToken: string = getAzureDevOpsAccessToken();
+
+ let authorEmail: string | undefined = tl.getInput('authorEmail');
+ let authorName: string | undefined = tl.getInput('authorName');
+
+ // Prepare variables for auto complete
+ let setAutoComplete = tl.getBoolInput('setAutoComplete', false);
+ let mergeStrategy = tl.getInput('mergeStrategy', true);
+ let autoCompleteIgnoreConfigIds = tl.getDelimitedInput('autoCompleteIgnoreConfigIds', ';', false).map(Number);
+
+ let storeDependencyList = tl.getBoolInput('storeDependencyList', false);
+
+ // Prepare variables for auto approve
+ let autoApprove: boolean = tl.getBoolInput('autoApprove', false);
+ let autoApproveUserToken: string = tl.getInput('autoApproveUserToken');
+
+ // Convert experiments from comma separated key value pairs to a record
+ let experiments = tl
+ .getInput('experiments', false)
+ ?.split(',')
+ ?.reduce(
+ (acc, cur) => {
+ let [key, value] = cur.split('=', 2);
+ acc[key] = value || true;
+ return acc;
+ },
+ {} as Record,
+ );
+
+ let debug: boolean = tl.getVariable('System.Debug')?.match(/true/i) ? true : false;
+
+ // Get the target identifiers
+ let targetUpdateIds = tl.getDelimitedInput('targetUpdateIds', ';', false).map(Number);
+
+ // Prepare other variables
+ let securityAdvisoriesFile: string | undefined = tl.getInput('securityAdvisoriesFile');
+ let skipPullRequests: boolean = tl.getBoolInput('skipPullRequests', false);
+ let commentPullRequests: boolean = tl.getBoolInput('commentPullRequests', false);
+ let abandonUnwantedPullRequests: boolean = tl.getBoolInput('abandonUnwantedPullRequests', true);
+
+ return {
+ organizationUrl: formattedOrganizationUrl,
+ protocol,
+ hostname,
+ port,
+ virtualDirectory,
+ organization,
+ projectId,
+ project,
+ repository,
+ repositoryOverridden,
+
+ apiEndpointUrl,
+
+ githubAccessToken,
+ systemAccessUser,
+ systemAccessToken,
+
+ authorEmail,
+ authorName,
+
+ storeDependencyList,
+
+ setAutoComplete,
+ mergeStrategy,
+ autoCompleteIgnoreConfigIds,
+
+ autoApprove,
+ autoApproveUserToken,
+
+ experiments,
+
+ debug,
+
+ targetUpdateIds,
+ securityAdvisoriesFile,
+
+ skipPullRequests,
+ commentPullRequests,
+ abandonUnwantedPullRequests,
+ };
+}
diff --git a/extension/tests/utils/dependabot.yml b/extension/tests/config/dependabot.yml
similarity index 70%
rename from extension/tests/utils/dependabot.yml
rename to extension/tests/config/dependabot.yml
index 35277f63..3acfcd72 100644
--- a/extension/tests/utils/dependabot.yml
+++ b/extension/tests/config/dependabot.yml
@@ -12,18 +12,18 @@ updates:
directory: '/client' # Location of package manifests
open-pull-requests-limit: 10
registries:
- - reg1
- - reg2
+ - reg1
+ - reg2
insecure-external-code-execution: 'deny'
ignore:
- - dependency-name: 'react'
- update-types: ['version-update:semver-major']
- - dependency-name: 'react-dom'
- update-types: ['version-update:semver-major']
- - dependency-name: '@types/react'
- update-types: ['version-update:semver-major']
- - dependency-name: '@types/react-dom'
- update-types: ['version-update:semver-major']
+ - dependency-name: 'react'
+ update-types: ['version-update:semver-major']
+ - dependency-name: 'react-dom'
+ update-types: ['version-update:semver-major']
+ - dependency-name: '@types/react'
+ update-types: ['version-update:semver-major']
+ - dependency-name: '@types/react-dom'
+ update-types: ['version-update:semver-major']
- package-ecosystem: 'nuget'
directories:
- '/src/client'
@@ -31,10 +31,10 @@ updates:
groups:
microsoft:
patterns:
- - "microsoft*"
+ - 'microsoft*'
update-types:
- - "minor"
- - "patch"
+ - 'minor'
+ - 'patch'
registries:
reg1:
type: nuget-feed
@@ -43,4 +43,4 @@ registries:
reg2:
type: npm-registry
url: 'https://pkgs.dev.azure.com/dependabot/_packaging/dependabot-npm/npm/registry/'
- token: 'tingle-npm:${{DEFAULT_TOKEN}}'
+ token: 'tingle-npm:${{ DEFAULT_TOKEN }}'
diff --git a/extension/tests/utils/sample-registries.yml b/extension/tests/config/sample-registries.yml
similarity index 100%
rename from extension/tests/utils/sample-registries.yml
rename to extension/tests/config/sample-registries.yml
diff --git a/extension/tests/utils/convertPlaceholder.test.ts b/extension/tests/utils/convertPlaceholder.test.ts
deleted file mode 100644
index 379a73bf..00000000
--- a/extension/tests/utils/convertPlaceholder.test.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-import { extractPlaceholder } from "../../task/utils/convertPlaceholder";
-
-describe("Parse property placeholder", () => {
- it("Should return key with underscores", () => {
- var matches: RegExpExecArray[] = extractPlaceholder(
- "PAT:${{MY_DEPENDABOT_ADO_PAT}}"
- );
- expect(matches[0][1]).toBe("MY_DEPENDABOT_ADO_PAT");
- });
-
- it("Should return the key", () => {
- var matches: RegExpExecArray[] = extractPlaceholder("PAT:${{PAT}}");
- expect(matches[0][1]).toBe("PAT");
- });
-
- it("Without PAT: prefix should return key", () => {
- var matches: RegExpExecArray[] = extractPlaceholder(
- "${{MY_DEPENDABOT_ADO_PAT}}"
- );
- expect(matches[0][1]).toBe("MY_DEPENDABOT_ADO_PAT");
- });
-
- it("With malformed brackets should be null", () => {
- var matches: RegExpExecArray[] = extractPlaceholder(
- "${MY_DEPENDABOT_ADO_PAT}"
- );
- expect(matches[0]).toBe(undefined);
- });
-});
diff --git a/extension/tests/utils/extractHostname.test.ts b/extension/tests/utils/extractHostname.test.ts
deleted file mode 100644
index 29c52e38..00000000
--- a/extension/tests/utils/extractHostname.test.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-import extractHostname from "../../task/utils/extractHostname";
-
-describe("Extract hostname", () => {
- it("Should convert old *.visualstudio.com hostname to dev.azure.com", () => {
- var url = new URL("https://contoso.visualstudio.com");
- var hostname = extractHostname(url);
-
- expect(hostname).toBe("dev.azure.com");
- });
-
- it("Should retain the hostname", () => {
- var url = new URL("https://dev.azure.com/Core/contoso");
- var hostname = extractHostname(url);
-
- expect(hostname).toBe("dev.azure.com");
- });
-
- it("Should retain localhost hostname", () => {
- var url = new URL("https://localhost:8080/contoso");
- var hostname = extractHostname(url);
-
- expect(hostname).toBe("localhost");
- });
-});
diff --git a/extension/tests/utils/extractOrganization.test.ts b/extension/tests/utils/extractOrganization.test.ts
deleted file mode 100644
index 5f078220..00000000
--- a/extension/tests/utils/extractOrganization.test.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-import extractOrganization from "../../task/utils/extractOrganization";
-
-describe("Extract organization name", () => {
- it("Should extract organization for on-premise domain", () => {
- var url = "https://server.domain.com/tfs/contoso/";
- var organization = extractOrganization(url);
-
- expect(organization).toBe("contoso");
- });
-
- it("Should extract organization for azure devops domain", () => {
- var url = "https://dev.azure.com/contoso/";
- var organization = extractOrganization(url);
-
- expect(organization).toBe("contoso");
- });
-
- it("Should extract organization for old style devops url", () => {
- var url = "https://contoso.visualstudio.com/";
- var organization = extractOrganization(url);
-
- expect(organization).toBe("contoso");
- });
-});
diff --git a/extension/tests/utils/extractVirtualDirectory.test.ts b/extension/tests/utils/extractVirtualDirectory.test.ts
deleted file mode 100644
index 6e0b38ca..00000000
--- a/extension/tests/utils/extractVirtualDirectory.test.ts
+++ /dev/null
@@ -1,17 +0,0 @@
-import extractVirtualDirectory from "../../task/utils/extractVirtualDirectory";
-
-describe("Extract virtual directory", () => {
- it("Should extract virtual directory", () => {
- var url = new URL("https://server.domain.com/contoso/x/");
- var virtualDirectory = extractVirtualDirectory(url);
-
- expect(virtualDirectory).toBe("contoso");
- });
-
- it("Should return empty for dev.azure.com organization URL", () => {
- var url = new URL("https://dev.azure.com/contoso/");
- var virtualDirectory = extractVirtualDirectory(url);
-
- expect(virtualDirectory).toBe("");
- });
-});
diff --git a/extension/vss-extension.json b/extension/vss-extension.json
index dd3a1a8f..2f4c8707 100644
--- a/extension/vss-extension.json
+++ b/extension/vss-extension.json
@@ -1,51 +1,52 @@
{
- "manifestVersion": 1,
- "id": "dependabot",
- "name": "Dependabot",
- "version": "0.1.0",
- "publisher": "tingle-software",
- "public": false,
- "targets": [
- {
- "id": "Microsoft.VisualStudio.Services"
- }
- ],
- "description": "Automatically update dependencies and vulnerabilities in your code",
- "categories": [
- "Azure Pipelines"
- ],
- "icons": {
- "default": "images/icon.png"
+ "$schema": "http://json.schemastore.org/vss-extension.json",
+ "manifestVersion": 1,
+ "id": "dependabot",
+ "name": "Dependabot",
+ "version": "2.0.0",
+ "publisher": "tingle-software",
+ "public": false,
+ "targets": [
+ {
+ "id": "Microsoft.VisualStudio.Services"
+ }
+ ],
+ "description": "Automatically update dependencies and vulnerabilities in your code",
+ "categories": ["Azure Pipelines"],
+ "icons": {
+ "default": "images/icon.png"
+ },
+ "links": {
+ "support": {
+ "uri": "https://github.com/tinglesoftware/dependabot-azure-devops/issues"
+ }
+ },
+ "repository": {
+ "type": "git",
+ "uri": "https://github.com/tinglesoftware/dependabot-azure-devops"
+ },
+ "content": {
+ "details": {
+ "path": "README.md"
+ }
+ },
+ "files": [
+ {
+ "path": "tasks"
},
- "links": {
- "support": { "uri": "https://github.com/tinglesoftware/dependabot-azure-devops/issues" }
- },
- "repository": {
- "type": "git",
- "uri": "https://github.com/tinglesoftware/dependabot-azure-devops"
- },
- "content": {
- "details": { "path": "README.md" }
- },
- "files": [
- {
- "path": "task"
- },
- {
- "path": "images",
- "addressable": true
- }
- ],
- "contributions": [
- {
- "id": "dependabot",
- "type": "ms.vss-distributed-task.task",
- "targets": [
- "ms.vss-distributed-task.tasks"
- ],
- "properties": {
- "name": "task"
- }
- }
- ]
+ {
+ "path": "images",
+ "addressable": true
+ }
+ ],
+ "contributions": [
+ {
+ "id": "dependabot",
+ "type": "ms.vss-distributed-task.task",
+ "targets": ["ms.vss-distributed-task.tasks"],
+ "properties": {
+ "name": "tasks"
+ }
+ }
+ ]
}
diff --git a/package-lock.json b/package-lock.json
new file mode 100644
index 00000000..28ce1a9d
--- /dev/null
+++ b/package-lock.json
@@ -0,0 +1,63 @@
+{
+ "name": "dependabot-azure-devops",
+ "version": "0.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "dependabot-azure-devops",
+ "version": "0.0.0",
+ "devDependencies": {
+ "prettier": "3.3.3",
+ "prettier-plugin-organize-imports": "4.1.0"
+ }
+ },
+ "node_modules/prettier": {
+ "version": "3.3.3",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz",
+ "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "prettier": "bin/prettier.cjs"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/prettier/prettier?sponsor=1"
+ }
+ },
+ "node_modules/prettier-plugin-organize-imports": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-4.1.0.tgz",
+ "integrity": "sha512-5aWRdCgv645xaa58X8lOxzZoiHAldAPChljr/MT0crXVOWTZ+Svl4hIWlz+niYSlO6ikE5UXkN1JrRvIP2ut0A==",
+ "dev": true,
+ "peerDependencies": {
+ "prettier": ">=2.0",
+ "typescript": ">=2.9",
+ "vue-tsc": "^2.1.0"
+ },
+ "peerDependenciesMeta": {
+ "vue-tsc": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/typescript": {
+ "version": "5.5.4",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz",
+ "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "peer": true,
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ }
+ }
+}
diff --git a/package.json b/package.json
new file mode 100644
index 00000000..7fa79e2e
--- /dev/null
+++ b/package.json
@@ -0,0 +1,13 @@
+{
+ "name": "dependabot-azure-devops",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "format": "prettier --write .",
+ "format:check": "prettier --check ."
+ },
+ "devDependencies": {
+ "prettier": "3.3.3",
+ "prettier-plugin-organize-imports": "4.1.0"
+ }
+}
diff --git a/server/Directory.Build.props b/server/Directory.Build.props
index ffab359d..e30a15f7 100644
--- a/server/Directory.Build.props
+++ b/server/Directory.Build.props
@@ -3,12 +3,10 @@
net8.0
-
-
-
latest
enable
enable
+ false
diff --git a/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs b/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs
index cd2ee46e..996d94d0 100644
--- a/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs
+++ b/server/Tingle.Dependabot.Tests/Models/DependabotConfigurationTests.cs
@@ -22,11 +22,12 @@ public void Deserialization_Works()
Assert.NotNull(configuration);
Assert.Equal(2, configuration.Version);
Assert.NotNull(configuration.Updates);
- Assert.Equal(2, configuration.Updates.Count);
+ Assert.Equal(3, configuration.Updates.Count);
var first = configuration.Updates[0];
Assert.Equal("/", first.Directory);
- Assert.Equal("docker", first.PackageEcosystem);
+ Assert.Null(first.Directories);
+ Assert.Equal("nuget", first.PackageEcosystem);
Assert.Equal(DependabotScheduleInterval.Weekly, first.Schedule?.Interval);
Assert.Equal(new TimeOnly(3, 0), first.Schedule?.Time);
Assert.Equal(DependabotScheduleDay.Sunday, first.Schedule?.Day);
@@ -36,6 +37,7 @@ public void Deserialization_Works()
var second = configuration.Updates[1];
Assert.Equal("/client", second.Directory);
+ Assert.Null(second.Directories);
Assert.Equal("npm", second.PackageEcosystem);
Assert.Equal(DependabotScheduleInterval.Daily, second.Schedule?.Interval);
Assert.Equal(new TimeOnly(3, 15), second.Schedule?.Time);
@@ -43,6 +45,17 @@ public void Deserialization_Works()
Assert.Equal("Etc/UTC", second.Schedule?.Timezone);
Assert.Equal("deny", second.InsecureExternalCodeExecution);
Assert.Equal(["reg1", "reg2"], second.Registries);
+
+ var third = configuration.Updates[2];
+ Assert.Null(third.Directory);
+ Assert.Equal(["**/*"], third.Directories);
+ Assert.Equal("docker", third.PackageEcosystem);
+ Assert.Equal(DependabotScheduleInterval.Daily, third.Schedule?.Interval);
+ Assert.Equal(new TimeOnly(2, 00), third.Schedule?.Time);
+ Assert.Equal(DependabotScheduleDay.Monday, third.Schedule?.Day);
+ Assert.Equal("Etc/UTC", third.Schedule?.Timezone);
+ Assert.Null(third.InsecureExternalCodeExecution);
+ Assert.Null(third.Registries);
}
[Fact]
@@ -95,7 +108,7 @@ public void Validation_Works()
Assert.NotNull(val.ErrorMessage);
Assert.Equal("Registries: 'dummy1,dummy2' have not been referenced by any update", val.ErrorMessage);
- // fails: registrynot configured
+ // fails: registry not configured
configuration.Updates[0].Registries?.AddRange(["dummy1", "dummy2", "dummy3"]);
results = [];
actual = RecursiveValidator.TryValidateObject(configuration, results);
diff --git a/server/Tingle.Dependabot.Tests/Models/DependabotUpdateTests.cs b/server/Tingle.Dependabot.Tests/Models/DependabotUpdateTests.cs
new file mode 100644
index 00000000..15958040
--- /dev/null
+++ b/server/Tingle.Dependabot.Tests/Models/DependabotUpdateTests.cs
@@ -0,0 +1,43 @@
+using System.ComponentModel.DataAnnotations;
+using Tingle.Dependabot.Models.Dependabot;
+using Xunit;
+using YamlDotNet.Serialization;
+using YamlDotNet.Serialization.NamingConventions;
+
+namespace Tingle.Dependabot.Tests.Models;
+
+public class DependabotUpdateTests
+{
+ [Fact]
+ public void Validation_Works()
+ {
+ var update = new DependabotUpdate
+ {
+ PackageEcosystem = "npm",
+ Directory = "/",
+ Directories = null,
+ Schedule = new()
+ {
+ Interval = DependabotScheduleInterval.Monthly,
+ Time = new(2, 0),
+ },
+ };
+
+ // works as expected
+ var results = new List();
+ var actual = RecursiveValidator.TryValidateObject(update, results);
+ Assert.True(actual);
+ Assert.Empty(results);
+
+ // fails: directory and directories not provided
+ update.Directory = null;
+ update.Directories = null;
+ results = [];
+ actual = RecursiveValidator.TryValidateObject(update, results);
+ Assert.False(actual);
+ var val = Assert.Single(results);
+ Assert.Equal([nameof(update.Directory), nameof(update.Directories)], val.MemberNames);
+ Assert.NotNull(val.ErrorMessage);
+ Assert.Equal("Either 'directory' or 'directories' must be provided", val.ErrorMessage);
+ }
+}
diff --git a/server/Tingle.Dependabot.Tests/PeriodicTasks/MissedTriggerCheckerTaskTests.cs b/server/Tingle.Dependabot.Tests/PeriodicTasks/MissedTriggerCheckerTaskTests.cs
index 46b27353..afef3117 100644
--- a/server/Tingle.Dependabot.Tests/PeriodicTasks/MissedTriggerCheckerTaskTests.cs
+++ b/server/Tingle.Dependabot.Tests/PeriodicTasks/MissedTriggerCheckerTaskTests.cs
@@ -119,6 +119,7 @@ await context.Repositories.AddAsync(new Repository
{
PackageEcosystem = "npm",
Directory = "/",
+ Directories = null,
Schedule = new DependabotUpdateSchedule
{
Interval = DependabotScheduleInterval.Daily,
@@ -129,7 +130,8 @@ await context.Repositories.AddAsync(new Repository
new RepositoryUpdate
{
PackageEcosystem = "npm",
- Directory = "/legacy",
+ Directory = null,
+ Directories = ["/legacy"],
Schedule = new DependabotUpdateSchedule
{
Interval = DependabotScheduleInterval.Daily,
diff --git a/server/Tingle.Dependabot.Tests/PeriodicTasks/UpdateJobsCleanerTaskTests.cs b/server/Tingle.Dependabot.Tests/PeriodicTasks/UpdateJobsCleanerTaskTests.cs
index 522832b4..3065a5a8 100644
--- a/server/Tingle.Dependabot.Tests/PeriodicTasks/UpdateJobsCleanerTaskTests.cs
+++ b/server/Tingle.Dependabot.Tests/PeriodicTasks/UpdateJobsCleanerTaskTests.cs
@@ -34,6 +34,7 @@ await context.UpdateJobs.AddAsync(new UpdateJob
Created = DateTimeOffset.UtcNow.AddMinutes(-19),
PackageEcosystem = "npm",
Directory = "/",
+ Directories = null,
Resources = new(0.25, 0.2),
AuthKey = Guid.NewGuid().ToString("n"),
Status = UpdateJobStatus.Succeeded,
@@ -47,6 +48,7 @@ await context.UpdateJobs.AddAsync(new UpdateJob
Created = DateTimeOffset.UtcNow.AddHours(-100),
PackageEcosystem = "nuget",
Directory = "/",
+ Directories = null,
Resources = new(0.25, 0.2),
AuthKey = Guid.NewGuid().ToString("n"),
Status = UpdateJobStatus.Succeeded,
@@ -59,7 +61,8 @@ await context.UpdateJobs.AddAsync(new UpdateJob
RepositorySlug = "test-repo",
Created = DateTimeOffset.UtcNow.AddMinutes(-30),
PackageEcosystem = "docker",
- Directory = "/",
+ Directory = null,
+ Directories = ["**/*"],
Resources = new(0.25, 0.2),
AuthKey = Guid.NewGuid().ToString("n"),
Status = UpdateJobStatus.Running,
@@ -90,6 +93,7 @@ await context.UpdateJobs.AddAsync(new UpdateJob
Created = DateTimeOffset.UtcNow.AddDays(-80),
PackageEcosystem = "npm",
Directory = "/",
+ Directories = null,
Resources = new(0.25, 0.2),
AuthKey = Guid.NewGuid().ToString("n"),
});
@@ -102,6 +106,7 @@ await context.UpdateJobs.AddAsync(new UpdateJob
Created = DateTimeOffset.UtcNow.AddDays(-100),
PackageEcosystem = "nuget",
Directory = "/",
+ Directories = null,
Resources = new(0.25, 0.2),
AuthKey = Guid.NewGuid().ToString("n"),
});
@@ -113,7 +118,8 @@ await context.UpdateJobs.AddAsync(new UpdateJob
RepositorySlug = "test-repo",
Created = DateTimeOffset.UtcNow.AddDays(-120),
PackageEcosystem = "docker",
- Directory = "/",
+ Directory = null,
+ Directories = ["**/*"],
Resources = new(0.25, 0.2),
AuthKey = Guid.NewGuid().ToString("n"),
});
@@ -168,6 +174,7 @@ await context.Repositories.AddAsync(new Repository
{
PackageEcosystem = "npm",
Directory = "/",
+ Directories = null,
Schedule = new DependabotUpdateSchedule
{
Interval = DependabotScheduleInterval.Daily,
@@ -177,7 +184,8 @@ await context.Repositories.AddAsync(new Repository
new RepositoryUpdate
{
PackageEcosystem = "npm",
- Directory = "/legacy",
+ Directory = null,
+ Directories = ["/legacy"],
Schedule = new DependabotUpdateSchedule
{
Interval = DependabotScheduleInterval.Daily,
diff --git a/server/Tingle.Dependabot.Tests/Samples/dependabot.yml b/server/Tingle.Dependabot.Tests/Samples/dependabot.yml
index d290fc9c..0ff3070c 100644
--- a/server/Tingle.Dependabot.Tests/Samples/dependabot.yml
+++ b/server/Tingle.Dependabot.Tests/Samples/dependabot.yml
@@ -5,7 +5,7 @@
version: 2
updates:
- - package-ecosystem: 'docker' # See documentation for possible values
+ - package-ecosystem: 'nuget' # See documentation for possible values
directory: '/' # Location of package manifests
schedule:
interval: 'weekly'
@@ -19,18 +19,22 @@ updates:
time: '03:15'
open-pull-requests-limit: 10
registries:
- - reg1
- - reg2
+ - reg1
+ - reg2
insecure-external-code-execution: 'deny'
ignore:
- - dependency-name: 'react'
- update-types: ['version-update:semver-major']
- - dependency-name: 'react-dom'
- update-types: ['version-update:semver-major']
- - dependency-name: '@types/react'
- update-types: ['version-update:semver-major']
- - dependency-name: '@types/react-dom'
- update-types: ['version-update:semver-major']
+ - dependency-name: 'react'
+ update-types: ['version-update:semver-major']
+ - dependency-name: 'react-dom'
+ update-types: ['version-update:semver-major']
+ - dependency-name: '@types/react'
+ update-types: ['version-update:semver-major']
+ - dependency-name: '@types/react-dom'
+ update-types: ['version-update:semver-major']
+ - package-ecosystem: 'docker' # See documentation for possible values
+ directories: ['**/*'] # Location of package manifests
+ schedule:
+ interval: 'daily'
registries:
reg1:
type: nuget-feed
diff --git a/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json
index d939acaf..13aa5fe2 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-1.json
@@ -128,4 +128,4 @@
}
},
"createdDate": "2023-01-21T12:42:13.182Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json
index a1e56378..46fc186b 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git-pullrequest-comment-event-2.json
@@ -183,4 +183,4 @@
}
},
"createdDate": "2023-01-21T13:54:58.3779564Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json
index 4e792462..4b142474 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-1.json
@@ -85,4 +85,4 @@
}
},
"createdDate": "2016-09-19T13:03:27.3156388Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json
index 10df2936..e60f635e 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.merged-2.json
@@ -145,4 +145,4 @@
}
},
"createdDate": "2023-01-18T04:03:28.114Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json
index 117ec135..8ddede37 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-1.json
@@ -91,4 +91,4 @@
}
},
"createdDate": "2016-09-19T13:03:27.2813828Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json
index 0c65de38..e7a74719 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.pullrequest.updated-2.json
@@ -144,4 +144,4 @@
}
},
"createdDate": "2023-01-18T04:03:28.114Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.push-1.json b/server/Tingle.Dependabot.Tests/Samples/git.push-1.json
index b0d362d5..0eeac484 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.push-1.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.push-1.json
@@ -75,4 +75,4 @@
}
},
"createdDate": "2016-09-19T13:03:27.0379153Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.push-2.json b/server/Tingle.Dependabot.Tests/Samples/git.push-2.json
index f847f2f5..f073cf65 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.push-2.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.push-2.json
@@ -96,4 +96,4 @@
}
},
"createdDate": "2020-12-23T02:04:55.1406608Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Samples/git.push-3.json b/server/Tingle.Dependabot.Tests/Samples/git.push-3.json
index ea2804a4..568cc8e5 100644
--- a/server/Tingle.Dependabot.Tests/Samples/git.push-3.json
+++ b/server/Tingle.Dependabot.Tests/Samples/git.push-3.json
@@ -111,4 +111,4 @@
}
},
"createdDate": "2020-12-23T05:47:19.8108134Z"
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj b/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj
index c900f267..7c41a85a 100644
--- a/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj
+++ b/server/Tingle.Dependabot.Tests/Tingle.Dependabot.Tests.csproj
@@ -14,8 +14,8 @@
-
-
+
+
diff --git a/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs b/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs
index 97b377ac..10dc0a55 100644
--- a/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs
+++ b/server/Tingle.Dependabot.Tests/Workflow/UpdateRunnerTests.cs
@@ -101,7 +101,8 @@ public void MakeExtraCredentials_Works()
Assert.Equal("composer_repository", Assert.Contains("type", credential));
Assert.Equal("https://repo.packagist.com/example-company/", Assert.Contains("url", credential));
Assert.DoesNotContain("registry", credential);
- Assert.DoesNotContain("host", credential);
+ Assert.DoesNotContain("index-url", credential);
+ Assert.Equal("repo.packagist.com", Assert.Contains("host", credential));
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
Assert.DoesNotContain("organization", credential);
@@ -117,6 +118,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("docker_registry", Assert.Contains("type", credential));
Assert.DoesNotContain("url", credential);
Assert.Equal("registry.hub.docker.com", Assert.Contains("registry", credential));
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
@@ -133,6 +135,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("git", Assert.Contains("type", credential));
Assert.Equal("https://github.com", Assert.Contains("url", credential));
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
@@ -149,6 +152,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("hex_organization", Assert.Contains("type", credential));
Assert.DoesNotContain("url", credential);
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.Equal("key_1234567890", Assert.Contains("key", credential));
Assert.DoesNotContain("token", credential);
@@ -165,6 +169,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("hex_repository", Assert.Contains("type", credential));
Assert.Equal("https://private-repo.example.com", Assert.Contains("url", credential));
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
@@ -181,6 +186,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("maven_repository", Assert.Contains("type", credential));
Assert.Equal("https://artifactory.example.com", Assert.Contains("url", credential));
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
@@ -197,6 +203,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("npm_registry", Assert.Contains("type", credential));
Assert.DoesNotContain("url", credential);
Assert.Equal("npm.pkg.github.com", Assert.Contains("registry", credential));
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.Equal("tkn_1234567890", Assert.Contains("token", credential));
@@ -213,6 +220,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("nuget_feed", Assert.Contains("type", credential));
Assert.Equal("https://pkgs.dev.azure.com/contoso/_packaging/My_Feed/nuget/v3/index.json", Assert.Contains("url", credential));
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
@@ -227,8 +235,9 @@ public void MakeExtraCredentials_Works()
// python-index
credential = credentials[8];
Assert.Equal("python_index", Assert.Contains("type", credential));
- Assert.Equal("https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example", Assert.Contains("url", credential));
+ Assert.DoesNotContain("url", credential);
Assert.DoesNotContain("registry", credential);
+ Assert.Equal("https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example", Assert.Contains("index-url", credential));
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.DoesNotContain("token", credential);
@@ -236,6 +245,7 @@ public void MakeExtraCredentials_Works()
Assert.DoesNotContain("repo", credential);
Assert.DoesNotContain("auth-key", credential);
Assert.DoesNotContain("public-key-fingerprint", credential);
+ Assert.Equal("https://pkgs.dev.azure.com/octocat/_packaging/my-feed/pypi/example", Assert.Contains("index-url", credential));
Assert.Equal("octocat@example.com", Assert.Contains("username", credential));
Assert.Equal("pwd_1234567890", Assert.Contains("password", credential));
Assert.Equal("true", Assert.Contains("replaces-base", credential));
@@ -245,6 +255,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("rubygems_server", Assert.Contains("type", credential));
Assert.Equal("https://rubygems.pkg.github.com/octocat/github_api", Assert.Contains("url", credential));
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.DoesNotContain("host", credential);
Assert.DoesNotContain("key", credential);
Assert.Equal("tkn_1234567890", Assert.Contains("token", credential));
@@ -261,6 +272,7 @@ public void MakeExtraCredentials_Works()
Assert.Equal("terraform_registry", Assert.Contains("type", credential));
Assert.DoesNotContain("url", credential);
Assert.DoesNotContain("registry", credential);
+ Assert.DoesNotContain("index-url", credential);
Assert.Equal("terraform.example.com", Assert.Contains("host", credential));
Assert.DoesNotContain("key", credential);
Assert.Equal("tkn_1234567890", Assert.Contains("token", credential));
@@ -273,16 +285,18 @@ public void MakeExtraCredentials_Works()
Assert.DoesNotContain("replaces-base", credential);
}
- [Fact]
- public void ConvertPlaceholder_Works()
+ [Theory]
+ [InlineData(":${{MY-p_aT}}", ":cake")]
+ [InlineData(":${{ MY-p_aT }}", ":cake")]
+ [InlineData(":${MY-p_aT}", ":${MY-p_aT}")]
+ public void ConvertPlaceholder_Works(string input, string expected)
{
- var input = ":${{MY-p_aT}}";
var secrets = new Dictionary(StringComparer.OrdinalIgnoreCase)
{
["my-p_at"] = "cake",
};
- var result = UpdateRunner.ConvertPlaceholder(input, secrets);
- Assert.Equal(":cake", result);
+ var actual = UpdateRunner.ConvertPlaceholder(input, secrets);
+ Assert.Equal(expected, actual);
}
[Theory]
diff --git a/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs b/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs
index 71cb34da..7a1c0d96 100644
--- a/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs
+++ b/server/Tingle.Dependabot/Consumers/TriggerUpdateJobsEventConsumer.cs
@@ -57,7 +57,12 @@ public async Task ConsumeAsync(EventContext context, Can
var ecosystem = update.PackageEcosystem!;
// check if there is an existing one
- var job = await dbContext.UpdateJobs.SingleOrDefaultAsync(j => j.PackageEcosystem == ecosystem && j.Directory == update.Directory && j.EventBusId == eventBusId, cancellationToken);
+ var job = await (from j in dbContext.UpdateJobs
+ where j.PackageEcosystem == ecosystem
+ where j.Directory == update.Directory
+ where j.Directories == update.Directories
+ where j.EventBusId == eventBusId
+ select j).SingleOrDefaultAsync(cancellationToken);
if (job is not null)
{
logger.SkippingTriggerJobAlreadyExists(repositoryId: repository.Id,
@@ -89,6 +94,7 @@ public async Task ConsumeAsync(EventContext context, Can
Commit = repository.LatestCommit,
PackageEcosystem = ecosystem,
Directory = update.Directory,
+ Directories = update.Directories,
Resources = resources,
AuthKey = Guid.NewGuid().ToString("n"),
diff --git a/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs b/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs
index ff4ca65a..e72cfd77 100644
--- a/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs
+++ b/server/Tingle.Dependabot/Consumers/UpdateJobEventsConsumer.cs
@@ -71,7 +71,11 @@ public async Task ConsumeAsync(EventContext context, C
var repository = await dbContext.Repositories.SingleOrDefaultAsync(r => r.Id == job.RepositoryId, cancellationToken);
if (repository is not null)
{
- var update = repository.Updates.SingleOrDefault(u => u.PackageEcosystem == job.PackageEcosystem && u.Directory == job.Directory);
+ var update = (from u in repository.Updates
+ where u.PackageEcosystem == job.PackageEcosystem
+ where u.Directory == job.Directory
+ where u.Directories == job.Directories
+ select u).SingleOrDefault();
if (update is not null && update.LatestJobId == job.Id)
{
update.LatestJobStatus = job.Status;
diff --git a/server/Tingle.Dependabot/Controllers/UpdateJobsController.cs b/server/Tingle.Dependabot/Controllers/UpdateJobsController.cs
index 0f74e49b..4b2ac6b2 100644
--- a/server/Tingle.Dependabot/Controllers/UpdateJobsController.cs
+++ b/server/Tingle.Dependabot/Controllers/UpdateJobsController.cs
@@ -103,7 +103,11 @@ public async Task UpdateDependencyListAsync([FromRoute, Required]
var repository = await dbContext.Repositories.SingleAsync(r => r.Id == job.RepositoryId);
// update the database
- var update = repository.Updates.SingleOrDefault(u => u.PackageEcosystem == job.PackageEcosystem && u.Directory == job.Directory);
+ var update = (from u in repository.Updates
+ where u.PackageEcosystem == job.PackageEcosystem
+ where u.Directory == job.Directory
+ where u.Directories == job.Directories
+ select u).SingleOrDefault();
if (update is not null)
{
update.Files = model.Data?.DependencyFiles ?? [];
diff --git a/server/Tingle.Dependabot/Migrations/20240824085208_DirectoriesAndGroups.Designer.cs b/server/Tingle.Dependabot/Migrations/20240824085208_DirectoriesAndGroups.Designer.cs
new file mode 100644
index 00000000..568bdfb7
--- /dev/null
+++ b/server/Tingle.Dependabot/Migrations/20240824085208_DirectoriesAndGroups.Designer.cs
@@ -0,0 +1,390 @@
+//
+using System;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Metadata;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using Tingle.Dependabot.Models;
+
+#nullable disable
+
+namespace Tingle.Dependabot.Migrations
+{
+ [DbContext(typeof(MainDbContext))]
+ [Migration("20240824085208_DirectoriesAndGroups")]
+ partial class DirectoriesAndGroups
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "8.0.8")
+ .HasAnnotation("Relational:MaxIdentifierLength", 128);
+
+ SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder);
+
+ modelBuilder.Entity("Microsoft.AspNetCore.DataProtection.EntityFrameworkCore.DataProtectionKey", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("int");
+
+ SqlServerPropertyBuilderExtensions.UseIdentityColumn(b.Property("Id"));
+
+ b.Property("FriendlyName")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Xml")
+ .HasColumnType("nvarchar(max)");
+
+ b.HasKey("Id");
+
+ b.ToTable("DataProtectionKeys");
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.Project", b =>
+ {
+ b.Property("Id")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("Created")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Description")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Etag")
+ .IsConcurrencyToken()
+ .ValueGeneratedOnAddOrUpdate()
+ .HasColumnType("rowversion");
+
+ b.Property("GithubToken")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Location")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Password")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Private")
+ .HasColumnType("bit");
+
+ b.Property("ProviderId")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Secrets")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Slug")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Synchronized")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Token")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Type")
+ .HasColumnType("int");
+
+ b.Property("Updated")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("UpdaterImageTag")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Url")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.HasKey("Id");
+
+ b.HasIndex("Created")
+ .IsDescending();
+
+ b.HasIndex("Password")
+ .IsUnique();
+
+ b.HasIndex("ProviderId")
+ .IsUnique();
+
+ b.ToTable("Projects");
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.Repository", b =>
+ {
+ b.Property("Id")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("ConfigFileContents")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Created")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Etag")
+ .IsConcurrencyToken()
+ .ValueGeneratedOnAddOrUpdate()
+ .HasColumnType("rowversion");
+
+ b.Property("LatestCommit")
+ .HasMaxLength(200)
+ .HasColumnType("nvarchar(200)");
+
+ b.Property("Name")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("ProjectId")
+ .IsRequired()
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("ProviderId")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Registries")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Slug")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("SyncException")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Updated")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Updates")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.HasKey("Id");
+
+ b.HasIndex("Created")
+ .IsDescending();
+
+ b.HasIndex("ProjectId");
+
+ b.HasIndex("ProviderId")
+ .IsUnique();
+
+ b.ToTable("Repositories");
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.UpdateJob", b =>
+ {
+ b.Property("Id")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("AuthKey")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Commit")
+ .HasMaxLength(50)
+ .HasColumnType("nvarchar(50)");
+
+ b.Property("Created")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Directories")
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Directory")
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Duration")
+ .HasColumnType("bigint");
+
+ b.Property("End")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Etag")
+ .IsConcurrencyToken()
+ .ValueGeneratedOnAddOrUpdate()
+ .HasColumnType("rowversion");
+
+ b.Property("EventBusId")
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("Log")
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("PackageEcosystem")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("ProjectId")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("RepositoryId")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b.Property("RepositorySlug")
+ .IsRequired()
+ .HasColumnType("nvarchar(max)");
+
+ b.Property("Start")
+ .HasColumnType("datetimeoffset");
+
+ b.Property("Status")
+ .HasColumnType("int");
+
+ b.Property("Trigger")
+ .HasColumnType("int");
+
+ b.Property("UpdaterImage")
+ .HasColumnType("nvarchar(max)");
+
+ b.HasKey("Id");
+
+ b.HasIndex("AuthKey")
+ .IsUnique();
+
+ b.HasIndex("Created")
+ .IsDescending();
+
+ b.HasIndex("ProjectId");
+
+ b.HasIndex("RepositoryId");
+
+ b.HasIndex("PackageEcosystem", "Directory", "Directories");
+
+ b.HasIndex("PackageEcosystem", "Directory", "Directories", "EventBusId")
+ .IsUnique()
+ .HasFilter("[Directory] IS NOT NULL AND [Directories] IS NOT NULL AND [EventBusId] IS NOT NULL");
+
+ b.ToTable("UpdateJobs");
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.Project", b =>
+ {
+ b.OwnsOne("Tingle.Dependabot.Models.Management.ProjectAutoApprove", "AutoApprove", b1 =>
+ {
+ b1.Property("ProjectId")
+ .HasColumnType("nvarchar(50)");
+
+ b1.Property("Enabled")
+ .HasColumnType("bit");
+
+ b1.HasKey("ProjectId");
+
+ b1.ToTable("Projects");
+
+ b1.WithOwner()
+ .HasForeignKey("ProjectId");
+ });
+
+ b.OwnsOne("Tingle.Dependabot.Models.Management.ProjectAutoComplete", "AutoComplete", b1 =>
+ {
+ b1.Property("ProjectId")
+ .HasColumnType("nvarchar(50)");
+
+ b1.Property("Enabled")
+ .HasColumnType("bit");
+
+ b1.Property("IgnoreConfigs")
+ .HasColumnType("nvarchar(max)");
+
+ b1.Property("MergeStrategy")
+ .HasColumnType("int");
+
+ b1.HasKey("ProjectId");
+
+ b1.ToTable("Projects");
+
+ b1.WithOwner()
+ .HasForeignKey("ProjectId");
+ });
+
+ b.Navigation("AutoApprove")
+ .IsRequired();
+
+ b.Navigation("AutoComplete")
+ .IsRequired();
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.Repository", b =>
+ {
+ b.HasOne("Tingle.Dependabot.Models.Management.Project", null)
+ .WithMany("Repositories")
+ .HasForeignKey("ProjectId")
+ .OnDelete(DeleteBehavior.Cascade)
+ .IsRequired();
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.UpdateJob", b =>
+ {
+ b.OwnsOne("Tingle.Dependabot.Models.Management.UpdateJobError", "Error", b1 =>
+ {
+ b1.Property("UpdateJobId")
+ .HasColumnType("nvarchar(50)");
+
+ b1.Property("Detail")
+ .HasColumnType("nvarchar(max)");
+
+ b1.Property("Type")
+ .IsRequired()
+ .HasColumnType("nvarchar(450)");
+
+ b1.HasKey("UpdateJobId");
+
+ b1.HasIndex("Type");
+
+ b1.ToTable("UpdateJobs");
+
+ b1.WithOwner()
+ .HasForeignKey("UpdateJobId");
+ });
+
+ b.OwnsOne("Tingle.Dependabot.Models.Management.UpdateJobResources", "Resources", b1 =>
+ {
+ b1.Property("UpdateJobId")
+ .HasColumnType("nvarchar(50)");
+
+ b1.Property("Cpu")
+ .HasColumnType("float");
+
+ b1.Property("Memory")
+ .HasColumnType("float");
+
+ b1.HasKey("UpdateJobId");
+
+ b1.ToTable("UpdateJobs");
+
+ b1.WithOwner()
+ .HasForeignKey("UpdateJobId");
+ });
+
+ b.Navigation("Error");
+
+ b.Navigation("Resources")
+ .IsRequired();
+ });
+
+ modelBuilder.Entity("Tingle.Dependabot.Models.Management.Project", b =>
+ {
+ b.Navigation("Repositories");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/server/Tingle.Dependabot/Migrations/20240824085208_DirectoriesAndGroups.cs b/server/Tingle.Dependabot/Migrations/20240824085208_DirectoriesAndGroups.cs
new file mode 100644
index 00000000..26b761ff
--- /dev/null
+++ b/server/Tingle.Dependabot/Migrations/20240824085208_DirectoriesAndGroups.cs
@@ -0,0 +1,85 @@
+using Microsoft.EntityFrameworkCore.Migrations;
+
+#nullable disable
+
+namespace Tingle.Dependabot.Migrations;
+
+///
+public partial class DirectoriesAndGroups : Migration
+{
+ ///
+ protected override void Up(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory",
+ table: "UpdateJobs");
+
+ migrationBuilder.DropIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory_EventBusId",
+ table: "UpdateJobs");
+
+ migrationBuilder.AlterColumn(
+ name: "Directory",
+ table: "UpdateJobs",
+ type: "nvarchar(450)",
+ nullable: true,
+ oldClrType: typeof(string),
+ oldType: "nvarchar(450)");
+
+ migrationBuilder.AddColumn(
+ name: "Directories",
+ table: "UpdateJobs",
+ type: "nvarchar(450)",
+ nullable: true);
+
+ migrationBuilder.CreateIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory_Directories",
+ table: "UpdateJobs",
+ columns: new[] { "PackageEcosystem", "Directory", "Directories" });
+
+ migrationBuilder.CreateIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory_Directories_EventBusId",
+ table: "UpdateJobs",
+ columns: new[] { "PackageEcosystem", "Directory", "Directories", "EventBusId" },
+ unique: true,
+ filter: "[Directory] IS NOT NULL AND [Directories] IS NOT NULL AND [EventBusId] IS NOT NULL");
+ }
+
+ ///
+ protected override void Down(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory_Directories",
+ table: "UpdateJobs");
+
+ migrationBuilder.DropIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory_Directories_EventBusId",
+ table: "UpdateJobs");
+
+ migrationBuilder.DropColumn(
+ name: "Directories",
+ table: "UpdateJobs");
+
+ migrationBuilder.AlterColumn(
+ name: "Directory",
+ table: "UpdateJobs",
+ type: "nvarchar(450)",
+ nullable: false,
+ defaultValue: "",
+ oldClrType: typeof(string),
+ oldType: "nvarchar(450)",
+ oldNullable: true);
+
+ migrationBuilder.CreateIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory",
+ table: "UpdateJobs",
+ columns: new[] { "PackageEcosystem", "Directory" });
+
+ migrationBuilder.CreateIndex(
+ name: "IX_UpdateJobs_PackageEcosystem_Directory_EventBusId",
+ table: "UpdateJobs",
+ columns: new[] { "PackageEcosystem", "Directory", "EventBusId" },
+ unique: true,
+ filter: "[EventBusId] IS NOT NULL");
+ }
+}
diff --git a/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs b/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs
index 69056547..56759b4f 100644
--- a/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs
+++ b/server/Tingle.Dependabot/Migrations/MainDbContextModelSnapshot.cs
@@ -17,7 +17,7 @@ protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
- .HasAnnotation("ProductVersion", "8.0.1")
+ .HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 128);
SqlServerModelBuilderExtensions.UseIdentityColumns(modelBuilder);
@@ -200,8 +200,10 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.Property("Created")
.HasColumnType("datetimeoffset");
+ b.Property("Directories")
+ .HasColumnType("nvarchar(450)");
+
b.Property("Directory")
- .IsRequired()
.HasColumnType("nvarchar(450)");
b.Property("Duration")
@@ -261,11 +263,11 @@ protected override void BuildModel(ModelBuilder modelBuilder)
b.HasIndex("RepositoryId");
- b.HasIndex("PackageEcosystem", "Directory");
+ b.HasIndex("PackageEcosystem", "Directory", "Directories");
- b.HasIndex("PackageEcosystem", "Directory", "EventBusId")
+ b.HasIndex("PackageEcosystem", "Directory", "Directories", "EventBusId")
.IsUnique()
- .HasFilter("[EventBusId] IS NOT NULL");
+ .HasFilter("[Directory] IS NOT NULL AND [Directories] IS NOT NULL AND [EventBusId] IS NOT NULL");
b.ToTable("UpdateJobs");
});
diff --git a/server/Tingle.Dependabot/Models/Dependabot/DependabotConfiguration.cs b/server/Tingle.Dependabot/Models/Dependabot/DependabotConfiguration.cs
index c9f1e5f3..aee973f6 100644
--- a/server/Tingle.Dependabot/Models/Dependabot/DependabotConfiguration.cs
+++ b/server/Tingle.Dependabot/Models/Dependabot/DependabotConfiguration.cs
@@ -38,17 +38,19 @@ public IEnumerable Validate(ValidationContext validationContex
}
}
-public record DependabotUpdate
+public record DependabotUpdate : IValidatableObject
{
/// Ecosystem for the update.
[Required]
[JsonPropertyName("package-ecosystem")]
public string? PackageEcosystem { get; set; }
- [Required]
[JsonPropertyName("directory")]
public string? Directory { get; set; }
+ [JsonPropertyName("directories")]
+ public List? Directories { get; set; }
+
[Required]
[JsonPropertyName("schedule")]
public DependabotUpdateSchedule? Schedule { get; set; }
@@ -61,6 +63,10 @@ public record DependabotUpdate
[JsonPropertyName("allow")]
public List? Allow { get; set; }
+
+ [JsonPropertyName("groups")]
+ public List? Groups { get; set; }
+
[JsonPropertyName("ignore")]
public List? Ignore { get; set; }
[JsonPropertyName("commit-message")]
@@ -81,6 +87,16 @@ public record DependabotUpdate
public bool Vendor { get; set; } = false;
[JsonPropertyName("versioning-strategy")]
public string VersioningStrategy { get; set; } = "auto";
+
+ public IEnumerable Validate(ValidationContext validationContext)
+ {
+ if (string.IsNullOrWhiteSpace(Directory) && (Directories is null || Directories.Count == 0))
+ {
+ yield return new ValidationResult(
+ "Either 'directory' or 'directories' must be provided",
+ memberNames: [nameof(Directory), nameof(Directories)]);
+ }
+ }
}
public class DependabotUpdateSchedule
@@ -119,6 +135,20 @@ public string GenerateCron()
}
}
+public class DependabotGroupDependency
+{
+ [JsonPropertyName("applies-to")]
+ public string? AppliesTo { get; set; }
+ [JsonPropertyName("dependency-type")]
+ public string? DependencyType { get; set; }
+ [JsonPropertyName("patterns")]
+ public List? Patterns { get; set; }
+ [JsonPropertyName("exclude-patterns")]
+ public List? ExcludePatterns { get; set; }
+ [JsonPropertyName("update-types")]
+ public List? UpdateTypes { get; set; }
+}
+
public class DependabotAllowDependency : IValidatableObject
{
[JsonPropertyName("dependency-name")]
diff --git a/server/Tingle.Dependabot/Models/MainDbContext.cs b/server/Tingle.Dependabot/Models/MainDbContext.cs
index bdf236bb..5d414ed8 100644
--- a/server/Tingle.Dependabot/Models/MainDbContext.cs
+++ b/server/Tingle.Dependabot/Models/MainDbContext.cs
@@ -61,8 +61,8 @@ protected override void OnModelCreating(ModelBuilder modelBuilder)
builder.HasIndex(j => j.Created).IsDescending(); // faster filtering
builder.HasIndex(j => j.ProjectId);
builder.HasIndex(j => j.RepositoryId);
- builder.HasIndex(j => new { j.PackageEcosystem, j.Directory, }); // faster filtering
- builder.HasIndex(j => new { j.PackageEcosystem, j.Directory, j.EventBusId, }).IsUnique();
+ builder.HasIndex(j => new { j.PackageEcosystem, j.Directory, j.Directories }); // faster filtering
+ builder.HasIndex(j => new { j.PackageEcosystem, j.Directory, j.Directories, j.EventBusId, }).IsUnique();
builder.HasIndex(j => j.AuthKey).IsUnique();
builder.OwnsOne(j => j.Resources);
diff --git a/server/Tingle.Dependabot/Models/Management/UpdateJob.cs b/server/Tingle.Dependabot/Models/Management/UpdateJob.cs
index 01736d80..47df0cbc 100644
--- a/server/Tingle.Dependabot/Models/Management/UpdateJob.cs
+++ b/server/Tingle.Dependabot/Models/Management/UpdateJob.cs
@@ -49,10 +49,12 @@ public class UpdateJob
[JsonIgnore] // only for internal use
public string? PackageEcosystem { get; set; }
- /// Identifier of the repository update.
- [Required]
+ /// Directory targeted by the repository update.
public string? Directory { get; set; }
+ /// Directories targeted by the repository update.
+ public List? Directories { get; set; }
+
/// Resources provisioned for the update.
[Required]
public UpdateJobResources? Resources { get; set; }
diff --git a/server/Tingle.Dependabot/Properties/launchSettings.json b/server/Tingle.Dependabot/Properties/launchSettings.json
index 32bf3c92..349d6fba 100644
--- a/server/Tingle.Dependabot/Properties/launchSettings.json
+++ b/server/Tingle.Dependabot/Properties/launchSettings.json
@@ -25,4 +25,4 @@
"useSSL": true
}
}
-}
\ No newline at end of file
+}
diff --git a/server/Tingle.Dependabot/Tingle.Dependabot.csproj b/server/Tingle.Dependabot/Tingle.Dependabot.csproj
index 4092ab23..a7af168e 100644
--- a/server/Tingle.Dependabot/Tingle.Dependabot.csproj
+++ b/server/Tingle.Dependabot/Tingle.Dependabot.csproj
@@ -18,10 +18,10 @@
-
+
-
-
+
+
@@ -29,18 +29,18 @@
-
+
-
-
-
+
+
+
-
+
diff --git a/server/Tingle.Dependabot/Workflow/UpdateRunner.cs b/server/Tingle.Dependabot/Workflow/UpdateRunner.cs
index 47bba628..0c41f804 100644
--- a/server/Tingle.Dependabot/Workflow/UpdateRunner.cs
+++ b/server/Tingle.Dependabot/Workflow/UpdateRunner.cs
@@ -77,7 +77,7 @@ public async Task CreateAsync(Project project, Repository repository, Repository
Name = UpdaterContainerName,
Image = $"ghcr.io/tinglesoftware/dependabot-updater-{ecosystem}:{updaterImageTag}",
Resources = job.Resources!,
- Args = { useV2 ? "update_files" : "update_script", },
+ Args = { useV2 ? "update_files" : "update_script_vnext", },
VolumeMounts = { new ContainerAppVolumeMount { VolumeName = volumeName, MountPath = options.WorkingDirectory, }, },
};
var env = await CreateEnvironmentVariables(project, repository, update, job, directory, credentials, cancellationToken);
@@ -117,10 +117,11 @@ public async Task CreateAsync(Project project, Repository repository, Repository
["purpose"] = "dependabot",
["ecosystem"] = ecosystem,
["repository"] = job.RepositorySlug,
- ["directory"] = job.Directory,
["machine-name"] = Environment.MachineName,
},
};
+ data.Tags.AddIfNotDefault("directory", job.Directory);
+ data.Tags.AddIfNotDefault("directories", ToJson(job.Directories));
// write job definition file
var experiments = new Dictionary
@@ -174,13 +175,7 @@ public async Task DeleteAsync(UpdateJob job, CancellationToken cancellationToken
var execution = executions.SingleOrDefault();
if (execution is null) return null;
- // this is a temporary workaround
- // TODO: remove this after https://github.com/Azure/azure-sdk-for-net/issues/38385 is fixed
- var rr = await resource.GetContainerAppJobExecutionAsync(execution.Data.Name, cancellationToken);
- var properties = JsonNode.Parse(rr.GetRawResponse().Content.ToString())!.AsObject()["properties"]!;
-
- //var status = execution.Data.Properties.Status.ToString() switch
- var status = properties["status"]!.GetValue() switch
+ var status = execution.Data.Status.ToString() switch
{
"Succeeded" => UpdateJobStatus.Succeeded,
"Running" => UpdateJobStatus.Running,
@@ -199,8 +194,7 @@ public async Task DeleteAsync(UpdateJob job, CancellationToken cancellationToken
}
// get the period
- //DateTimeOffset? start = execution.Data.Properties.StartTime, end = execution.Data.Properties.EndTime;
- DateTimeOffset? start = properties["startTime"]?.GetValue(), end = properties["endTime"]?.GetValue();
+ DateTimeOffset? start = execution.Data.StartOn, end = execution.Data.EndOn;
// create and return state
return new UpdateRunnerState(status, start, end);
@@ -238,9 +232,6 @@ internal async Task> CreateEnvironmentVariables(Proj
IList> credentials,
CancellationToken cancellationToken = default) // TODO: unit test this
{
- [return: NotNullIfNotNull(nameof(value))]
- static string? ToJson(T? value) => value is null ? null : JsonSerializer.Serialize(value, serializerOptions); // null ensures we do not add to the values
-
// check if debug and determinism is enabled for the project via Feature Management
var fmc = MakeTargetingContext(project, job);
var debugAllJobs = await featureManager.IsEnabledAsync(FeatureNames.DebugAllJobs); // context is not passed because this is global
@@ -263,7 +254,6 @@ internal async Task> CreateEnvironmentVariables(Proj
// env for v1
["DEPENDABOT_PACKAGE_MANAGER"] = job.PackageEcosystem!,
- ["DEPENDABOT_DIRECTORY"] = job.Directory!,
["DEPENDABOT_OPEN_PULL_REQUESTS_LIMIT"] = update.OpenPullRequestsLimit.ToString(),
["DEPENDABOT_EXTRA_CREDENTIALS"] = ToJson(credentials),
["DEPENDABOT_FAIL_ON_EXCEPTION"] = "false", // we the script to run to completion so that we get notified of job completion
@@ -272,10 +262,13 @@ internal async Task> CreateEnvironmentVariables(Proj
// Add optional values
values.AddIfNotDefault("GITHUB_ACCESS_TOKEN", project.GithubToken ?? options.GithubToken)
.AddIfNotDefault("DEPENDABOT_REBASE_STRATEGY", update.RebaseStrategy)
+ .AddIfNotDefault("DEPENDABOT_DIRECTORY", update.Directory)
+ .AddIfNotDefault("DEPENDABOT_DIRECTORIES", ToJson(update.Directories))
.AddIfNotDefault("DEPENDABOT_TARGET_BRANCH", update.TargetBranch)
.AddIfNotDefault("DEPENDABOT_VENDOR", update.Vendor ? "true" : null)
.AddIfNotDefault("DEPENDABOT_REJECT_EXTERNAL_CODE", string.Equals(update.InsecureExternalCodeExecution, "deny").ToString().ToLowerInvariant())
.AddIfNotDefault("DEPENDABOT_VERSIONING_STRATEGY", update.VersioningStrategy)
+ .AddIfNotDefault("DEPENDABOT_DEPENDENCY_GROUPS", ToJson(update.Groups))
.AddIfNotDefault("DEPENDABOT_ALLOW_CONDITIONS", ToJson(update.Allow))
.AddIfNotDefault("DEPENDABOT_IGNORE_CONDITIONS", ToJson(update.Ignore))
.AddIfNotDefault("DEPENDABOT_COMMIT_MESSAGE_OPTIONS", ToJson(update.CommitMessage))
@@ -320,10 +313,12 @@ internal async Task WriteJobDefinitionAsync(Project project,
{
["job"] = new JsonObject
{
+ ["dependency-groups"] = ToJsonNode(update.Groups ?? []),
["allowed-updates"] = ToJsonNode(update.Allow ?? []),
["credentials-metadata"] = ToJsonNode(credentialsMetadata).AsArray(),
// ["dependencies"] = null, // object array
["directory"] = job.Directory,
+ ["directories"] = ToJsonNode(job.Directories),
// ["existing-pull-requests"] = null, // object array
["experiments"] = ToJsonNode(experiments),
["ignore-conditions"] = ToJsonNode(update.Ignore ?? []),
@@ -335,6 +330,7 @@ internal async Task WriteJobDefinitionAsync(Project project,
["provider"] = "azure",
["repo"] = job.RepositorySlug,
["directory"] = job.Directory,
+ ["directories"] = ToJsonNode(job.Directories),
["branch"] = update.TargetBranch,
["hostname"] = url.Hostname,
["api-endpoint"] = new UriBuilder
@@ -381,18 +377,17 @@ internal static IList> MakeCredentialsMetadata(IList<
return credentials.Select(cred =>
{
var values = new Dictionary { ["type"] = cred["type"], };
- cred.TryGetValue("host", out var host);
- // pull host from registry if available
- if (string.IsNullOrWhiteSpace(host))
+ // if no host, pull host from url, index-url, or registry if available
+ if (!cred.TryGetValue("host", out var host) || string.IsNullOrWhiteSpace(host))
{
- host = cred.TryGetValue("registry", out var registry) && Uri.TryCreate($"https://{registry}", UriKind.Absolute, out var u) ? u.Host : host;
- }
+ if (cred.TryGetValue("url", out var url) || cred.TryGetValue("index-url", out url)) { }
+ else if (cred.TryGetValue("registry", out var registry)) url = $"https://{registry}";
- // pull host from registry if url
- if (string.IsNullOrWhiteSpace(host))
- {
- host = cred.TryGetValue("url", out var url) && Uri.TryCreate(url, UriKind.Absolute, out var u) ? u.Host : host;
+ if (url is not null && Uri.TryCreate(url, UriKind.Absolute, out var u))
+ {
+ host = u.Host;
+ }
}
values.AddIfNotDefault("host", host);
@@ -424,23 +419,31 @@ internal static IList> MakeExtraCredentials(ICollecti
values.AddIfNotDefault("token", ConvertPlaceholder(v.Token, secrets));
values.AddIfNotDefault("replaces-base", v.ReplacesBase is true ? "true" : null);
- // Some credentials do not use the 'url' property in the Ruby updater.
- // npm_registry and docker_registry use 'registry' which should be stripped off the scheme.
- // terraform_registry uses 'host' which is the hostname from the given URL.
-
- if (type == "docker_registry" || type == "npm_registry")
+ /*
+ * Some credentials do not use the 'url' property in the Ruby updater.
+ * The 'host' and 'registry' properties are derived from the given URL.
+ * The 'registry' property is derived from the 'url' by stripping off the scheme.
+ * The 'host' property is derived from the hostname of the 'url'.
+ *
+ * 'npm_registry' and 'docker_registry' use 'registry' only.
+ * 'terraform_registry' uses 'host' only.
+ * 'composer_repository' uses both 'url' and 'host'.
+ * 'python_index' uses 'index-url' instead of 'url'.
+ */
+
+ if (Uri.TryCreate(v.Url, UriKind.Absolute, out var url))
{
- values.Add("registry", v.Url!.Replace("https://", "").Replace("http://", ""));
- }
- else if (type == "terraform_registry")
- {
- values.Add("host", new Uri(v.Url!).Host);
- }
- else
- {
- values.AddIfNotDefault("url", v.Url!);
+ var addRegistry = type is "docker_registry" or "npm_registry";
+ if (addRegistry) values.Add("registry", $"{url.Host}{url.PathAndQuery}".TrimEnd('/'));
+
+ var addHost = type is "terraform_registry" or "composer_repository";
+ if (addHost) values.Add("host", url.Host);
}
- var useRegistryProperty = type.Contains("npm") || type.Contains("docker");
+
+ if (type is "python_index") values.AddIfNotDefault("index-url", v.Url);
+
+ var skipUrl = type is "docker_registry" or "npm_registry" or "terraform_registry" or "python_index";
+ if (!skipUrl) values.AddIfNotDefault("url", v.Url);
return values;
}).ToList();
@@ -485,6 +488,9 @@ internal static IList> MakeExtraCredentials(ICollecti
_ => ecosystem,
};
}
+
+ [return: NotNullIfNotNull(nameof(value))]
+ private static string? ToJson(T? value) => value is null ? null : JsonSerializer.Serialize(value, serializerOptions); // null ensures we do not add to the values
}
public readonly record struct UpdateRunnerState(UpdateJobStatus Status, DateTimeOffset? Start, DateTimeOffset? End)
diff --git a/server/main.bicep b/server/main.bicep
index 8409aa60..5ba5ad18 100644
--- a/server/main.bicep
+++ b/server/main.bicep
@@ -103,10 +103,6 @@ resource appConfiguration 'Microsoft.AppConfiguration/configurationStores@2023-0
'${managedIdentity.id}': {/*ttk bug*/ }
}
}
-
- // override the default updater image tag for nuget jobs
- // TODO: remove this here and on Azure once the authentication issues are resolved (https://github.com/tinglesoftware/dependabot-azure-devops/issues/921)
- resource nugetVersion 'keyValues' = { name: 'Workflow:UpdaterImageTags:nuget$Production', properties: { value: '1.24' } }
}
/* Storage Account */
diff --git a/server/main.json b/server/main.json
index 3babaf67..ab52bc5b 100644
--- a/server/main.json
+++ b/server/main.json
@@ -1,6 +1,13 @@
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
+ "metadata": {
+ "_generator": {
+ "name": "bicep",
+ "version": "0.29.47.4906",
+ "templateHash": "3661784367808800983"
+ }
+ },
"parameters": {
"location": {
"type": "string",
@@ -143,17 +150,6 @@
"[resourceId('Microsoft.KeyVault/vaults', parameters('name'))]"
]
},
- {
- "type": "Microsoft.AppConfiguration/configurationStores/keyValues",
- "apiVersion": "2023-03-01",
- "name": "[format('{0}/{1}', parameters('name'), 'Workflow:UpdaterImageTags:nuget$Production')]",
- "properties": {
- "value": "1.24"
- },
- "dependsOn": [
- "[resourceId('Microsoft.AppConfiguration/configurationStores', parameters('name'))]"
- ]
- },
{
"copy": {
"name": "shares",
diff --git a/update-files.ps1 b/update-files.ps1
index 4341ddbe..83a81ae9 100644
--- a/update-files.ps1
+++ b/update-files.ps1
@@ -51,7 +51,9 @@ $files = @(
"updater/lib/dependabot/environment.rb"
"updater/lib/dependabot/file_fetcher_command.rb"
"updater/lib/dependabot/job.rb"
+ "updater/lib/dependabot/notices_helpers.rb"
"updater/lib/dependabot/opentelemetry.rb"
+ "updater/lib/dependabot/pull_request.rb"
"updater/lib/dependabot/sentry.rb"
"updater/lib/dependabot/service.rb"
"updater/lib/dependabot/setup.rb"
diff --git a/updater/Gemfile b/updater/Gemfile
index 440679f1..7732c5ac 100644
--- a/updater/Gemfile
+++ b/updater/Gemfile
@@ -8,20 +8,20 @@ source "https://rubygems.org"
# They are so many, our reference won't be found for it to be updated.
# Hence adding the branch.
-gem "dependabot-omnibus", "~>0.268.0"
+gem "dependabot-omnibus", "~>0.278.0"
# gem "dependabot-omnibus", github: "dependabot/dependabot-core", branch: "main"
# gem "dependabot-omnibus", github: "dependabot/dependabot-core", tag: "v0.232.0"
# gem "dependabot-omnibus", github: "dependabot/dependabot-core", ref: "ffde6f6"
gem "http", "~> 5.2"
gem "octokit", "6.1.1"
-gem "opentelemetry-exporter-otlp", "~> 0.28"
+gem "opentelemetry-exporter-otlp", "~> 0.29"
gem "opentelemetry-instrumentation-excon", "~> 0.22"
gem "opentelemetry-instrumentation-faraday", "~> 0.24"
gem "opentelemetry-instrumentation-http", "~> 0.23"
gem "opentelemetry-instrumentation-net_http", "~> 0.22"
gem "opentelemetry-sdk", "~> 1.5"
-gem "sentry-opentelemetry", "~> 5.19"
+gem "sentry-opentelemetry", "~> 5.20"
gem "sentry-ruby", "~> 5.17"
gem "terminal-table", "~> 3.0.2"
@@ -40,7 +40,7 @@ group :test do
gem "rubocop-sorbet", "~> 0.8.1"
gem "simplecov", "~> 0.22.0"
gem "turbo_tests", "~> 2.2.0"
- gem "vcr", "~> 6.1"
+ gem "vcr", "~> 6.3"
gem "webmock", "~> 3.18"
gem "webrick", ">= 1.7"
end
diff --git a/updater/Gemfile.lock b/updater/Gemfile.lock
index 68188682..d885188a 100644
--- a/updater/Gemfile.lock
+++ b/updater/Gemfile.lock
@@ -5,19 +5,19 @@ GEM
public_suffix (>= 2.0.2, < 7.0)
ast (2.4.2)
aws-eventstream (1.3.0)
- aws-partitions (1.961.0)
- aws-sdk-codecommit (1.72.0)
- aws-sdk-core (~> 3, >= 3.201.0)
+ aws-partitions (1.981.0)
+ aws-sdk-codecommit (1.78.0)
+ aws-sdk-core (~> 3, >= 3.207.0)
aws-sigv4 (~> 1.5)
- aws-sdk-core (3.201.3)
+ aws-sdk-core (3.209.1)
aws-eventstream (~> 1, >= 1.3.0)
aws-partitions (~> 1, >= 1.651.0)
- aws-sigv4 (~> 1.8)
+ aws-sigv4 (~> 1.9)
jmespath (~> 1, >= 1.6.1)
- aws-sdk-ecr (1.79.0)
- aws-sdk-core (~> 3, >= 3.201.0)
+ aws-sdk-ecr (1.87.0)
+ aws-sdk-core (~> 3, >= 3.207.0)
aws-sigv4 (~> 1.5)
- aws-sigv4 (1.9.1)
+ aws-sigv4 (1.10.0)
aws-eventstream (~> 1, >= 1.0.2)
base64 (0.2.0)
bigdecimal (3.1.8)
@@ -31,12 +31,12 @@ GEM
debug (1.9.2)
irb (~> 1.10)
reline (>= 0.3.8)
- dependabot-bundler (0.268.0)
- dependabot-common (= 0.268.0)
+ dependabot-bundler (0.278.0)
+ dependabot-common (= 0.278.0)
parallel (~> 1.24)
- dependabot-cargo (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-common (0.268.0)
+ dependabot-cargo (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-common (0.278.0)
aws-sdk-codecommit (~> 1.28)
aws-sdk-ecr (~> 1.5)
bundler (>= 1.16, < 3.0.0)
@@ -45,71 +45,71 @@ GEM
excon (~> 0.109)
faraday (= 2.7.11)
faraday-retry (= 2.2.0)
- gitlab (= 4.19.0)
+ gitlab (= 5.0.0)
json (< 2.7)
nokogiri (~> 1.8)
- octokit (>= 4.6, < 7.0)
+ octokit (>= 4.6, < 8.0)
opentelemetry-sdk (~> 1.3)
parser (>= 2.5, < 4.0)
psych (~> 5.0)
sorbet-runtime (~> 0.5.11178)
stackprof (~> 0.2.16)
toml-rb (>= 1.1.2, < 4.0)
- dependabot-composer (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-devcontainers (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-docker (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-elm (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-git_submodules (0.268.0)
- dependabot-common (= 0.268.0)
+ dependabot-composer (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-devcontainers (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-docker (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-elm (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-git_submodules (0.278.0)
+ dependabot-common (= 0.278.0)
parseconfig (~> 1.0, < 1.1.0)
- dependabot-github_actions (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-go_modules (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-gradle (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-maven (= 0.268.0)
- dependabot-hex (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-maven (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-npm_and_yarn (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-nuget (0.268.0)
- dependabot-common (= 0.268.0)
+ dependabot-github_actions (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-go_modules (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-gradle (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-maven (= 0.278.0)
+ dependabot-hex (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-maven (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-npm_and_yarn (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-nuget (0.278.0)
+ dependabot-common (= 0.278.0)
rubyzip (>= 2.3.2, < 3.0)
- dependabot-omnibus (0.268.0)
- dependabot-bundler (= 0.268.0)
- dependabot-cargo (= 0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-composer (= 0.268.0)
- dependabot-devcontainers (= 0.268.0)
- dependabot-docker (= 0.268.0)
- dependabot-elm (= 0.268.0)
- dependabot-git_submodules (= 0.268.0)
- dependabot-github_actions (= 0.268.0)
- dependabot-go_modules (= 0.268.0)
- dependabot-gradle (= 0.268.0)
- dependabot-hex (= 0.268.0)
- dependabot-maven (= 0.268.0)
- dependabot-npm_and_yarn (= 0.268.0)
- dependabot-nuget (= 0.268.0)
- dependabot-pub (= 0.268.0)
- dependabot-python (= 0.268.0)
- dependabot-swift (= 0.268.0)
- dependabot-terraform (= 0.268.0)
- dependabot-pub (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-python (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-swift (0.268.0)
- dependabot-common (= 0.268.0)
- dependabot-terraform (0.268.0)
- dependabot-common (= 0.268.0)
+ dependabot-omnibus (0.278.0)
+ dependabot-bundler (= 0.278.0)
+ dependabot-cargo (= 0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-composer (= 0.278.0)
+ dependabot-devcontainers (= 0.278.0)
+ dependabot-docker (= 0.278.0)
+ dependabot-elm (= 0.278.0)
+ dependabot-git_submodules (= 0.278.0)
+ dependabot-github_actions (= 0.278.0)
+ dependabot-go_modules (= 0.278.0)
+ dependabot-gradle (= 0.278.0)
+ dependabot-hex (= 0.278.0)
+ dependabot-maven (= 0.278.0)
+ dependabot-npm_and_yarn (= 0.278.0)
+ dependabot-nuget (= 0.278.0)
+ dependabot-pub (= 0.278.0)
+ dependabot-python (= 0.278.0)
+ dependabot-swift (= 0.278.0)
+ dependabot-terraform (= 0.278.0)
+ dependabot-pub (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-python (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-swift (0.278.0)
+ dependabot-common (= 0.278.0)
+ dependabot-terraform (0.278.0)
+ dependabot-common (= 0.278.0)
diff-lcs (1.5.1)
docile (1.4.1)
docker_registry2 (1.18.2)
@@ -138,28 +138,28 @@ GEM
ffi (>= 1.15.5)
rake
flamegraph (0.9.5)
- gitlab (4.19.0)
+ gitlab (5.0.0)
httparty (~> 0.20)
terminal-table (>= 1.5.1)
- google-protobuf (4.27.3)
+ google-protobuf (4.28.2)
bigdecimal
rake (>= 13)
- google-protobuf (4.27.3-aarch64-linux)
+ google-protobuf (4.28.2-aarch64-linux)
bigdecimal
rake (>= 13)
- google-protobuf (4.27.3-arm64-darwin)
+ google-protobuf (4.28.2-arm64-darwin)
bigdecimal
rake (>= 13)
- google-protobuf (4.27.3-x86-linux)
+ google-protobuf (4.28.2-x86-linux)
bigdecimal
rake (>= 13)
- google-protobuf (4.27.3-x86_64-darwin)
+ google-protobuf (4.28.2-x86_64-darwin)
bigdecimal
rake (>= 13)
- google-protobuf (4.27.3-x86_64-linux)
+ google-protobuf (4.28.2-x86_64-linux)
bigdecimal
rake (>= 13)
- googleapis-common-protos-types (1.15.0)
+ googleapis-common-protos-types (1.16.0)
google-protobuf (>= 3.18, < 5.a)
gpgme (2.0.24)
mini_portile2 (~> 2.7)
@@ -171,7 +171,7 @@ GEM
http-form_data (~> 2.2)
llhttp-ffi (~> 0.5.0)
http-accept (1.7.0)
- http-cookie (1.0.6)
+ http-cookie (1.0.7)
domain_name (~> 0.5)
http-form_data (2.3.0)
httparty (0.22.0)
@@ -190,7 +190,7 @@ GEM
rake (~> 13.0)
mime-types (3.5.2)
mime-types-data (~> 3.2015)
- mime-types-data (3.2024.0702)
+ mime-types-data (3.2024.0903)
mini_mime (1.1.5)
mini_portile2 (2.8.7)
multi_xml (0.7.1)
@@ -214,17 +214,17 @@ GEM
octokit (6.1.1)
faraday (>= 1, < 3)
sawyer (~> 0.9)
- opentelemetry-api (1.3.0)
+ opentelemetry-api (1.4.0)
opentelemetry-common (0.21.0)
opentelemetry-api (~> 1.0)
- opentelemetry-exporter-otlp (0.28.1)
+ opentelemetry-exporter-otlp (0.29.0)
google-protobuf (>= 3.18)
googleapis-common-protos-types (~> 1.3)
opentelemetry-api (~> 1.1)
opentelemetry-common (~> 0.20)
opentelemetry-sdk (~> 1.2)
opentelemetry-semantic_conventions
- opentelemetry-instrumentation-base (0.22.5)
+ opentelemetry-instrumentation-base (0.22.6)
opentelemetry-api (~> 1.0)
opentelemetry-common (~> 0.21)
opentelemetry-registry (~> 0.1)
@@ -249,11 +249,11 @@ GEM
opentelemetry-semantic_conventions
opentelemetry-semantic_conventions (1.10.1)
opentelemetry-api (~> 1.0)
- parallel (1.25.1)
- parallel_tests (4.7.1)
+ parallel (1.26.3)
+ parallel_tests (4.7.2)
parallel
parseconfig (1.0.8)
- parser (3.3.4.0)
+ parser (3.3.5.0)
ast (~> 2.4.1)
racc
psych (5.1.2)
@@ -265,22 +265,21 @@ GEM
rdoc (6.7.0)
psych (>= 4.0.0)
regexp_parser (2.9.2)
- reline (0.5.9)
+ reline (0.5.10)
io-console (~> 0.5)
rest-client (2.1.0)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
- rexml (3.3.4)
- strscan
+ rexml (3.3.7)
rspec (3.13.0)
rspec-core (~> 3.13.0)
rspec-expectations (~> 3.13.0)
rspec-mocks (~> 3.13.0)
- rspec-core (3.13.0)
+ rspec-core (3.13.1)
rspec-support (~> 3.13.0)
- rspec-expectations (3.13.1)
+ rspec-expectations (3.13.3)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.13.0)
rspec-its (1.3.0)
@@ -303,7 +302,7 @@ GEM
rubocop-ast (>= 1.31.1, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
- rubocop-ast (1.31.3)
+ rubocop-ast (1.32.3)
parser (>= 3.3.1.0)
rubocop-capybara (2.21.0)
rubocop (~> 1.41)
@@ -327,22 +326,21 @@ GEM
sawyer (0.9.2)
addressable (>= 2.3.5)
faraday (>= 0.17.3, < 3)
- sentry-opentelemetry (5.19.0)
+ sentry-opentelemetry (5.20.1)
opentelemetry-sdk (~> 1.0)
- sentry-ruby (~> 5.19.0)
- sentry-ruby (5.19.0)
+ sentry-ruby (~> 5.20.1)
+ sentry-ruby (5.20.1)
bigdecimal
concurrent-ruby (~> 1.0, >= 1.0.2)
simplecov (0.22.0)
docile (~> 1.1)
simplecov-html (~> 0.11)
simplecov_json_formatter (~> 0.1)
- simplecov-html (0.12.3)
+ simplecov-html (0.13.1)
simplecov_json_formatter (0.1.4)
- sorbet-runtime (0.5.11506)
+ sorbet-runtime (0.5.11589)
stackprof (0.2.26)
stringio (3.1.1)
- strscan (3.1.0)
terminal-table (3.0.2)
unicode-display_width (>= 1.1.1, < 3)
toml-rb (3.0.1)
@@ -351,13 +349,14 @@ GEM
turbo_tests (2.2.4)
parallel_tests (>= 3.3.0, < 5)
rspec (>= 3.10)
- unicode-display_width (2.5.0)
- vcr (6.2.0)
+ unicode-display_width (2.6.0)
+ vcr (6.3.1)
+ base64
webmock (3.23.1)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff (>= 0.4.0, < 2.0.0)
- webrick (1.8.1)
+ webrick (1.8.2)
PLATFORMS
aarch64-linux
@@ -378,12 +377,12 @@ PLATFORMS
DEPENDENCIES
debug (~> 1.9.2)
- dependabot-omnibus (~> 0.268.0)
+ dependabot-omnibus (~> 0.278.0)
flamegraph (~> 0.9.5)
gpgme (~> 2.0)
http (~> 5.2)
octokit (= 6.1.1)
- opentelemetry-exporter-otlp (~> 0.28)
+ opentelemetry-exporter-otlp (~> 0.29)
opentelemetry-instrumentation-excon (~> 0.22)
opentelemetry-instrumentation-faraday (~> 0.24)
opentelemetry-instrumentation-http (~> 0.23)
@@ -397,12 +396,12 @@ DEPENDENCIES
rubocop-performance (~> 1.21.0)
rubocop-rspec (~> 2.29.1)
rubocop-sorbet (~> 0.8.1)
- sentry-opentelemetry (~> 5.19)
+ sentry-opentelemetry (~> 5.20)
sentry-ruby (~> 5.17)
simplecov (~> 0.22.0)
terminal-table (~> 3.0.2)
turbo_tests (~> 2.2.0)
- vcr (~> 6.1)
+ vcr (~> 6.3)
webmock (~> 3.18)
webrick (>= 1.7)
diff --git a/updater/lib/dependabot/api_client.rb b/updater/lib/dependabot/api_client.rb
index 65be0c1d..24b40822 100644
--- a/updater/lib/dependabot/api_client.rb
+++ b/updater/lib/dependabot/api_client.rb
@@ -102,8 +102,11 @@ def close_pull_request(dependency_names, reason)
sig { params(error_type: T.any(String, Symbol), error_details: T.nilable(T::Hash[T.untyped, T.untyped])).void }
def record_update_job_error(error_type:, error_details:)
::Dependabot::OpenTelemetry.tracer.in_span("record_update_job_error", kind: :internal) do |_span|
- ::Dependabot::OpenTelemetry.record_update_job_error(job_id: job_id, error_type: error_type,
- error_details: error_details)
+ ::Dependabot::OpenTelemetry.record_update_job_error(
+ job_id: job_id,
+ error_type: error_type,
+ error_details: error_details
+ )
api_url = "#{base_url}/update_jobs/#{job_id}/record_update_job_error"
body = {
data: {
@@ -123,6 +126,41 @@ def record_update_job_error(error_type:, error_details:)
end
end
+ sig do
+ params(
+ warn_type: T.any(String, Symbol),
+ warn_title: String,
+ warn_description: String
+ ).void
+ end
+ def record_update_job_warning(warn_type:, warn_title:, warn_description:)
+ ::Dependabot::OpenTelemetry.tracer.in_span("record_update_job_message", kind: :internal) do |_span|
+ ::Dependabot::OpenTelemetry.record_update_job_warning(
+ job_id: job_id,
+ warn_type: warn_type,
+ warn_title: warn_title,
+ warn_description: warn_description
+ )
+ api_url = "#{base_url}/update_jobs/#{job_id}/record_update_job_warning"
+ body = {
+ data: {
+ "warn-type": warn_type,
+ "warn-title": warn_title,
+ "warn-description": warn_description
+ }
+ }
+ response = http_client.post(api_url, json: body)
+ raise ApiError, response.body if response.code >= 400
+ rescue HTTP::ConnectionError, OpenSSL::SSL::SSLError
+ retry_count ||= 0
+ retry_count += 1
+ raise if retry_count > 3
+
+ sleep(rand(3.0..10.0))
+ retry
+ end
+ end
+
sig { params(error_type: T.any(Symbol, String), error_details: T.nilable(T::Hash[T.untyped, T.untyped])).void }
def record_update_job_unknown_error(error_type:, error_details:)
error_type = "unknown_error" if error_type.nil?
diff --git a/updater/lib/dependabot/dependency_change.rb b/updater/lib/dependabot/dependency_change.rb
index 161cbb50..a7065986 100644
--- a/updater/lib/dependabot/dependency_change.rb
+++ b/updater/lib/dependabot/dependency_change.rb
@@ -45,15 +45,19 @@ def initialize(deps_no_previous_version:, deps_no_change:)
sig { returns(T.nilable(Dependabot::DependencyGroup)) }
attr_reader :dependency_group
+ sig { returns(T::Array[Dependabot::Notice]) }
+ attr_reader :notices
+
sig do
params(
job: Dependabot::Job,
updated_dependencies: T::Array[Dependabot::Dependency],
updated_dependency_files: T::Array[Dependabot::DependencyFile],
- dependency_group: T.nilable(Dependabot::DependencyGroup)
+ dependency_group: T.nilable(Dependabot::DependencyGroup),
+ notices: T::Array[Dependabot::Notice]
).void
end
- def initialize(job:, updated_dependencies:, updated_dependency_files:, dependency_group: nil)
+ def initialize(job:, updated_dependencies:, updated_dependency_files:, dependency_group: nil, notices: [])
@job = job
@updated_dependencies = updated_dependencies
@updated_dependency_files = updated_dependency_files
@@ -61,6 +65,7 @@ def initialize(job:, updated_dependencies:, updated_dependency_files:, dependenc
@pr_message = T.let(nil, T.nilable(Dependabot::PullRequestCreator::Message))
ensure_dependencies_have_directories
+ @notices = notices
end
sig { returns(Dependabot::PullRequestCreator::Message) }
@@ -90,7 +95,8 @@ def pr_message
dependency_group: dependency_group,
pr_message_max_length: pr_message_max_length,
pr_message_encoding: pr_message_encoding,
- ignore_conditions: job.ignore_conditions
+ ignore_conditions: job.ignore_conditions,
+ notices: notices
).message
@pr_message = message
@@ -135,9 +141,11 @@ def merge_changes!(dependency_changes)
dependency_changes.each do |dependency_change|
updated_dependencies.concat(dependency_change.updated_dependencies)
updated_dependency_files.concat(dependency_change.updated_dependency_files)
+ notices.concat(dependency_change.notices)
end
updated_dependencies.compact!
updated_dependency_files.compact!
+ notices.compact!
end
sig { returns(T::Boolean) }
@@ -171,11 +179,7 @@ def matches_existing_pr?
Set.new(pr["dependencies"]) == updated_dependencies_set(should_consider_directory: directories_in_use)
end
else
- job.existing_pull_requests.any? do |pr|
- directories_in_use = pr.all? { |dep| dep["directory"] }
-
- Set.new(pr) == updated_dependencies_set(should_consider_directory: directories_in_use)
- end
+ job.existing_pull_requests.any?(new_pr)
end
end
@@ -197,6 +201,12 @@ def updated_dependencies_set(should_consider_directory:)
)
end
+ sig { returns(PullRequest) }
+ def new_pr
+ @new_pr ||= T.let(PullRequest.create_from_updated_dependencies(updated_dependencies),
+ T.nilable(Dependabot::PullRequest))
+ end
+
sig { returns(T::Array[Dependabot::Dependency]) }
def ensure_dependencies_have_directories
updated_dependencies.each do |dep|
diff --git a/updater/lib/dependabot/dependency_change_builder.rb b/updater/lib/dependabot/dependency_change_builder.rb
index 6e09dcf2..7ae3894e 100644
--- a/updater/lib/dependabot/dependency_change_builder.rb
+++ b/updater/lib/dependabot/dependency_change_builder.rb
@@ -30,15 +30,17 @@ class DependencyChangeBuilder
job: Dependabot::Job,
dependency_files: T::Array[Dependabot::DependencyFile],
updated_dependencies: T::Array[Dependabot::Dependency],
- change_source: T.any(Dependabot::Dependency, Dependabot::DependencyGroup)
+ change_source: T.any(Dependabot::Dependency, Dependabot::DependencyGroup),
+ notices: T::Array[Dependabot::Notice]
).returns(Dependabot::DependencyChange)
end
- def self.create_from(job:, dependency_files:, updated_dependencies:, change_source:)
+ def self.create_from(job:, dependency_files:, updated_dependencies:, change_source:, notices: [])
new(
job: job,
dependency_files: dependency_files,
updated_dependencies: updated_dependencies,
- change_source: change_source
+ change_source: change_source,
+ notices: notices
).run
end
@@ -47,10 +49,11 @@ def self.create_from(job:, dependency_files:, updated_dependencies:, change_sour
job: Dependabot::Job,
dependency_files: T::Array[Dependabot::DependencyFile],
updated_dependencies: T::Array[Dependabot::Dependency],
- change_source: T.any(Dependabot::Dependency, Dependabot::DependencyGroup)
+ change_source: T.any(Dependabot::Dependency, Dependabot::DependencyGroup),
+ notices: T::Array[Dependabot::Notice]
).void
end
- def initialize(job:, dependency_files:, updated_dependencies:, change_source:)
+ def initialize(job:, dependency_files:, updated_dependencies:, change_source:, notices: [])
@job = job
dir = Pathname.new(job.source.directory).cleanpath
@@ -61,6 +64,7 @@ def initialize(job:, dependency_files:, updated_dependencies:, change_source:)
@updated_dependencies = updated_dependencies
@change_source = change_source
+ @notices = notices
end
sig { returns(Dependabot::DependencyChange) }
@@ -84,7 +88,8 @@ def run
job: job,
updated_dependencies: updated_deps,
updated_dependency_files: updated_files,
- dependency_group: source_dependency_group
+ dependency_group: source_dependency_group,
+ notices: notices
)
end
@@ -102,6 +107,9 @@ def run
sig { returns(T.any(Dependabot::Dependency, Dependabot::DependencyGroup)) }
attr_reader :change_source
+ sig { returns(T::Array[Dependabot::Notice]) }
+ attr_reader :notices
+
sig { returns(T.nilable(String)) }
def source_dependency_name
return nil unless change_source.is_a? Dependabot::Dependency
diff --git a/updater/lib/dependabot/dependency_snapshot.rb b/updater/lib/dependabot/dependency_snapshot.rb
index 9fe29693..b9c2c853 100644
--- a/updater/lib/dependabot/dependency_snapshot.rb
+++ b/updater/lib/dependabot/dependency_snapshot.rb
@@ -5,6 +5,7 @@
require "sorbet-runtime"
require "dependabot/file_parsers"
+require "dependabot/notices_helpers"
# This class describes the dependencies obtained from a project at a specific commit SHA
# including both the Dependabot::DependencyFile objects at that reference as well as
@@ -15,6 +16,7 @@
module Dependabot
class DependencySnapshot
extend T::Sig
+ include NoticesHelpers
sig do
params(job: Dependabot::Job, job_definition: T::Hash[String, T.untyped]).returns(Dependabot::DependencySnapshot)
@@ -65,6 +67,18 @@ def dependencies
T.must(@dependencies[@current_directory])
end
+ sig { returns(T.nilable(Dependabot::PackageManagerBase)) }
+ def package_manager
+ @package_manager[@current_directory]
+ end
+
+ sig { returns(T::Array[Dependabot::Notice]) }
+ def notices
+ # The notices array in dependency snapshot stay immutable,
+ # so we can return a copy
+ @notices[@current_directory]&.dup || []
+ end
+
# Returns the subset of all project dependencies which are permitted
# by the project configuration.
sig { returns(T::Array[Dependabot::Dependency]) }
@@ -167,6 +181,9 @@ def initialize(job:, base_commit_sha:, dependency_files:) # rubocop:disable Metr
@current_directory = T.let("", String)
@dependencies = T.let({}, T::Hash[String, T::Array[Dependabot::Dependency]])
+ @package_manager = T.let({}, T::Hash[String, T.nilable(Dependabot::PackageManagerBase)])
+ @notices = T.let({}, T::Hash[String, T::Array[Dependabot::Notice]])
+
directories.each do |dir|
@current_directory = dir
@dependencies[dir] = parse_files!
@@ -216,7 +233,7 @@ def parse_files!
def dependency_file_parser
assert_current_directory_set!
job.source.directory = @current_directory
- Dependabot::FileParsers.for_package_manager(job.package_manager).new(
+ parser = Dependabot::FileParsers.for_package_manager(job.package_manager).new(
dependency_files: dependency_files,
repo_contents_path: job.repo_contents_path,
source: job.source,
@@ -224,6 +241,23 @@ def dependency_file_parser
reject_external_code: job.reject_external_code?,
options: job.experiments
)
+ # Add 'package_manager' to the depedency_snapshopt to use it in operations'
+ package_manager = parser.package_manager
+
+ @package_manager[@current_directory] = package_manager
+
+ # Log deprecation notices if the package manager is deprecated
+ # and add them to the notices array
+ notices_for_current_directory = []
+
+ # add deprecation notices for the package manager
+ add_deprecation_notice(
+ notices: notices_for_current_directory,
+ package_manager: package_manager
+ )
+ @notices[@current_directory] = notices_for_current_directory
+
+ parser
end
sig { params(group: Dependabot::DependencyGroup).returns(T::Array[T::Hash[String, String]]) }
diff --git a/updater/lib/dependabot/job.rb b/updater/lib/dependabot/job.rb
index 99fdc2d5..75f052e9 100644
--- a/updater/lib/dependabot/job.rb
+++ b/updater/lib/dependabot/job.rb
@@ -11,6 +11,7 @@
require "dependabot/experiments"
require "dependabot/requirements_update_strategy"
require "dependabot/source"
+require "dependabot/pull_request"
# Describes a single Dependabot workload within the GitHub-integrated Service
#
@@ -60,7 +61,7 @@ class Job
sig { returns(T.nilable(T::Array[String])) }
attr_reader :dependencies
- sig { returns(T::Array[T::Array[T::Hash[String, String]]]) }
+ sig { returns(T::Array[PullRequest]) }
attr_reader :existing_pull_requests
sig { returns(T::Array[T::Hash[String, T.untyped]]) }
@@ -139,8 +140,7 @@ def initialize(attributes) # rubocop:disable Metrics/AbcSize
end,
T::Array[Dependabot::Credential])
@dependencies = T.let(attributes.fetch(:dependencies), T.nilable(T::Array[T.untyped]))
- @existing_pull_requests = T.let(attributes.fetch(:existing_pull_requests),
- T::Array[T::Array[T::Hash[String, String]]])
+ @existing_pull_requests = T.let(PullRequest.create_from_job_definition(attributes), T::Array[PullRequest])
# TODO: Make this hash required
#
# We will need to do a pass updating the CLI and smoke tests before this is possible,
diff --git a/updater/lib/dependabot/notices_helpers.rb b/updater/lib/dependabot/notices_helpers.rb
new file mode 100644
index 00000000..b5a646c0
--- /dev/null
+++ b/updater/lib/dependabot/notices_helpers.rb
@@ -0,0 +1,75 @@
+# typed: strong
+# frozen_string_literal: true
+
+require "sorbet-runtime"
+require "dependabot/notices"
+require "dependabot/package_manager"
+
+# This module extracts helpers for notice generations that can be used
+# for showing notices in logs, pr messages and alert ui page.
+module Dependabot
+ module NoticesHelpers
+ extend T::Sig
+ extend T::Helpers
+
+ abstract!
+
+ # Add a deprecation notice to the notice list if the package manager is deprecated
+ # if the package manager is deprecated.
+ # notices << deprecation_notices if deprecation_notices
+ sig do
+ params(
+ notices: T::Array[Dependabot::Notice],
+ package_manager: T.nilable(PackageManagerBase)
+ )
+ .void
+ end
+ def add_deprecation_notice(notices:, package_manager:)
+ # Create a deprecation notice if the package manager is deprecated
+ deprecation_notice = create_deprecation_notice(package_manager)
+
+ return unless deprecation_notice
+
+ log_notice(deprecation_notice)
+
+ notices << deprecation_notice
+ end
+
+ sig { params(notice: Dependabot::Notice).void }
+ def log_notice(notice)
+ logger = Dependabot.logger
+ # Log each non-empty line of the deprecation notice description
+ notice.description.each_line do |line|
+ line = line.strip
+ next if line.empty?
+
+ case notice.mode
+ when Dependabot::Notice::NoticeMode::INFO
+ logger.info(line)
+ when Dependabot::Notice::NoticeMode::WARN
+ logger.warn(line)
+ when Dependabot::Notice::NoticeMode::ERROR
+ logger.error(line)
+ else
+ logger.info(line)
+ end
+ end
+ end
+
+ private
+
+ sig { params(package_manager: T.nilable(PackageManagerBase)).returns(T.nilable(Dependabot::Notice)) }
+ def create_deprecation_notice(package_manager)
+ # Feature flag check if deprecation notice should be added to notices.
+ return unless Dependabot::Experiments.enabled?(:add_deprecation_warn_to_pr_message)
+
+ return unless package_manager
+
+ return unless package_manager.is_a?(PackageManagerBase)
+
+ Notice.generate_pm_deprecation_notice(
+ package_manager
+ )
+ end
+ end
+end
diff --git a/updater/lib/dependabot/opentelemetry.rb b/updater/lib/dependabot/opentelemetry.rb
index 3d89b654..9a933b9f 100644
--- a/updater/lib/dependabot/opentelemetry.rb
+++ b/updater/lib/dependabot/opentelemetry.rb
@@ -10,6 +10,9 @@ module OpenTelemetry
module Attributes
JOB_ID = "dependabot.job.id"
+ WARN_TYPE = "dependabot.job.warn_type"
+ WARN_TITLE = "dependabot.job.warn_title"
+ WARN_DESCRIPTION = "dependabot.job.warn_description"
ERROR_TYPE = "dependabot.job.error_type"
ERROR_DETAILS = "dependabot.job.error_details"
METRIC = "dependabot.metric"
@@ -89,6 +92,26 @@ def self.record_update_job_error(job_id:, error_type:, error_details:)
current_span.add_event(error_type, attributes: attributes)
end
+ sig do
+ params(
+ job_id: T.any(String, Integer),
+ warn_type: T.any(String, Symbol),
+ warn_title: String,
+ warn_description: String
+ ).void
+ end
+ def self.record_update_job_warning(job_id:, warn_type:, warn_title:, warn_description:)
+ current_span = ::OpenTelemetry::Trace.current_span
+
+ attributes = {
+ Attributes::JOB_ID => job_id,
+ Attributes::WARN_TYPE => warn_type,
+ Attributes::WARN_TITLE => warn_title,
+ Attributes::WARN_DESCRIPTION => warn_description
+ }
+ current_span.add_event(warn_type, attributes: attributes)
+ end
+
sig do
params(
error: StandardError,
diff --git a/updater/lib/dependabot/pull_request.rb b/updater/lib/dependabot/pull_request.rb
new file mode 100644
index 00000000..f3f4e3a1
--- /dev/null
+++ b/updater/lib/dependabot/pull_request.rb
@@ -0,0 +1,108 @@
+# typed: strict
+# frozen_string_literal: true
+
+require "sorbet-runtime"
+
+module Dependabot
+ class PullRequest
+ extend T::Sig
+
+ class Dependency
+ extend T::Sig
+
+ sig { returns(String) }
+ attr_reader :name
+
+ sig { returns(T.nilable(String)) }
+ attr_reader :version
+
+ sig { returns(T::Boolean) }
+ attr_reader :removed
+
+ sig { returns(T.nilable(String)) }
+ attr_reader :directory
+
+ sig { params(name: String, version: T.nilable(String), removed: T::Boolean, directory: T.nilable(String)).void }
+ def initialize(name:, version:, removed: false, directory: nil)
+ @name = name
+ @version = version
+ @removed = removed
+ @directory = directory
+ end
+
+ sig { returns(T::Hash[Symbol, T.untyped]) }
+ def to_h
+ {
+ name: name,
+ version: version,
+ removed: removed? ? true : nil,
+ directory: directory
+ }.compact
+ end
+
+ sig { returns(T::Boolean) }
+ def removed?
+ removed
+ end
+ end
+
+ sig { returns(T::Array[Dependency]) }
+ attr_reader :dependencies
+
+ sig { params(attributes: T::Hash[Symbol, T.untyped]).returns(T::Array[Dependabot::PullRequest]) }
+ def self.create_from_job_definition(attributes)
+ attributes.fetch(:existing_pull_requests).map do |pr|
+ new(
+ pr.map do |dep|
+ Dependency.new(
+ name: dep.fetch("dependency-name"),
+ version: dep.fetch("dependency-version", nil),
+ removed: dep.fetch("dependency-removed", false),
+ directory: dep.fetch("directory", nil)
+ )
+ end
+ )
+ end
+ end
+
+ sig { params(updated_dependencies: T::Array[Dependabot::Dependency]).returns(Dependabot::PullRequest) }
+ def self.create_from_updated_dependencies(updated_dependencies)
+ new(
+ updated_dependencies.filter_map do |dep|
+ Dependency.new(
+ name: dep.name,
+ version: dep.version,
+ removed: dep.removed?,
+ directory: dep.directory
+ )
+ end
+ )
+ end
+
+ sig { params(dependencies: T::Array[PullRequest::Dependency]).void }
+ def initialize(dependencies)
+ @dependencies = dependencies
+ end
+
+ sig { params(other: PullRequest).returns(T::Boolean) }
+ def ==(other)
+ if using_directory? && other.using_directory?
+ dependencies.map(&:to_h).difference(other.dependencies.map(&:to_h)).none?
+ else
+ dependencies.map { |dep| dep.to_h.except(:directory) }.difference(
+ other.dependencies.map { |dep| dep.to_h.except(:directory) }
+ ).none?
+ end
+ end
+
+ sig { params(name: String, version: String).returns(T::Boolean) }
+ def contains_dependency?(name, version)
+ dependencies.any? { |dep| dep.name == name && dep.version == version }
+ end
+
+ sig { returns(T::Boolean) }
+ def using_directory?
+ dependencies.all? { |dep| !!dep.directory }
+ end
+ end
+end
diff --git a/updater/lib/dependabot/service.rb b/updater/lib/dependabot/service.rb
index 5e69e3d2..c8cb0f2e 100644
--- a/updater/lib/dependabot/service.rb
+++ b/updater/lib/dependabot/service.rb
@@ -81,6 +81,21 @@ def record_update_job_error(error_type:, error_details:, dependency: nil)
client.record_update_job_error(error_type: error_type, error_details: error_details)
end
+ sig do
+ params(
+ warn_type: T.any(String, Symbol),
+ warn_title: String,
+ warn_description: String
+ ).void
+ end
+ def record_update_job_warning(warn_type:, warn_title:, warn_description:)
+ client.record_update_job_warning(
+ warn_type: warn_type,
+ warn_title: warn_title,
+ warn_description: warn_description
+ )
+ end
+
sig { params(error_type: T.any(String, Symbol), error_details: T.nilable(T::Hash[T.untyped, T.untyped])).void }
def record_update_job_unknown_error(error_type:, error_details:)
client.record_update_job_unknown_error(error_type: error_type, error_details: error_details)
diff --git a/updater/lib/dependabot/updater/group_update_creation.rb b/updater/lib/dependabot/updater/group_update_creation.rb
index 50d5ddca..9494698c 100644
--- a/updater/lib/dependabot/updater/group_update_creation.rb
+++ b/updater/lib/dependabot/updater/group_update_creation.rb
@@ -6,6 +6,8 @@
require "dependabot/dependency_change_builder"
require "dependabot/updater/dependency_group_change_batch"
require "dependabot/workspace"
+require "dependabot/updater/security_update_helpers"
+require "dependabot/notices"
# This module contains the methods required to build a DependencyChange for
# a single DependencyGroup.
@@ -22,6 +24,7 @@ class Updater
module GroupUpdateCreation
extend T::Sig
extend T::Helpers
+ include PullRequestHelpers
abstract!
@@ -52,6 +55,9 @@ def compile_all_dependency_changes_for(group)
)
original_dependencies = dependency_snapshot.dependencies
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ notices = dependency_snapshot.notices
+
Dependabot.logger.info("Updating the #{job.source.directory} directory.")
group.dependencies.each do |dependency|
# We still want to update a dependency if it's been updated in another manifest files,
@@ -89,6 +95,8 @@ def compile_all_dependency_changes_for(group)
dep.name.casecmp(dependency.name)&.zero?
end
+ next unless lead_dependency
+
dependency_change = create_change_for(T.must(lead_dependency), updated_dependencies, dependency_files, group)
# Move on to the next dependency using the existing files if we
@@ -106,7 +114,8 @@ def compile_all_dependency_changes_for(group)
job: job,
updated_dependencies: group_changes.updated_dependencies,
updated_dependency_files: group_changes.updated_dependency_files,
- dependency_group: group
+ dependency_group: group,
+ notices: notices
)
if Experiments.enabled?("dependency_change_validation") && !dependency_change.all_have_previous_version?
@@ -114,6 +123,10 @@ def compile_all_dependency_changes_for(group)
return nil
end
+ # Send warning alerts to the API if any warning notices are present.
+ # Note that only notices with notice.show_alert set to true will be sent.
+ record_warning_notices(notices) if notices.any?
+
dependency_change
ensure
cleanup_workspace
@@ -228,6 +241,9 @@ def compile_updates_for(dependency, dependency_files, group) # rubocop:disable M
return []
end
+ # Raise an error if the package manager version is unsupported
+ dependency_snapshot.package_manager&.raise_if_unsupported!
+
checker.updated_dependencies(
requirements_to_unlock: requirements_to_unlock
)
diff --git a/updater/lib/dependabot/updater/operations.rb b/updater/lib/dependabot/updater/operations.rb
index b1b59187..9b33014d 100644
--- a/updater/lib/dependabot/updater/operations.rb
+++ b/updater/lib/dependabot/updater/operations.rb
@@ -34,8 +34,7 @@ module Operations
RefreshGroupUpdatePullRequest,
CreateSecurityUpdatePullRequest,
RefreshSecurityUpdatePullRequest,
- RefreshVersionUpdatePullRequest,
- UpdateAllVersions
+ RefreshVersionUpdatePullRequest
].freeze
def self.class_for(job:)
diff --git a/updater/lib/dependabot/updater/operations/create_security_update_pull_request.rb b/updater/lib/dependabot/updater/operations/create_security_update_pull_request.rb
index 941f469b..d10754aa 100644
--- a/updater/lib/dependabot/updater/operations/create_security_update_pull_request.rb
+++ b/updater/lib/dependabot/updater/operations/create_security_update_pull_request.rb
@@ -2,6 +2,7 @@
# frozen_string_literal: true
require "dependabot/updater/security_update_helpers"
+require "dependabot/notices"
# This class implements our strategy for updating a single, insecure dependency
# to a secure version. We attempt to make the smallest version update possible,
@@ -12,6 +13,7 @@ module Operations
class CreateSecurityUpdatePullRequest
extend T::Sig
include SecurityUpdateHelpers
+ include PullRequestHelpers
sig { params(job: Job).returns(T::Boolean) }
def self.applies_to?(job:)
@@ -42,7 +44,9 @@ def initialize(service:, job:, dependency_snapshot:, error_handler:)
@dependency_snapshot = dependency_snapshot
@error_handler = error_handler
# TODO: Collect @created_pull_requests on the Job object?
- @created_pull_requests = T.let([], T::Array[T::Array[T::Hash[String, String]]])
+ @created_pull_requests = T.let([], T::Array[PullRequest])
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ @notices = T.let([], T::Array[Dependabot::Notice])
end
# TODO: We currently tolerate multiple dependencies for this operation
@@ -55,6 +59,10 @@ def initialize(service:, job:, dependency_snapshot:, error_handler:)
def perform
Dependabot.logger.info("Starting security update job for #{job.source.repo}")
+ # Retrieve the list of initial notices from dependency snapshot
+ @notices = dependency_snapshot.notices
+ # More notices can be added during the update process
+
target_dependencies = dependency_snapshot.job_dependencies
if target_dependencies.empty?
@@ -74,8 +82,11 @@ def perform
attr_reader :dependency_snapshot
sig { returns(Dependabot::Updater::ErrorHandler) }
attr_reader :error_handler
- sig { returns(T::Array[T::Array[T::Hash[String, String]]]) }
+ sig { returns(T::Array[PullRequest]) }
attr_reader :created_pull_requests
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ sig { returns(T::Array[Dependabot::Notice]) }
+ attr_reader :notices
sig { params(dependency: Dependabot::Dependency).void }
def check_and_create_pr_with_error_handling(dependency)
@@ -94,6 +105,7 @@ def check_and_create_pr_with_error_handling(dependency)
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength
+ # rubocop:disable Metrics/CyclomaticComplexity
sig { params(dependency: Dependabot::Dependency).void }
def check_and_create_pull_request(dependency)
dependency = vulnerable_version(dependency) if dependency.metadata[:all_versions]
@@ -134,6 +146,9 @@ def check_and_create_pull_request(dependency)
log_requirements_for_update(requirements_to_unlock, checker)
return record_security_update_not_possible_error(checker) if requirements_to_unlock == :update_not_possible
+ # Raise an error if the package manager version is unsupported
+ dependency_snapshot.package_manager&.raise_if_unsupported!
+
updated_deps = checker.updated_dependencies(
requirements_to_unlock: requirements_to_unlock
)
@@ -152,11 +167,11 @@ def check_and_create_pull_request(dependency)
# request)
record_pull_request_exists_for_security_update(existing_pr)
- deps = existing_pr.map do |dep|
- if dep.fetch("dependency-removed", false)
- "#{dep.fetch('dependency-name')}@removed"
+ deps = existing_pr.dependencies.map do |dep|
+ if dep.removed?
+ "#{dep.name}@removed"
else
- "#{dep.fetch('dependency-name')}@#{dep.fetch('dependency-version')}"
+ "#{dep.name}@#{dep.version}"
end
end
@@ -169,9 +184,15 @@ def check_and_create_pull_request(dependency)
job: job,
dependency_files: dependency_snapshot.dependency_files,
updated_dependencies: updated_deps,
- change_source: checker.dependency
+ change_source: checker.dependency,
+ # Sending notices to the pr message builder to be used in the PR message if show_in_pr is true
+ notices: @notices
)
+ # Send warning alerts to the API if any warning notices are present.
+ # Note that only notices with notice.show_alert set to true will be sent.
+ record_warning_notices(notices) if notices.any?
+
create_pull_request(dependency_change)
rescue Dependabot::AllVersionsIgnored
Dependabot.logger.info("All updates for #{dependency.name} were ignored")
@@ -181,6 +202,7 @@ def check_and_create_pull_request(dependency)
# rubocop:enable Metrics/MethodLength
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/PerceivedComplexity
+ # rubocop:enable Metrics/CyclomaticComplexity
sig { params(dependency: Dependabot::Dependency).returns(Dependabot::Dependency) }
def vulnerable_version(dependency)
return dependency if dependency.metadata[:all_versions].count == 1
@@ -243,29 +265,19 @@ def pr_exists_for_latest_version?(checker)
return false if latest_version.nil?
job.existing_pull_requests
- .select { |pr| pr.count == 1 }
- .map(&:first)
- .select { |pr| pr && pr.fetch("dependency-name") == checker.dependency.name }
- .any? { |pr| pr && pr.fetch("dependency-version", nil) == latest_version }
+ .any? { |pr| pr.contains_dependency?(checker.dependency.name, latest_version) } ||
+ created_pull_requests.any? { |pr| pr.contains_dependency?(checker.dependency.name, latest_version) }
end
sig do
params(updated_dependencies: T::Array[Dependabot::Dependency])
- .returns(T.nilable(T::Array[T::Hash[String, String]]))
+ .returns(T.nilable(PullRequest))
end
def existing_pull_request(updated_dependencies)
- new_pr_set = Set.new(
- updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil
- }.compact
- end
- )
+ new_pr = PullRequest.create_from_updated_dependencies(updated_dependencies)
- job.existing_pull_requests.find { |pr| Set.new(pr) == new_pr_set } ||
- created_pull_requests.find { |pr| Set.new(pr) == new_pr_set }
+ job.existing_pull_requests.find { |pr| pr == new_pr } ||
+ created_pull_requests.find { |pr| pr == new_pr }
end
sig { params(checker: Dependabot::UpdateCheckers::Base).returns(Symbol) }
@@ -289,18 +301,7 @@ def create_pull_request(dependency_change)
service.create_pull_request(dependency_change, dependency_snapshot.base_commit_sha)
- created_pull_requests << dependency_change.updated_dependencies.map do |dep|
- create_pull_request_for_dependency(dep)
- end
- end
-
- sig { params(dependency: Dependabot::Dependency).returns(T::Hash[String, String]) }
- def create_pull_request_for_dependency(dependency)
- {
- "dependency-name" => dependency.name,
- "dependency-version" => dependency.version,
- "dependency-removed" => dependency.removed? ? true : nil
- }.compact
+ created_pull_requests << PullRequest.create_from_updated_dependencies(dependency_change.updated_dependencies)
end
end
end
diff --git a/updater/lib/dependabot/updater/operations/group_update_all_versions.rb b/updater/lib/dependabot/updater/operations/group_update_all_versions.rb
index afb90751..9eaf7a79 100644
--- a/updater/lib/dependabot/updater/operations/group_update_all_versions.rb
+++ b/updater/lib/dependabot/updater/operations/group_update_all_versions.rb
@@ -22,14 +22,12 @@ class GroupUpdateAllVersions
include GroupUpdateCreation
sig { params(job: Dependabot::Job).returns(T::Boolean) }
- def self.applies_to?(job:) # rubocop:disable Metrics/PerceivedComplexity
+ def self.applies_to?(job:)
return false if job.updating_a_pull_request?
if Dependabot::Experiments.enabled?(:grouped_security_updates_disabled) && job.security_updates_only?
return false
end
- return true if job.source.directories && T.must(job.source.directories).count > 1
-
if job.security_updates_only?
return true if job.dependencies && T.must(job.dependencies).count > 1
return true if job.dependency_groups.any? { |group| group["applies-to"] == "security-updates" }
@@ -37,7 +35,7 @@ def self.applies_to?(job:) # rubocop:disable Metrics/PerceivedComplexity
return false
end
- job.dependency_groups.any?
+ true
end
sig { returns(Symbol) }
@@ -63,25 +61,7 @@ def initialize(service:, job:, dependency_snapshot:, error_handler:)
sig { void }
def perform
- if dependency_snapshot.groups.any?
- run_grouped_dependency_updates
- else
- # We shouldn't have selected this operation if no groups were defined
- # due to the rules in `::applies_to?`, but if it happens it isn't
- # enough reasons to fail the job.
- Dependabot.logger.warn(
- "No dependency groups defined!"
- )
-
- # We should warn our exception tracker in case this represents an
- # unexpected problem hydrating groups we have swallowed and then
- # delegate everything to run_ungrouped_dependency_updates.
- service.capture_exception(
- error: DependabotError.new("Attempted a grouped update with no groups defined."),
- job: job
- )
- end
-
+ run_grouped_dependency_updates if dependency_snapshot.groups.any?
run_ungrouped_dependency_updates
end
@@ -139,8 +119,15 @@ def run_grouped_update_for(group)
sig { void }
def run_ungrouped_dependency_updates
- if job.source.directories.nil?
- return if dependency_snapshot.ungrouped_dependencies.empty?
+ directories.each do |directory|
+ job.source.directory = directory
+ dependency_snapshot.current_directory = directory
+ next unless dependency_snapshot.dependencies.any?
+
+ if dependency_snapshot.ungrouped_dependencies.empty?
+ Dependabot.logger.info("Found no dependencies to update after filtering allowed updates in #{directory}")
+ next
+ end
Dependabot::Updater::Operations::UpdateAllVersions.new(
service: service,
@@ -148,19 +135,15 @@ def run_ungrouped_dependency_updates
dependency_snapshot: dependency_snapshot,
error_handler: error_handler
).perform
+ end
+ end
+
+ sig { returns(T::Array[String]) }
+ def directories
+ if job.source.directories.nil?
+ [T.must(job.source.directory)]
else
- T.must(job.source.directories).each do |directory|
- job.source.directory = directory
- dependency_snapshot.current_directory = directory
- next if dependency_snapshot.ungrouped_dependencies.empty?
-
- Dependabot::Updater::Operations::UpdateAllVersions.new(
- service: service,
- job: job,
- dependency_snapshot: dependency_snapshot,
- error_handler: error_handler
- ).perform
- end
+ T.must(job.source.directories)
end
end
end
diff --git a/updater/lib/dependabot/updater/operations/refresh_security_update_pull_request.rb b/updater/lib/dependabot/updater/operations/refresh_security_update_pull_request.rb
index 1b1aefa6..6202dc7a 100644
--- a/updater/lib/dependabot/updater/operations/refresh_security_update_pull_request.rb
+++ b/updater/lib/dependabot/updater/operations/refresh_security_update_pull_request.rb
@@ -1,6 +1,9 @@
-# typed: true
+# typed: strong
# frozen_string_literal: true
+require "dependabot/updater/security_update_helpers"
+require "dependabot/notices"
+
# This class implements our strategy for 'refreshing' an existing Pull Request
# that updates an insecure dependency.
#
@@ -14,8 +17,11 @@ module Dependabot
class Updater
module Operations
class RefreshSecurityUpdatePullRequest
+ extend T::Sig
include SecurityUpdateHelpers
+ include PullRequestHelpers
+ sig { params(job: Job).returns(T::Boolean) }
def self.applies_to?(job:)
return false unless job.security_updates_only?
# If we haven't been given metadata about the dependencies present
@@ -25,31 +31,56 @@ def self.applies_to?(job:)
job.updating_a_pull_request?
end
+ sig { returns(Symbol) }
def self.tag_name
:update_security_pr
end
+ sig do
+ params(service: Service, job: Job, dependency_snapshot: DependencySnapshot, error_handler: ErrorHandler)
+ .void
+ end
def initialize(service:, job:, dependency_snapshot:, error_handler:)
@service = service
@job = job
@dependency_snapshot = dependency_snapshot
@error_handler = error_handler
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ @notices = T.let([], T::Array[Dependabot::Notice])
end
+ sig { void }
def perform
- dependency = dependencies.last
+ Dependabot.logger.info("Starting update job for #{job.source.repo}")
+ Dependabot.logger.info("Checking and updating security pull requests...")
+
+ # Raise an error if the package manager version is unsupported
+ dependency_snapshot.package_manager&.raise_if_unsupported!
+
+ # Retrieve the list of initial notices from dependency snapshot
+ @notices = dependency_snapshot.notices
+ # More notices can be added during the update process
+
check_and_update_pull_request(dependencies)
rescue StandardError => e
- error_handler.handle_dependency_error(error: e, dependency: dependency)
+ error_handler.handle_dependency_error(error: e, dependency: dependencies.last)
end
private
+ sig { returns(Dependabot::Job) }
attr_reader :job
+ sig { returns(Dependabot::Service) }
attr_reader :service
+ sig { returns(Dependabot::DependencySnapshot) }
attr_reader :dependency_snapshot
+ sig { returns(Dependabot::Updater::ErrorHandler) }
attr_reader :error_handler
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ sig { returns(T::Array[Dependabot::Notice]) }
+ attr_reader :notices
+ sig { returns(T::Array[Dependabot::Dependency]) }
def dependencies
dependency_snapshot.job_dependencies
end
@@ -57,8 +88,18 @@ def dependencies
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength
+ # rubocop:disable Metrics/CyclomaticComplexity
+ sig { params(dependencies: T::Array[Dependabot::Dependency]).void }
def check_and_update_pull_request(dependencies)
- if dependencies.count != job.dependencies.count
+ # If the job dependencies are empty, then we should close the PR
+ job_dependencies = job.dependencies
+ unless job_dependencies
+ Dependabot.logger.info("No dependencies to update")
+ close_pull_request(reason: :dependencies_removed)
+ return
+ end
+
+ if dependencies.count != job_dependencies.count
# If the job dependencies mismatch the parsed dependencies, then
# we should close the PR as at least one thing we changed has been
# removed from the project.
@@ -72,12 +113,14 @@ def check_and_update_pull_request(dependencies)
# pull request is rebased.
if dependencies.none? { |d| job.allowed_update?(d) }
lead_dependency = dependencies.first
- if job.vulnerable?(lead_dependency)
+ if lead_dependency && job.vulnerable?(lead_dependency)
Dependabot.logger.info(
"Dependency no longer allowed to update #{lead_dependency.name} #{lead_dependency.version}"
)
- else
+ elsif lead_dependency
Dependabot.logger.info("No longer vulnerable #{lead_dependency.name} #{lead_dependency.version}")
+ else
+ Dependabot.logger.info("No dependencies to update")
end
close_pull_request(reason: :up_to_date)
return
@@ -89,10 +132,13 @@ def check_and_update_pull_request(dependencies)
# Note: Gradle, Maven and Nuget dependency names can be case-insensitive
# and the dependency name in the security advisory often doesn't match
# what users have specified in their manifest.
- lead_dep_name = job.dependencies.first.downcase
+ lead_dep_name = job_dependencies.first&.downcase
lead_dependency = dependencies.find do |dep|
dep.name.downcase == lead_dep_name
end
+
+ return close_pull_request(reason: :update_no_longer_possible) unless lead_dependency
+
checker = update_checker_for(lead_dependency)
log_checking_for_update(lead_dependency)
@@ -115,13 +161,19 @@ def check_and_update_pull_request(dependencies)
job: job,
dependency_files: dependency_snapshot.dependency_files,
updated_dependencies: updated_deps,
- change_source: checker.dependency
+ change_source: checker.dependency,
+ # Sending notices to the pr message builder to be used in the PR message if show_in_pr is true
+ notices: @notices
)
+ # Send warning alerts to the API if any warning notices are present.
+ # Note that only notices with notice.show_alert set to true will be sent.
+ record_warning_notices(notices) if notices.any?
+
# NOTE: Gradle, Maven and Nuget dependency names can be case-insensitive
# and the dependency name in the security advisory often doesn't match
# what users have specified in their manifest.
- job_dependencies = job.dependencies.map(&:downcase)
+ job_dependencies = job_dependencies.map(&:downcase)
if dependency_change.updated_dependencies.map { |x| x.name.downcase } != job_dependencies
# The dependencies being updated have changed. Close the existing
# multi-dependency PR and try creating a new one.
@@ -135,7 +187,7 @@ def check_and_update_pull_request(dependencies)
create_pull_request(dependency_change)
end
rescue Dependabot::AllVersionsIgnored
- Dependabot.logger.info("All updates for #{job.dependencies.first} were ignored")
+ Dependabot.logger.info("All updates for #{job_dependencies&.first} were ignored")
# Report this error to the backend to create an update job error
raise
@@ -143,7 +195,9 @@ def check_and_update_pull_request(dependencies)
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength
+ # rubocop:enable Metrics/CyclomaticComplexity
+ sig { params(checker: Dependabot::UpdateCheckers::Base).returns(Symbol) }
def requirements_to_unlock(checker)
if !checker.requirements_unlocked_or_can_be?
if checker.can_update?(requirements_to_unlock: :none) then :none
@@ -157,6 +211,7 @@ def requirements_to_unlock(checker)
end
end
+ sig { params(dependency: Dependabot::Dependency).returns(Dependabot::UpdateCheckers::Base) }
def update_checker_for(dependency)
Dependabot::UpdateCheckers.for_package_manager(job.package_manager).new(
dependency: dependency,
@@ -171,6 +226,7 @@ def update_checker_for(dependency)
)
end
+ sig { params(dependency: Dependabot::Dependency).void }
def log_checking_for_update(dependency)
Dependabot.logger.info(
"Checking if #{dependency.name} #{dependency.version} needs updating"
@@ -178,12 +234,14 @@ def log_checking_for_update(dependency)
job.log_ignore_conditions_for(dependency)
end
+ sig { params(dependency: Dependabot::Dependency).void }
def log_up_to_date(dependency)
Dependabot.logger.info(
"No update needed for #{dependency.name} #{dependency.version}"
)
end
+ sig { params(requirements_to_unlock: Symbol, checker: Dependabot::UpdateCheckers::Base).void }
def log_requirements_for_update(requirements_to_unlock, checker)
Dependabot.logger.info("Requirements to unlock #{requirements_to_unlock}")
@@ -194,35 +252,16 @@ def log_requirements_for_update(requirements_to_unlock, checker)
)
end
+ sig do
+ params(updated_dependencies: T::Array[Dependabot::Dependency])
+ .returns(T.nilable(PullRequest))
+ end
def existing_pull_request(updated_dependencies)
- new_pr_set = Set.new(
- updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil,
- "directory" => dep.directory
- }.compact
- end
- )
-
- existing = job.existing_pull_requests.find { |pr| Set.new(pr) == new_pr_set }
- return existing if existing
-
- # try the search again without directory
- new_pr_set = Set.new(
- updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil
- }.compact
- end
- )
-
- job.existing_pull_requests.find { |pr| Set.new(pr) == new_pr_set }
+ new_pr = PullRequest.create_from_updated_dependencies(updated_dependencies)
+ job.existing_pull_requests.find { |pr| pr == new_pr }
end
+ sig { params(dependency_change: Dependabot::DependencyChange).void }
def create_pull_request(dependency_change)
Dependabot.logger.info("Submitting #{dependency_change.updated_dependencies.map(&:name).join(', ')} " \
"pull request for creation")
@@ -230,6 +269,7 @@ def create_pull_request(dependency_change)
service.create_pull_request(dependency_change, dependency_snapshot.base_commit_sha)
end
+ sig { params(dependency_change: Dependabot::DependencyChange).void }
def update_pull_request(dependency_change)
Dependabot.logger.info("Submitting #{dependency_change.updated_dependencies.map(&:name).join(', ')} " \
"pull request for update")
@@ -237,11 +277,16 @@ def update_pull_request(dependency_change)
service.update_pull_request(dependency_change, dependency_snapshot.base_commit_sha)
end
+ sig { params(reason: Symbol).void }
def close_pull_request(reason:)
reason_string = reason.to_s.tr("_", " ")
+
+ job_dependencies = job.dependencies || []
+
Dependabot.logger.info("Telling backend to close pull request for " \
- "#{job.dependencies.join(', ')} - #{reason_string}")
- service.close_pull_request(job.dependencies, reason)
+ "#{job_dependencies.join(', ')} - #{reason_string}")
+
+ service.close_pull_request(job_dependencies, reason)
end
end
end
diff --git a/updater/lib/dependabot/updater/operations/refresh_version_update_pull_request.rb b/updater/lib/dependabot/updater/operations/refresh_version_update_pull_request.rb
index 81c1d660..4919c758 100644
--- a/updater/lib/dependabot/updater/operations/refresh_version_update_pull_request.rb
+++ b/updater/lib/dependabot/updater/operations/refresh_version_update_pull_request.rb
@@ -1,6 +1,9 @@
-# typed: true
+# typed: strong
# frozen_string_literal: true
+require "dependabot/updater/security_update_helpers"
+require "dependabot/notices"
+
# This class implements our strategy for 'refreshing' an existing Pull Request
# that updates a dependnency to the latest permitted version.
#
@@ -12,6 +15,10 @@ module Dependabot
class Updater
module Operations
class RefreshVersionUpdatePullRequest
+ extend T::Sig
+ include PullRequestHelpers
+
+ sig { params(job: Dependabot::Job).returns(T::Boolean) }
def self.applies_to?(job:)
return false if job.security_updates_only?
# If we haven't been given metadata about the dependencies present
@@ -21,24 +28,42 @@ def self.applies_to?(job:)
job.updating_a_pull_request?
end
+ sig { returns(Symbol) }
def self.tag_name
:update_version_pr
end
+ sig do
+ params(
+ service: Dependabot::Service,
+ job: Dependabot::Job,
+ dependency_snapshot: Dependabot::DependencySnapshot,
+ error_handler: ErrorHandler
+ ).void
+ end
def initialize(service:, job:, dependency_snapshot:, error_handler:)
@service = service
@job = job
@dependency_snapshot = dependency_snapshot
@error_handler = error_handler
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ @notices = T.let([], T::Array[Dependabot::Notice])
- return unless job.source.directory.nil? && job.source.directories.count == 1
+ return unless job.source.directory.nil? && job.source.directories&.count == 1
- job.source.directory = job.source.directories.first
+ job.source.directory = job.source.directories&.first
end
+ sig { void }
def perform
- Dependabot.logger.info("Starting PR update job for #{job.source.repo}")
+ Dependabot.logger.info("Starting update job for #{job.source.repo}")
+ Dependabot.logger.info("Checking and updating versions pull requests...")
dependency = dependencies.last
+
+ # Retrieve the list of initial notices from dependency snapshot
+ @notices = dependency_snapshot.notices
+ # More notices can be added during the update process
+
check_and_update_pull_request(dependencies)
rescue StandardError => e
error_handler.handle_dependency_error(error: e, dependency: dependency)
@@ -46,20 +71,33 @@ def perform
private
+ sig { returns(Dependabot::Job) }
attr_reader :job
+ sig { returns(Dependabot::Service) }
attr_reader :service
+ sig { returns(Dependabot::DependencySnapshot) }
attr_reader :dependency_snapshot
+ sig { returns(Dependabot::Updater::ErrorHandler) }
attr_reader :error_handler
- attr_reader :created_pull_requests
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ sig { returns(T::Array[Dependabot::Notice]) }
+ attr_reader :notices
+ sig { returns(T::Array[Dependabot::Dependency]) }
def dependencies
dependency_snapshot.job_dependencies
end
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/PerceivedComplexity
+ # rubocop:disable Metrics/MethodLength
+ sig do
+ params(dependencies: T::Array[Dependabot::Dependency]).void
+ end
def check_and_update_pull_request(dependencies)
- if dependencies.count != job.dependencies.count
+ job_dependencies = T.must(job.dependencies)
+
+ if job_dependencies.count.zero? || dependencies.count != job_dependencies.count
# If the job dependencies mismatch the parsed dependencies, then
# we should close the PR as at least one thing we changed has been
# removed from the project.
@@ -73,10 +111,19 @@ def check_and_update_pull_request(dependencies)
# Note: Gradle, Maven and Nuget dependency names can be case-insensitive
# and the dependency name in the security advisory often doesn't match
# what users have specified in their manifest.
- lead_dep_name = job.dependencies.first.downcase
+ lead_dep_name = T.must(job_dependencies.first).downcase
lead_dependency = dependencies.find do |dep|
dep.name.downcase == lead_dep_name
end
+
+ if lead_dependency.nil?
+ # If the lead dependency is not found, it indicates that one of the dependencies
+ # we attempted to update has been removed from the project. Therefore, we should
+ # close the PR.
+ close_pull_request(reason: :dependency_removed)
+ return
+ end
+
checker = update_checker_for(lead_dependency, raise_on_ignored: raise_on_ignored?(lead_dependency))
log_checking_for_update(lead_dependency)
@@ -91,6 +138,9 @@ def check_and_update_pull_request(dependencies)
return close_pull_request(reason: :update_no_longer_possible)
end
+ # Raise an error if the package manager version is unsupported
+ dependency_snapshot.package_manager&.raise_if_unsupported!
+
updated_deps = checker.updated_dependencies(
requirements_to_unlock: requirements_to_unlock
)
@@ -99,13 +149,19 @@ def check_and_update_pull_request(dependencies)
job: job,
dependency_files: dependency_snapshot.dependency_files,
updated_dependencies: updated_deps,
- change_source: checker.dependency
+ change_source: checker.dependency,
+ # Sending notices to the pr message builder to be used in the PR message if show_in_pr is true
+ notices: @notices
)
+ # Send warning alerts to the API if any warning notices are present.
+ # Note that only notices with notice.show_alert set to true will be sent.
+ record_warning_notices(notices) if notices.any?
+
# NOTE: Gradle, Maven and Nuget dependency names can be case-insensitive
# and the dependency name in the security advisory often doesn't match
# what users have specified in their manifest.
- job_dependencies = job.dependencies.map(&:downcase)
+ job_dependencies = job_dependencies.map(&:downcase)
if dependency_change.updated_dependencies.map { |x| x.name.downcase } != job_dependencies
# The dependencies being updated have changed. Close the existing
# multi-dependency PR and try creating a new one.
@@ -121,7 +177,9 @@ def check_and_update_pull_request(dependencies)
end
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/PerceivedComplexity
+ # rubocop:enable Metrics/MethodLength
+ sig { params(dependency_change: Dependabot::DependencyChange).void }
def create_pull_request(dependency_change)
Dependabot.logger.info("Submitting #{dependency_change.updated_dependencies.map(&:name).join(', ')} " \
"pull request for creation")
@@ -129,6 +187,7 @@ def create_pull_request(dependency_change)
service.create_pull_request(dependency_change, dependency_snapshot.base_commit_sha)
end
+ sig { params(dependency_change: Dependabot::DependencyChange).void }
def update_pull_request(dependency_change)
Dependabot.logger.info("Submitting #{dependency_change.updated_dependencies.map(&:name).join(', ')} " \
"pull request for update")
@@ -136,17 +195,25 @@ def update_pull_request(dependency_change)
service.update_pull_request(dependency_change, dependency_snapshot.base_commit_sha)
end
+ sig { params(reason: Symbol).void }
def close_pull_request(reason:)
+ job_dependencies = T.must(job.dependencies)
+
reason_string = reason.to_s.tr("_", " ")
Dependabot.logger.info("Telling backend to close pull request for " \
- "#{job.dependencies.join(', ')} - #{reason_string}")
- service.close_pull_request(job.dependencies, reason)
+ "#{job_dependencies.join(', ')} - #{reason_string}")
+ service.close_pull_request(job_dependencies, reason)
end
+ sig { params(dependency: Dependabot::Dependency).returns(T::Boolean) }
def raise_on_ignored?(dependency)
job.ignore_conditions_for(dependency).any?
end
+ sig do
+ params(dependency: Dependabot::Dependency, raise_on_ignored: T::Boolean)
+ .returns(Dependabot::UpdateCheckers::Base)
+ end
def update_checker_for(dependency, raise_on_ignored:)
Dependabot::UpdateCheckers.for_package_manager(job.package_manager).new(
dependency: dependency,
@@ -161,6 +228,7 @@ def update_checker_for(dependency, raise_on_ignored:)
)
end
+ sig { params(dependency: Dependabot::Dependency).void }
def log_checking_for_update(dependency)
Dependabot.logger.info(
"Checking if #{dependency.name} #{dependency.version} needs updating"
@@ -168,6 +236,9 @@ def log_checking_for_update(dependency)
job.log_ignore_conditions_for(dependency)
end
+ sig do
+ params(dependency: Dependabot::Dependency, checker: Dependabot::UpdateCheckers::Base).returns(T::Boolean)
+ end
def all_versions_ignored?(dependency, checker)
Dependabot.logger.info("Latest version is #{checker.latest_version}")
false
@@ -176,6 +247,9 @@ def all_versions_ignored?(dependency, checker)
true
end
+ sig do
+ params(checker: Dependabot::UpdateCheckers::Base).returns(Symbol)
+ end
def requirements_to_unlock(checker)
if !checker.requirements_unlocked_or_can_be?
if checker.can_update?(requirements_to_unlock: :none) then :none
@@ -189,6 +263,9 @@ def requirements_to_unlock(checker)
end
end
+ sig do
+ params(requirements_to_unlock: Symbol, checker: Dependabot::UpdateCheckers::Base).void
+ end
def log_requirements_for_update(requirements_to_unlock, checker)
Dependabot.logger.info("Requirements to unlock #{requirements_to_unlock}")
@@ -199,33 +276,13 @@ def log_requirements_for_update(requirements_to_unlock, checker)
)
end
+ sig do
+ params(updated_dependencies: T::Array[Dependabot::Dependency])
+ .returns(T.nilable(Dependabot::PullRequest))
+ end
def existing_pull_request(updated_dependencies)
- new_pr_set = Set.new(
- updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil,
- "directory" => dep.directory
- }.compact
- end
- )
-
- existing = job.existing_pull_requests.find { |pr| Set.new(pr) == new_pr_set }
- return existing if existing
-
- # try the search again without directory
- new_pr_set = Set.new(
- updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil
- }.compact
- end
- )
-
- job.existing_pull_requests.find { |pr| Set.new(pr) == new_pr_set }
+ new_pr = PullRequest.create_from_updated_dependencies(updated_dependencies)
+ job.existing_pull_requests.find { |pr| pr == new_pr }
end
end
end
diff --git a/updater/lib/dependabot/updater/operations/update_all_versions.rb b/updater/lib/dependabot/updater/operations/update_all_versions.rb
index 453f5249..40e63264 100644
--- a/updater/lib/dependabot/updater/operations/update_all_versions.rb
+++ b/updater/lib/dependabot/updater/operations/update_all_versions.rb
@@ -1,6 +1,9 @@
-# typed: true
+# typed: strong
# frozen_string_literal: true
+require "dependabot/updater/security_update_helpers"
+require "dependabot/notices"
+
# This class implements our strategy for iterating over all of the dependencies
# for a specific project folder to find those that are out of date and create
# a single PR per Dependency.
@@ -8,45 +11,71 @@ module Dependabot
class Updater
module Operations
class UpdateAllVersions
- def self.applies_to?(job:)
- return false if job.security_updates_only?
- return false if job.updating_a_pull_request?
- return false if job.dependencies&.any?
+ extend T::Sig
+ include PullRequestHelpers
- true
+ sig { params(_job: Dependabot::Job).returns(T::Boolean) }
+ def self.applies_to?(_job:)
+ false # only called elsewhere
end
+ sig { returns(Symbol) }
def self.tag_name
:update_all_versions
end
+ sig do
+ params(
+ service: Dependabot::Service,
+ job: Dependabot::Job,
+ dependency_snapshot: Dependabot::DependencySnapshot,
+ error_handler: ErrorHandler
+ ).void
+ end
def initialize(service:, job:, dependency_snapshot:, error_handler:)
@service = service
@job = job
@dependency_snapshot = dependency_snapshot
@error_handler = error_handler
# TODO: Collect @created_pull_requests on the Job object?
- @created_pull_requests = []
+ @created_pull_requests = T.let([], T::Array[PullRequest])
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ @notices = T.let([], T::Array[Dependabot::Notice])
- return unless job.source.directory.nil? && job.source.directories.count == 1
+ return unless job.source.directory.nil? && job.source.directories&.count == 1
- job.source.directory = job.source.directories.first
+ job.source.directory = job.source.directories&.first
end
+ sig { void }
def perform
Dependabot.logger.info("Starting update job for #{job.source.repo}")
Dependabot.logger.info("Checking all dependencies for version updates...")
+
+ # Retrieve the list of initial notices from dependency snapshot
+ @notices = dependency_snapshot.notices
+ # More notices can be added during the update process
+
dependencies.each { |dep| check_and_create_pr_with_error_handling(dep) }
end
private
+ sig { returns(Dependabot::Job) }
attr_reader :job
+ sig { returns(Dependabot::Service) }
attr_reader :service
+ sig { returns(Dependabot::DependencySnapshot) }
attr_reader :dependency_snapshot
+ sig { returns(Dependabot::Updater::ErrorHandler) }
attr_reader :error_handler
+ sig { returns(T::Array[PullRequest]) }
attr_reader :created_pull_requests
+ # A list of notices that will be used in PR messages and/or sent to the dependabot github alerts.
+ sig { returns(T::Array[Dependabot::Notice]) }
+ attr_reader :notices
+ sig { returns(T::Array[Dependabot::Dependency]) }
def dependencies
if dependency_snapshot.dependencies.any? && dependency_snapshot.allowed_dependencies.none?
Dependabot.logger.info("Found no dependencies to update after filtering allowed updates")
@@ -60,12 +89,12 @@ def dependencies
end
end
+ sig { params(dependency: Dependabot::Dependency).void }
def check_and_create_pr_with_error_handling(dependency)
check_and_create_pull_request(dependency)
rescue URI::InvalidURIError => e
- msg = e.class.to_s + " with message: " + e.message
- e = Dependabot::DependencyFileNotResolvable.new(msg)
- error_handler.handle_dependency_error(error: e, dependency: dependency)
+ error_handler.handle_dependency_error(error: Dependabot::DependencyFileNotResolvable.new(e.message),
+ dependency: dependency)
rescue Dependabot::InconsistentRegistryResponse => e
error_handler.log_dependency_error(
dependency: dependency,
@@ -80,6 +109,7 @@ def check_and_create_pr_with_error_handling(dependency)
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/MethodLength
# rubocop:disable Metrics/PerceivedComplexity
+ sig { params(dependency: Dependabot::Dependency).void }
def check_and_create_pull_request(dependency)
checker = update_checker_for(dependency, raise_on_ignored: raise_on_ignored?(dependency))
@@ -113,11 +143,11 @@ def check_and_create_pull_request(dependency)
end
if (existing_pr = existing_pull_request(updated_deps))
- deps = existing_pr.map do |dep|
- if dep.fetch("dependency-removed", false)
- "#{dep.fetch('dependency-name')}@removed"
+ deps = existing_pr.dependencies.map do |dep|
+ if dep.removed?
+ "#{dep.name}@removed"
else
- "#{dep.fetch('dependency-name')}@#{dep.fetch('dependency-version')}"
+ "#{dep.name}@#{dep.version}"
end
end
@@ -137,29 +167,44 @@ def check_and_create_pull_request(dependency)
job: job,
dependency_files: dependency_snapshot.dependency_files,
updated_dependencies: updated_deps,
- change_source: checker.dependency
+ change_source: checker.dependency,
+ # Sending notices to the pr message builder to be used in the PR message if show_in_pr is true
+ notices: @notices
)
+ # Raise an error if the package manager version is unsupported
+ dependency_snapshot.package_manager&.raise_if_unsupported!
+
if dependency_change.updated_dependency_files.empty?
raise "UpdateChecker found viable dependencies to be updated, but FileUpdater failed to update any files"
end
+ # Send warning alerts to the API if any warning notices are present.
+ # Note that only notices with notice.show_alert set to true will be sent.
+ record_warning_notices(notices) if notices.any?
+
create_pull_request(dependency_change)
end
# rubocop:enable Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength
# rubocop:enable Metrics/AbcSize
+ sig { params(dependency: Dependabot::Dependency).void }
def log_up_to_date(dependency)
Dependabot.logger.info(
"No update needed for #{dependency.name} #{dependency.version}"
)
end
+ sig { params(dependency: Dependabot::Dependency).returns(T::Boolean) }
def raise_on_ignored?(dependency)
job.ignore_conditions_for(dependency).any?
end
+ sig do
+ params(dependency: Dependabot::Dependency, raise_on_ignored: T::Boolean)
+ .returns(Dependabot::UpdateCheckers::Base)
+ end
def update_checker_for(dependency, raise_on_ignored:)
Dependabot::UpdateCheckers.for_package_manager(job.package_manager).new(
dependency: dependency,
@@ -174,6 +219,7 @@ def update_checker_for(dependency, raise_on_ignored:)
)
end
+ sig { params(dependency: Dependabot::Dependency).void }
def log_checking_for_update(dependency)
Dependabot.logger.info(
"Checking if #{dependency.name} #{dependency.version} needs updating"
@@ -181,6 +227,7 @@ def log_checking_for_update(dependency)
job.log_ignore_conditions_for(dependency)
end
+ sig { params(error: StandardError, dependency: Dependabot::Dependency).returns(T.untyped) }
def process_dependency_error(error, dependency)
if error.class.to_s.include?("RegistryError")
ex = Dependabot::DependencyFileNotResolvable.new(error.message)
@@ -190,6 +237,10 @@ def process_dependency_error(error, dependency)
end
end
+ sig do
+ params(dependency: Dependabot::Dependency, checker: Dependabot::UpdateCheckers::Base)
+ .returns(T::Boolean)
+ end
def all_versions_ignored?(dependency, checker)
Dependabot.logger.info("Latest version is #{checker.latest_version}")
false
@@ -198,32 +249,28 @@ def all_versions_ignored?(dependency, checker)
true
end
+ sig { params(checker: Dependabot::UpdateCheckers::Base).returns(T::Boolean) }
def pr_exists_for_latest_version?(checker)
latest_version = checker.latest_version&.to_s
return false if latest_version.nil?
job.existing_pull_requests
- .select { |pr| pr.count == 1 }
- .map(&:first)
- .select { |pr| pr.fetch("dependency-name") == checker.dependency.name }
- .any? { |pr| pr.fetch("dependency-version", nil) == latest_version }
+ .any? { |pr| pr.contains_dependency?(checker.dependency.name, latest_version) } ||
+ created_pull_requests.any? { |pr| pr.contains_dependency?(checker.dependency.name, latest_version) }
end
+ sig do
+ params(updated_dependencies: T::Array[Dependabot::Dependency])
+ .returns(T.nilable(Dependabot::PullRequest))
+ end
def existing_pull_request(updated_dependencies)
- new_pr_set = Set.new(
- updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil
- }.compact
- end
- )
+ new_pr = PullRequest.create_from_updated_dependencies(updated_dependencies)
- job.existing_pull_requests.find { |pr| Set.new(pr) == new_pr_set } ||
- created_pull_requests.find { |pr| Set.new(pr) == new_pr_set }
+ job.existing_pull_requests.find { |pr| pr == new_pr } ||
+ created_pull_requests.find { |pr| pr == new_pr }
end
+ sig { params(checker: Dependabot::UpdateCheckers::Base).returns(Symbol) }
def requirements_to_unlock(checker)
if !checker.requirements_unlocked_or_can_be?
if checker.can_update?(requirements_to_unlock: :none) then :none
@@ -237,6 +284,7 @@ def requirements_to_unlock(checker)
end
end
+ sig { params(requirements_to_unlock: Symbol, checker: Dependabot::UpdateCheckers::Base).void }
def log_requirements_for_update(requirements_to_unlock, checker)
Dependabot.logger.info("Requirements to unlock #{requirements_to_unlock}")
@@ -249,17 +297,19 @@ def log_requirements_for_update(requirements_to_unlock, checker)
# If a version update for a peer dependency is possible we should
# defer to the PR that will be created for it to avoid duplicate PRs.
+ sig { params(dependency_name: String, updated_deps: T::Array[Dependabot::Dependency]).returns(T::Boolean) }
def peer_dependency_should_update_instead?(dependency_name, updated_deps)
updated_deps
.reject { |dep| dep.name == dependency_name }
.any? do |dep|
next true if existing_pull_request([dep])
+
next false if dep.previous_requirements.nil?
original_peer_dep = ::Dependabot::Dependency.new(
name: dep.name,
version: dep.previous_version,
- requirements: dep.previous_requirements,
+ requirements: T.must(dep.previous_requirements),
package_manager: dep.package_manager
)
update_checker_for(original_peer_dep, raise_on_ignored: false)
@@ -267,19 +317,14 @@ def peer_dependency_should_update_instead?(dependency_name, updated_deps)
end
end
+ sig { params(dependency_change: Dependabot::DependencyChange).void }
def create_pull_request(dependency_change)
Dependabot.logger.info("Submitting #{dependency_change.updated_dependencies.map(&:name).join(', ')} " \
"pull request for creation")
service.create_pull_request(dependency_change, dependency_snapshot.base_commit_sha)
- created_pull_requests << dependency_change.updated_dependencies.map do |dep|
- {
- "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "dependency-removed" => dep.removed? ? true : nil
- }.compact
- end
+ created_pull_requests << PullRequest.create_from_updated_dependencies(dependency_change.updated_dependencies)
end
end
end
diff --git a/updater/lib/dependabot/updater/security_update_helpers.rb b/updater/lib/dependabot/updater/security_update_helpers.rb
index c409a521..e4827eea 100644
--- a/updater/lib/dependabot/updater/security_update_helpers.rb
+++ b/updater/lib/dependabot/updater/security_update_helpers.rb
@@ -120,13 +120,13 @@ def record_pull_request_exists_for_latest_version(checker)
)
end
- sig { params(existing_pull_request: T::Array[T::Hash[String, String]]).void }
+ sig { params(existing_pull_request: PullRequest).void }
def record_pull_request_exists_for_security_update(existing_pull_request)
- updated_dependencies = existing_pull_request.map do |dep|
+ updated_dependencies = existing_pull_request.dependencies.map do |dep|
{
- "dependency-name": dep.fetch("dependency-name"),
- "dependency-version": dep.fetch("dependency-version", nil),
- "dependency-removed": dep.fetch("dependency-removed", nil)
+ "dependency-name": dep.name,
+ "dependency-version": dep.version,
+ "dependency-removed": dep.removed? ? true : nil
}.compact
end
@@ -181,5 +181,52 @@ def security_update_not_possible_message(checker, latest_allowed_version, confli
end
end
end
+
+ module PullRequestHelpers
+ extend T::Sig
+ extend T::Helpers
+
+ sig { returns(Dependabot::Service) }
+ attr_reader :service
+
+ abstract!
+
+ sig { params(notices: T.nilable(T::Array[Dependabot::Notice])).void }
+ def record_warning_notices(notices)
+ return if !notices || notices.empty?
+
+ # Find unique warning notices which are going to be shown on insight page.
+ warn_notices = unique_warn_notices(notices)
+
+ warn_notices.each do |notice|
+ # If alert is enabled, sending the deprecation notice to the service for showing on the UI insight page
+ send_alert_notice(notice) if notice.show_alert
+ end
+ rescue StandardError => e
+ Dependabot.logger.error(
+ "Failed to send notice warning: #{e.message}"
+ )
+ end
+
+ private
+
+ # Resurns unique warning notices which are going to be shown on insight page.
+ sig { params(notices: T::Array[Dependabot::Notice]).returns(T::Array[Dependabot::Notice]) }
+ def unique_warn_notices(notices)
+ notices
+ .select { |notice| notice.mode == Dependabot::Notice::NoticeMode::WARN }
+ .uniq { |notice| [notice.type, notice.package_manager_name] }
+ end
+
+ sig { params(notice: Dependabot::Notice).void }
+ def send_alert_notice(notice)
+ # Sending the notice to the service for showing on the dependabot insight page
+ service.record_update_job_warning(
+ warn_type: notice.type,
+ warn_title: notice.title,
+ warn_description: notice.description
+ )
+ end
+ end
end
end
diff --git a/updater/lib/tinglesoftware/dependabot/job.rb b/updater/lib/tinglesoftware/dependabot/job.rb
index aa765974..b7ca9d8c 100644
--- a/updater/lib/tinglesoftware/dependabot/job.rb
+++ b/updater/lib/tinglesoftware/dependabot/job.rb
@@ -310,7 +310,7 @@ def azure_virtual_directory
end
def azure_api_endpoint
- virual_directory = azure_virtual_directory.empty? ? "" : "#{azure_virtual_directory}/}"
+ virual_directory = azure_virtual_directory.empty? ? "" : "#{azure_virtual_directory}/"
"#{azure_protocol}://#{azure_hostname}:#{azure_port}/#{virual_directory}"
end
diff --git a/updater/lib/tinglesoftware/dependabot/vulnerabilities.rb b/updater/lib/tinglesoftware/dependabot/vulnerabilities.rb
index e1dc23a7..4204c170 100644
--- a/updater/lib/tinglesoftware/dependabot/vulnerabilities.rb
+++ b/updater/lib/tinglesoftware/dependabot/vulnerabilities.rb
@@ -23,7 +23,8 @@ class QueryError < StandardError; end
"pip" => "PIP",
"pub" => "PUB",
"bundler" => "RUBYGEMS",
- "cargo" => "RUST"
+ "cargo" => "RUST",
+ "swift" => "SWIFT"
}.freeze
GRAPHQL_QUERY = <<-GRAPHQL
diff --git a/updater/spec/dependabot/api_client_spec.rb b/updater/spec/dependabot/api_client_spec.rb
index f40c6229..36779ba6 100644
--- a/updater/spec/dependabot/api_client_spec.rb
+++ b/updater/spec/dependabot/api_client_spec.rb
@@ -379,6 +379,47 @@
end
end
+ describe "record_update_job_warning" do
+ let(:record_update_job_warning_url) { "http://example.com/update_jobs/1/record_update_job_warning" }
+
+ let(:warn_type) { "test_warning_type" }
+ let(:warn_title) { "Test Warning Title" }
+ let(:warn_description) { "Test Warning Description" }
+
+ before do
+ stub_request(:post, record_update_job_warning_url)
+ .to_return(status: 204, headers: headers)
+ end
+
+ it "hits the correct endpoint" do
+ client.record_update_job_warning(
+ warn_type: warn_type,
+ warn_title: warn_title,
+ warn_description: warn_description
+ )
+
+ expect(WebMock)
+ .to have_requested(:post, record_update_job_warning_url)
+ .with(headers: { "Authorization" => "token" })
+ end
+
+ it "encodes the payload correctly" do
+ client.record_update_job_warning(
+ warn_type: warn_type,
+ warn_title: warn_title,
+ warn_description: warn_description
+ )
+
+ expect(WebMock).to(have_requested(:post, record_update_job_warning_url).with do |req|
+ data = JSON.parse(req.body)["data"]
+
+ expect(data["warn-type"]).to eq(warn_type)
+ expect(data["warn-title"]).to eq(warn_title)
+ expect(data["warn-description"]).to eq(warn_description)
+ end)
+ end
+ end
+
describe "mark_job_as_processed" do
let(:url) { "http://example.com/update_jobs/1/mark_as_processed" }
let(:base_commit) { "sha" }
diff --git a/updater/spec/dependabot/dependency_change_spec.rb b/updater/spec/dependabot/dependency_change_spec.rb
index 61d6fe0e..4b6acbc3 100644
--- a/updater/spec/dependabot/dependency_change_spec.rb
+++ b/updater/spec/dependabot/dependency_change_spec.rb
@@ -7,6 +7,7 @@
require "dependabot/pull_request_creator"
require "dependabot/dependency_change"
require "dependabot/job"
+require "dependabot/pull_request"
RSpec.describe Dependabot::DependencyChange do
subject(:dependency_change) do
@@ -114,7 +115,8 @@
dependency_group: nil,
pr_message_encoding: nil,
pr_message_max_length: 65_535,
- ignore_conditions: []
+ ignore_conditions: [],
+ notices: []
)
expect(dependency_change.pr_message.pr_message).to eql("Hello World!")
@@ -141,7 +143,8 @@
dependency_group: group,
pr_message_encoding: nil,
pr_message_max_length: 65_535,
- ignore_conditions: []
+ ignore_conditions: [],
+ notices: []
)
expect(dependency_change.pr_message&.pr_message).to eql("Hello World!")
@@ -297,11 +300,15 @@
end
let(:existing_pull_requests) do
[
- updated_dependencies.map do |dep|
- { "dependency-name" => dep.name,
- "dependency-version" => dep.version,
- "directory" => dep.directory }
- end
+ Dependabot::PullRequest.new(
+ updated_dependencies.map do |dep|
+ Dependabot::PullRequest::Dependency.new(
+ name: dep.name,
+ version: dep.version,
+ directory: dep.directory
+ )
+ end
+ )
]
end
let(:dependency_change) do
@@ -319,10 +326,14 @@
context "when there's no directory in an existing PR that otherwise matches" do
let(:existing_pull_requests) do
[
- updated_dependencies.map do |dep|
- { "dependency-name" => dep.name,
- "dependency-version" => dep.version }
- end
+ Dependabot::PullRequest.new(
+ updated_dependencies.map do |dep|
+ Dependabot::PullRequest::Dependency.new(
+ name: dep.name,
+ version: dep.version
+ )
+ end
+ )
]
end
diff --git a/updater/spec/dependabot/service_spec.rb b/updater/spec/dependabot/service_spec.rb
index a0f9572d..2af5f2a5 100644
--- a/updater/spec/dependabot/service_spec.rb
+++ b/updater/spec/dependabot/service_spec.rb
@@ -23,7 +23,8 @@
update_pull_request: nil,
close_pull_request: nil,
record_update_job_error: nil,
- record_update_job_unknown_error: nil
+ record_update_job_unknown_error: nil,
+ record_update_job_warning: nil
})
allow(api_client).to receive(:is_a?).with(Dependabot::ApiClient).and_return(true)
api_client
@@ -305,6 +306,28 @@
end
end
+ describe "#record_update_job_warning" do
+ let(:warn_type) { :deprecated_dependency }
+ let(:warn_title) { "Deprecated Dependency Used" }
+ let(:warn_description) { "The dependency xyz is deprecated and should be updated or removed." }
+
+ before do
+ service.record_update_job_warning(
+ warn_type: warn_type,
+ warn_title: warn_title,
+ warn_description: warn_description
+ )
+ end
+
+ it "delegates to @client" do
+ expect(mock_client).to have_received(:record_update_job_warning).with(
+ warn_type: warn_type,
+ warn_title: warn_title,
+ warn_description: warn_description
+ )
+ end
+ end
+
describe "#capture_exception" do
before do
allow(Dependabot::Experiments).to receive(:enabled?).with(:record_update_job_unknown_error).and_return(true)
diff --git a/updater/spec/dependabot/updater/operations_spec.rb b/updater/spec/dependabot/updater/operations_spec.rb
index 6afb24fe..fb110a28 100644
--- a/updater/spec/dependabot/updater/operations_spec.rb
+++ b/updater/spec/dependabot/updater/operations_spec.rb
@@ -12,22 +12,6 @@
Dependabot::Experiments.reset!
end
- it "returns nil if no operation matches" do
- # We always expect jobs that update a pull request to specify their
- # existing dependency changes, a job with this set of conditions
- # should never exist.
- source = instance_double(Dependabot::Source, directory: "/.", directories: nil)
- job = instance_double(Dependabot::Job,
- source: source,
- security_updates_only?: false,
- updating_a_pull_request?: true,
- dependencies: [],
- dependency_groups: [],
- is_a?: true)
-
- expect(described_class.class_for(job: job)).to be_nil
- end
-
it "returns the UpdateAllVersions class when the Job is for a fresh, non-security update with no dependencies" do
source = instance_double(Dependabot::Source, directory: "/.", directories: nil)
job = instance_double(Dependabot::Job,
@@ -38,7 +22,7 @@
dependency_groups: [],
is_a?: true)
- expect(described_class.class_for(job: job)).to be(Dependabot::Updater::Operations::UpdateAllVersions)
+ expect(described_class.class_for(job: job)).to be(Dependabot::Updater::Operations::GroupUpdateAllVersions)
end
it "returns the GroupUpdateAllVersions class when the Job is for a fresh, version update with no dependencies" do
diff --git a/updater/spec/support/dummy_pkg_helpers.rb b/updater/spec/support/dummy_pkg_helpers.rb
index de4b572f..2f207f5f 100644
--- a/updater/spec/support/dummy_pkg_helpers.rb
+++ b/updater/spec/support/dummy_pkg_helpers.rb
@@ -1,6 +1,7 @@
# typed: false
# frozen_string_literal: true
+require "dependabot/package_manager"
require "dependabot/dependency_file"
# This module provides some shortcuts for working with our two mock RubyGems packages:
@@ -61,4 +62,37 @@ def create_temporary_content_directory(fixture:, directory: "/", state: "origina
def updated_bundler_files_hash(fixture: "bundler")
updated_bundler_files(fixture: fixture).map(&:to_h)
end
+
+ # Stub PackageManagerBase
+ class StubPackageManager < Dependabot::PackageManagerBase
+ def initialize(name:, version:, deprecated_versions: [], unsupported_versions: [], supported_versions: [])
+ @name = name
+ @version = version
+ @deprecated_versions = deprecated_versions
+ @unsupported_versions = unsupported_versions
+ @supported_versions = supported_versions
+ end
+
+ attr_reader :name
+ attr_reader :version
+ attr_reader :deprecated_versions
+ attr_reader :unsupported_versions
+ attr_reader :supported_versions
+
+ sig { override.returns(T::Boolean) }
+ def deprecated?
+ # If the version is unsupported, the unsupported error is getting raised separately.
+ return false if unsupported?
+
+ deprecated_versions.include?(version)
+ end
+
+ sig { override.returns(T::Boolean) }
+ def unsupported?
+ # Check if the feature flag for Bundler v1 unsupported error is enabled.
+ return false unless name == "bundler" && Dependabot::Experiments.enabled?(:bundler_v1_unsupported_error)
+
+ version < supported_versions.first
+ end
+ end
end